WebHost: fix accidental robots.txt capture (#3502)
This commit is contained in:
parent
4f514e5944
commit
ce37bed7c6
|
@ -8,7 +8,8 @@ from . import cache
|
||||||
def robots():
|
def robots():
|
||||||
# If this host is not official, do not allow search engine crawling
|
# If this host is not official, do not allow search engine crawling
|
||||||
if not app.config["ASSET_RIGHTS"]:
|
if not app.config["ASSET_RIGHTS"]:
|
||||||
return app.send_static_file('robots.txt')
|
# filename changed in case the path is intercepted and served by an outside service
|
||||||
|
return app.send_static_file('robots_file.txt')
|
||||||
|
|
||||||
# Send 404 if the host has affirmed this to be the official WebHost
|
# Send 404 if the host has affirmed this to be the official WebHost
|
||||||
abort(404)
|
abort(404)
|
||||||
|
|
Loading…
Reference in New Issue