WebHost: fix accidental robots.txt capture (#3502)

This commit is contained in:
Fabian Dill 2024-06-21 14:54:19 +02:00 committed by GitHub
parent 4f514e5944
commit ce37bed7c6
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
2 changed files with 2 additions and 1 deletions

View File

@ -8,7 +8,8 @@ from . import cache
def robots():
# If this host is not official, do not allow search engine crawling
if not app.config["ASSET_RIGHTS"]:
return app.send_static_file('robots.txt')
# filename changed in case the path is intercepted and served by an outside service
return app.send_static_file('robots_file.txt')
# Send 404 if the host has affirmed this to be the official WebHost
abort(404)