mirror of
https://github.com/ReVanced/revanced-api.git
synced 2026-01-20 09:53:56 +00:00
feat: Disallow all web crawlers (#111)
* feat: Disallow all web crawlers * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
This commit is contained in:
10
api/robots.py
Normal file
10
api/robots.py
Normal file
@@ -0,0 +1,10 @@
|
||||
from sanic import Blueprint
|
||||
from sanic.response import text
|
||||
|
||||
|
||||
robots: Blueprint = Blueprint("robots")
|
||||
|
||||
|
||||
@robots.get("/robots.txt")
|
||||
async def robots_txt(request):
|
||||
return text("User-agent: *\nDisallow: /", content_type="text/plain")
|
||||
Reference in New Issue
Block a user