mirror of
https://github.com/codex-storage/deluge.git
synced 2025-02-25 17:45:26 +00:00
webui : robots.txt, prevent search-engine indexing
This commit is contained in:
parent
07939d46b2
commit
596e8bbf21
@ -91,7 +91,8 @@ urls = (
|
||||
#"/downloads/(.*)","downloads" disabled until it can handle large downloads
|
||||
#default-pages
|
||||
"/", "home",
|
||||
"", "home"
|
||||
"", "home",
|
||||
"/robots.txt","robots"
|
||||
)
|
||||
#/routing
|
||||
|
||||
@ -345,5 +346,12 @@ class downloads(static_handler):
|
||||
if not ws.config.get('share_downloads'):
|
||||
raise Exception('Access to downloads is forbidden.')
|
||||
return static_handler.GET(self, name)
|
||||
|
||||
class robots:
|
||||
def GET(self):
|
||||
"no robots/prevent searchengines from indexing"
|
||||
web.header("Content-Type", "text/plain")
|
||||
print "User-agent: *\nDisallow:\n"
|
||||
|
||||
#/pages
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user