webui : robots.txt, prevent search-engine indexing

This commit is contained in:
Martijn Voncken 2008-02-16 14:29:35 +00:00
parent 07939d46b2
commit 596e8bbf21

View File

@ -91,7 +91,8 @@ urls = (
#"/downloads/(.*)","downloads" disabled until it can handle large downloads
#default-pages
"/", "home",
"", "home"
"", "home",
"/robots.txt","robots"
)
#/routing
@ -345,5 +346,12 @@ class downloads(static_handler):
if not ws.config.get('share_downloads'):
raise Exception('Access to downloads is forbidden.')
return static_handler.GET(self, name)
class robots:
def GET(self):
"no robots/prevent searchengines from indexing"
web.header("Content-Type", "text/plain")
print "User-agent: *\nDisallow:\n"
#/pages