Create config variable supybot.directories.data.web, and move robots.txt to this directory.

This commit is contained in:
Valentin Lorentz 2013-03-09 20:52:35 +01:00
parent d714909576
commit 83dd3a2fe8
2 changed files with 32 additions and 18 deletions

View File

@ -789,6 +789,9 @@ registerGlobalValue(supybot.directories, 'backup',
registerGlobalValue(supybot.directories.data, 'tmp', registerGlobalValue(supybot.directories.data, 'tmp',
DataFilenameDirectory('tmp', _("""Determines what directory temporary files DataFilenameDirectory('tmp', _("""Determines what directory temporary files
are put into."""))) are put into.""")))
registerGlobalValue(supybot.directories.data, 'web',
DataFilenameDirectory('web', _("""Determines what directory files of the
web server (templates, custom images, ...) are put into.""")))
utils.file.AtomicFile.default.tmpDir = supybot.directories.data.tmp utils.file.AtomicFile.default.tmpDir = supybot.directories.data.tmp
utils.file.AtomicFile.default.backupDir = supybot.directories.backup utils.file.AtomicFile.default.backupDir = supybot.directories.backup
@ -1113,9 +1116,6 @@ registerGlobalValue(supybot.servers.http, 'keepAlive',
registry.Boolean(False, _("""Determines whether the server will stay registry.Boolean(False, _("""Determines whether the server will stay
alive if no plugin is using it. This also means that the server will alive if no plugin is using it. This also means that the server will
start even if it is not used."""))) start even if it is not used.""")))
registerGlobalValue(supybot.servers.http, 'robots',
registry.String('', _("""Determines the content of the robots.txt file,
served on the server to search engine.""")))
registerGlobalValue(supybot.servers.http, 'favicon', registerGlobalValue(supybot.servers.http, 'favicon',
registry.String('', _("""Determines the path of the file served as registry.String('', _("""Determines the path of the file served as
favicon to browsers."""))) favicon to browsers.""")))

View File

@ -52,6 +52,32 @@ configGroup = conf.supybot.servers.http
class RequestNotHandled(Exception): class RequestNotHandled(Exception):
pass pass
TEMPLATE_DEFAULTS = {
'index.html': """\
<html>
<head>
<title>""" + _('Supybot Web server index') + """</title>
</head>
<body>
<p>""" + _('Here is a list of the plugins that have a Web interface:') +\
"""
</p>
%(list)s
</body>
</html>""",
'robots.txt': """""",
}
for filename, content in TEMPLATE_DEFAULTS.items():
path = conf.supybot.directories.data.web.dirize(filename)
if not os.path.isfile(path):
with open(path, 'a') as fd:
fd.write(content)
def get_template(filename):
path = conf.supybot.directories.data.web.dirize(filename)
return open(path, 'r').read()
class RealSupyHTTPServer(HTTPServer): class RealSupyHTTPServer(HTTPServer):
# TODO: make this configurable # TODO: make this configurable
timeout = 0.5 timeout = 0.5
@ -184,26 +210,14 @@ class SupyIndex(SupyHTTPServerCallback):
"""Displays the index of available plugins.""" """Displays the index of available plugins."""
name = "index" name = "index"
defaultResponse = _("Request not handled.") defaultResponse = _("Request not handled.")
template = """
<html>
<head>
<title>""" + _('Supybot Web server index') + """</title>
</head>
<body>
<p>""" + _('Here is a list of the plugins that have a Web interface:') +\
"""
</p>
%s
</body>
</html>"""
def doGet(self, handler, path): def doGet(self, handler, path):
plugins = [x for x in handler.server.callbacks.items()] plugins = [x for x in handler.server.callbacks.items()]
if plugins == []: if plugins == []:
plugins = _('No plugins available.') plugins = _('No plugins available.')
else: else:
plugins = '<ul><li>%s</li></ul>' % '</li><li>'.join( plugins = '<ul class="plugins"><li>%s</li></ul>' % '</li><li>'.join(
['<a href="/%s/">%s</a>' % (x,y.name) for x,y in plugins]) ['<a href="/%s/">%s</a>' % (x,y.name) for x,y in plugins])
response = self.template % plugins response = get_template('index.html') % {'list': plugins}
handler.send_response(200) handler.send_response(200)
self.send_header('Content_type', 'text/html') self.send_header('Content_type', 'text/html')
self.send_header('Content-Length', len(response)) self.send_header('Content-Length', len(response))
@ -215,7 +229,7 @@ class RobotsTxt(SupyHTTPServerCallback):
name = 'robotstxt' name = 'robotstxt'
defaultResponse = _('Request not handled') defaultResponse = _('Request not handled')
def doGet(self, handler, path): def doGet(self, handler, path):
response = conf.supybot.servers.http.robots().replace('\\n', '\n') response = get_template('robots.txt')
handler.send_response(200) handler.send_response(200)
self.send_header('Content-type', 'text/plain') self.send_header('Content-type', 'text/plain')
self.send_header('Content-Length', len(response)) self.send_header('Content-Length', len(response))