Web: add 'timeout' config for web fetch, default 5 sec.

Otherwise, when a site would take a long time to respond, the thread would hang for quite a while.

also needed to mod src/utils/web.py to take the timeout arg.
This commit is contained in:
Daniel Folkinshteyn 2011-06-13 16:42:57 -04:00
parent 1b74b8ddf6
commit 2b708f034b
3 changed files with 11 additions and 3 deletions

View File

@ -58,4 +58,9 @@ conf.registerGlobalValue(Web.fetch, 'maximum',
registry.NonNegativeInteger(0, """Determines the maximum number of
bytes the bot will download via the 'fetch' command in this plugin."""))
conf.registerGlobalValue(Web.fetch, 'timeout',
registry.NonNegativeInteger(5, """Determines the maximum number of
seconds the bot will wait for the site to respond, when using the 'fetch'
command in this plugin. If 0, will use socket.defaulttimeout"""))
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:

View File

@ -236,7 +236,10 @@ class Web(callbacks.PluginRegexp):
irc.error('This command is disabled '
'(supybot.plugins.Web.fetch.maximum is set to 0).',
Raise=True)
fd = utils.web.getUrlFd(url)
timeout = self.registryValue('fetch.timeout')
if timeout == 0:
timeout = None
fd = utils.web.getUrlFd(url, timeout=timeout)
irc.reply(fd.read(max))
fetch = wrap(fetch, ['url'])

View File

@ -96,7 +96,7 @@ defaultHeaders = {
# application-specific function. Feel free to use a callable here.
proxy = None
def getUrlFd(url, headers=None, data=None):
def getUrlFd(url, headers=None, data=None, timeout=None):
"""getUrlFd(url, headers=None, data=None)
Opens the given url and returns a file object. Headers and data are
@ -114,7 +114,7 @@ def getUrlFd(url, headers=None, data=None):
httpProxy = force(proxy)
if httpProxy:
request.set_proxy(httpProxy, 'http')
fd = urllib2.urlopen(request)
fd = urllib2.urlopen(request, timeout=timeout)
return fd
except socket.timeout, e:
raise Error, TIMED_OUT