mirror of
https://github.com/Mikaela/Limnoria.git
synced 2024-11-02 17:29:22 +01:00
Made maxSize a config variable, supybot.httpPeekSize.
This commit is contained in:
parent
b2f5fb630c
commit
7f107ff9c2
@ -50,6 +50,7 @@ import conf
|
|||||||
import utils
|
import utils
|
||||||
import webutils
|
import webutils
|
||||||
import privmsgs
|
import privmsgs
|
||||||
|
import registry
|
||||||
import callbacks
|
import callbacks
|
||||||
|
|
||||||
class FreshmeatException(Exception):
|
class FreshmeatException(Exception):
|
||||||
@ -57,7 +58,6 @@ class FreshmeatException(Exception):
|
|||||||
|
|
||||||
class Http(callbacks.Privmsg):
|
class Http(callbacks.Privmsg):
|
||||||
threaded = True
|
threaded = True
|
||||||
maxSize = 4096
|
|
||||||
_titleRe = re.compile(r'<title>(.*?)</title>', re.I | re.S)
|
_titleRe = re.compile(r'<title>(.*?)</title>', re.I | re.S)
|
||||||
def callCommand(self, method, irc, msg, *L):
|
def callCommand(self, method, irc, msg, *L):
|
||||||
try:
|
try:
|
||||||
@ -90,7 +90,7 @@ class Http(callbacks.Privmsg):
|
|||||||
if not url.startswith('http://'):
|
if not url.startswith('http://'):
|
||||||
irc.error('Only HTTP urls are valid.')
|
irc.error('Only HTTP urls are valid.')
|
||||||
return
|
return
|
||||||
s = webutils.getUrl(url, size=self.maxSize)
|
s = webutils.getUrl(url, size=conf.supybot.httpPeekSize())
|
||||||
m = self._doctypeRe.search(s)
|
m = self._doctypeRe.search(s)
|
||||||
if m:
|
if m:
|
||||||
s = utils.normalizeWhitespace(m.group(0))
|
s = utils.normalizeWhitespace(m.group(0))
|
||||||
@ -113,13 +113,13 @@ class Http(callbacks.Privmsg):
|
|||||||
size = fd.headers['Content-Length']
|
size = fd.headers['Content-Length']
|
||||||
irc.reply('%s is %s bytes long.' % (url, size))
|
irc.reply('%s is %s bytes long.' % (url, size))
|
||||||
except KeyError:
|
except KeyError:
|
||||||
s = fd.read(self.maxSize)
|
s = fd.read(conf.supybot.httpPeekSize())
|
||||||
if len(s) != self.maxSize:
|
if len(s) != conf.supybot.httpPeekSize():
|
||||||
irc.reply('%s is %s bytes long.' % (url, len(s)))
|
irc.reply('%s is %s bytes long.' % (url, len(s)))
|
||||||
else:
|
else:
|
||||||
irc.reply('The server didn\'t tell me how long %s is '
|
irc.reply('The server didn\'t tell me how long %s is '
|
||||||
'but it\'s longer than %s bytes.' %
|
'but it\'s longer than %s bytes.' %
|
||||||
(url,self.maxSize))
|
(url,conf.supybot.httpPeekSize()))
|
||||||
|
|
||||||
def title(self, irc, msg, args):
|
def title(self, irc, msg, args):
|
||||||
"""<url>
|
"""<url>
|
||||||
@ -129,13 +129,13 @@ class Http(callbacks.Privmsg):
|
|||||||
url = privmsgs.getArgs(args)
|
url = privmsgs.getArgs(args)
|
||||||
if '://' not in url:
|
if '://' not in url:
|
||||||
url = 'http://%s' % url
|
url = 'http://%s' % url
|
||||||
text = webutils.getUrl(url, size=self.maxSize)
|
text = webutils.getUrl(url, size=conf.supybot.httpPeekSize())
|
||||||
m = self._titleRe.search(text)
|
m = self._titleRe.search(text)
|
||||||
if m is not None:
|
if m is not None:
|
||||||
irc.reply(utils.htmlToText(m.group(1).strip()))
|
irc.reply(utils.htmlToText(m.group(1).strip()))
|
||||||
else:
|
else:
|
||||||
irc.reply('That URL appears to have no HTML title '
|
irc.reply('That URL appears to have no HTML title '
|
||||||
'within the first %s bytes.' % self.maxSize)
|
'within the first %s bytes.'%conf.supybot.httpPeekSize())
|
||||||
|
|
||||||
def freshmeat(self, irc, msg, args):
|
def freshmeat(self, irc, msg, args):
|
||||||
"""<project name>
|
"""<project name>
|
||||||
|
@ -102,7 +102,6 @@ class URL(callbacks.PrivmsgCommandAndRegexp,
|
|||||||
plugins.ChannelDBHandler):
|
plugins.ChannelDBHandler):
|
||||||
regexps = ['tinyurlSnarfer', 'titleSnarfer']
|
regexps = ['tinyurlSnarfer', 'titleSnarfer']
|
||||||
_titleRe = re.compile('<title>(.*?)</title>', re.I)
|
_titleRe = re.compile('<title>(.*?)</title>', re.I)
|
||||||
maxSize = 4096
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.nextMsgs = {}
|
self.nextMsgs = {}
|
||||||
callbacks.PrivmsgCommandAndRegexp.__init__(self)
|
callbacks.PrivmsgCommandAndRegexp.__init__(self)
|
||||||
@ -200,7 +199,7 @@ class URL(callbacks.PrivmsgCommandAndRegexp,
|
|||||||
channel = msg.args[0]
|
channel = msg.args[0]
|
||||||
if self.registryValue('titleSnarfer', channel):
|
if self.registryValue('titleSnarfer', channel):
|
||||||
url = match.group(0)
|
url = match.group(0)
|
||||||
text = webutils.getUrl(url, size=self.maxSize)
|
text = webutils.getUrl(url, size=conf.supybot.httpPeekSize())
|
||||||
m = self._titleRe.search(text)
|
m = self._titleRe.search(text)
|
||||||
if m is not None:
|
if m is not None:
|
||||||
s = 'Title: %s' % utils.htmlToText(m.group(1).strip())
|
s = 'Title: %s' % utils.htmlToText(m.group(1).strip())
|
||||||
|
@ -168,6 +168,11 @@ whether the bot will automatically thread all commands. At this point this
|
|||||||
option exists almost exclusively for debugging purposes; it can do very little
|
option exists almost exclusively for debugging purposes; it can do very little
|
||||||
except to take up more CPU."""))
|
except to take up more CPU."""))
|
||||||
|
|
||||||
|
supybot.register('httpPeekSize', registry.PositiveInteger(4096, """Determines
|
||||||
|
how many bytes the bot will 'peek' at when looking through a URL for a
|
||||||
|
doctype or title or something similar. It'll give up after it reads this many
|
||||||
|
bytes."""))
|
||||||
|
|
||||||
###
|
###
|
||||||
# Reply/error tweaking.
|
# Reply/error tweaking.
|
||||||
###
|
###
|
||||||
|
Loading…
Reference in New Issue
Block a user