mirror of
https://github.com/Mikaela/Limnoria.git
synced 2025-02-17 14:10:41 +01:00
Threaded RSS's announcements and made reloading/flushing work properly.
This commit is contained in:
parent
0098784ffd
commit
0c2de453f2
@ -39,12 +39,14 @@ import plugins
|
|||||||
|
|
||||||
import sets
|
import sets
|
||||||
import time
|
import time
|
||||||
|
import threading
|
||||||
from itertools import imap
|
from itertools import imap
|
||||||
|
|
||||||
import rssparser
|
import rssparser
|
||||||
|
|
||||||
import conf
|
import conf
|
||||||
import utils
|
import utils
|
||||||
|
import world
|
||||||
import ircmsgs
|
import ircmsgs
|
||||||
import ircutils
|
import ircutils
|
||||||
import privmsgs
|
import privmsgs
|
||||||
@ -81,6 +83,8 @@ conf.registerGlobalValue(conf.supybot.plugins.RSS, 'waitPeriod',
|
|||||||
wait between retrieving RSS feeds; requests made within this period will
|
wait between retrieving RSS feeds; requests made within this period will
|
||||||
return cached results."""))
|
return cached results."""))
|
||||||
conf.registerGroup(conf.supybot.plugins.RSS, 'feeds')
|
conf.registerGroup(conf.supybot.plugins.RSS, 'feeds')
|
||||||
|
conf.supybot.plugins.RSS.feeds.help = utils.normalizeWhitespace("""These are
|
||||||
|
the registered feeds for the RSS plugin.""")
|
||||||
|
|
||||||
def registerFeed(name, url):
|
def registerFeed(name, url):
|
||||||
conf.supybot.plugins.RSS.feeds.register(name, registry.String(url, ''))
|
conf.supybot.plugins.RSS.feeds.register(name, registry.String(url, ''))
|
||||||
@ -92,7 +96,6 @@ class RSS(callbacks.Privmsg):
|
|||||||
self.feedNames = sets.Set()
|
self.feedNames = sets.Set()
|
||||||
self.lastRequest = {}
|
self.lastRequest = {}
|
||||||
self.cachedFeeds = {}
|
self.cachedFeeds = {}
|
||||||
L = conf.supybot.plugins.RSS.feeds.getValues(fullNames=False)
|
|
||||||
for (name, url) in registry._cache.iteritems():
|
for (name, url) in registry._cache.iteritems():
|
||||||
name = name.lower()
|
name = name.lower()
|
||||||
if name.startswith('supybot.plugins.rss.feeds.'):
|
if name.startswith('supybot.plugins.rss.feeds.'):
|
||||||
@ -108,46 +111,60 @@ class RSS(callbacks.Privmsg):
|
|||||||
L = conf.supybot.plugins.RSS.announce.getValues(fullNames=False)
|
L = conf.supybot.plugins.RSS.announce.getValues(fullNames=False)
|
||||||
for (channel, v) in L:
|
for (channel, v) in L:
|
||||||
feeds = v()
|
feeds = v()
|
||||||
bold = self.registryValue('bold', channel)
|
|
||||||
sep = self.registryValue('headlineSeparator', channel)
|
|
||||||
prefix = self.registryValue('announcementPrefix', channel)
|
|
||||||
for name in feeds:
|
for name in feeds:
|
||||||
if self.isCommand(callbacks.canonicalName(name)):
|
if self.isCommand(callbacks.canonicalName(name)):
|
||||||
url = self.getCommand(name).url
|
url = self.getCommand(name).url
|
||||||
else:
|
else:
|
||||||
url = name
|
url = name
|
||||||
try:
|
if self.willGetNewFeed(url):
|
||||||
oldresults = self.cachedFeeds[url]
|
t = threading.Thread(target=self._newHeadlines,
|
||||||
oldheadlines = self.getHeadlines(oldresults)
|
name='Fetching <%s>' % url,
|
||||||
except KeyError:
|
args=(irc, channel, name, url))
|
||||||
oldheadlines = []
|
self.log.info('Spawning thread to fetch <%s>', url)
|
||||||
# TODO: This should be threaded.
|
world.threadsSpawned += 1
|
||||||
newresults = self.getFeed(url)
|
t.start()
|
||||||
newheadlines = self.getHeadlines(newresults)
|
|
||||||
for headline in oldheadlines:
|
|
||||||
try:
|
def _newHeadlines(self, irc, channel, name, url):
|
||||||
newheadlines.remove(headline)
|
try:
|
||||||
except ValueError:
|
oldresults = self.cachedFeeds[url]
|
||||||
pass
|
oldheadlines = self.getHeadlines(oldresults)
|
||||||
if newheadlines:
|
except KeyError:
|
||||||
pre = '%s%s: ' % (prefix, name)
|
oldheadlines = []
|
||||||
if bold:
|
newresults = self.getFeed(url)
|
||||||
pre = ircutils.bold(pre)
|
newheadlines = self.getHeadlines(newresults)
|
||||||
irc.replies(newheadlines, prefixer=pre, joiner=sep,
|
for headline in oldheadlines:
|
||||||
to=channel, prefixName=False)
|
try:
|
||||||
|
newheadlines.remove(headline)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
bold = self.registryValue('bold', channel)
|
||||||
|
sep = self.registryValue('headlineSeparator', channel)
|
||||||
|
prefix = self.registryValue('announcementPrefix', channel)
|
||||||
|
if newheadlines:
|
||||||
|
pre = '%s%s: ' % (prefix, name)
|
||||||
|
if bold:
|
||||||
|
pre = ircutils.bold(pre)
|
||||||
|
irc.replies(newheadlines, prefixer=pre, joiner=sep,
|
||||||
|
to=channel, prefixName=False)
|
||||||
|
|
||||||
def getFeed(self, url):
|
def willGetNewFeed(self, url):
|
||||||
now = time.time()
|
now = time.time()
|
||||||
wait = self.registryValue('waitPeriod')
|
wait = self.registryValue('waitPeriod')
|
||||||
if url not in self.lastRequest or now - self.lastRequest[url] > wait:
|
if url not in self.lastRequest or now - self.lastRequest[url] > wait:
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
def getFeed(self, url):
|
||||||
|
if self.willGetNewFeed(url):
|
||||||
try:
|
try:
|
||||||
self.log.info('Downloading new feed from %s', url)
|
self.log.info('Downloading new feed from <%s>', url)
|
||||||
results = rssparser.parse(url)
|
results = rssparser.parse(url)
|
||||||
except sgmllib.SGMLParseError:
|
except sgmllib.SGMLParseError:
|
||||||
self.log.exception('Uncaught exception from rssparser:')
|
self.log.exception('Uncaught exception from rssparser:')
|
||||||
raise callbacks.Error, 'Invalid (unparseable) RSS feed.'
|
raise callbacks.Error, 'Invalid (unparseable) RSS feed.'
|
||||||
self.cachedFeeds[url] = results
|
self.cachedFeeds[url] = results
|
||||||
self.lastRequest[url] = now
|
self.lastRequest[url] = time.time()
|
||||||
return self.cachedFeeds[url]
|
return self.cachedFeeds[url]
|
||||||
|
|
||||||
def getHeadlines(self, feed):
|
def getHeadlines(self, feed):
|
||||||
@ -198,7 +215,7 @@ class RSS(callbacks.Privmsg):
|
|||||||
irc.replySuccess()
|
irc.replySuccess()
|
||||||
|
|
||||||
def announce(self, irc, msg, args):
|
def announce(self, irc, msg, args):
|
||||||
"""[<channel>] <name|url> [<name|url> ...]
|
"""[<channel>] [<name|url> ...]
|
||||||
|
|
||||||
Sets the current list of announced feeds in the channel to the feeds
|
Sets the current list of announced feeds in the channel to the feeds
|
||||||
given. Valid feeds include the names of registered feeds as well as
|
given. Valid feeds include the names of registered feeds as well as
|
||||||
@ -207,8 +224,11 @@ class RSS(callbacks.Privmsg):
|
|||||||
"""
|
"""
|
||||||
channel = privmsgs.getChannel(msg, args)
|
channel = privmsgs.getChannel(msg, args)
|
||||||
conf.supybot.plugins.RSS.announce.get(channel).setValue(args)
|
conf.supybot.plugins.RSS.announce.get(channel).setValue(args)
|
||||||
irc.replySuccess()
|
if not args:
|
||||||
|
irc.replySuccess('All previous announced feeds removed.')
|
||||||
|
else:
|
||||||
|
irc.replySuccess()
|
||||||
|
|
||||||
def rss(self, irc, msg, args):
|
def rss(self, irc, msg, args):
|
||||||
"""<url>
|
"""<url>
|
||||||
|
|
||||||
|
@ -50,6 +50,7 @@ import log
|
|||||||
import conf
|
import conf
|
||||||
import drivers
|
import drivers
|
||||||
import ircutils
|
import ircutils
|
||||||
|
import registry
|
||||||
import schedule
|
import schedule
|
||||||
|
|
||||||
startedAt = time.time() # Just in case it doesn't get set later.
|
startedAt = time.time() # Just in case it doesn't get set later.
|
||||||
@ -88,6 +89,9 @@ def upkeep():
|
|||||||
flushed = conf.supybot.flush()
|
flushed = conf.supybot.flush()
|
||||||
if flushed:
|
if flushed:
|
||||||
flush()
|
flush()
|
||||||
|
# This is so registry._cache gets filled.
|
||||||
|
if registryFilename is not None:
|
||||||
|
registry.open(registryFilename)
|
||||||
if not dying:
|
if not dying:
|
||||||
log.debug('Regexp cache size: %s', len(sre._cache))
|
log.debug('Regexp cache size: %s', len(sre._cache))
|
||||||
log.debug('Pattern cache size: %s'%len(ircutils._patternCache))
|
log.debug('Pattern cache size: %s'%len(ircutils._patternCache))
|
||||||
|
Loading…
x
Reference in New Issue
Block a user