2005-02-02 07:03:09 +01:00
|
|
|
###
|
|
|
|
# Copyright (c) 2002-2004, Jeremiah Fincher
|
2010-06-24 06:37:40 +02:00
|
|
|
# Copyright (c) 2008-2010, James Vega
|
2005-02-02 07:03:09 +01:00
|
|
|
# All rights reserved.
|
|
|
|
#
|
|
|
|
# Redistribution and use in source and binary forms, with or without
|
|
|
|
# modification, are permitted provided that the following conditions are met:
|
|
|
|
#
|
|
|
|
# * Redistributions of source code must retain the above copyright notice,
|
|
|
|
# this list of conditions, and the following disclaimer.
|
|
|
|
# * Redistributions in binary form must reproduce the above copyright notice,
|
|
|
|
# this list of conditions, and the following disclaimer in the
|
|
|
|
# documentation and/or other materials provided with the distribution.
|
|
|
|
# * Neither the name of the author of this software nor the name of
|
|
|
|
# contributors to this software may be used to endorse or promote products
|
|
|
|
# derived from this software without specific prior written consent.
|
|
|
|
#
|
|
|
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
|
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
|
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
|
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
|
|
|
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
|
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
|
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
|
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
|
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
|
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
|
|
# POSSIBILITY OF SUCH DAMAGE.
|
|
|
|
###
|
|
|
|
|
2005-05-30 02:52:54 +02:00
|
|
|
import new
|
2005-02-02 07:03:09 +01:00
|
|
|
import time
|
|
|
|
import socket
|
|
|
|
import sgmllib
|
|
|
|
import threading
|
|
|
|
|
|
|
|
import supybot.conf as conf
|
|
|
|
import supybot.utils as utils
|
|
|
|
import supybot.world as world
|
|
|
|
from supybot.commands import *
|
|
|
|
import supybot.ircutils as ircutils
|
|
|
|
import supybot.registry as registry
|
|
|
|
import supybot.callbacks as callbacks
|
2010-10-19 19:50:41 +02:00
|
|
|
from supybot.i18n import PluginInternationalization, internationalizeDocstring
|
|
|
|
_ = PluginInternationalization('RSS')
|
2005-02-02 07:03:09 +01:00
|
|
|
|
2009-03-20 01:33:17 +01:00
|
|
|
try:
|
|
|
|
feedparser = utils.python.universalImport('feedparser', 'local.feedparser')
|
|
|
|
except ImportError:
|
|
|
|
raise callbacks.Error, \
|
|
|
|
'You the feedparser module installed to use this plugin. ' \
|
|
|
|
'Download the module at <http://feedparser.org/>.'
|
2008-12-18 05:40:30 +01:00
|
|
|
|
2005-02-02 07:03:09 +01:00
|
|
|
def getFeedName(irc, msg, args, state):
|
|
|
|
if not registry.isValidRegistryName(args[0]):
|
2005-06-01 22:13:33 +02:00
|
|
|
state.errorInvalid('feed name', args[0],
|
|
|
|
'Feed names must not include spaces.')
|
2005-02-02 07:03:09 +01:00
|
|
|
state.args.append(callbacks.canonicalName(args.pop(0)))
|
|
|
|
addConverter('feedName', getFeedName)
|
|
|
|
|
2005-02-09 08:04:04 +01:00
|
|
|
class RSS(callbacks.Plugin):
|
2005-02-02 07:03:09 +01:00
|
|
|
"""This plugin is useful both for announcing updates to RSS feeds in a
|
|
|
|
channel, and for retrieving the headlines of RSS feeds via command. Use
|
|
|
|
the "add" command to add feeds to this plugin, and use the "announce"
|
|
|
|
command to determine what feeds should be announced in a given channel."""
|
|
|
|
threaded = True
|
|
|
|
def __init__(self, irc):
|
|
|
|
self.__parent = super(RSS, self)
|
|
|
|
self.__parent.__init__(irc)
|
2005-05-30 02:52:54 +02:00
|
|
|
# Schema is feed : [url, command]
|
|
|
|
self.feedNames = callbacks.CanonicalNameDict()
|
2005-02-02 07:03:09 +01:00
|
|
|
self.locks = {}
|
|
|
|
self.lastRequest = {}
|
|
|
|
self.cachedFeeds = {}
|
|
|
|
self.gettingLockLock = threading.Lock()
|
|
|
|
for name in self.registryValue('feeds'):
|
|
|
|
self._registerFeed(name)
|
|
|
|
try:
|
2005-02-23 01:03:09 +01:00
|
|
|
url = self.registryValue(registry.join(['feeds', name]))
|
2005-02-02 07:03:09 +01:00
|
|
|
except registry.NonExistentRegistryEntry:
|
|
|
|
self.log.warning('%s is not a registered feed, removing.',name)
|
|
|
|
continue
|
|
|
|
self.makeFeedCommand(name, url)
|
|
|
|
self.getFeed(url) # So announced feeds don't announce on startup.
|
|
|
|
|
2005-05-30 02:52:54 +02:00
|
|
|
def isCommandMethod(self, name):
|
|
|
|
if not self.__parent.isCommandMethod(name):
|
|
|
|
if name in self.feedNames:
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
return True
|
|
|
|
|
2009-05-23 00:52:15 +02:00
|
|
|
def listCommands(self):
|
|
|
|
return self.__parent.listCommands(self.feedNames.keys())
|
|
|
|
|
2005-05-30 02:52:54 +02:00
|
|
|
def getCommandMethod(self, command):
|
|
|
|
try:
|
|
|
|
return self.__parent.getCommandMethod(command)
|
|
|
|
except AttributeError:
|
|
|
|
return self.feedNames[command[0]][1]
|
|
|
|
|
2005-02-02 07:03:09 +01:00
|
|
|
def _registerFeed(self, name, url=''):
|
|
|
|
self.registryValue('feeds').add(name)
|
|
|
|
group = self.registryValue('feeds', value=False)
|
2010-06-24 06:37:40 +02:00
|
|
|
conf.registerGlobalValue(group, name, registry.String(url, ''))
|
2005-02-02 07:03:09 +01:00
|
|
|
|
|
|
|
def __call__(self, irc, msg):
|
|
|
|
self.__parent.__call__(irc, msg)
|
|
|
|
irc = callbacks.SimpleProxy(irc, msg)
|
|
|
|
newFeeds = {}
|
|
|
|
for channel in irc.state.channels:
|
|
|
|
feeds = self.registryValue('announce', channel)
|
|
|
|
for name in feeds:
|
|
|
|
commandName = callbacks.canonicalName(name)
|
2005-02-19 09:43:37 +01:00
|
|
|
if self.isCommandMethod(commandName):
|
2005-05-30 02:52:54 +02:00
|
|
|
url = self.feedNames[commandName][0]
|
2005-02-02 07:03:09 +01:00
|
|
|
else:
|
|
|
|
url = name
|
|
|
|
if self.willGetNewFeed(url):
|
|
|
|
newFeeds.setdefault((url, name), []).append(channel)
|
|
|
|
for ((url, name), channels) in newFeeds.iteritems():
|
|
|
|
# We check if we can acquire the lock right here because if we
|
|
|
|
# don't, we'll possibly end up spawning a lot of threads to get
|
|
|
|
# the feed, because this thread may run for a number of bytecodes
|
|
|
|
# before it switches to a thread that'll get the lock in
|
|
|
|
# _newHeadlines.
|
|
|
|
if self.acquireLock(url, blocking=False):
|
|
|
|
try:
|
|
|
|
t = threading.Thread(target=self._newHeadlines,
|
|
|
|
name=format('Fetching %u', url),
|
|
|
|
args=(irc, channels, name, url))
|
|
|
|
self.log.info('Checking for announcements at %u', url)
|
|
|
|
world.threadsSpawned += 1
|
|
|
|
t.setDaemon(True)
|
|
|
|
t.start()
|
|
|
|
finally:
|
|
|
|
self.releaseLock(url)
|
|
|
|
time.sleep(0.1) # So other threads can run.
|
|
|
|
|
|
|
|
def buildHeadlines(self, headlines, channel, config='announce.showLinks'):
|
|
|
|
newheadlines = []
|
|
|
|
if self.registryValue(config, channel):
|
|
|
|
for headline in headlines:
|
|
|
|
if headline[1]:
|
2006-08-28 15:18:12 +02:00
|
|
|
newheadlines.append(format('%s %u',
|
|
|
|
headline[0],
|
|
|
|
headline[1].encode('utf-8')))
|
2005-02-02 07:03:09 +01:00
|
|
|
else:
|
|
|
|
newheadlines.append(format('%s', headline[0]))
|
|
|
|
else:
|
|
|
|
for headline in headlines:
|
|
|
|
newheadlines = [format('%s', h[0]) for h in headlines]
|
|
|
|
return newheadlines
|
|
|
|
|
|
|
|
def _newHeadlines(self, irc, channels, name, url):
|
|
|
|
try:
|
|
|
|
# We acquire the lock here so there's only one announcement thread
|
|
|
|
# in this code at any given time. Otherwise, several announcement
|
|
|
|
# threads will getFeed (all blocking, in turn); then they'll all
|
2005-02-25 01:06:16 +01:00
|
|
|
# want to send their news messages to the appropriate channels.
|
2005-02-02 07:03:09 +01:00
|
|
|
# Note that we're allowed to acquire this lock twice within the
|
|
|
|
# same thread because it's an RLock and not just a normal Lock.
|
|
|
|
self.acquireLock(url)
|
|
|
|
try:
|
|
|
|
oldresults = self.cachedFeeds[url]
|
|
|
|
oldheadlines = self.getHeadlines(oldresults)
|
|
|
|
except KeyError:
|
|
|
|
oldheadlines = []
|
|
|
|
newresults = self.getFeed(url)
|
|
|
|
newheadlines = self.getHeadlines(newresults)
|
2005-02-25 01:04:39 +01:00
|
|
|
if len(newheadlines) == 1:
|
|
|
|
s = newheadlines[0][0]
|
|
|
|
if s in ('Timeout downloading feed.',
|
|
|
|
'Unable to download feed.'):
|
|
|
|
self.log.debug('%s %u', s, url)
|
|
|
|
return
|
2009-08-27 22:41:34 +02:00
|
|
|
def normalize(headline):
|
2005-02-02 07:03:09 +01:00
|
|
|
return (tuple(headline[0].lower().split()), headline[1])
|
2009-08-27 22:41:34 +02:00
|
|
|
oldheadlines = set(map(normalize, oldheadlines))
|
2005-02-02 07:03:09 +01:00
|
|
|
for (i, headline) in enumerate(newheadlines):
|
2009-08-27 22:41:34 +02:00
|
|
|
if normalize(headline) in oldheadlines:
|
2005-02-02 07:03:09 +01:00
|
|
|
newheadlines[i] = None
|
|
|
|
newheadlines = filter(None, newheadlines) # Removes Nones.
|
|
|
|
if newheadlines:
|
2011-08-14 07:42:08 +02:00
|
|
|
def filter_whitelist(headline):
|
|
|
|
v = False
|
|
|
|
for kw in whitelist:
|
|
|
|
if kw in headline[0] or kw in headline[1]:
|
|
|
|
v = True
|
|
|
|
break
|
|
|
|
return v
|
|
|
|
def filter_blacklist(headline):
|
|
|
|
v = True
|
|
|
|
for kw in blacklist:
|
|
|
|
if kw in headline[0] or kw in headline[1]:
|
|
|
|
v = False
|
|
|
|
break
|
|
|
|
return v
|
2011-08-27 15:58:14 +02:00
|
|
|
for channel in channels:
|
2010-04-25 06:34:31 +02:00
|
|
|
if len(oldheadlines) == 0:
|
2011-08-14 07:42:08 +02:00
|
|
|
channelnewheadlines = newheadlines[:self.registryValue('initialAnnounceHeadlines', channel)]
|
2011-08-26 17:16:13 +02:00
|
|
|
else:
|
|
|
|
channelnewheadlines = newheadlines[:]
|
2011-08-14 07:42:08 +02:00
|
|
|
whitelist = self.registryValue('keywordWhitelist', channel)
|
|
|
|
blacklist = self.registryValue('keywordBlacklist', channel)
|
|
|
|
if len(whitelist) != 0:
|
|
|
|
channelnewheadlines = filter(filter_whitelist, channelnewheadlines)
|
|
|
|
if len(blacklist) != 0:
|
|
|
|
channelnewheadlines = filter(filter_blacklist, channelnewheadlines)
|
2011-08-26 17:16:13 +02:00
|
|
|
if len(channelnewheadlines) == 0:
|
|
|
|
return
|
2005-02-02 07:03:09 +01:00
|
|
|
bold = self.registryValue('bold', channel)
|
|
|
|
sep = self.registryValue('headlineSeparator', channel)
|
|
|
|
prefix = self.registryValue('announcementPrefix', channel)
|
|
|
|
pre = format('%s%s: ', prefix, name)
|
|
|
|
if bold:
|
|
|
|
pre = ircutils.bold(pre)
|
|
|
|
sep = ircutils.bold(sep)
|
2011-08-14 07:42:08 +02:00
|
|
|
headlines = self.buildHeadlines(channelnewheadlines, channel)
|
2005-02-02 07:03:09 +01:00
|
|
|
irc.replies(headlines, prefixer=pre, joiner=sep,
|
2005-06-01 23:08:30 +02:00
|
|
|
to=channel, prefixNick=False, private=True)
|
2005-02-02 07:03:09 +01:00
|
|
|
finally:
|
|
|
|
self.releaseLock(url)
|
|
|
|
|
|
|
|
def willGetNewFeed(self, url):
|
|
|
|
now = time.time()
|
|
|
|
wait = self.registryValue('waitPeriod')
|
|
|
|
if url not in self.lastRequest or now - self.lastRequest[url] > wait:
|
|
|
|
return True
|
|
|
|
else:
|
|
|
|
return False
|
|
|
|
|
|
|
|
def acquireLock(self, url, blocking=True):
|
|
|
|
try:
|
|
|
|
self.gettingLockLock.acquire()
|
|
|
|
try:
|
|
|
|
lock = self.locks[url]
|
|
|
|
except KeyError:
|
|
|
|
lock = threading.RLock()
|
|
|
|
self.locks[url] = lock
|
|
|
|
return lock.acquire(blocking=blocking)
|
|
|
|
finally:
|
|
|
|
self.gettingLockLock.release()
|
|
|
|
|
|
|
|
def releaseLock(self, url):
|
|
|
|
self.locks[url].release()
|
|
|
|
|
|
|
|
def getFeed(self, url):
|
|
|
|
def error(s):
|
|
|
|
return {'items': [{'title': s}]}
|
|
|
|
try:
|
|
|
|
# This is the most obvious place to acquire the lock, because a
|
|
|
|
# malicious user could conceivably flood the bot with rss commands
|
|
|
|
# and DoS the website in question.
|
|
|
|
self.acquireLock(url)
|
|
|
|
if self.willGetNewFeed(url):
|
|
|
|
try:
|
|
|
|
self.log.debug('Downloading new feed from %u', url)
|
2007-05-09 04:30:18 +02:00
|
|
|
results = feedparser.parse(url)
|
2005-02-02 07:03:09 +01:00
|
|
|
if 'bozo_exception' in results:
|
|
|
|
raise results['bozo_exception']
|
|
|
|
except sgmllib.SGMLParseError:
|
2007-05-09 04:30:18 +02:00
|
|
|
self.log.exception('Uncaught exception from feedparser:')
|
2005-02-02 07:03:09 +01:00
|
|
|
raise callbacks.Error, 'Invalid (unparsable) RSS feed.'
|
|
|
|
except socket.timeout:
|
|
|
|
return error('Timeout downloading feed.')
|
|
|
|
except Exception, e:
|
|
|
|
# These seem mostly harmless. We'll need reports of a
|
|
|
|
# kind that isn't.
|
|
|
|
self.log.debug('Allowing bozo_exception %r through.', e)
|
2005-02-02 07:12:25 +01:00
|
|
|
if results.get('feed', {}):
|
|
|
|
self.cachedFeeds[url] = results
|
|
|
|
self.lastRequest[url] = time.time()
|
|
|
|
else:
|
|
|
|
self.log.debug('Not caching results; feed is empty.')
|
2005-02-02 07:03:09 +01:00
|
|
|
try:
|
|
|
|
return self.cachedFeeds[url]
|
|
|
|
except KeyError:
|
2005-02-25 01:04:39 +01:00
|
|
|
wait = self.registryValue('waitPeriod')
|
|
|
|
# If there's a problem retrieving the feed, we should back off
|
|
|
|
# for a little bit before retrying so that there is time for
|
|
|
|
# the error to be resolved.
|
|
|
|
self.lastRequest[url] = time.time() - .5 * wait
|
2005-02-02 07:03:09 +01:00
|
|
|
return error('Unable to download feed.')
|
|
|
|
finally:
|
|
|
|
self.releaseLock(url)
|
|
|
|
|
2006-05-02 14:22:11 +02:00
|
|
|
def _getConverter(self, feed):
|
|
|
|
toText = utils.web.htmlToText
|
2005-07-07 16:13:53 +02:00
|
|
|
if 'encoding' in feed:
|
2011-01-01 17:24:13 +01:00
|
|
|
def conv(s):
|
|
|
|
try:
|
|
|
|
return toText(s).strip().encode(feed['encoding'],'replace')
|
|
|
|
except UnicodeEncodeError:
|
|
|
|
return toText(s.encode('utf-8', 'ignore')).strip()
|
|
|
|
return conv
|
2005-07-07 16:13:53 +02:00
|
|
|
else:
|
2006-05-02 14:22:11 +02:00
|
|
|
return lambda s: toText(s).strip()
|
|
|
|
|
|
|
|
def getHeadlines(self, feed):
|
|
|
|
headlines = []
|
|
|
|
conv = self._getConverter(feed)
|
2005-02-02 07:03:09 +01:00
|
|
|
for d in feed['items']:
|
|
|
|
if 'title' in d:
|
2006-05-02 14:22:11 +02:00
|
|
|
title = conv(d['title'])
|
2005-02-02 07:03:09 +01:00
|
|
|
link = d.get('link')
|
|
|
|
if link:
|
|
|
|
headlines.append((title, link))
|
|
|
|
else:
|
|
|
|
headlines.append((title, None))
|
|
|
|
return headlines
|
|
|
|
|
2010-10-19 19:50:41 +02:00
|
|
|
@internationalizeDocstring
|
2005-02-02 07:03:09 +01:00
|
|
|
def makeFeedCommand(self, name, url):
|
|
|
|
docstring = format("""[<number of headlines>]
|
|
|
|
|
|
|
|
Reports the titles for %s at the RSS feed %u. If
|
|
|
|
<number of headlines> is given, returns only that many headlines.
|
|
|
|
RSS feeds are only looked up every supybot.plugins.RSS.waitPeriod
|
|
|
|
seconds, which defaults to 1800 (30 minutes) since that's what most
|
|
|
|
websites prefer.
|
|
|
|
""", name, url)
|
|
|
|
if url not in self.locks:
|
|
|
|
self.locks[url] = threading.RLock()
|
2005-05-30 02:52:54 +02:00
|
|
|
if self.isCommandMethod(name):
|
2005-02-02 07:03:09 +01:00
|
|
|
s = format('I already have a command in this plugin named %s.',name)
|
|
|
|
raise callbacks.Error, s
|
|
|
|
def f(self, irc, msg, args):
|
|
|
|
args.insert(0, url)
|
|
|
|
self.rss(irc, msg, args)
|
2005-05-16 05:25:43 +02:00
|
|
|
f = utils.python.changeFunctionName(f, name, docstring)
|
2005-05-30 02:52:54 +02:00
|
|
|
f = new.instancemethod(f, self, RSS)
|
|
|
|
self.feedNames[name] = (url, f)
|
2005-02-02 07:03:09 +01:00
|
|
|
self._registerFeed(name, url)
|
|
|
|
|
2010-10-19 19:50:41 +02:00
|
|
|
@internationalizeDocstring
|
2005-02-02 07:03:09 +01:00
|
|
|
def add(self, irc, msg, args, name, url):
|
|
|
|
"""<name> <url>
|
|
|
|
|
|
|
|
Adds a command to this plugin that will look up the RSS feed at the
|
|
|
|
given URL.
|
|
|
|
"""
|
|
|
|
self.makeFeedCommand(name, url)
|
|
|
|
irc.replySuccess()
|
|
|
|
add = wrap(add, ['feedName', 'url'])
|
|
|
|
|
2010-10-19 19:50:41 +02:00
|
|
|
@internationalizeDocstring
|
2005-02-02 07:03:09 +01:00
|
|
|
def remove(self, irc, msg, args, name):
|
|
|
|
"""<name>
|
|
|
|
|
|
|
|
Removes the command for looking up RSS feeds at <name> from
|
|
|
|
this plugin.
|
|
|
|
"""
|
|
|
|
if name not in self.feedNames:
|
2010-10-19 19:50:41 +02:00
|
|
|
irc.error(_('That\'s not a valid RSS feed command name.'))
|
2005-02-02 07:03:09 +01:00
|
|
|
return
|
2005-05-30 04:27:16 +02:00
|
|
|
del self.feedNames[name]
|
2005-05-29 18:54:57 +02:00
|
|
|
conf.supybot.plugins.RSS.feeds().remove(name)
|
2005-02-02 07:03:09 +01:00
|
|
|
conf.supybot.plugins.RSS.feeds.unregister(name)
|
|
|
|
irc.replySuccess()
|
|
|
|
remove = wrap(remove, ['feedName'])
|
|
|
|
|
2009-02-28 06:10:10 +01:00
|
|
|
class announce(callbacks.Commands):
|
2010-10-26 09:27:09 +02:00
|
|
|
@internationalizeDocstring
|
2009-02-28 06:10:10 +01:00
|
|
|
def list(self, irc, msg, args, channel):
|
|
|
|
"""[<channel>]
|
2005-02-02 07:03:09 +01:00
|
|
|
|
2009-02-28 06:10:10 +01:00
|
|
|
Returns the list of feeds announced in <channel>. <channel> is
|
|
|
|
only necessary if the message isn't sent in the channel itself.
|
|
|
|
"""
|
|
|
|
announce = conf.supybot.plugins.RSS.announce
|
2005-02-22 15:27:59 +01:00
|
|
|
feeds = format('%L', list(announce.get(channel)()))
|
2010-10-19 19:50:41 +02:00
|
|
|
irc.reply(feeds or _('I am currently not announcing any feeds.'))
|
2010-04-27 01:50:08 +02:00
|
|
|
list = wrap(list, ['channel',])
|
2009-02-28 06:10:10 +01:00
|
|
|
|
2010-10-19 19:50:41 +02:00
|
|
|
@internationalizeDocstring
|
2009-02-28 06:10:10 +01:00
|
|
|
def add(self, irc, msg, args, channel, feeds):
|
|
|
|
"""[<channel>] <name|url> [<name|url> ...]
|
|
|
|
|
|
|
|
Adds the list of feeds to the current list of announced feeds in
|
|
|
|
<channel>. Valid feeds include the names of registered feeds as
|
|
|
|
well as URLs for RSS feeds. <channel> is only necessary if the
|
|
|
|
message isn't sent in the channel itself.
|
|
|
|
"""
|
|
|
|
announce = conf.supybot.plugins.RSS.announce
|
|
|
|
S = announce.get(channel)()
|
|
|
|
for feed in feeds:
|
|
|
|
S.add(feed)
|
|
|
|
announce.get(channel).setValue(S)
|
|
|
|
irc.replySuccess()
|
|
|
|
add = wrap(add, [('checkChannelCapability', 'op'),
|
|
|
|
many(first('url', 'feedName'))])
|
|
|
|
|
2010-10-19 19:50:41 +02:00
|
|
|
@internationalizeDocstring
|
2009-02-28 06:10:10 +01:00
|
|
|
def remove(self, irc, msg, args, channel, feeds):
|
|
|
|
"""[<channel>] <name|url> [<name|url> ...]
|
|
|
|
|
|
|
|
Removes the list of feeds from the current list of announced feeds
|
|
|
|
in <channel>. Valid feeds include the names of registered feeds as
|
|
|
|
well as URLs for RSS feeds. <channel> is only necessary if the
|
|
|
|
message isn't sent in the channel itself.
|
|
|
|
"""
|
|
|
|
announce = conf.supybot.plugins.RSS.announce
|
|
|
|
S = announce.get(channel)()
|
|
|
|
for feed in feeds:
|
|
|
|
S.discard(feed)
|
|
|
|
announce.get(channel).setValue(S)
|
|
|
|
irc.replySuccess()
|
|
|
|
remove = wrap(remove, [('checkChannelCapability', 'op'),
|
|
|
|
many(first('url', 'feedName'))])
|
2005-02-02 07:03:09 +01:00
|
|
|
|
2010-10-19 19:50:41 +02:00
|
|
|
@internationalizeDocstring
|
2005-02-02 07:03:09 +01:00
|
|
|
def rss(self, irc, msg, args, url, n):
|
|
|
|
"""<url> [<number of headlines>]
|
|
|
|
|
|
|
|
Gets the title components of the given RSS feed.
|
|
|
|
If <number of headlines> is given, return only that many headlines.
|
|
|
|
"""
|
|
|
|
self.log.debug('Fetching %u', url)
|
|
|
|
feed = self.getFeed(url)
|
|
|
|
if irc.isChannel(msg.args[0]):
|
|
|
|
channel = msg.args[0]
|
|
|
|
else:
|
|
|
|
channel = None
|
|
|
|
headlines = self.getHeadlines(feed)
|
|
|
|
if not headlines:
|
2010-10-19 19:50:41 +02:00
|
|
|
irc.error(_('Couldn\'t get RSS feed.'))
|
2005-02-02 07:03:09 +01:00
|
|
|
return
|
|
|
|
headlines = self.buildHeadlines(headlines, channel, 'showLinks')
|
|
|
|
if n:
|
|
|
|
headlines = headlines[:n]
|
2010-04-25 06:34:31 +02:00
|
|
|
else:
|
|
|
|
headlines = headlines[:self.registryValue('defaultNumberOfHeadlines')]
|
2005-02-02 07:03:09 +01:00
|
|
|
sep = self.registryValue('headlineSeparator', channel)
|
|
|
|
if self.registryValue('bold', channel):
|
|
|
|
sep = ircutils.bold(sep)
|
|
|
|
irc.replies(headlines, joiner=sep)
|
|
|
|
rss = wrap(rss, ['url', additional('int')])
|
|
|
|
|
2010-10-19 19:50:41 +02:00
|
|
|
@internationalizeDocstring
|
2005-02-02 07:03:09 +01:00
|
|
|
def info(self, irc, msg, args, url):
|
|
|
|
"""<url|feed>
|
|
|
|
|
|
|
|
Returns information from the given RSS feed, namely the title,
|
|
|
|
URL, description, and last update date, if available.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
url = self.registryValue('feeds.%s' % url)
|
|
|
|
except registry.NonExistentRegistryEntry:
|
|
|
|
pass
|
|
|
|
feed = self.getFeed(url)
|
2006-05-02 14:22:11 +02:00
|
|
|
conv = self._getConverter(feed)
|
2005-07-19 23:32:54 +02:00
|
|
|
info = feed.get('feed')
|
2005-02-02 07:03:09 +01:00
|
|
|
if not info:
|
2010-10-19 19:50:41 +02:00
|
|
|
irc.error(_('I couldn\'t retrieve that RSS feed.'))
|
2005-02-02 07:03:09 +01:00
|
|
|
return
|
2005-07-19 23:32:54 +02:00
|
|
|
# check the 'modified_parsed' key, if it's there, convert it here first
|
|
|
|
if 'modified' in info:
|
|
|
|
seconds = time.mktime(info['modified_parsed'])
|
2005-02-02 07:03:09 +01:00
|
|
|
now = time.mktime(time.gmtime())
|
|
|
|
when = utils.timeElapsed(now - seconds) + ' ago'
|
|
|
|
else:
|
|
|
|
when = 'time unavailable'
|
2006-05-02 14:22:11 +02:00
|
|
|
title = conv(info.get('title', 'unavailable'))
|
|
|
|
desc = conv(info.get('description', 'unavailable'))
|
2007-10-22 19:48:49 +02:00
|
|
|
link = conv(info.get('link', 'unavailable'))
|
2005-02-02 07:03:09 +01:00
|
|
|
# The rest of the entries are all available in the channel key
|
2010-10-19 19:50:41 +02:00
|
|
|
response = format(_('Title: %s; URL: %u; '
|
|
|
|
'Description: %s; Last updated: %s.'),
|
2007-10-22 19:48:49 +02:00
|
|
|
title, link, desc, when)
|
2005-02-02 07:03:09 +01:00
|
|
|
irc.reply(utils.str.normalizeWhitespace(response))
|
|
|
|
info = wrap(info, [first('url', 'feedName')])
|
2010-10-26 09:32:12 +02:00
|
|
|
RSS = internationalizeDocstring(RSS)
|
2005-02-02 07:03:09 +01:00
|
|
|
|
|
|
|
Class = RSS
|
|
|
|
|
2006-02-11 16:52:51 +01:00
|
|
|
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
|