2005-02-02 07:03:09 +01:00
|
|
|
###
|
|
|
|
# Copyright (c) 2002-2004, Jeremiah Fincher
|
2012-09-01 16:16:48 +02:00
|
|
|
# Copyright (c) 2008-2010, James McCoy
|
2014-07-31 18:44:49 +02:00
|
|
|
# Copyright (c) 2014, Valentin Lorentz
|
2005-02-02 07:03:09 +01:00
|
|
|
# All rights reserved.
|
|
|
|
#
|
|
|
|
# Redistribution and use in source and binary forms, with or without
|
|
|
|
# modification, are permitted provided that the following conditions are met:
|
|
|
|
#
|
|
|
|
# * Redistributions of source code must retain the above copyright notice,
|
|
|
|
# this list of conditions, and the following disclaimer.
|
|
|
|
# * Redistributions in binary form must reproduce the above copyright notice,
|
|
|
|
# this list of conditions, and the following disclaimer in the
|
|
|
|
# documentation and/or other materials provided with the distribution.
|
|
|
|
# * Neither the name of the author of this software nor the name of
|
|
|
|
# contributors to this software may be used to endorse or promote products
|
|
|
|
# derived from this software without specific prior written consent.
|
|
|
|
#
|
|
|
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
|
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
|
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
|
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
|
|
|
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
|
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
|
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
|
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
|
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
|
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
|
|
# POSSIBILITY OF SUCH DAMAGE.
|
|
|
|
###
|
|
|
|
|
2014-07-31 22:53:03 +02:00
|
|
|
import re
|
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import json
|
2005-02-02 07:03:09 +01:00
|
|
|
import time
|
2012-08-04 18:46:16 +02:00
|
|
|
import types
|
2014-07-31 18:44:49 +02:00
|
|
|
import string
|
2005-02-02 07:03:09 +01:00
|
|
|
import socket
|
|
|
|
import threading
|
2012-10-14 16:41:56 +02:00
|
|
|
import feedparser
|
2005-02-02 07:03:09 +01:00
|
|
|
|
|
|
|
import supybot.conf as conf
|
|
|
|
import supybot.utils as utils
|
|
|
|
import supybot.world as world
|
|
|
|
from supybot.commands import *
|
2015-08-11 16:50:23 +02:00
|
|
|
import supybot.utils.minisix as minisix
|
2014-07-31 20:14:09 +02:00
|
|
|
import supybot.ircmsgs as ircmsgs
|
2005-02-02 07:03:09 +01:00
|
|
|
import supybot.ircutils as ircutils
|
|
|
|
import supybot.registry as registry
|
|
|
|
import supybot.callbacks as callbacks
|
2010-10-19 19:50:41 +02:00
|
|
|
from supybot.i18n import PluginInternationalization, internationalizeDocstring
|
|
|
|
_ = PluginInternationalization('RSS')
|
2005-02-02 07:03:09 +01:00
|
|
|
|
2014-07-31 18:44:49 +02:00
|
|
|
def get_feedName(irc, msg, args, state):
|
2014-10-10 16:41:30 +02:00
|
|
|
if ircutils.isChannel(args[0]):
|
|
|
|
state.errorInvalid('feed name', args[0], 'must not be channel names.')
|
2005-02-02 07:03:09 +01:00
|
|
|
if not registry.isValidRegistryName(args[0]):
|
2005-06-01 22:13:33 +02:00
|
|
|
state.errorInvalid('feed name', args[0],
|
|
|
|
'Feed names must not include spaces.')
|
2005-02-02 07:03:09 +01:00
|
|
|
state.args.append(callbacks.canonicalName(args.pop(0)))
|
2014-07-31 18:44:49 +02:00
|
|
|
addConverter('feedName', get_feedName)
|
|
|
|
|
2014-07-31 22:53:03 +02:00
|
|
|
announced_headlines_filename = \
|
|
|
|
conf.supybot.directories.data.dirize('RSS_announced.flat')
|
|
|
|
|
2014-10-14 22:59:00 +02:00
|
|
|
def only_one_at_once(f):
|
|
|
|
lock = [False]
|
|
|
|
def newf(*args, **kwargs):
|
|
|
|
if lock[0]:
|
|
|
|
return
|
|
|
|
lock[0] = True
|
|
|
|
try:
|
|
|
|
f(*args, **kwargs)
|
|
|
|
finally:
|
|
|
|
lock[0] = False
|
|
|
|
return newf
|
|
|
|
|
2015-05-23 11:40:26 +02:00
|
|
|
class InvalidFeedUrl(ValueError):
|
|
|
|
pass
|
|
|
|
|
2014-07-31 18:44:49 +02:00
|
|
|
class Feed:
|
|
|
|
__slots__ = ('url', 'name', 'data', 'last_update', 'entries',
|
2014-08-13 17:04:35 +02:00
|
|
|
'etag', 'modified', 'initial',
|
2014-07-31 18:44:49 +02:00
|
|
|
'lock', 'announced_entries')
|
2014-08-13 16:42:47 +02:00
|
|
|
def __init__(self, name, url, initial,
|
|
|
|
plugin_is_loading=False, announced=None):
|
2014-07-31 18:56:52 +02:00
|
|
|
assert name, name
|
|
|
|
if not url:
|
2015-05-23 11:48:32 +02:00
|
|
|
if not utils.web.httpUrlRe.match(name):
|
2015-05-23 11:40:26 +02:00
|
|
|
raise InvalidFeedUrl(name)
|
2014-07-31 18:56:52 +02:00
|
|
|
url = name
|
2014-07-31 18:44:49 +02:00
|
|
|
self.name = name
|
|
|
|
self.url = url
|
2014-08-13 16:42:47 +02:00
|
|
|
self.initial = initial
|
2014-07-31 18:44:49 +02:00
|
|
|
self.data = None
|
|
|
|
# We don't want to fetch feeds right after the plugin is
|
|
|
|
# loaded (the bot could be starting, and thus already busy)
|
|
|
|
self.last_update = time.time() if plugin_is_loading else 0
|
2014-07-31 20:50:12 +02:00
|
|
|
self.entries = []
|
2014-08-01 01:12:19 +02:00
|
|
|
self.etag = None
|
|
|
|
self.modified = None
|
2014-07-31 20:50:12 +02:00
|
|
|
self.lock = threading.Lock()
|
2014-07-31 22:53:03 +02:00
|
|
|
self.announced_entries = announced or \
|
|
|
|
utils.structures.TruncatableSet()
|
|
|
|
|
|
|
|
def __repr__(self):
|
2014-08-13 16:42:47 +02:00
|
|
|
return 'Feed(%r, %r, %b, <bool>, %r)' % \
|
|
|
|
(self.name, self.url, self.initial, self.announced_entries)
|
2014-07-31 18:44:49 +02:00
|
|
|
|
2014-07-31 18:56:52 +02:00
|
|
|
def get_command(self, plugin):
|
2014-07-31 18:44:49 +02:00
|
|
|
docstring = format(_("""[<number of headlines>]
|
|
|
|
|
|
|
|
Reports the titles for %s at the RSS feed %u. If
|
|
|
|
<number of headlines> is given, returns only that many headlines.
|
|
|
|
RSS feeds are only looked up every supybot.plugins.RSS.waitPeriod
|
|
|
|
seconds, which defaults to 1800 (30 minutes) since that's what most
|
|
|
|
websites prefer."""), self.name, self.url)
|
2014-07-31 18:56:52 +02:00
|
|
|
def f(self2, irc, msg, args):
|
2014-07-31 23:50:27 +02:00
|
|
|
args.insert(0, self.name)
|
2014-07-31 18:56:52 +02:00
|
|
|
self2.rss(irc, msg, args)
|
|
|
|
f = utils.python.changeFunctionName(f, self.name, docstring)
|
|
|
|
f = types.MethodType(f, plugin)
|
2014-07-31 18:44:49 +02:00
|
|
|
return f
|
|
|
|
|
2015-09-02 10:04:04 +02:00
|
|
|
_sort_parameters = {
|
|
|
|
'oldestFirst': (('published_parsed', 'updated_parsed'), False),
|
|
|
|
'newestFirst': (('published_parsed', 'updated_parsed'), True),
|
|
|
|
'outdatedFirst': (('updated_parsed', 'published_parsed'), False),
|
|
|
|
'updatedFirst': (('updated_parsed', 'published_parsed'), True),
|
|
|
|
}
|
|
|
|
def _sort_arguments(order):
|
|
|
|
(fields, reverse) = _sort_parameters[order]
|
|
|
|
def key(entry):
|
|
|
|
for field in fields:
|
|
|
|
if field in entry:
|
|
|
|
return entry[field]
|
|
|
|
raise KeyError('No date field in entry.')
|
|
|
|
return (key, reverse)
|
|
|
|
|
2014-07-31 18:44:49 +02:00
|
|
|
def sort_feed_items(items, order):
|
|
|
|
"""Return feed items, sorted according to sortFeedItems."""
|
2015-09-02 10:04:04 +02:00
|
|
|
if order == 'asInFeed':
|
2014-07-31 18:44:49 +02:00
|
|
|
return items
|
2015-09-02 10:04:04 +02:00
|
|
|
(key, reverse) = _sort_arguments(order)
|
2014-07-31 18:44:49 +02:00
|
|
|
try:
|
2015-09-02 10:04:04 +02:00
|
|
|
sitems = sorted(items, key=key, reverse=reverse)
|
2014-07-31 18:44:49 +02:00
|
|
|
except KeyError:
|
|
|
|
# feedparser normalizes required timestamp fields in ATOM and RSS
|
2015-09-02 10:04:04 +02:00
|
|
|
# to the "published"/"updated" fields. Feeds missing it are unsortable by date.
|
2014-07-31 18:44:49 +02:00
|
|
|
return items
|
|
|
|
return sitems
|
2005-02-02 07:03:09 +01:00
|
|
|
|
2014-07-31 22:53:03 +02:00
|
|
|
def load_announces_db(fd):
|
|
|
|
return dict((name, utils.structures.TruncatableSet(entries))
|
|
|
|
for (name, entries) in json.load(fd).items())
|
|
|
|
def save_announces_db(db, fd):
|
|
|
|
json.dump(dict((name, list(entries)) for (name, entries) in db), fd)
|
|
|
|
|
|
|
|
|
2005-02-09 08:04:04 +01:00
|
|
|
class RSS(callbacks.Plugin):
|
2005-02-02 07:03:09 +01:00
|
|
|
"""This plugin is useful both for announcing updates to RSS feeds in a
|
|
|
|
channel, and for retrieving the headlines of RSS feeds via command. Use
|
|
|
|
the "add" command to add feeds to this plugin, and use the "announce"
|
|
|
|
command to determine what feeds should be announced in a given channel."""
|
|
|
|
threaded = True
|
|
|
|
def __init__(self, irc):
|
|
|
|
self.__parent = super(RSS, self)
|
|
|
|
self.__parent.__init__(irc)
|
2014-07-31 18:44:49 +02:00
|
|
|
# Scheme: {name: url}
|
|
|
|
self.feed_names = callbacks.CanonicalNameDict()
|
|
|
|
# Scheme: {url: feed}
|
|
|
|
self.feeds = {}
|
2014-07-31 22:53:03 +02:00
|
|
|
if os.path.isfile(announced_headlines_filename):
|
|
|
|
with open(announced_headlines_filename) as fd:
|
|
|
|
announced = load_announces_db(fd)
|
|
|
|
else:
|
|
|
|
announced = {}
|
2005-02-02 07:03:09 +01:00
|
|
|
for name in self.registryValue('feeds'):
|
2014-07-31 19:31:20 +02:00
|
|
|
self.assert_feed_does_not_exist(name)
|
2014-07-31 18:44:49 +02:00
|
|
|
self.register_feed_config(name)
|
2005-02-02 07:03:09 +01:00
|
|
|
try:
|
2005-02-23 01:03:09 +01:00
|
|
|
url = self.registryValue(registry.join(['feeds', name]))
|
2005-02-02 07:03:09 +01:00
|
|
|
except registry.NonExistentRegistryEntry:
|
|
|
|
self.log.warning('%s is not a registered feed, removing.',name)
|
|
|
|
continue
|
2015-05-23 11:40:26 +02:00
|
|
|
try:
|
|
|
|
self.register_feed(name, url, True, True, announced.get(name, []))
|
|
|
|
except InvalidFeedUrl:
|
|
|
|
self.log.error('%s is not a valid feed, removing.', name)
|
|
|
|
continue
|
2014-07-31 22:53:03 +02:00
|
|
|
world.flushers.append(self._flush)
|
|
|
|
|
|
|
|
def die(self):
|
|
|
|
self._flush()
|
|
|
|
world.flushers.remove(self._flush)
|
|
|
|
self.__parent.die()
|
|
|
|
|
|
|
|
def _flush(self):
|
|
|
|
l = [(f.name, f.announced_entries) for f in self.feeds.values()]
|
2014-07-31 22:57:47 +02:00
|
|
|
with utils.file.AtomicFile(announced_headlines_filename, 'w',
|
2014-07-31 22:53:03 +02:00
|
|
|
backupDir='/dev/null') as fd:
|
|
|
|
save_announces_db(l, fd)
|
|
|
|
|
2014-07-31 18:44:49 +02:00
|
|
|
|
|
|
|
##################
|
|
|
|
# Feed registering
|
|
|
|
|
2014-07-31 23:56:27 +02:00
|
|
|
def assert_feed_does_not_exist(self, name, url=None):
|
2014-07-31 19:31:20 +02:00
|
|
|
if self.isCommandMethod(name):
|
2014-07-31 23:56:27 +02:00
|
|
|
s = format(_('I already have a command in this plugin named %s.'),
|
|
|
|
name)
|
2014-07-31 19:31:20 +02:00
|
|
|
raise callbacks.Error(s)
|
2014-07-31 23:56:27 +02:00
|
|
|
if url:
|
|
|
|
feed = self.feeds.get(url)
|
|
|
|
if feed and feed.name != feed.url:
|
|
|
|
s = format(_('I already have a feed with that URL named %s.'),
|
|
|
|
feed.name)
|
|
|
|
raise callbacks.Error(s)
|
2014-07-31 19:31:20 +02:00
|
|
|
|
2014-07-31 18:44:49 +02:00
|
|
|
def register_feed_config(self, name, url=''):
|
|
|
|
self.registryValue('feeds').add(name)
|
|
|
|
group = self.registryValue('feeds', value=False)
|
|
|
|
conf.registerGlobalValue(group, name, registry.String(url, ''))
|
2014-07-31 23:50:27 +02:00
|
|
|
feed_group = conf.registerGroup(group, name)
|
|
|
|
conf.registerChannelValue(feed_group, 'format',
|
2014-08-10 15:29:47 +02:00
|
|
|
registry.String('', _("""Feed-specific format. Defaults to
|
|
|
|
supybot.plugins.RSS.format if empty.""")))
|
2014-07-31 23:50:27 +02:00
|
|
|
conf.registerChannelValue(feed_group, 'announceFormat',
|
2014-08-10 15:29:47 +02:00
|
|
|
registry.String('', _("""Feed-specific announce format.
|
|
|
|
Defaults to supybot.plugins.RSS.announceFormat if empty.""")))
|
2015-09-23 11:05:25 +02:00
|
|
|
conf.registerGlobalValue(feed_group, 'waitPeriod',
|
|
|
|
registry.NonNegativeInteger(0, _("""If set to a non-zero
|
|
|
|
value, overrides supybot.plugins.RSS.waitPeriod for this
|
|
|
|
particular feed.""")))
|
2014-07-31 18:44:49 +02:00
|
|
|
|
2014-08-13 16:42:47 +02:00
|
|
|
def register_feed(self, name, url, initial,
|
|
|
|
plugin_is_loading, announced=[]):
|
2014-07-31 18:56:52 +02:00
|
|
|
self.feed_names[name] = url
|
2014-08-13 16:42:47 +02:00
|
|
|
self.feeds[url] = Feed(name, url, initial,
|
|
|
|
plugin_is_loading, announced)
|
2014-07-31 18:56:52 +02:00
|
|
|
|
2014-07-31 18:44:49 +02:00
|
|
|
def remove_feed(self, feed):
|
|
|
|
del self.feed_names[feed.name]
|
|
|
|
del self.feeds[feed.url]
|
2014-07-31 19:31:20 +02:00
|
|
|
conf.supybot.plugins.RSS.feeds().remove(feed.name)
|
|
|
|
conf.supybot.plugins.RSS.feeds.unregister(feed.name)
|
2014-07-31 18:44:49 +02:00
|
|
|
|
|
|
|
##################
|
|
|
|
# Methods handling
|
2005-02-02 07:03:09 +01:00
|
|
|
|
2005-05-30 02:52:54 +02:00
|
|
|
def isCommandMethod(self, name):
|
|
|
|
if not self.__parent.isCommandMethod(name):
|
2014-07-31 18:44:49 +02:00
|
|
|
return bool(self.get_feed(name))
|
2005-05-30 02:52:54 +02:00
|
|
|
else:
|
|
|
|
return True
|
|
|
|
|
2009-05-23 00:52:15 +02:00
|
|
|
def listCommands(self):
|
2014-08-01 11:17:58 +02:00
|
|
|
return self.__parent.listCommands(self.feed_names.keys())
|
2009-05-23 00:52:15 +02:00
|
|
|
|
2005-05-30 02:52:54 +02:00
|
|
|
def getCommandMethod(self, command):
|
|
|
|
try:
|
|
|
|
return self.__parent.getCommandMethod(command)
|
|
|
|
except AttributeError:
|
2014-07-31 18:56:52 +02:00
|
|
|
return self.get_feed(command[0]).get_command(self)
|
2005-02-02 07:03:09 +01:00
|
|
|
|
|
|
|
def __call__(self, irc, msg):
|
|
|
|
self.__parent.__call__(irc, msg)
|
2014-10-14 22:59:00 +02:00
|
|
|
threading.Thread(target=self.update_feeds).start()
|
2014-07-31 18:44:49 +02:00
|
|
|
|
|
|
|
|
|
|
|
##################
|
|
|
|
# Status accessors
|
|
|
|
|
|
|
|
def get_feed(self, name):
|
|
|
|
return self.feeds.get(self.feed_names.get(name, name), None)
|
|
|
|
|
|
|
|
def is_expired(self, feed):
|
|
|
|
assert feed
|
2015-09-23 11:05:25 +02:00
|
|
|
period = self.registryValue('waitPeriod')
|
|
|
|
if feed.name != feed.url: # Named feed
|
|
|
|
specific_period = self.registryValue('feeds.%s.waitPeriod' % feed.name)
|
|
|
|
if specific_period:
|
|
|
|
period = specific_period
|
|
|
|
event_horizon = time.time() - period
|
2014-07-31 18:44:49 +02:00
|
|
|
return feed.last_update < event_horizon
|
|
|
|
|
|
|
|
###############
|
|
|
|
# Feed fetching
|
|
|
|
|
|
|
|
def update_feed(self, feed):
|
2014-07-31 20:50:12 +02:00
|
|
|
with feed.lock:
|
2014-08-01 01:12:19 +02:00
|
|
|
d = feedparser.parse(feed.url, etag=feed.etag,
|
|
|
|
modified=feed.modified)
|
|
|
|
if 'status' not in d or d.status != 304: # Not modified
|
|
|
|
if 'etag' in d:
|
|
|
|
feed.etag = d.etag
|
|
|
|
if 'modified' in d:
|
|
|
|
feed.modified = d.modified
|
|
|
|
feed.data = d.feed
|
|
|
|
feed.entries = d.entries
|
|
|
|
feed.last_update = time.time()
|
2014-08-13 16:42:47 +02:00
|
|
|
(initial, feed.initial) = (feed.initial, False)
|
|
|
|
self.announce_feed(feed, initial)
|
2014-07-31 18:44:49 +02:00
|
|
|
|
|
|
|
def update_feed_in_thread(self, feed):
|
|
|
|
feed.last_update = time.time()
|
|
|
|
t = world.SupyThread(target=self.update_feed,
|
|
|
|
name=format('Fetching feed %u', feed.url),
|
|
|
|
args=(feed,))
|
|
|
|
t.setDaemon(True)
|
|
|
|
t.start()
|
|
|
|
|
|
|
|
def update_feed_if_needed(self, feed):
|
|
|
|
if self.is_expired(feed):
|
|
|
|
self.update_feed(feed)
|
|
|
|
|
2014-10-14 22:59:00 +02:00
|
|
|
@only_one_at_once
|
2014-07-31 18:44:49 +02:00
|
|
|
def update_feeds(self):
|
2014-07-31 20:50:12 +02:00
|
|
|
announced_feeds = set()
|
|
|
|
for irc in world.ircs:
|
|
|
|
for channel in irc.state.channels:
|
|
|
|
announced_feeds |= self.registryValue('announce', channel)
|
|
|
|
for name in announced_feeds:
|
2014-08-04 17:01:30 +02:00
|
|
|
feed = self.get_feed(name)
|
|
|
|
if not feed:
|
2015-11-03 17:25:13 +01:00
|
|
|
self.log.warning('Feed %s is announced but does not exist.',
|
2014-08-04 17:01:30 +02:00
|
|
|
name)
|
|
|
|
continue
|
|
|
|
self.update_feed_if_needed(feed)
|
2014-07-31 18:44:49 +02:00
|
|
|
|
2014-07-31 20:50:12 +02:00
|
|
|
def get_new_entries(self, feed):
|
2014-09-14 12:06:45 +02:00
|
|
|
# http://validator.w3.org/feed/docs/rss2.html#hrelementsOfLtitemgt
|
|
|
|
get_id = lambda entry: entry.id if hasattr(entry, 'id') else (
|
|
|
|
entry.title if hasattr(entry, 'title') else entry.description)
|
|
|
|
|
2014-07-31 20:50:12 +02:00
|
|
|
with feed.lock:
|
|
|
|
entries = feed.entries
|
|
|
|
new_entries = [entry for entry in entries
|
2014-09-14 12:06:45 +02:00
|
|
|
if get_id(entry) not in feed.announced_entries]
|
2014-07-31 20:50:12 +02:00
|
|
|
if not new_entries:
|
|
|
|
return []
|
2014-09-14 12:06:45 +02:00
|
|
|
feed.announced_entries |= set(get_id(entry) for entry in new_entries)
|
2014-07-31 20:50:12 +02:00
|
|
|
# We keep a little more because we don't want to re-announce
|
|
|
|
# oldest entries if one of the newest gets removed.
|
2014-09-13 00:37:06 +02:00
|
|
|
feed.announced_entries.truncate(10*len(entries))
|
2014-07-31 20:50:12 +02:00
|
|
|
return new_entries
|
|
|
|
|
2014-08-13 16:42:47 +02:00
|
|
|
def announce_feed(self, feed, initial):
|
2014-07-31 20:50:12 +02:00
|
|
|
new_entries = self.get_new_entries(feed)
|
2005-02-02 07:03:09 +01:00
|
|
|
|
2012-05-28 19:30:44 +02:00
|
|
|
order = self.registryValue('sortFeedItems')
|
2014-07-31 18:44:49 +02:00
|
|
|
new_entries = sort_feed_items(new_entries, order)
|
|
|
|
for irc in world.ircs:
|
|
|
|
for channel in irc.state.channels:
|
|
|
|
if feed.name not in self.registryValue('announce', channel):
|
|
|
|
continue
|
2014-08-13 16:42:47 +02:00
|
|
|
if initial:
|
|
|
|
n = self.registryValue('initialAnnounceHeadlines', channel)
|
|
|
|
if n:
|
|
|
|
announced_entries = new_entries[-n:]
|
|
|
|
else:
|
|
|
|
announced_entries = []
|
|
|
|
else:
|
|
|
|
announced_entries = new_entries
|
|
|
|
for entry in announced_entries:
|
2014-07-31 18:44:49 +02:00
|
|
|
self.announce_entry(irc, channel, feed, entry)
|
|
|
|
|
|
|
|
|
|
|
|
#################
|
|
|
|
# Entry rendering
|
|
|
|
|
|
|
|
def should_send_entry(self, channel, entry):
|
|
|
|
whitelist = self.registryValue('keywordWhitelist', channel)
|
|
|
|
blacklist = self.registryValue('keywordBlacklist', channel)
|
|
|
|
if whitelist:
|
|
|
|
if all(kw not in entry.title and kw not in entry.description
|
|
|
|
for kw in whitelist):
|
|
|
|
return False
|
|
|
|
if blacklist:
|
|
|
|
if any(kw in entry.title or kw in entry.description
|
|
|
|
for kw in blacklist):
|
|
|
|
return False
|
|
|
|
return True
|
2005-02-02 07:03:09 +01:00
|
|
|
|
2015-06-01 19:32:40 +02:00
|
|
|
_normalize_entry = utils.str.multipleReplacer(
|
|
|
|
{'\r': ' ', '\n': ' ', '\x00': ''})
|
2014-07-31 18:44:49 +02:00
|
|
|
def format_entry(self, channel, feed, entry, is_announce):
|
2014-07-31 23:50:27 +02:00
|
|
|
key_name = 'announceFormat' if is_announce else 'format'
|
|
|
|
if feed.name in self.registryValue('feeds'):
|
|
|
|
specific_key_name = registry.join(['feeds', feed.name, key_name])
|
|
|
|
template = self.registryValue(specific_key_name, channel) or \
|
|
|
|
self.registryValue(key_name, channel)
|
2014-07-31 18:44:49 +02:00
|
|
|
else:
|
2014-07-31 23:50:27 +02:00
|
|
|
template = self.registryValue(key_name, channel)
|
2014-08-04 15:02:46 +02:00
|
|
|
date = entry.get('published_parsed')
|
2014-07-31 18:44:49 +02:00
|
|
|
date = utils.str.timestamp(date)
|
2015-06-01 19:32:40 +02:00
|
|
|
s = string.Template(template).safe_substitute(
|
2014-07-31 18:44:49 +02:00
|
|
|
feed_name=feed.name,
|
|
|
|
date=date,
|
|
|
|
**entry)
|
2015-06-01 19:32:40 +02:00
|
|
|
return self._normalize_entry(s)
|
2014-07-31 18:44:49 +02:00
|
|
|
|
|
|
|
def announce_entry(self, irc, channel, feed, entry):
|
|
|
|
if self.should_send_entry(channel, entry):
|
2014-07-31 20:14:09 +02:00
|
|
|
s = self.format_entry(channel, feed, entry, True)
|
2015-09-23 11:27:20 +02:00
|
|
|
if self.registryValue('notice', channel):
|
|
|
|
m = ircmsgs.notice(channel, s)
|
|
|
|
else:
|
|
|
|
m = ircmsgs.privmsg(channel, s)
|
|
|
|
irc.queueMsg(m)
|
2005-02-02 07:03:09 +01:00
|
|
|
|
2014-07-31 18:44:49 +02:00
|
|
|
|
|
|
|
##########
|
|
|
|
# Commands
|
2005-02-02 07:03:09 +01:00
|
|
|
|
2010-10-19 19:50:41 +02:00
|
|
|
@internationalizeDocstring
|
2005-02-02 07:03:09 +01:00
|
|
|
def add(self, irc, msg, args, name, url):
|
|
|
|
"""<name> <url>
|
|
|
|
|
|
|
|
Adds a command to this plugin that will look up the RSS feed at the
|
|
|
|
given URL.
|
|
|
|
"""
|
2014-07-31 23:56:27 +02:00
|
|
|
self.assert_feed_does_not_exist(name, url)
|
2014-07-31 18:44:49 +02:00
|
|
|
self.register_feed_config(name, url)
|
2014-08-13 16:42:47 +02:00
|
|
|
self.register_feed(name, url, True, False)
|
2005-02-02 07:03:09 +01:00
|
|
|
irc.replySuccess()
|
|
|
|
add = wrap(add, ['feedName', 'url'])
|
|
|
|
|
2010-10-19 19:50:41 +02:00
|
|
|
@internationalizeDocstring
|
2005-02-02 07:03:09 +01:00
|
|
|
def remove(self, irc, msg, args, name):
|
|
|
|
"""<name>
|
|
|
|
|
|
|
|
Removes the command for looking up RSS feeds at <name> from
|
|
|
|
this plugin.
|
|
|
|
"""
|
2014-07-31 18:44:49 +02:00
|
|
|
feed = self.get_feed(name)
|
|
|
|
if not feed:
|
2010-10-19 19:50:41 +02:00
|
|
|
irc.error(_('That\'s not a valid RSS feed command name.'))
|
2005-02-02 07:03:09 +01:00
|
|
|
return
|
2014-07-31 18:44:49 +02:00
|
|
|
self.remove_feed(feed)
|
2005-02-02 07:03:09 +01:00
|
|
|
irc.replySuccess()
|
|
|
|
remove = wrap(remove, ['feedName'])
|
|
|
|
|
2009-02-28 06:10:10 +01:00
|
|
|
class announce(callbacks.Commands):
|
2010-10-26 09:27:09 +02:00
|
|
|
@internationalizeDocstring
|
2009-02-28 06:10:10 +01:00
|
|
|
def list(self, irc, msg, args, channel):
|
|
|
|
"""[<channel>]
|
2005-02-02 07:03:09 +01:00
|
|
|
|
2009-02-28 06:10:10 +01:00
|
|
|
Returns the list of feeds announced in <channel>. <channel> is
|
|
|
|
only necessary if the message isn't sent in the channel itself.
|
|
|
|
"""
|
|
|
|
announce = conf.supybot.plugins.RSS.announce
|
2005-02-22 15:27:59 +01:00
|
|
|
feeds = format('%L', list(announce.get(channel)()))
|
2010-10-19 19:50:41 +02:00
|
|
|
irc.reply(feeds or _('I am currently not announcing any feeds.'))
|
2010-04-27 01:50:08 +02:00
|
|
|
list = wrap(list, ['channel',])
|
2009-02-28 06:10:10 +01:00
|
|
|
|
2010-10-19 19:50:41 +02:00
|
|
|
@internationalizeDocstring
|
2009-02-28 06:10:10 +01:00
|
|
|
def add(self, irc, msg, args, channel, feeds):
|
|
|
|
"""[<channel>] <name|url> [<name|url> ...]
|
|
|
|
|
|
|
|
Adds the list of feeds to the current list of announced feeds in
|
|
|
|
<channel>. Valid feeds include the names of registered feeds as
|
|
|
|
well as URLs for RSS feeds. <channel> is only necessary if the
|
|
|
|
message isn't sent in the channel itself.
|
|
|
|
"""
|
2014-10-10 16:41:30 +02:00
|
|
|
plugin = irc.getCallback('RSS')
|
2015-10-10 20:40:25 +02:00
|
|
|
invalid_feeds = [x for x in feeds if not plugin.get_feed(x)
|
|
|
|
and not utils.web.urlRe.match(x)]
|
2014-10-10 16:41:30 +02:00
|
|
|
if invalid_feeds:
|
|
|
|
irc.error(format(_('These feeds are unknown: %L'),
|
|
|
|
invalid_feeds), Raise=True)
|
2009-02-28 06:10:10 +01:00
|
|
|
announce = conf.supybot.plugins.RSS.announce
|
|
|
|
S = announce.get(channel)()
|
2014-07-31 20:50:12 +02:00
|
|
|
for name in feeds:
|
|
|
|
S.add(name)
|
2009-02-28 06:10:10 +01:00
|
|
|
announce.get(channel).setValue(S)
|
|
|
|
irc.replySuccess()
|
2014-07-31 20:50:12 +02:00
|
|
|
for name in feeds:
|
|
|
|
feed = plugin.get_feed(name)
|
2015-10-10 20:40:25 +02:00
|
|
|
if not feed:
|
|
|
|
plugin.register_feed_config(name, name)
|
|
|
|
plugin.register_feed(name, name, True, False)
|
|
|
|
feed = plugin.get_feed(name)
|
2014-08-13 16:42:47 +02:00
|
|
|
plugin.announce_feed(feed, True)
|
2009-02-28 06:10:10 +01:00
|
|
|
add = wrap(add, [('checkChannelCapability', 'op'),
|
|
|
|
many(first('url', 'feedName'))])
|
|
|
|
|
2010-10-19 19:50:41 +02:00
|
|
|
@internationalizeDocstring
|
2009-02-28 06:10:10 +01:00
|
|
|
def remove(self, irc, msg, args, channel, feeds):
|
|
|
|
"""[<channel>] <name|url> [<name|url> ...]
|
|
|
|
|
|
|
|
Removes the list of feeds from the current list of announced feeds
|
|
|
|
in <channel>. Valid feeds include the names of registered feeds as
|
|
|
|
well as URLs for RSS feeds. <channel> is only necessary if the
|
|
|
|
message isn't sent in the channel itself.
|
|
|
|
"""
|
|
|
|
announce = conf.supybot.plugins.RSS.announce
|
|
|
|
S = announce.get(channel)()
|
|
|
|
for feed in feeds:
|
|
|
|
S.discard(feed)
|
|
|
|
announce.get(channel).setValue(S)
|
|
|
|
irc.replySuccess()
|
|
|
|
remove = wrap(remove, [('checkChannelCapability', 'op'),
|
|
|
|
many(first('url', 'feedName'))])
|
2005-02-02 07:03:09 +01:00
|
|
|
|
2010-10-19 19:50:41 +02:00
|
|
|
@internationalizeDocstring
|
2005-02-02 07:03:09 +01:00
|
|
|
def rss(self, irc, msg, args, url, n):
|
2014-07-31 23:50:27 +02:00
|
|
|
"""<name|url> [<number of headlines>]
|
2005-02-02 07:03:09 +01:00
|
|
|
|
|
|
|
Gets the title components of the given RSS feed.
|
|
|
|
If <number of headlines> is given, return only that many headlines.
|
|
|
|
"""
|
|
|
|
self.log.debug('Fetching %u', url)
|
2014-07-31 18:44:49 +02:00
|
|
|
feed = self.get_feed(url)
|
|
|
|
if not feed:
|
2014-08-13 16:42:47 +02:00
|
|
|
feed = Feed(url, url, True)
|
2005-02-02 07:03:09 +01:00
|
|
|
if irc.isChannel(msg.args[0]):
|
|
|
|
channel = msg.args[0]
|
|
|
|
else:
|
|
|
|
channel = None
|
2014-07-31 18:44:49 +02:00
|
|
|
self.update_feed_if_needed(feed)
|
|
|
|
entries = feed.entries
|
|
|
|
if not entries:
|
2010-10-19 19:50:41 +02:00
|
|
|
irc.error(_('Couldn\'t get RSS feed.'))
|
2005-02-02 07:03:09 +01:00
|
|
|
return
|
2014-07-31 18:44:49 +02:00
|
|
|
n = n or self.registryValue('defaultNumberOfHeadlines', channel)
|
|
|
|
entries = list(filter(lambda e:self.should_send_entry(channel, e),
|
|
|
|
feed.entries))
|
|
|
|
entries = entries[:n]
|
|
|
|
headlines = map(lambda e:self.format_entry(channel, feed, e, False),
|
|
|
|
entries)
|
2005-02-02 07:03:09 +01:00
|
|
|
sep = self.registryValue('headlineSeparator', channel)
|
|
|
|
irc.replies(headlines, joiner=sep)
|
2014-07-31 23:50:27 +02:00
|
|
|
rss = wrap(rss, [first('url', 'feedName'), additional('int')])
|
2005-02-02 07:03:09 +01:00
|
|
|
|
2010-10-19 19:50:41 +02:00
|
|
|
@internationalizeDocstring
|
2005-02-02 07:03:09 +01:00
|
|
|
def info(self, irc, msg, args, url):
|
|
|
|
"""<url|feed>
|
|
|
|
|
|
|
|
Returns information from the given RSS feed, namely the title,
|
|
|
|
URL, description, and last update date, if available.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
url = self.registryValue('feeds.%s' % url)
|
|
|
|
except registry.NonExistentRegistryEntry:
|
|
|
|
pass
|
2014-07-31 18:44:49 +02:00
|
|
|
feed = self.get_feed(url)
|
2014-07-31 20:14:09 +02:00
|
|
|
if not feed:
|
2014-08-13 16:42:47 +02:00
|
|
|
feed = Feed(url, url, True)
|
2014-07-31 18:44:49 +02:00
|
|
|
self.update_feed_if_needed(feed)
|
|
|
|
info = feed.data
|
2005-02-02 07:03:09 +01:00
|
|
|
if not info:
|
2010-10-19 19:50:41 +02:00
|
|
|
irc.error(_('I couldn\'t retrieve that RSS feed.'))
|
2005-02-02 07:03:09 +01:00
|
|
|
return
|
2005-07-19 23:32:54 +02:00
|
|
|
# check the 'modified_parsed' key, if it's there, convert it here first
|
|
|
|
if 'modified' in info:
|
|
|
|
seconds = time.mktime(info['modified_parsed'])
|
2005-02-02 07:03:09 +01:00
|
|
|
now = time.mktime(time.gmtime())
|
|
|
|
when = utils.timeElapsed(now - seconds) + ' ago'
|
|
|
|
else:
|
2014-07-31 18:44:49 +02:00
|
|
|
when = _('time unavailable')
|
|
|
|
title = info.get('title', _('unavailable'))
|
|
|
|
desc = info.get('description', _('unavailable'))
|
|
|
|
link = info.get('link', _('unavailable'))
|
2005-02-02 07:03:09 +01:00
|
|
|
# The rest of the entries are all available in the channel key
|
2010-10-19 19:50:41 +02:00
|
|
|
response = format(_('Title: %s; URL: %u; '
|
|
|
|
'Description: %s; Last updated: %s.'),
|
2007-10-22 19:48:49 +02:00
|
|
|
title, link, desc, when)
|
2005-02-02 07:03:09 +01:00
|
|
|
irc.reply(utils.str.normalizeWhitespace(response))
|
|
|
|
info = wrap(info, [first('url', 'feedName')])
|
2010-10-26 09:32:12 +02:00
|
|
|
RSS = internationalizeDocstring(RSS)
|
2005-02-02 07:03:09 +01:00
|
|
|
|
|
|
|
Class = RSS
|
|
|
|
|
2006-02-11 16:52:51 +01:00
|
|
|
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
|