Limnoria/plugins/RSS/plugin.py

726 lines
28 KiB
Python
Raw Normal View History

2005-02-02 07:03:09 +01:00
###
# Copyright (c) 2002-2004, Jeremiah Fincher
# Copyright (c) 2008-2010, James McCoy
# Copyright (c) 2010-2021, Valentin Lorentz
2005-02-02 07:03:09 +01:00
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
import re
import os
import sys
import json
2005-02-02 07:03:09 +01:00
import time
import types
2014-07-31 18:44:49 +02:00
import string
2005-02-02 07:03:09 +01:00
import socket
import threading
import feedparser
2005-02-02 07:03:09 +01:00
RSS: Catch errors from update_feed(), just in case feedparser should always catch the error, but someone reported it doesn't catch this error on TLS cert issues: ``` Traceback (most recent call last): File "/usr/lib/python3.8/threading.py", line 932, in _bootstrap_inner self.run() File "/usr/lib/python3.8/threading.py", line 870, in run self._target(*self._args, **self._kwargs) File "/home/falso/virtualenv/limnoria/lib/python3.8/site-packages/supybot/plugins/RSS/plugin.py", line 86, in newf f(*args, **kwargs) File "/home/falso/virtualenv/limnoria/lib/python3.8/site-packages/supybot/plugins/RSS/plugin.py", line 351, in update_feeds self.update_feed_if_needed(feed) File "/home/falso/virtualenv/limnoria/lib/python3.8/site-packages/supybot/plugins/RSS/plugin.py", line 337, in update_feed_if_needed self.update_feed(feed) File "/home/falso/virtualenv/limnoria/lib/python3.8/site-packages/supybot/plugins/RSS/plugin.py", line 311, in update_feed d = feedparser.parse(feed.url, etag=feed.etag, File "/home/falso/virtualenv/limnoria/lib/python3.8/site-packages/feedparser/api.py", line 214, in parse data = _open_resource(url_file_stream_or_string, etag, modified, agent, referrer, handlers, request_headers, result) File "/home/falso/virtualenv/limnoria/lib/python3.8/site-packages/feedparser/api.py", line 114, in _open_resource return http.get(url_file_stream_or_string, etag, modified, agent, referrer, handlers, request_headers, result) File "/home/falso/virtualenv/limnoria/lib/python3.8/site-packages/feedparser/http.py", line 158, in get f = opener.open(request) File "/usr/lib/python3.8/urllib/request.py", line 525, in open response = self._open(req, data) File "/usr/lib/python3.8/urllib/request.py", line 542, in _open result = self._call_chain(self.handle_open, protocol, protocol + File "/usr/lib/python3.8/urllib/request.py", line 502, in _call_chain result = func(*args) File "/usr/lib/python3.8/urllib/request.py", line 1393, in https_open return self.do_open(http.client.HTTPSConnection, req, File "/usr/lib/python3.8/urllib/request.py", line 1354, in do_open r = h.getresponse() File "/usr/lib/python3.8/http/client.py", line 1347, in getresponse response.begin() File "/usr/lib/python3.8/http/client.py", line 307, in begin version, status, reason = self._read_status() File "/usr/lib/python3.8/http/client.py", line 268, in _read_status line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1") File "/usr/lib/python3.8/socket.py", line 669, in readinto return self._sock.recv_into(b) File "/usr/lib/python3.8/ssl.py", line 1241, in recv_into return self.read(nbytes, buffer) File "/usr/lib/python3.8/ssl.py", line 1099, in read return self._sslobj.read(len, buffer) socket.timeout: The read operation timed out ``` So let's catch the error just in case, so it doesn't block all other feeds.
2021-05-28 18:48:33 +02:00
import supybot.log as log
2005-02-02 07:03:09 +01:00
import supybot.conf as conf
import supybot.utils as utils
import supybot.world as world
from supybot.commands import *
2015-08-11 16:50:23 +02:00
import supybot.utils.minisix as minisix
2014-07-31 20:14:09 +02:00
import supybot.ircmsgs as ircmsgs
2005-02-02 07:03:09 +01:00
import supybot.ircutils as ircutils
import supybot.registry as registry
import supybot.callbacks as callbacks
from supybot.i18n import PluginInternationalization, internationalizeDocstring
_ = PluginInternationalization('RSS')
2005-02-02 07:03:09 +01:00
if world.testing:
INIT_DELAY = 1
else:
INIT_DELAY = 10
if minisix.PY2:
from urllib2 import ProxyHandler
else:
from urllib.request import ProxyHandler
from .config import register_feed_config
2014-07-31 18:44:49 +02:00
def get_feedName(irc, msg, args, state):
if irc.isChannel(args[0]):
state.errorInvalid('feed name', args[0], 'must not be channel names.')
2005-02-02 07:03:09 +01:00
if not registry.isValidRegistryName(args[0]):
state.errorInvalid('feed name', args[0],
'Feed names must not include spaces.')
if "." in args[0]:
state.errorInvalid('feed name', args[0],
'Feed names must not include dots.')
2005-02-02 07:03:09 +01:00
state.args.append(callbacks.canonicalName(args.pop(0)))
2014-07-31 18:44:49 +02:00
addConverter('feedName', get_feedName)
announced_headlines_filename = \
conf.supybot.directories.data.dirize('RSS_announced.flat')
def only_one_at_once(f):
lock = [False]
def newf(*args, **kwargs):
if lock[0]:
return
lock[0] = True
try:
f(*args, **kwargs)
finally:
lock[0] = False
return newf
def get_entry_id(entry):
# in order, try elements to use as unique identifier.
# http://validator.w3.org/feed/docs/rss2.html#hrelementsOfLtitemgt
id_elements = ('id', 'link', 'title', 'description')
for id_element in id_elements:
try:
return getattr(entry, id_element)
except AttributeError:
pass
raise ValueError('Feed entry is missing both title and description')
class InvalidFeedUrl(ValueError):
pass
2014-07-31 18:44:49 +02:00
class Feed:
__slots__ = ('url', 'name', 'data', 'last_update', 'entries',
2014-08-13 17:04:35 +02:00
'etag', 'modified', 'initial',
'lock', 'announced_entries', 'last_exception')
def __init__(self, name, url, initial,
plugin_is_loading=False, announced=None):
2014-07-31 18:56:52 +02:00
assert name, name
if not url:
url = name
if not utils.web.httpUrlRe.match(url):
raise InvalidFeedUrl(url)
2014-07-31 18:44:49 +02:00
self.name = name
self.url = url
self.initial = initial
2014-07-31 18:44:49 +02:00
self.data = None
# We don't want to fetch feeds right after the plugin is
# loaded (the bot could be starting, and thus already busy)
self.last_update = 0
self.entries = []
self.etag = None
self.modified = None
self.lock = threading.Lock()
self.announced_entries = announced or \
utils.structures.TruncatableSet()
self.last_exception = None
def __repr__(self):
2019-10-05 15:57:28 +02:00
return 'Feed(%r, %r, %r, <bool>, %r)' % \
(self.name, self.url, self.initial, self.announced_entries)
2014-07-31 18:44:49 +02:00
2014-07-31 18:56:52 +02:00
def get_command(self, plugin):
2014-07-31 18:44:49 +02:00
docstring = format(_("""[<number of headlines>]
Reports the titles for %s at the RSS feed %u. If
<number of headlines> is given, returns only that many headlines.
RSS feeds are only looked up every supybot.plugins.RSS.waitPeriod
seconds, which defaults to 1800 (30 minutes) since that's what most
websites prefer."""), self.name, self.url)
2014-07-31 18:56:52 +02:00
def f(self2, irc, msg, args):
args.insert(0, self.name)
2014-07-31 18:56:52 +02:00
self2.rss(irc, msg, args)
f = utils.python.changeFunctionName(f, self.name, docstring)
f = types.MethodType(f, plugin)
2014-07-31 18:44:49 +02:00
return f
_sort_parameters = {
'oldestFirst': (('published_parsed', 'updated_parsed'), False),
'newestFirst': (('published_parsed', 'updated_parsed'), True),
'outdatedFirst': (('updated_parsed', 'published_parsed'), False),
'updatedFirst': (('updated_parsed', 'published_parsed'), True),
}
def _sort_arguments(order):
(fields, reverse) = _sort_parameters[order]
def key(entry):
for field in fields:
if field in entry:
return entry[field]
raise KeyError('No date field in entry.')
return (key, reverse)
2014-07-31 18:44:49 +02:00
def sort_feed_items(items, order):
"""Return feed items, sorted according to sortFeedItems."""
if order == 'asInFeed':
2014-07-31 18:44:49 +02:00
return items
(key, reverse) = _sort_arguments(order)
2014-07-31 18:44:49 +02:00
try:
sitems = sorted(items, key=key, reverse=reverse)
2014-07-31 18:44:49 +02:00
except KeyError:
# feedparser normalizes required timestamp fields in ATOM and RSS
# to the "published"/"updated" fields. Feeds missing it are unsortable by date.
2014-07-31 18:44:49 +02:00
return items
return sitems
2005-02-02 07:03:09 +01:00
def load_announces_db(fd):
return dict((name, utils.structures.TruncatableSet(entries))
for (name, entries) in json.load(fd).items())
def save_announces_db(db, fd):
json.dump(dict((name, list(entries)) for (name, entries) in db), fd)
class RSS(callbacks.Plugin):
"""
This plugin is useful both for announcing updates to RSS feeds in a
2005-02-02 07:03:09 +01:00
channel, and for retrieving the headlines of RSS feeds via command. Use
the "add" command to add feeds to this plugin, and use the "announce"
command to determine what feeds should be announced in a given channel.
Basic usage
^^^^^^^^^^^
1. Add a feed using
``@rss add limnoria https://github.com/progval/Limnoria/tags.atom``.
* This is RSS feed of Limnoria's stable releases.
* You can now check the latest news from the feed with ``@limnoria``.
2. To have new news automatically announced on the channel, use
``@rss announce add Limnoria``.
To add another feed, simply replace limnoria and the address using name
of the feed and address of the feed. For example, YLE News:
1. ``@rss add yle http://yle.fi/uutiset/rss/uutiset.rss?osasto=news``
2. ``@rss announce add yle``
News on their own lines
^^^^^^^^^^^^^^^^^^^^^^^
If you want the feed topics to be on their own lines instead of being separated by
the separator which you have configured you can set `reply.onetoone` to False.
Please first read the help for that configuration variable
``@config help reply.onetoone``
and understand what it says and then you can do
``@config reply.onetoone False``
"""
2005-02-02 07:03:09 +01:00
threaded = True
def __init__(self, irc):
self.__parent = super(RSS, self)
self.__parent.__init__(irc)
if world.starting:
self._init_time = time.time() # To delay loading the feeds
else:
self._init_time = 0
2014-07-31 18:44:49 +02:00
# Scheme: {name: url}
self.feed_names = callbacks.CanonicalNameDict()
# Scheme: {url: feed}
self.feeds = {}
if os.path.isfile(announced_headlines_filename):
with open(announced_headlines_filename) as fd:
announced = load_announces_db(fd)
else:
announced = {}
2005-02-02 07:03:09 +01:00
for name in self.registryValue('feeds'):
try:
url = self.registryValue(registry.join(['feeds', name]))
2005-02-02 07:03:09 +01:00
except registry.NonExistentRegistryEntry:
self.log.warning('%s is not a registered feed, removing.',name)
continue
try:
self.register_feed(name, url, True, True, announced.get(name, []))
except InvalidFeedUrl:
self.log.error('%s is not a valid feed, removing.', name)
continue
world.flushers.append(self._flush)
def die(self):
self._flush()
world.flushers.remove(self._flush)
self.__parent.die()
def _flush(self):
l = [(f.name, f.announced_entries) for f in self.feeds.values()]
with utils.file.AtomicFile(announced_headlines_filename, 'w',
backupDir='/dev/null') as fd:
save_announces_db(l, fd)
2014-07-31 18:44:49 +02:00
##################
# Feed registering
def assert_feed_does_not_exist(self, name, url=None):
2014-07-31 19:31:20 +02:00
if self.isCommandMethod(name):
s = format(_('I already have a command in this plugin named %s.'),
name)
2014-07-31 19:31:20 +02:00
raise callbacks.Error(s)
if url:
feed = self.feeds.get(url)
if feed and feed.name != feed.url:
s = format(_('I already have a feed with that URL named %s.'),
feed.name)
raise callbacks.Error(s)
2014-07-31 19:31:20 +02:00
def register_feed(self, name, url, initial,
plugin_is_loading, announced=None):
if name != url:
# If name == url, then it's an anonymous feed
self.feed_names[name] = url
self.feeds[url] = Feed(name, url, initial,
plugin_is_loading, announced)
2014-07-31 18:56:52 +02:00
def remove_feed(self, name_or_url):
self.feed_names.pop(name_or_url, None)
while True:
try:
conf.supybot.plugins.RSS.feeds().remove(name_or_url)
except KeyError:
break
try:
conf.supybot.plugins.RSS.feeds.unregister(name_or_url)
except (KeyError, registry.NonExistentRegistryEntry):
pass
2014-07-31 18:44:49 +02:00
##################
# Methods handling
2005-02-02 07:03:09 +01:00
def isCommandMethod(self, name):
if not self.__parent.isCommandMethod(name):
2014-07-31 18:44:49 +02:00
return bool(self.get_feed(name))
else:
return True
def listCommands(self):
return self.__parent.listCommands(self.feed_names.keys())
def getCommandMethod(self, command):
try:
return self.__parent.getCommandMethod(command)
except AttributeError:
2014-07-31 18:56:52 +02:00
return self.get_feed(command[0]).get_command(self)
2005-02-02 07:03:09 +01:00
def __call__(self, irc, msg):
self.__parent.__call__(irc, msg)
threading.Thread(target=self.update_feeds).start()
2014-07-31 18:44:49 +02:00
##################
# Status accessors
def get_feed(self, name):
return self.feeds.get(self.feed_names.get(name, name), None)
def is_expired(self, feed):
assert feed
2015-09-23 11:05:25 +02:00
period = self.registryValue('waitPeriod')
if feed.name != feed.url: # Named feed
specific_period = self.registryValue('feeds.%s.waitPeriod' % feed.name)
if specific_period:
period = specific_period
event_horizon = time.time() - period
2014-07-31 18:44:49 +02:00
return feed.last_update < event_horizon
###############
# Feed fetching
RSS: Catch errors from update_feed(), just in case feedparser should always catch the error, but someone reported it doesn't catch this error on TLS cert issues: ``` Traceback (most recent call last): File "/usr/lib/python3.8/threading.py", line 932, in _bootstrap_inner self.run() File "/usr/lib/python3.8/threading.py", line 870, in run self._target(*self._args, **self._kwargs) File "/home/falso/virtualenv/limnoria/lib/python3.8/site-packages/supybot/plugins/RSS/plugin.py", line 86, in newf f(*args, **kwargs) File "/home/falso/virtualenv/limnoria/lib/python3.8/site-packages/supybot/plugins/RSS/plugin.py", line 351, in update_feeds self.update_feed_if_needed(feed) File "/home/falso/virtualenv/limnoria/lib/python3.8/site-packages/supybot/plugins/RSS/plugin.py", line 337, in update_feed_if_needed self.update_feed(feed) File "/home/falso/virtualenv/limnoria/lib/python3.8/site-packages/supybot/plugins/RSS/plugin.py", line 311, in update_feed d = feedparser.parse(feed.url, etag=feed.etag, File "/home/falso/virtualenv/limnoria/lib/python3.8/site-packages/feedparser/api.py", line 214, in parse data = _open_resource(url_file_stream_or_string, etag, modified, agent, referrer, handlers, request_headers, result) File "/home/falso/virtualenv/limnoria/lib/python3.8/site-packages/feedparser/api.py", line 114, in _open_resource return http.get(url_file_stream_or_string, etag, modified, agent, referrer, handlers, request_headers, result) File "/home/falso/virtualenv/limnoria/lib/python3.8/site-packages/feedparser/http.py", line 158, in get f = opener.open(request) File "/usr/lib/python3.8/urllib/request.py", line 525, in open response = self._open(req, data) File "/usr/lib/python3.8/urllib/request.py", line 542, in _open result = self._call_chain(self.handle_open, protocol, protocol + File "/usr/lib/python3.8/urllib/request.py", line 502, in _call_chain result = func(*args) File "/usr/lib/python3.8/urllib/request.py", line 1393, in https_open return self.do_open(http.client.HTTPSConnection, req, File "/usr/lib/python3.8/urllib/request.py", line 1354, in do_open r = h.getresponse() File "/usr/lib/python3.8/http/client.py", line 1347, in getresponse response.begin() File "/usr/lib/python3.8/http/client.py", line 307, in begin version, status, reason = self._read_status() File "/usr/lib/python3.8/http/client.py", line 268, in _read_status line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1") File "/usr/lib/python3.8/socket.py", line 669, in readinto return self._sock.recv_into(b) File "/usr/lib/python3.8/ssl.py", line 1241, in recv_into return self.read(nbytes, buffer) File "/usr/lib/python3.8/ssl.py", line 1099, in read return self._sslobj.read(len, buffer) socket.timeout: The read operation timed out ``` So let's catch the error just in case, so it doesn't block all other feeds.
2021-05-28 18:48:33 +02:00
@log.firewall
2014-07-31 18:44:49 +02:00
def update_feed(self, feed):
handlers = []
if utils.web.proxy():
handlers.append(ProxyHandler(
{'http': utils.force(utils.web.proxy())}))
handlers.append(ProxyHandler(
{'https': utils.force(utils.web.proxy())}))
with feed.lock:
try:
d = feedparser.parse(feed.url, etag=feed.etag,
modified=feed.modified, handlers=handlers)
except socket.error as e:
self.log.warning("Network error while fetching <%s>: %s",
feed.url, e)
feed.last_exception = e
return
except Exception as e:
self.log.error("Failed to fetch <%s>: %s", feed.url, e)
raise # reraise so @log.firewall prints the traceback
if 'status' not in d or d.status != 304: # Not modified
if 'etag' in d:
feed.etag = d.etag
if 'modified' in d:
feed.modified = d.modified
feed.data = d.feed
feed.entries = d.entries
feed.last_update = time.time()
# feedparser will store soft errors in bozo_exception and set
# the "bozo" bit to 1 on supported platforms:
# https://pythonhosted.org/feedparser/bozo.html
# If this error caused us to e.g. not get any entries at all,
# it may be helpful to show it as well.
if getattr(d, 'bozo', 0) and hasattr(d, 'bozo_exception'):
feed.last_exception = d.bozo_exception
else:
feed.last_exception = None
(initial, feed.initial) = (feed.initial, False)
self.announce_feed(feed, initial)
2014-07-31 18:44:49 +02:00
def update_feed_if_needed(self, feed):
if self.is_expired(feed) and \
self._init_time + INIT_DELAY < time.time():
2014-07-31 18:44:49 +02:00
self.update_feed(feed)
@only_one_at_once
2014-07-31 18:44:49 +02:00
def update_feeds(self):
announced_feeds = {}
for irc in world.ircs:
for channel in list(irc.state.channels):
channel_feed_names = self.registryValue(
'announce', channel, irc.network)
for name in channel_feed_names:
announced_feeds[name] = (channel, irc.network)
for (name, (channel, network)) in announced_feeds.items():
feed = self.get_feed(name)
if not feed:
self.log.warning(
'Feed %s is announced in %s@%s, but does not exist. '
'Use "rss announce remove %s %s" to remove it from '
'announcements.',
name, channel, network, channel, name)
continue
self.update_feed_if_needed(feed)
2014-07-31 18:44:49 +02:00
def get_new_entries(self, feed):
with feed.lock:
entries = feed.entries
new_entries = [entry for entry in entries
if get_entry_id(entry) not in feed.announced_entries]
if not new_entries:
return []
feed.announced_entries |= set(get_entry_id(entry) for entry in new_entries)
# We keep a little more because we don't want to re-announce
# oldest entries if one of the newest gets removed.
feed.announced_entries.truncate(10*len(entries))
return new_entries
def announce_feed(self, feed, initial):
new_entries = self.get_new_entries(feed)
2005-02-02 07:03:09 +01:00
order = self.registryValue('sortFeedItems')
new_entries = sort_feed_items(new_entries, 'newestFirst')
2014-07-31 18:44:49 +02:00
for irc in world.ircs:
for channel in list(irc.state.channels):
# Old bots have it set in plugins.RSS.announce.#channel,
# new bots set it in plugins.RSS.announce.:network.#channel,
# so we want to read from both.
channel_feeds = self.registryValue('announce', channel) \
| self.registryValue('announce', channel, irc.network)
if feed.name not in channel_feeds:
2014-07-31 18:44:49 +02:00
continue
if initial:
max_entries = self.registryValue(
'initialAnnounceHeadlines', channel, irc.network)
else:
max_entries = self.registryValue(
'maximumAnnounceHeadlines', channel, irc.network)
announced_entries = new_entries[0:max_entries]
announced_entries = sort_feed_items(announced_entries, order)
for entry in announced_entries:
2014-07-31 18:44:49 +02:00
self.announce_entry(irc, channel, feed, entry)
#################
# Entry rendering
def should_send_entry(self, network, channel, entry):
whitelist = self.registryValue('keywordWhitelist', channel, network)
blacklist = self.registryValue('keywordBlacklist', channel, network)
2016-09-20 22:54:11 +02:00
2016-09-20 23:22:13 +02:00
# fix shadowing by "from supybot.commands import *"
try:
all = __builtins__.all
any = __builtins__.any
except AttributeError:
all = __builtins__['all']
any = __builtins__['any']
2016-09-20 22:54:11 +02:00
title = getattr(entry, 'title', '')
description = getattr(entry, 'description', '')
2014-07-31 18:44:49 +02:00
if whitelist:
if all(kw not in title and kw not in description
2014-07-31 18:44:49 +02:00
for kw in whitelist):
return False
if blacklist:
if any(kw in title or kw in description
2014-07-31 18:44:49 +02:00
for kw in blacklist):
return False
return True
2005-02-02 07:03:09 +01:00
_normalize_entry = utils.str.multipleReplacer(
{'\r': ' ', '\n': ' ', '\x00': ''})
def format_entry(self, network, channel, feed, entry, is_announce):
key_name = 'announceFormat' if is_announce else 'format'
if feed.name in self.registryValue('feeds'):
specific_key_name = registry.join(['feeds', feed.name, key_name])
template = self.registryValue(specific_key_name,
channel, network) or \
self.registryValue(key_name, channel, network)
2014-07-31 18:44:49 +02:00
else:
template = self.registryValue(key_name, channel, network)
date = entry.get('published_parsed')
2014-07-31 18:44:49 +02:00
date = utils.str.timestamp(date)
s = string.Template(template).safe_substitute(
entry,
2014-07-31 18:44:49 +02:00
feed_name=feed.name,
date=date)
return self._normalize_entry(s)
2014-07-31 18:44:49 +02:00
def announce_entry(self, irc, channel, feed, entry):
if self.should_send_entry(irc.network, channel, entry):
s = self.format_entry(irc.network, channel, feed, entry, True)
if self.registryValue('notice', channel, irc.network):
m = ircmsgs.notice(channel, s)
else:
m = ircmsgs.privmsg(channel, s)
irc.queueMsg(m)
2005-02-02 07:03:09 +01:00
2014-07-31 18:44:49 +02:00
##########
# Commands
2005-02-02 07:03:09 +01:00
@internationalizeDocstring
2005-02-02 07:03:09 +01:00
def add(self, irc, msg, args, name, url):
"""<name> <url>
Adds a command to this plugin that will look up the RSS feed at the
given URL.
"""
self.assert_feed_does_not_exist(name, url)
register_feed_config(name, url)
self.register_feed(name, url, True, False)
2005-02-02 07:03:09 +01:00
irc.replySuccess()
add = wrap(add, ['feedName', 'url'])
@internationalizeDocstring
2005-02-02 07:03:09 +01:00
def remove(self, irc, msg, args, name):
"""<name>
Removes the command for looking up RSS feeds at <name> from
this plugin.
"""
2014-07-31 18:44:49 +02:00
feed = self.get_feed(name)
if not feed:
irc.error(_('That\'s not a valid RSS feed command name.'))
2005-02-02 07:03:09 +01:00
return
# If the feed was first created "anonymously", eg. with
# `@rss announce add http://example.org/rss`, then as a named feed
# with `@rss add example http://example.org/rss`,
# `self.get_feed(name)` above gets only one of them; so let's
# remove the aliased name or URL from the feed names too,
# or we would have a dangling entry here.
self.remove_feed(feed.url)
self.remove_feed(name)
assert self.get_feed(name) is None
2005-02-02 07:03:09 +01:00
irc.replySuccess()
remove = wrap(remove, ['feedName'])
class announce(callbacks.Commands):
2010-10-26 09:27:09 +02:00
@internationalizeDocstring
def list(self, irc, msg, args, channel):
"""[<channel>]
2005-02-02 07:03:09 +01:00
Returns the list of feeds announced in <channel>. <channel> is
only necessary if the message isn't sent in the channel itself.
"""
announce = conf.supybot.plugins.RSS.announce
2021-05-28 17:56:59 +02:00
channel_feeds = announce.getSpecific(channel=channel)() \
| announce.getSpecific(channel=channel, network=irc.network)()
feeds = format('%L', set(channel_feeds)) # set() to deduplicate
irc.reply(feeds or _('I am currently not announcing any feeds.'))
list = wrap(list, ['channel',])
@internationalizeDocstring
def add(self, irc, msg, args, channel, feeds):
"""[<channel>] <name|url> [<name|url> ...]
Adds the list of feeds to the current list of announced feeds in
<channel>. Valid feeds include the names of registered feeds as
well as URLs for RSS feeds. <channel> is only necessary if the
message isn't sent in the channel itself.
"""
plugin = irc.getCallback('RSS')
invalid_feeds = [x for x in feeds if not plugin.get_feed(x)
and not utils.web.urlRe.match(x)]
if invalid_feeds:
irc.error(format(_('These feeds are unknown: %L'),
invalid_feeds), Raise=True)
announce = conf.supybot.plugins.RSS.announce
S = announce.getSpecific(channel=channel, network=irc.network)()
for name in feeds:
S.add(name)
announce.getSpecific(channel=channel, network=irc.network,
fallback_to_channel=False).setValue(S)
irc.replySuccess()
for name in feeds:
feed = plugin.get_feed(name)
if not feed:
register_feed_config(name, name)
plugin.register_feed(name, name, True, False)
feed = plugin.get_feed(name)
plugin.announce_feed(feed, True)
add = wrap(add, [('checkChannelCapability', 'op'),
many(first('url', 'feedName'))])
@internationalizeDocstring
def remove(self, irc, msg, args, channel, feeds):
"""[<channel>] <name|url> [<name|url> ...]
Removes the list of feeds from the current list of announced feeds
in <channel>. Valid feeds include the names of registered feeds as
well as URLs for RSS feeds. <channel> is only necessary if the
message isn't sent in the channel itself.
"""
announce = conf.supybot.plugins.RSS.announce
def remove_from_var(var):
S = var()
for feed in feeds:
S.discard(feed)
var.setValue(S)
remove_from_var(announce.get(channel))
remove_from_var(announce.getSpecific(
channel=channel, network=irc.network, fallback_to_channel=False))
irc.replySuccess()
remove = wrap(remove, [('checkChannelCapability', 'op'),
many(first('url', 'feedName'))])
2005-02-02 07:03:09 +01:00
@internationalizeDocstring
def channels(self, irc, msg, args, feed):
"""<name|url>
Returns a list of channels that the given feed name or URL is being
announced to.
"""
plugin = irc.getCallback('RSS')
if not plugin.get_feed(feed):
irc.error(_("Unknown feed %s" % feed), Raise=True)
channels = []
for ircnet in world.ircs:
for channel in list(ircnet.state.channels):
if feed in plugin.registryValue('announce', channel, ircnet.network):
channels.append(ircnet.network + channel)
if channels:
irc.reply(format("%s is announced to %L.", feed, channels))
else:
irc.reply("%s is not announced to any channels." % feed)
channels = wrap(channels, ['feedName'])
@internationalizeDocstring
2005-02-02 07:03:09 +01:00
def rss(self, irc, msg, args, url, n):
"""<name|url> [<number of headlines>]
2005-02-02 07:03:09 +01:00
Gets the title components of the given RSS feed.
If <number of headlines> is given, return only that many headlines.
"""
self.log.debug('Fetching %u', url)
try:
feed = self.get_feed(url)
if not feed:
feed = Feed(url, url, True)
except InvalidFeedUrl:
irc.error('%s is not a valid feed URL or name.' % url, Raise=True)
channel = msg.channel
2014-07-31 18:44:49 +02:00
self.update_feed_if_needed(feed)
entries = feed.entries
if not entries:
s = _('Couldn\'t get RSS feed.')
# If we got a soft parsing exception on our last run, show the error.
if feed.last_exception is not None:
s += _(' Parser error: ')
s += str(feed.last_exception)
irc.error(s)
2005-02-02 07:03:09 +01:00
return
n = n or self.registryValue('defaultNumberOfHeadlines', channel, irc.network)
entries = list(filter(lambda e:self.should_send_entry(irc.network, channel, e),
2014-07-31 18:44:49 +02:00
feed.entries))
entries = sort_feed_items(entries, 'newestFirst')
2014-07-31 18:44:49 +02:00
entries = entries[:n]
entries = sort_feed_items(entries, self.registryValue('sortFeedItems'))
headlines = map(lambda e:self.format_entry(irc.network, channel, feed, e, False),
2014-07-31 18:44:49 +02:00
entries)
sep = self.registryValue('headlineSeparator', channel, irc.network)
2005-02-02 07:03:09 +01:00
irc.replies(headlines, joiner=sep)
rss = wrap(rss, [first('url', 'feedName'), additional('int')])
2005-02-02 07:03:09 +01:00
@internationalizeDocstring
2005-02-02 07:03:09 +01:00
def info(self, irc, msg, args, url):
"""<url|feed>
Returns information from the given RSS feed, namely the title,
URL, description, and last update date, if available.
"""
try:
url = self.registryValue('feeds.%s' % url)
except registry.NonExistentRegistryEntry:
pass
2014-07-31 18:44:49 +02:00
feed = self.get_feed(url)
2014-07-31 20:14:09 +02:00
if not feed:
feed = Feed(url, url, True)
2014-07-31 18:44:49 +02:00
self.update_feed_if_needed(feed)
info = feed.data
2005-02-02 07:03:09 +01:00
if not info:
irc.error(_('I couldn\'t retrieve that RSS feed.'))
2005-02-02 07:03:09 +01:00
return
# check the 'modified_parsed' key, if it's there, convert it here first
if 'modified' in info:
seconds = time.mktime(info['modified_parsed'])
2005-02-02 07:03:09 +01:00
now = time.mktime(time.gmtime())
when = utils.timeElapsed(now - seconds) + ' ago'
else:
2014-07-31 18:44:49 +02:00
when = _('time unavailable')
title = info.get('title', _('unavailable'))
desc = info.get('description', _('unavailable'))
link = info.get('link', _('unavailable'))
2005-02-02 07:03:09 +01:00
# The rest of the entries are all available in the channel key
response = format(_('Title: %s; URL: %u; '
'Description: %s; Last updated: %s.'),
title, link, desc, when)
2005-02-02 07:03:09 +01:00
irc.reply(utils.str.normalizeWhitespace(response))
info = wrap(info, [first('url', 'feedName')])
RSS = internationalizeDocstring(RSS)
2005-02-02 07:03:09 +01:00
Class = RSS
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79: