2005-02-02 07:03:09 +01:00
|
|
|
###
|
|
|
|
# Copyright (c) 2002-2004, Jeremiah Fincher
|
2012-09-01 16:16:48 +02:00
|
|
|
# Copyright (c) 2008-2010, James McCoy
|
2014-07-31 18:44:49 +02:00
|
|
|
# Copyright (c) 2014, Valentin Lorentz
|
2005-02-02 07:03:09 +01:00
|
|
|
# All rights reserved.
|
|
|
|
#
|
|
|
|
# Redistribution and use in source and binary forms, with or without
|
|
|
|
# modification, are permitted provided that the following conditions are met:
|
|
|
|
#
|
|
|
|
# * Redistributions of source code must retain the above copyright notice,
|
|
|
|
# this list of conditions, and the following disclaimer.
|
|
|
|
# * Redistributions in binary form must reproduce the above copyright notice,
|
|
|
|
# this list of conditions, and the following disclaimer in the
|
|
|
|
# documentation and/or other materials provided with the distribution.
|
|
|
|
# * Neither the name of the author of this software nor the name of
|
|
|
|
# contributors to this software may be used to endorse or promote products
|
|
|
|
# derived from this software without specific prior written consent.
|
|
|
|
#
|
|
|
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
|
|
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
|
|
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
|
|
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
|
|
|
|
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
|
|
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
|
|
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
|
|
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
|
|
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
|
|
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
|
|
# POSSIBILITY OF SUCH DAMAGE.
|
|
|
|
###
|
|
|
|
|
2014-07-31 22:53:03 +02:00
|
|
|
import re
|
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import json
|
2005-02-02 07:03:09 +01:00
|
|
|
import time
|
2012-08-04 18:46:16 +02:00
|
|
|
import types
|
2014-07-31 18:44:49 +02:00
|
|
|
import string
|
2005-02-02 07:03:09 +01:00
|
|
|
import socket
|
|
|
|
import threading
|
2012-10-14 16:41:56 +02:00
|
|
|
import feedparser
|
2005-02-02 07:03:09 +01:00
|
|
|
|
RSS: Catch errors from update_feed(), just in case
feedparser should always catch the error, but someone reported it doesn't
catch this error on TLS cert issues:
```
Traceback (most recent call last):
File "/usr/lib/python3.8/threading.py", line 932, in _bootstrap_inner
self.run()
File "/usr/lib/python3.8/threading.py", line 870, in run
self._target(*self._args, **self._kwargs)
File "/home/falso/virtualenv/limnoria/lib/python3.8/site-packages/supybot/plugins/RSS/plugin.py", line 86, in newf
f(*args, **kwargs)
File "/home/falso/virtualenv/limnoria/lib/python3.8/site-packages/supybot/plugins/RSS/plugin.py", line 351, in update_feeds
self.update_feed_if_needed(feed)
File "/home/falso/virtualenv/limnoria/lib/python3.8/site-packages/supybot/plugins/RSS/plugin.py", line 337, in update_feed_if_needed
self.update_feed(feed)
File "/home/falso/virtualenv/limnoria/lib/python3.8/site-packages/supybot/plugins/RSS/plugin.py", line 311, in update_feed
d = feedparser.parse(feed.url, etag=feed.etag,
File "/home/falso/virtualenv/limnoria/lib/python3.8/site-packages/feedparser/api.py", line 214, in parse
data = _open_resource(url_file_stream_or_string, etag, modified, agent, referrer, handlers, request_headers, result)
File "/home/falso/virtualenv/limnoria/lib/python3.8/site-packages/feedparser/api.py", line 114, in _open_resource
return http.get(url_file_stream_or_string, etag, modified, agent, referrer, handlers, request_headers, result)
File "/home/falso/virtualenv/limnoria/lib/python3.8/site-packages/feedparser/http.py", line 158, in get
f = opener.open(request)
File "/usr/lib/python3.8/urllib/request.py", line 525, in open
response = self._open(req, data)
File "/usr/lib/python3.8/urllib/request.py", line 542, in _open
result = self._call_chain(self.handle_open, protocol, protocol +
File "/usr/lib/python3.8/urllib/request.py", line 502, in _call_chain
result = func(*args)
File "/usr/lib/python3.8/urllib/request.py", line 1393, in https_open
return self.do_open(http.client.HTTPSConnection, req,
File "/usr/lib/python3.8/urllib/request.py", line 1354, in do_open
r = h.getresponse()
File "/usr/lib/python3.8/http/client.py", line 1347, in getresponse
response.begin()
File "/usr/lib/python3.8/http/client.py", line 307, in begin
version, status, reason = self._read_status()
File "/usr/lib/python3.8/http/client.py", line 268, in _read_status
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
File "/usr/lib/python3.8/socket.py", line 669, in readinto
return self._sock.recv_into(b)
File "/usr/lib/python3.8/ssl.py", line 1241, in recv_into
return self.read(nbytes, buffer)
File "/usr/lib/python3.8/ssl.py", line 1099, in read
return self._sslobj.read(len, buffer)
socket.timeout: The read operation timed out
```
So let's catch the error just in case, so it doesn't block all other
feeds.
2021-05-28 18:48:33 +02:00
|
|
|
import supybot.log as log
|
2005-02-02 07:03:09 +01:00
|
|
|
import supybot.conf as conf
|
|
|
|
import supybot.utils as utils
|
|
|
|
import supybot.world as world
|
|
|
|
from supybot.commands import *
|
2015-08-11 16:50:23 +02:00
|
|
|
import supybot.utils.minisix as minisix
|
2014-07-31 20:14:09 +02:00
|
|
|
import supybot.ircmsgs as ircmsgs
|
2005-02-02 07:03:09 +01:00
|
|
|
import supybot.ircutils as ircutils
|
|
|
|
import supybot.registry as registry
|
|
|
|
import supybot.callbacks as callbacks
|
2010-10-19 19:50:41 +02:00
|
|
|
from supybot.i18n import PluginInternationalization, internationalizeDocstring
|
|
|
|
_ = PluginInternationalization('RSS')
|
2005-02-02 07:03:09 +01:00
|
|
|
|
2017-10-21 16:48:44 +02:00
|
|
|
if world.testing:
|
|
|
|
INIT_DELAY = 1
|
|
|
|
else:
|
|
|
|
INIT_DELAY = 10
|
|
|
|
|
2016-08-16 10:56:29 +02:00
|
|
|
if minisix.PY2:
|
|
|
|
from urllib2 import ProxyHandler
|
|
|
|
else:
|
|
|
|
from urllib.request import ProxyHandler
|
|
|
|
|
2019-11-11 19:42:34 +01:00
|
|
|
from .config import register_feed_config
|
|
|
|
|
2014-07-31 18:44:49 +02:00
|
|
|
def get_feedName(irc, msg, args, state):
|
2019-08-04 18:11:28 +02:00
|
|
|
if irc.isChannel(args[0]):
|
2014-10-10 16:41:30 +02:00
|
|
|
state.errorInvalid('feed name', args[0], 'must not be channel names.')
|
2005-02-02 07:03:09 +01:00
|
|
|
if not registry.isValidRegistryName(args[0]):
|
2005-06-01 22:13:33 +02:00
|
|
|
state.errorInvalid('feed name', args[0],
|
|
|
|
'Feed names must not include spaces.')
|
2021-04-28 20:04:45 +02:00
|
|
|
if "." in args[0]:
|
|
|
|
state.errorInvalid('feed name', args[0],
|
|
|
|
'Feed names must not include dots.')
|
2005-02-02 07:03:09 +01:00
|
|
|
state.args.append(callbacks.canonicalName(args.pop(0)))
|
2014-07-31 18:44:49 +02:00
|
|
|
addConverter('feedName', get_feedName)
|
|
|
|
|
2014-07-31 22:53:03 +02:00
|
|
|
announced_headlines_filename = \
|
|
|
|
conf.supybot.directories.data.dirize('RSS_announced.flat')
|
|
|
|
|
2014-10-14 22:59:00 +02:00
|
|
|
def only_one_at_once(f):
|
|
|
|
lock = [False]
|
|
|
|
def newf(*args, **kwargs):
|
|
|
|
if lock[0]:
|
|
|
|
return
|
|
|
|
lock[0] = True
|
|
|
|
try:
|
|
|
|
f(*args, **kwargs)
|
|
|
|
finally:
|
|
|
|
lock[0] = False
|
|
|
|
return newf
|
|
|
|
|
2020-01-31 20:59:42 +01:00
|
|
|
def get_entry_id(entry):
|
|
|
|
# in order, try elements to use as unique identifier.
|
|
|
|
# http://validator.w3.org/feed/docs/rss2.html#hrelementsOfLtitemgt
|
|
|
|
id_elements = ('id', 'link', 'title', 'description')
|
|
|
|
for id_element in id_elements:
|
|
|
|
try:
|
|
|
|
return getattr(entry, id_element)
|
|
|
|
except AttributeError:
|
|
|
|
pass
|
|
|
|
raise ValueError('Feed entry is missing both title and description')
|
|
|
|
|
2015-05-23 11:40:26 +02:00
|
|
|
class InvalidFeedUrl(ValueError):
|
|
|
|
pass
|
|
|
|
|
2014-07-31 18:44:49 +02:00
|
|
|
class Feed:
|
|
|
|
__slots__ = ('url', 'name', 'data', 'last_update', 'entries',
|
2014-08-13 17:04:35 +02:00
|
|
|
'etag', 'modified', 'initial',
|
2018-10-14 21:38:10 +02:00
|
|
|
'lock', 'announced_entries', 'last_exception')
|
2014-08-13 16:42:47 +02:00
|
|
|
def __init__(self, name, url, initial,
|
|
|
|
plugin_is_loading=False, announced=None):
|
2014-07-31 18:56:52 +02:00
|
|
|
assert name, name
|
|
|
|
if not url:
|
|
|
|
url = name
|
2020-01-28 19:34:35 +01:00
|
|
|
if not utils.web.httpUrlRe.match(url):
|
|
|
|
raise InvalidFeedUrl(url)
|
2014-07-31 18:44:49 +02:00
|
|
|
self.name = name
|
|
|
|
self.url = url
|
2014-08-13 16:42:47 +02:00
|
|
|
self.initial = initial
|
2014-07-31 18:44:49 +02:00
|
|
|
self.data = None
|
|
|
|
# We don't want to fetch feeds right after the plugin is
|
|
|
|
# loaded (the bot could be starting, and thus already busy)
|
2017-10-21 16:48:44 +02:00
|
|
|
self.last_update = 0
|
2014-07-31 20:50:12 +02:00
|
|
|
self.entries = []
|
2014-08-01 01:12:19 +02:00
|
|
|
self.etag = None
|
|
|
|
self.modified = None
|
2014-07-31 20:50:12 +02:00
|
|
|
self.lock = threading.Lock()
|
2014-07-31 22:53:03 +02:00
|
|
|
self.announced_entries = announced or \
|
|
|
|
utils.structures.TruncatableSet()
|
2018-10-14 21:38:10 +02:00
|
|
|
self.last_exception = None
|
2014-07-31 22:53:03 +02:00
|
|
|
|
|
|
|
def __repr__(self):
|
2019-10-05 15:57:28 +02:00
|
|
|
return 'Feed(%r, %r, %r, <bool>, %r)' % \
|
2014-08-13 16:42:47 +02:00
|
|
|
(self.name, self.url, self.initial, self.announced_entries)
|
2014-07-31 18:44:49 +02:00
|
|
|
|
2014-07-31 18:56:52 +02:00
|
|
|
def get_command(self, plugin):
|
2014-07-31 18:44:49 +02:00
|
|
|
docstring = format(_("""[<number of headlines>]
|
|
|
|
|
|
|
|
Reports the titles for %s at the RSS feed %u. If
|
|
|
|
<number of headlines> is given, returns only that many headlines.
|
|
|
|
RSS feeds are only looked up every supybot.plugins.RSS.waitPeriod
|
|
|
|
seconds, which defaults to 1800 (30 minutes) since that's what most
|
|
|
|
websites prefer."""), self.name, self.url)
|
2014-07-31 18:56:52 +02:00
|
|
|
def f(self2, irc, msg, args):
|
2014-07-31 23:50:27 +02:00
|
|
|
args.insert(0, self.name)
|
2014-07-31 18:56:52 +02:00
|
|
|
self2.rss(irc, msg, args)
|
|
|
|
f = utils.python.changeFunctionName(f, self.name, docstring)
|
|
|
|
f = types.MethodType(f, plugin)
|
2014-07-31 18:44:49 +02:00
|
|
|
return f
|
|
|
|
|
2015-09-02 10:04:04 +02:00
|
|
|
_sort_parameters = {
|
|
|
|
'oldestFirst': (('published_parsed', 'updated_parsed'), False),
|
|
|
|
'newestFirst': (('published_parsed', 'updated_parsed'), True),
|
|
|
|
'outdatedFirst': (('updated_parsed', 'published_parsed'), False),
|
|
|
|
'updatedFirst': (('updated_parsed', 'published_parsed'), True),
|
|
|
|
}
|
|
|
|
def _sort_arguments(order):
|
|
|
|
(fields, reverse) = _sort_parameters[order]
|
|
|
|
def key(entry):
|
|
|
|
for field in fields:
|
|
|
|
if field in entry:
|
|
|
|
return entry[field]
|
|
|
|
raise KeyError('No date field in entry.')
|
|
|
|
return (key, reverse)
|
|
|
|
|
2014-07-31 18:44:49 +02:00
|
|
|
def sort_feed_items(items, order):
|
|
|
|
"""Return feed items, sorted according to sortFeedItems."""
|
2015-09-02 10:04:04 +02:00
|
|
|
if order == 'asInFeed':
|
2014-07-31 18:44:49 +02:00
|
|
|
return items
|
2015-09-02 10:04:04 +02:00
|
|
|
(key, reverse) = _sort_arguments(order)
|
2014-07-31 18:44:49 +02:00
|
|
|
try:
|
2015-09-02 10:04:04 +02:00
|
|
|
sitems = sorted(items, key=key, reverse=reverse)
|
2014-07-31 18:44:49 +02:00
|
|
|
except KeyError:
|
|
|
|
# feedparser normalizes required timestamp fields in ATOM and RSS
|
2015-09-02 10:04:04 +02:00
|
|
|
# to the "published"/"updated" fields. Feeds missing it are unsortable by date.
|
2014-07-31 18:44:49 +02:00
|
|
|
return items
|
|
|
|
return sitems
|
2005-02-02 07:03:09 +01:00
|
|
|
|
2014-07-31 22:53:03 +02:00
|
|
|
def load_announces_db(fd):
|
|
|
|
return dict((name, utils.structures.TruncatableSet(entries))
|
|
|
|
for (name, entries) in json.load(fd).items())
|
|
|
|
def save_announces_db(db, fd):
|
|
|
|
json.dump(dict((name, list(entries)) for (name, entries) in db), fd)
|
|
|
|
|
|
|
|
|
2005-02-09 08:04:04 +01:00
|
|
|
class RSS(callbacks.Plugin):
|
2021-04-05 17:29:49 +02:00
|
|
|
"""
|
|
|
|
This plugin is useful both for announcing updates to RSS feeds in a
|
2005-02-02 07:03:09 +01:00
|
|
|
channel, and for retrieving the headlines of RSS feeds via command. Use
|
|
|
|
the "add" command to add feeds to this plugin, and use the "announce"
|
2021-04-05 17:29:49 +02:00
|
|
|
command to determine what feeds should be announced in a given channel.
|
|
|
|
|
|
|
|
Basic usage
|
|
|
|
^^^^^^^^^^^
|
|
|
|
|
|
|
|
1. Add a feed using
|
|
|
|
``@rss add limnoria https://github.com/ProgVal/Limnoria/tags.atom``.
|
|
|
|
|
|
|
|
* This is RSS feed of Limnoria's stable releases.
|
|
|
|
* You can now check the latest news from the feed with ``@limnoria``.
|
|
|
|
|
|
|
|
2. To have new news automatically announced on the channel, use
|
|
|
|
``@rss announce add Limnoria``.
|
|
|
|
|
|
|
|
To add another feed, simply replace limnoria and the address using name
|
|
|
|
of the feed and address of the feed. For example, YLE News:
|
|
|
|
|
|
|
|
1. ``@rss add yle http://yle.fi/uutiset/rss/uutiset.rss?osasto=news``
|
|
|
|
2. ``@rss announce add yle``
|
|
|
|
|
|
|
|
News on their own lines
|
|
|
|
^^^^^^^^^^^^^^^^^^^^^^^
|
|
|
|
|
|
|
|
If you want the feed topics to be on their own lines instead of being separated by
|
|
|
|
the separator which you have configured you can set `reply.onetoone` to False.
|
|
|
|
|
|
|
|
Please first read the help for that configuration variable
|
|
|
|
|
|
|
|
``@config help reply.onetoone``
|
|
|
|
|
|
|
|
and understand what it says and then you can do
|
|
|
|
|
|
|
|
``@config reply.onetoone False``
|
|
|
|
|
|
|
|
"""
|
2005-02-02 07:03:09 +01:00
|
|
|
threaded = True
|
|
|
|
def __init__(self, irc):
|
|
|
|
self.__parent = super(RSS, self)
|
|
|
|
self.__parent.__init__(irc)
|
2017-10-21 16:48:44 +02:00
|
|
|
|
2017-12-10 09:38:27 +01:00
|
|
|
if world.starting:
|
|
|
|
self._init_time = time.time() # To delay loading the feeds
|
|
|
|
else:
|
|
|
|
self._init_time = 0
|
2017-10-21 16:48:44 +02:00
|
|
|
|
2014-07-31 18:44:49 +02:00
|
|
|
# Scheme: {name: url}
|
|
|
|
self.feed_names = callbacks.CanonicalNameDict()
|
|
|
|
# Scheme: {url: feed}
|
|
|
|
self.feeds = {}
|
2014-07-31 22:53:03 +02:00
|
|
|
if os.path.isfile(announced_headlines_filename):
|
|
|
|
with open(announced_headlines_filename) as fd:
|
|
|
|
announced = load_announces_db(fd)
|
|
|
|
else:
|
|
|
|
announced = {}
|
2005-02-02 07:03:09 +01:00
|
|
|
for name in self.registryValue('feeds'):
|
|
|
|
try:
|
2005-02-23 01:03:09 +01:00
|
|
|
url = self.registryValue(registry.join(['feeds', name]))
|
2005-02-02 07:03:09 +01:00
|
|
|
except registry.NonExistentRegistryEntry:
|
|
|
|
self.log.warning('%s is not a registered feed, removing.',name)
|
|
|
|
continue
|
2015-05-23 11:40:26 +02:00
|
|
|
try:
|
|
|
|
self.register_feed(name, url, True, True, announced.get(name, []))
|
|
|
|
except InvalidFeedUrl:
|
|
|
|
self.log.error('%s is not a valid feed, removing.', name)
|
|
|
|
continue
|
2014-07-31 22:53:03 +02:00
|
|
|
world.flushers.append(self._flush)
|
|
|
|
|
|
|
|
def die(self):
|
|
|
|
self._flush()
|
|
|
|
world.flushers.remove(self._flush)
|
|
|
|
self.__parent.die()
|
|
|
|
|
|
|
|
def _flush(self):
|
|
|
|
l = [(f.name, f.announced_entries) for f in self.feeds.values()]
|
2014-07-31 22:57:47 +02:00
|
|
|
with utils.file.AtomicFile(announced_headlines_filename, 'w',
|
2014-07-31 22:53:03 +02:00
|
|
|
backupDir='/dev/null') as fd:
|
|
|
|
save_announces_db(l, fd)
|
|
|
|
|
2014-07-31 18:44:49 +02:00
|
|
|
|
|
|
|
##################
|
|
|
|
# Feed registering
|
|
|
|
|
2014-07-31 23:56:27 +02:00
|
|
|
def assert_feed_does_not_exist(self, name, url=None):
|
2014-07-31 19:31:20 +02:00
|
|
|
if self.isCommandMethod(name):
|
2014-07-31 23:56:27 +02:00
|
|
|
s = format(_('I already have a command in this plugin named %s.'),
|
|
|
|
name)
|
2014-07-31 19:31:20 +02:00
|
|
|
raise callbacks.Error(s)
|
2014-07-31 23:56:27 +02:00
|
|
|
if url:
|
|
|
|
feed = self.feeds.get(url)
|
|
|
|
if feed and feed.name != feed.url:
|
|
|
|
s = format(_('I already have a feed with that URL named %s.'),
|
|
|
|
feed.name)
|
|
|
|
raise callbacks.Error(s)
|
2014-07-31 19:31:20 +02:00
|
|
|
|
2014-08-13 16:42:47 +02:00
|
|
|
def register_feed(self, name, url, initial,
|
2017-03-30 23:37:56 +02:00
|
|
|
plugin_is_loading, announced=None):
|
2014-07-31 18:56:52 +02:00
|
|
|
self.feed_names[name] = url
|
2014-08-13 16:42:47 +02:00
|
|
|
self.feeds[url] = Feed(name, url, initial,
|
|
|
|
plugin_is_loading, announced)
|
2014-07-31 18:56:52 +02:00
|
|
|
|
2021-05-01 14:29:09 +02:00
|
|
|
def remove_feed(self, name_or_url):
|
|
|
|
self.feed_names.pop(name_or_url, None)
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
conf.supybot.plugins.RSS.feeds().remove(name_or_url)
|
|
|
|
except KeyError:
|
|
|
|
break
|
|
|
|
try:
|
|
|
|
conf.supybot.plugins.RSS.feeds.unregister(name_or_url)
|
|
|
|
except (KeyError, registry.NonExistentRegistryEntry):
|
|
|
|
pass
|
2014-07-31 18:44:49 +02:00
|
|
|
|
|
|
|
##################
|
|
|
|
# Methods handling
|
2005-02-02 07:03:09 +01:00
|
|
|
|
2005-05-30 02:52:54 +02:00
|
|
|
def isCommandMethod(self, name):
|
|
|
|
if not self.__parent.isCommandMethod(name):
|
2014-07-31 18:44:49 +02:00
|
|
|
return bool(self.get_feed(name))
|
2005-05-30 02:52:54 +02:00
|
|
|
else:
|
|
|
|
return True
|
|
|
|
|
2009-05-23 00:52:15 +02:00
|
|
|
def listCommands(self):
|
2014-08-01 11:17:58 +02:00
|
|
|
return self.__parent.listCommands(self.feed_names.keys())
|
2009-05-23 00:52:15 +02:00
|
|
|
|
2005-05-30 02:52:54 +02:00
|
|
|
def getCommandMethod(self, command):
|
|
|
|
try:
|
|
|
|
return self.__parent.getCommandMethod(command)
|
|
|
|
except AttributeError:
|
2014-07-31 18:56:52 +02:00
|
|
|
return self.get_feed(command[0]).get_command(self)
|
2005-02-02 07:03:09 +01:00
|
|
|
|
|
|
|
def __call__(self, irc, msg):
|
|
|
|
self.__parent.__call__(irc, msg)
|
2014-10-14 22:59:00 +02:00
|
|
|
threading.Thread(target=self.update_feeds).start()
|
2014-07-31 18:44:49 +02:00
|
|
|
|
|
|
|
|
|
|
|
##################
|
|
|
|
# Status accessors
|
|
|
|
|
|
|
|
def get_feed(self, name):
|
|
|
|
return self.feeds.get(self.feed_names.get(name, name), None)
|
|
|
|
|
|
|
|
def is_expired(self, feed):
|
|
|
|
assert feed
|
2015-09-23 11:05:25 +02:00
|
|
|
period = self.registryValue('waitPeriod')
|
|
|
|
if feed.name != feed.url: # Named feed
|
|
|
|
specific_period = self.registryValue('feeds.%s.waitPeriod' % feed.name)
|
|
|
|
if specific_period:
|
|
|
|
period = specific_period
|
|
|
|
event_horizon = time.time() - period
|
2014-07-31 18:44:49 +02:00
|
|
|
return feed.last_update < event_horizon
|
|
|
|
|
|
|
|
###############
|
|
|
|
# Feed fetching
|
|
|
|
|
RSS: Catch errors from update_feed(), just in case
feedparser should always catch the error, but someone reported it doesn't
catch this error on TLS cert issues:
```
Traceback (most recent call last):
File "/usr/lib/python3.8/threading.py", line 932, in _bootstrap_inner
self.run()
File "/usr/lib/python3.8/threading.py", line 870, in run
self._target(*self._args, **self._kwargs)
File "/home/falso/virtualenv/limnoria/lib/python3.8/site-packages/supybot/plugins/RSS/plugin.py", line 86, in newf
f(*args, **kwargs)
File "/home/falso/virtualenv/limnoria/lib/python3.8/site-packages/supybot/plugins/RSS/plugin.py", line 351, in update_feeds
self.update_feed_if_needed(feed)
File "/home/falso/virtualenv/limnoria/lib/python3.8/site-packages/supybot/plugins/RSS/plugin.py", line 337, in update_feed_if_needed
self.update_feed(feed)
File "/home/falso/virtualenv/limnoria/lib/python3.8/site-packages/supybot/plugins/RSS/plugin.py", line 311, in update_feed
d = feedparser.parse(feed.url, etag=feed.etag,
File "/home/falso/virtualenv/limnoria/lib/python3.8/site-packages/feedparser/api.py", line 214, in parse
data = _open_resource(url_file_stream_or_string, etag, modified, agent, referrer, handlers, request_headers, result)
File "/home/falso/virtualenv/limnoria/lib/python3.8/site-packages/feedparser/api.py", line 114, in _open_resource
return http.get(url_file_stream_or_string, etag, modified, agent, referrer, handlers, request_headers, result)
File "/home/falso/virtualenv/limnoria/lib/python3.8/site-packages/feedparser/http.py", line 158, in get
f = opener.open(request)
File "/usr/lib/python3.8/urllib/request.py", line 525, in open
response = self._open(req, data)
File "/usr/lib/python3.8/urllib/request.py", line 542, in _open
result = self._call_chain(self.handle_open, protocol, protocol +
File "/usr/lib/python3.8/urllib/request.py", line 502, in _call_chain
result = func(*args)
File "/usr/lib/python3.8/urllib/request.py", line 1393, in https_open
return self.do_open(http.client.HTTPSConnection, req,
File "/usr/lib/python3.8/urllib/request.py", line 1354, in do_open
r = h.getresponse()
File "/usr/lib/python3.8/http/client.py", line 1347, in getresponse
response.begin()
File "/usr/lib/python3.8/http/client.py", line 307, in begin
version, status, reason = self._read_status()
File "/usr/lib/python3.8/http/client.py", line 268, in _read_status
line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
File "/usr/lib/python3.8/socket.py", line 669, in readinto
return self._sock.recv_into(b)
File "/usr/lib/python3.8/ssl.py", line 1241, in recv_into
return self.read(nbytes, buffer)
File "/usr/lib/python3.8/ssl.py", line 1099, in read
return self._sslobj.read(len, buffer)
socket.timeout: The read operation timed out
```
So let's catch the error just in case, so it doesn't block all other
feeds.
2021-05-28 18:48:33 +02:00
|
|
|
@log.firewall
|
2014-07-31 18:44:49 +02:00
|
|
|
def update_feed(self, feed):
|
2016-08-16 10:56:29 +02:00
|
|
|
handlers = []
|
|
|
|
if utils.web.proxy():
|
|
|
|
handlers.append(ProxyHandler(
|
|
|
|
{'http': utils.force(utils.web.proxy())}))
|
|
|
|
handlers.append(ProxyHandler(
|
|
|
|
{'https': utils.force(utils.web.proxy())}))
|
2014-07-31 20:50:12 +02:00
|
|
|
with feed.lock:
|
2014-08-01 01:12:19 +02:00
|
|
|
d = feedparser.parse(feed.url, etag=feed.etag,
|
2016-08-16 10:56:29 +02:00
|
|
|
modified=feed.modified, handlers=handlers)
|
2014-08-01 01:12:19 +02:00
|
|
|
if 'status' not in d or d.status != 304: # Not modified
|
|
|
|
if 'etag' in d:
|
|
|
|
feed.etag = d.etag
|
|
|
|
if 'modified' in d:
|
|
|
|
feed.modified = d.modified
|
|
|
|
feed.data = d.feed
|
|
|
|
feed.entries = d.entries
|
|
|
|
feed.last_update = time.time()
|
2018-10-14 21:38:10 +02:00
|
|
|
# feedparser will store soft errors in bozo_exception and set
|
|
|
|
# the "bozo" bit to 1 on supported platforms:
|
|
|
|
# https://pythonhosted.org/feedparser/bozo.html
|
|
|
|
# If this error caused us to e.g. not get any entries at all,
|
|
|
|
# it may be helpful to show it as well.
|
|
|
|
if getattr(d, 'bozo', 0) and hasattr(d, 'bozo_exception'):
|
|
|
|
feed.last_exception = d.bozo_exception
|
|
|
|
else:
|
|
|
|
feed.last_exception = None
|
|
|
|
|
2014-08-13 16:42:47 +02:00
|
|
|
(initial, feed.initial) = (feed.initial, False)
|
|
|
|
self.announce_feed(feed, initial)
|
2014-07-31 18:44:49 +02:00
|
|
|
|
|
|
|
def update_feed_if_needed(self, feed):
|
2017-10-21 16:48:44 +02:00
|
|
|
if self.is_expired(feed) and \
|
|
|
|
self._init_time + INIT_DELAY < time.time():
|
2014-07-31 18:44:49 +02:00
|
|
|
self.update_feed(feed)
|
|
|
|
|
2014-10-14 22:59:00 +02:00
|
|
|
@only_one_at_once
|
2014-07-31 18:44:49 +02:00
|
|
|
def update_feeds(self):
|
2020-10-10 11:50:04 +02:00
|
|
|
announced_feeds = {}
|
2014-07-31 20:50:12 +02:00
|
|
|
for irc in world.ircs:
|
2021-07-31 14:33:34 +02:00
|
|
|
for channel in list(irc.state.channels):
|
2020-10-10 11:50:04 +02:00
|
|
|
channel_feed_names = self.registryValue(
|
|
|
|
'announce', channel, irc.network)
|
|
|
|
for name in channel_feed_names:
|
|
|
|
announced_feeds[name] = (channel, irc.network)
|
|
|
|
for (name, (channel, network)) in announced_feeds.items():
|
2014-08-04 17:01:30 +02:00
|
|
|
feed = self.get_feed(name)
|
|
|
|
if not feed:
|
2020-10-10 11:50:04 +02:00
|
|
|
self.log.warning(
|
|
|
|
'Feed %s is announced in %s@%s, but does not exist. '
|
|
|
|
'Use "rss announce remove %s %s" to remove it from '
|
|
|
|
'announcements.',
|
|
|
|
name, channel, network, channel, name)
|
2014-08-04 17:01:30 +02:00
|
|
|
continue
|
|
|
|
self.update_feed_if_needed(feed)
|
2014-07-31 18:44:49 +02:00
|
|
|
|
2014-07-31 20:50:12 +02:00
|
|
|
def get_new_entries(self, feed):
|
|
|
|
with feed.lock:
|
|
|
|
entries = feed.entries
|
|
|
|
new_entries = [entry for entry in entries
|
2020-01-31 20:59:42 +01:00
|
|
|
if get_entry_id(entry) not in feed.announced_entries]
|
2014-07-31 20:50:12 +02:00
|
|
|
if not new_entries:
|
|
|
|
return []
|
2020-01-31 20:59:42 +01:00
|
|
|
feed.announced_entries |= set(get_entry_id(entry) for entry in new_entries)
|
2014-07-31 20:50:12 +02:00
|
|
|
# We keep a little more because we don't want to re-announce
|
|
|
|
# oldest entries if one of the newest gets removed.
|
2014-09-13 00:37:06 +02:00
|
|
|
feed.announced_entries.truncate(10*len(entries))
|
2014-07-31 20:50:12 +02:00
|
|
|
return new_entries
|
|
|
|
|
2014-08-13 16:42:47 +02:00
|
|
|
def announce_feed(self, feed, initial):
|
2014-07-31 20:50:12 +02:00
|
|
|
new_entries = self.get_new_entries(feed)
|
2005-02-02 07:03:09 +01:00
|
|
|
|
2012-05-28 19:30:44 +02:00
|
|
|
order = self.registryValue('sortFeedItems')
|
2017-01-27 21:00:49 +01:00
|
|
|
new_entries = sort_feed_items(new_entries, 'newestFirst')
|
2014-07-31 18:44:49 +02:00
|
|
|
for irc in world.ircs:
|
2021-07-31 14:33:34 +02:00
|
|
|
for channel in list(irc.state.channels):
|
2021-05-01 13:31:34 +02:00
|
|
|
# Old bots have it set in plugins.RSS.announce.#channel,
|
|
|
|
# new bots set it in plugins.RSS.announce.:network.#channel,
|
|
|
|
# so we want to read from both.
|
|
|
|
channel_feeds = self.registryValue('announce', channel) \
|
|
|
|
| self.registryValue('announce', channel, irc.network)
|
|
|
|
if feed.name not in channel_feeds:
|
2014-07-31 18:44:49 +02:00
|
|
|
continue
|
2014-08-13 16:42:47 +02:00
|
|
|
if initial:
|
2019-08-24 17:50:05 +02:00
|
|
|
max_entries = self.registryValue(
|
|
|
|
'initialAnnounceHeadlines', channel, irc.network)
|
2014-08-13 16:42:47 +02:00
|
|
|
else:
|
2019-08-24 17:50:05 +02:00
|
|
|
max_entries = self.registryValue(
|
|
|
|
'maximumAnnounceHeadlines', channel, irc.network)
|
2017-10-12 21:21:50 +02:00
|
|
|
announced_entries = new_entries[0:max_entries]
|
2017-01-27 21:00:49 +01:00
|
|
|
announced_entries = sort_feed_items(announced_entries, order)
|
2014-08-13 16:42:47 +02:00
|
|
|
for entry in announced_entries:
|
2014-07-31 18:44:49 +02:00
|
|
|
self.announce_entry(irc, channel, feed, entry)
|
|
|
|
|
|
|
|
|
|
|
|
#################
|
|
|
|
# Entry rendering
|
|
|
|
|
2019-08-24 17:50:05 +02:00
|
|
|
def should_send_entry(self, network, channel, entry):
|
|
|
|
whitelist = self.registryValue('keywordWhitelist', channel, network)
|
|
|
|
blacklist = self.registryValue('keywordBlacklist', channel, network)
|
2016-09-20 22:54:11 +02:00
|
|
|
|
2016-09-20 23:22:13 +02:00
|
|
|
# fix shadowing by "from supybot.commands import *"
|
2016-09-20 23:09:29 +02:00
|
|
|
try:
|
|
|
|
all = __builtins__.all
|
|
|
|
any = __builtins__.any
|
|
|
|
except AttributeError:
|
|
|
|
all = __builtins__['all']
|
|
|
|
any = __builtins__['any']
|
2016-09-20 22:54:11 +02:00
|
|
|
|
2018-03-13 23:32:17 +01:00
|
|
|
title = getattr(entry, 'title', '')
|
|
|
|
description = getattr(entry, 'description', '')
|
|
|
|
|
2014-07-31 18:44:49 +02:00
|
|
|
if whitelist:
|
2018-03-13 23:32:17 +01:00
|
|
|
if all(kw not in title and kw not in description
|
2014-07-31 18:44:49 +02:00
|
|
|
for kw in whitelist):
|
|
|
|
return False
|
|
|
|
if blacklist:
|
2018-03-13 23:32:17 +01:00
|
|
|
if any(kw in title or kw in description
|
2014-07-31 18:44:49 +02:00
|
|
|
for kw in blacklist):
|
|
|
|
return False
|
|
|
|
return True
|
2005-02-02 07:03:09 +01:00
|
|
|
|
2015-06-01 19:32:40 +02:00
|
|
|
_normalize_entry = utils.str.multipleReplacer(
|
|
|
|
{'\r': ' ', '\n': ' ', '\x00': ''})
|
2019-08-24 17:50:05 +02:00
|
|
|
def format_entry(self, network, channel, feed, entry, is_announce):
|
2014-07-31 23:50:27 +02:00
|
|
|
key_name = 'announceFormat' if is_announce else 'format'
|
|
|
|
if feed.name in self.registryValue('feeds'):
|
|
|
|
specific_key_name = registry.join(['feeds', feed.name, key_name])
|
2019-08-24 17:50:05 +02:00
|
|
|
template = self.registryValue(specific_key_name,
|
|
|
|
channel, network) or \
|
|
|
|
self.registryValue(key_name, channel, network)
|
2014-07-31 18:44:49 +02:00
|
|
|
else:
|
2019-08-24 17:50:05 +02:00
|
|
|
template = self.registryValue(key_name, channel, network)
|
2014-08-04 15:02:46 +02:00
|
|
|
date = entry.get('published_parsed')
|
2014-07-31 18:44:49 +02:00
|
|
|
date = utils.str.timestamp(date)
|
2016-09-08 21:42:31 +02:00
|
|
|
s = string.Template(template).substitute(
|
|
|
|
entry,
|
2014-07-31 18:44:49 +02:00
|
|
|
feed_name=feed.name,
|
2016-09-08 21:42:31 +02:00
|
|
|
date=date)
|
2015-06-01 19:32:40 +02:00
|
|
|
return self._normalize_entry(s)
|
2014-07-31 18:44:49 +02:00
|
|
|
|
|
|
|
def announce_entry(self, irc, channel, feed, entry):
|
2019-08-24 17:50:05 +02:00
|
|
|
if self.should_send_entry(irc.network, channel, entry):
|
|
|
|
s = self.format_entry(irc.network, channel, feed, entry, True)
|
|
|
|
if self.registryValue('notice', channel, irc.network):
|
2015-09-23 11:27:20 +02:00
|
|
|
m = ircmsgs.notice(channel, s)
|
|
|
|
else:
|
|
|
|
m = ircmsgs.privmsg(channel, s)
|
|
|
|
irc.queueMsg(m)
|
2005-02-02 07:03:09 +01:00
|
|
|
|
2014-07-31 18:44:49 +02:00
|
|
|
|
|
|
|
##########
|
|
|
|
# Commands
|
2005-02-02 07:03:09 +01:00
|
|
|
|
2010-10-19 19:50:41 +02:00
|
|
|
@internationalizeDocstring
|
2005-02-02 07:03:09 +01:00
|
|
|
def add(self, irc, msg, args, name, url):
|
|
|
|
"""<name> <url>
|
|
|
|
|
|
|
|
Adds a command to this plugin that will look up the RSS feed at the
|
|
|
|
given URL.
|
|
|
|
"""
|
2014-07-31 23:56:27 +02:00
|
|
|
self.assert_feed_does_not_exist(name, url)
|
2019-11-11 19:42:34 +01:00
|
|
|
register_feed_config(name, url)
|
2014-08-13 16:42:47 +02:00
|
|
|
self.register_feed(name, url, True, False)
|
2005-02-02 07:03:09 +01:00
|
|
|
irc.replySuccess()
|
|
|
|
add = wrap(add, ['feedName', 'url'])
|
|
|
|
|
2010-10-19 19:50:41 +02:00
|
|
|
@internationalizeDocstring
|
2005-02-02 07:03:09 +01:00
|
|
|
def remove(self, irc, msg, args, name):
|
|
|
|
"""<name>
|
|
|
|
|
|
|
|
Removes the command for looking up RSS feeds at <name> from
|
|
|
|
this plugin.
|
|
|
|
"""
|
2014-07-31 18:44:49 +02:00
|
|
|
feed = self.get_feed(name)
|
|
|
|
if not feed:
|
2010-10-19 19:50:41 +02:00
|
|
|
irc.error(_('That\'s not a valid RSS feed command name.'))
|
2005-02-02 07:03:09 +01:00
|
|
|
return
|
2021-05-01 13:27:56 +02:00
|
|
|
|
|
|
|
# If the feed was first created "anonymously", eg. with
|
|
|
|
# `@rss announce add http://example.org/rss`, then as a named feed
|
|
|
|
# with `@rss add example http://example.org/rss`,
|
|
|
|
# `self.get_feed(name)` above gets only one of them; so let's
|
|
|
|
# remove the aliased name or URL from the feed names too,
|
|
|
|
# or we would have a dangling entry here.
|
2021-05-01 14:29:09 +02:00
|
|
|
self.remove_feed(feed.url)
|
|
|
|
self.remove_feed(name)
|
2021-05-01 13:27:56 +02:00
|
|
|
assert self.get_feed(name) is None
|
|
|
|
|
2005-02-02 07:03:09 +01:00
|
|
|
irc.replySuccess()
|
|
|
|
remove = wrap(remove, ['feedName'])
|
|
|
|
|
2009-02-28 06:10:10 +01:00
|
|
|
class announce(callbacks.Commands):
|
2010-10-26 09:27:09 +02:00
|
|
|
@internationalizeDocstring
|
2009-02-28 06:10:10 +01:00
|
|
|
def list(self, irc, msg, args, channel):
|
|
|
|
"""[<channel>]
|
2005-02-02 07:03:09 +01:00
|
|
|
|
2009-02-28 06:10:10 +01:00
|
|
|
Returns the list of feeds announced in <channel>. <channel> is
|
|
|
|
only necessary if the message isn't sent in the channel itself.
|
|
|
|
"""
|
|
|
|
announce = conf.supybot.plugins.RSS.announce
|
2021-05-28 17:56:59 +02:00
|
|
|
channel_feeds = announce.getSpecific(channel=channel)() \
|
|
|
|
| announce.getSpecific(channel=channel, network=irc.network)()
|
2021-05-01 13:31:34 +02:00
|
|
|
feeds = format('%L', set(channel_feeds)) # set() to deduplicate
|
2010-10-19 19:50:41 +02:00
|
|
|
irc.reply(feeds or _('I am currently not announcing any feeds.'))
|
2010-04-27 01:50:08 +02:00
|
|
|
list = wrap(list, ['channel',])
|
2009-02-28 06:10:10 +01:00
|
|
|
|
2010-10-19 19:50:41 +02:00
|
|
|
@internationalizeDocstring
|
2009-02-28 06:10:10 +01:00
|
|
|
def add(self, irc, msg, args, channel, feeds):
|
|
|
|
"""[<channel>] <name|url> [<name|url> ...]
|
|
|
|
|
|
|
|
Adds the list of feeds to the current list of announced feeds in
|
|
|
|
<channel>. Valid feeds include the names of registered feeds as
|
|
|
|
well as URLs for RSS feeds. <channel> is only necessary if the
|
|
|
|
message isn't sent in the channel itself.
|
|
|
|
"""
|
2014-10-10 16:41:30 +02:00
|
|
|
plugin = irc.getCallback('RSS')
|
2015-10-10 20:40:25 +02:00
|
|
|
invalid_feeds = [x for x in feeds if not plugin.get_feed(x)
|
|
|
|
and not utils.web.urlRe.match(x)]
|
2014-10-10 16:41:30 +02:00
|
|
|
if invalid_feeds:
|
|
|
|
irc.error(format(_('These feeds are unknown: %L'),
|
|
|
|
invalid_feeds), Raise=True)
|
2009-02-28 06:10:10 +01:00
|
|
|
announce = conf.supybot.plugins.RSS.announce
|
2021-05-01 13:31:34 +02:00
|
|
|
S = announce.getSpecific(channel=channel, network=irc.network)()
|
2014-07-31 20:50:12 +02:00
|
|
|
for name in feeds:
|
|
|
|
S.add(name)
|
2021-05-01 13:31:34 +02:00
|
|
|
announce.getSpecific(channel=channel, network=irc.network).setValue(S)
|
2009-02-28 06:10:10 +01:00
|
|
|
irc.replySuccess()
|
2014-07-31 20:50:12 +02:00
|
|
|
for name in feeds:
|
|
|
|
feed = plugin.get_feed(name)
|
2015-10-10 20:40:25 +02:00
|
|
|
if not feed:
|
2019-11-11 19:42:34 +01:00
|
|
|
register_feed_config(name, name)
|
2015-10-10 20:40:25 +02:00
|
|
|
plugin.register_feed(name, name, True, False)
|
|
|
|
feed = plugin.get_feed(name)
|
2014-08-13 16:42:47 +02:00
|
|
|
plugin.announce_feed(feed, True)
|
2009-02-28 06:10:10 +01:00
|
|
|
add = wrap(add, [('checkChannelCapability', 'op'),
|
|
|
|
many(first('url', 'feedName'))])
|
|
|
|
|
2010-10-19 19:50:41 +02:00
|
|
|
@internationalizeDocstring
|
2009-02-28 06:10:10 +01:00
|
|
|
def remove(self, irc, msg, args, channel, feeds):
|
|
|
|
"""[<channel>] <name|url> [<name|url> ...]
|
|
|
|
|
|
|
|
Removes the list of feeds from the current list of announced feeds
|
|
|
|
in <channel>. Valid feeds include the names of registered feeds as
|
|
|
|
well as URLs for RSS feeds. <channel> is only necessary if the
|
|
|
|
message isn't sent in the channel itself.
|
|
|
|
"""
|
|
|
|
announce = conf.supybot.plugins.RSS.announce
|
2020-10-10 11:49:15 +02:00
|
|
|
|
|
|
|
def remove_from_var(var):
|
|
|
|
S = var()
|
|
|
|
for feed in feeds:
|
|
|
|
S.discard(feed)
|
|
|
|
var.setValue(S)
|
|
|
|
|
|
|
|
remove_from_var(announce.get(channel))
|
|
|
|
remove_from_var(announce.getSpecific(
|
|
|
|
channel=channel, network=irc.network))
|
|
|
|
|
2009-02-28 06:10:10 +01:00
|
|
|
irc.replySuccess()
|
|
|
|
remove = wrap(remove, [('checkChannelCapability', 'op'),
|
|
|
|
many(first('url', 'feedName'))])
|
2005-02-02 07:03:09 +01:00
|
|
|
|
2019-10-06 20:42:31 +02:00
|
|
|
@internationalizeDocstring
|
|
|
|
def channels(self, irc, msg, args, feed):
|
|
|
|
"""<name|url>
|
|
|
|
|
|
|
|
Returns a list of channels that the given feed name or URL is being
|
|
|
|
announced to.
|
|
|
|
"""
|
|
|
|
plugin = irc.getCallback('RSS')
|
|
|
|
if not plugin.get_feed(feed):
|
|
|
|
irc.error(_("Unknown feed %s" % feed), Raise=True)
|
|
|
|
|
|
|
|
channels = []
|
|
|
|
for ircnet in world.ircs:
|
2021-07-31 14:33:34 +02:00
|
|
|
for channel in list(ircnet.state.channels):
|
2019-10-06 20:42:31 +02:00
|
|
|
if feed in plugin.registryValue('announce', channel, ircnet.network):
|
|
|
|
channels.append(ircnet.network + channel)
|
|
|
|
|
|
|
|
if channels:
|
|
|
|
irc.reply(format("%s is announced to %L.", feed, channels))
|
|
|
|
else:
|
|
|
|
irc.reply("%s is not announced to any channels." % feed)
|
|
|
|
|
|
|
|
channels = wrap(channels, ['feedName'])
|
|
|
|
|
2010-10-19 19:50:41 +02:00
|
|
|
@internationalizeDocstring
|
2005-02-02 07:03:09 +01:00
|
|
|
def rss(self, irc, msg, args, url, n):
|
2014-07-31 23:50:27 +02:00
|
|
|
"""<name|url> [<number of headlines>]
|
2005-02-02 07:03:09 +01:00
|
|
|
|
|
|
|
Gets the title components of the given RSS feed.
|
|
|
|
If <number of headlines> is given, return only that many headlines.
|
|
|
|
"""
|
|
|
|
self.log.debug('Fetching %u', url)
|
2020-01-28 19:34:35 +01:00
|
|
|
try:
|
|
|
|
feed = self.get_feed(url)
|
|
|
|
if not feed:
|
|
|
|
feed = Feed(url, url, True)
|
|
|
|
except InvalidFeedUrl:
|
|
|
|
irc.error('%s is not a valid feed URL or name.' % url, Raise=True)
|
2019-08-24 17:50:05 +02:00
|
|
|
channel = msg.channel
|
2014-07-31 18:44:49 +02:00
|
|
|
self.update_feed_if_needed(feed)
|
|
|
|
entries = feed.entries
|
|
|
|
if not entries:
|
2018-10-14 21:38:10 +02:00
|
|
|
s = _('Couldn\'t get RSS feed.')
|
|
|
|
# If we got a soft parsing exception on our last run, show the error.
|
|
|
|
if feed.last_exception is not None:
|
|
|
|
s += _(' Parser error: ')
|
|
|
|
s += str(feed.last_exception)
|
|
|
|
irc.error(s)
|
2005-02-02 07:03:09 +01:00
|
|
|
return
|
2019-08-24 17:50:05 +02:00
|
|
|
n = n or self.registryValue('defaultNumberOfHeadlines', channel, irc.network)
|
|
|
|
entries = list(filter(lambda e:self.should_send_entry(irc.network, channel, e),
|
2014-07-31 18:44:49 +02:00
|
|
|
feed.entries))
|
|
|
|
entries = entries[:n]
|
2019-08-24 17:50:05 +02:00
|
|
|
headlines = map(lambda e:self.format_entry(irc.network, channel, feed, e, False),
|
2014-07-31 18:44:49 +02:00
|
|
|
entries)
|
2019-08-24 17:50:05 +02:00
|
|
|
sep = self.registryValue('headlineSeparator', channel, irc.network)
|
2005-02-02 07:03:09 +01:00
|
|
|
irc.replies(headlines, joiner=sep)
|
2014-07-31 23:50:27 +02:00
|
|
|
rss = wrap(rss, [first('url', 'feedName'), additional('int')])
|
2005-02-02 07:03:09 +01:00
|
|
|
|
2010-10-19 19:50:41 +02:00
|
|
|
@internationalizeDocstring
|
2005-02-02 07:03:09 +01:00
|
|
|
def info(self, irc, msg, args, url):
|
|
|
|
"""<url|feed>
|
|
|
|
|
|
|
|
Returns information from the given RSS feed, namely the title,
|
|
|
|
URL, description, and last update date, if available.
|
|
|
|
"""
|
|
|
|
try:
|
|
|
|
url = self.registryValue('feeds.%s' % url)
|
|
|
|
except registry.NonExistentRegistryEntry:
|
|
|
|
pass
|
2014-07-31 18:44:49 +02:00
|
|
|
feed = self.get_feed(url)
|
2014-07-31 20:14:09 +02:00
|
|
|
if not feed:
|
2014-08-13 16:42:47 +02:00
|
|
|
feed = Feed(url, url, True)
|
2014-07-31 18:44:49 +02:00
|
|
|
self.update_feed_if_needed(feed)
|
|
|
|
info = feed.data
|
2005-02-02 07:03:09 +01:00
|
|
|
if not info:
|
2010-10-19 19:50:41 +02:00
|
|
|
irc.error(_('I couldn\'t retrieve that RSS feed.'))
|
2005-02-02 07:03:09 +01:00
|
|
|
return
|
2005-07-19 23:32:54 +02:00
|
|
|
# check the 'modified_parsed' key, if it's there, convert it here first
|
|
|
|
if 'modified' in info:
|
|
|
|
seconds = time.mktime(info['modified_parsed'])
|
2005-02-02 07:03:09 +01:00
|
|
|
now = time.mktime(time.gmtime())
|
|
|
|
when = utils.timeElapsed(now - seconds) + ' ago'
|
|
|
|
else:
|
2014-07-31 18:44:49 +02:00
|
|
|
when = _('time unavailable')
|
|
|
|
title = info.get('title', _('unavailable'))
|
|
|
|
desc = info.get('description', _('unavailable'))
|
|
|
|
link = info.get('link', _('unavailable'))
|
2005-02-02 07:03:09 +01:00
|
|
|
# The rest of the entries are all available in the channel key
|
2010-10-19 19:50:41 +02:00
|
|
|
response = format(_('Title: %s; URL: %u; '
|
|
|
|
'Description: %s; Last updated: %s.'),
|
2007-10-22 19:48:49 +02:00
|
|
|
title, link, desc, when)
|
2005-02-02 07:03:09 +01:00
|
|
|
irc.reply(utils.str.normalizeWhitespace(response))
|
|
|
|
info = wrap(info, [first('url', 'feedName')])
|
2010-10-26 09:32:12 +02:00
|
|
|
RSS = internationalizeDocstring(RSS)
|
2005-02-02 07:03:09 +01:00
|
|
|
|
|
|
|
Class = RSS
|
|
|
|
|
2006-02-11 16:52:51 +01:00
|
|
|
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
|