RSS: Catch errors from update_feed(), just in case

feedparser should always catch the error, but someone reported it doesn't
catch this error on TLS cert issues:

```
Traceback (most recent call last):
  File "/usr/lib/python3.8/threading.py", line 932, in _bootstrap_inner
    self.run()
  File "/usr/lib/python3.8/threading.py", line 870, in run
    self._target(*self._args, **self._kwargs)
  File "/home/falso/virtualenv/limnoria/lib/python3.8/site-packages/supybot/plugins/RSS/plugin.py", line 86, in newf
    f(*args, **kwargs)
  File "/home/falso/virtualenv/limnoria/lib/python3.8/site-packages/supybot/plugins/RSS/plugin.py", line 351, in update_feeds
    self.update_feed_if_needed(feed)
  File "/home/falso/virtualenv/limnoria/lib/python3.8/site-packages/supybot/plugins/RSS/plugin.py", line 337, in update_feed_if_needed
    self.update_feed(feed)
  File "/home/falso/virtualenv/limnoria/lib/python3.8/site-packages/supybot/plugins/RSS/plugin.py", line 311, in update_feed
    d = feedparser.parse(feed.url, etag=feed.etag,
  File "/home/falso/virtualenv/limnoria/lib/python3.8/site-packages/feedparser/api.py", line 214, in parse
    data = _open_resource(url_file_stream_or_string, etag, modified, agent, referrer, handlers, request_headers, result)
  File "/home/falso/virtualenv/limnoria/lib/python3.8/site-packages/feedparser/api.py", line 114, in _open_resource
    return http.get(url_file_stream_or_string, etag, modified, agent, referrer, handlers, request_headers, result)
  File "/home/falso/virtualenv/limnoria/lib/python3.8/site-packages/feedparser/http.py", line 158, in get
    f = opener.open(request)
  File "/usr/lib/python3.8/urllib/request.py", line 525, in open
    response = self._open(req, data)
  File "/usr/lib/python3.8/urllib/request.py", line 542, in _open
    result = self._call_chain(self.handle_open, protocol, protocol +
  File "/usr/lib/python3.8/urllib/request.py", line 502, in _call_chain
    result = func(*args)
  File "/usr/lib/python3.8/urllib/request.py", line 1393, in https_open
    return self.do_open(http.client.HTTPSConnection, req,
  File "/usr/lib/python3.8/urllib/request.py", line 1354, in do_open
    r = h.getresponse()
  File "/usr/lib/python3.8/http/client.py", line 1347, in getresponse
    response.begin()
  File "/usr/lib/python3.8/http/client.py", line 307, in begin
    version, status, reason = self._read_status()
  File "/usr/lib/python3.8/http/client.py", line 268, in _read_status
    line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
  File "/usr/lib/python3.8/socket.py", line 669, in readinto
    return self._sock.recv_into(b)
  File "/usr/lib/python3.8/ssl.py", line 1241, in recv_into
    return self.read(nbytes, buffer)
  File "/usr/lib/python3.8/ssl.py", line 1099, in read
    return self._sslobj.read(len, buffer)
socket.timeout: The read operation timed out
```

So let's catch the error just in case, so it doesn't block all other
feeds.
This commit is contained in:
Valentin Lorentz 2021-05-28 18:48:33 +02:00
parent 3415cd08a1
commit 758f9bee34
1 changed files with 2 additions and 0 deletions

View File

@ -40,6 +40,7 @@ import socket
import threading import threading
import feedparser import feedparser
import supybot.log as log
import supybot.conf as conf import supybot.conf as conf
import supybot.utils as utils import supybot.utils as utils
import supybot.world as world import supybot.world as world
@ -344,6 +345,7 @@ class RSS(callbacks.Plugin):
############### ###############
# Feed fetching # Feed fetching
@log.firewall
def update_feed(self, feed): def update_feed(self, feed):
handlers = [] handlers = []
if utils.web.proxy(): if utils.web.proxy():