mirror of
https://github.com/Mikaela/Limnoria.git
synced 2025-08-19 11:27:21 +02:00
Compare commits
No commits in common. "d435442b39f167509e64b21a9cd1cf3f71e33033" and "4ed318d06fc1af264e61a5e1cd4928f466802361" have entirely different histories.
d435442b39
...
4ed318d06f
@ -50,7 +50,6 @@ class AdminTestCase(PluginTestCase):
|
||||
self.irc.feedMsg(ircmsgs.join('#Baz', prefix=self.prefix))
|
||||
getAfterJoinMessages()
|
||||
self.assertRegexp('channels', '#bar, #Baz, and #foo')
|
||||
self.assertNotRegexp('config networks.test.channels', '.*#foo.*')
|
||||
|
||||
def testIgnoreAddRemove(self):
|
||||
self.assertNotError('admin ignore add foo!bar@baz')
|
||||
@ -88,7 +87,6 @@ class AdminTestCase(PluginTestCase):
|
||||
ircdb.users.delUser(u.id)
|
||||
|
||||
def testJoin(self):
|
||||
try:
|
||||
m = self.getMsg('join #foo')
|
||||
self.assertEqual(m.command, 'JOIN')
|
||||
self.assertEqual(m.args[0], '#foo')
|
||||
@ -96,8 +94,6 @@ class AdminTestCase(PluginTestCase):
|
||||
self.assertEqual(m.command, 'JOIN')
|
||||
self.assertEqual(m.args[0], '#foo')
|
||||
self.assertEqual(m.args[1], 'key')
|
||||
finally:
|
||||
conf.supybot.networks.test.channels.setValue('')
|
||||
|
||||
def testNick(self):
|
||||
try:
|
||||
@ -111,13 +107,10 @@ class AdminTestCase(PluginTestCase):
|
||||
self.assertError('admin capability add %s owner' % self.nick)
|
||||
|
||||
def testJoinOnOwnerInvite(self):
|
||||
try:
|
||||
self.irc.feedMsg(ircmsgs.invite(conf.supybot.nick(), '#foo', prefix=self.prefix))
|
||||
m = self.getMsg(' ')
|
||||
self.assertEqual(m.command, 'JOIN')
|
||||
self.assertEqual(m.args[0], '#foo')
|
||||
finally:
|
||||
conf.supybot.networks.test.channels.setValue('')
|
||||
|
||||
def testNoJoinOnUnprivilegedInvite(self):
|
||||
try:
|
||||
@ -128,7 +121,6 @@ class AdminTestCase(PluginTestCase):
|
||||
'Error: "somecommand" is not a valid command.')
|
||||
finally:
|
||||
world.testing = True
|
||||
self.assertNotRegexp('config networks.test.channels', '.*#foo.*')
|
||||
|
||||
def testAcmd(self):
|
||||
self.irc.feedMsg(ircmsgs.join('#foo', prefix=self.prefix))
|
||||
|
@ -991,14 +991,9 @@ class Channel(callbacks.Plugin):
|
||||
network = conf.supybot.networks.get(irc.network)
|
||||
network.channels().remove(channel)
|
||||
except KeyError:
|
||||
pass
|
||||
if channel not in irc.state.channels:
|
||||
# Not configured AND not in the channel
|
||||
irc.error(_('I\'m not in %s.') % channel, Raise=True)
|
||||
else:
|
||||
if channel not in irc.state.channels:
|
||||
# Configured, but not in the channel
|
||||
irc.reply(_('%s removed from configured join list.') % channel)
|
||||
return
|
||||
reason = (reason or self.registryValue("partMsg", channel, irc.network))
|
||||
reason = ircutils.standardSubstitute(irc, msg, reason)
|
||||
irc.queueMsg(ircmsgs.part(channel, reason))
|
||||
|
@ -1,3 +1,5 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
###
|
||||
# Copyright (c) 2002-2005, Jeremiah Fincher
|
||||
# Copyright (c) 2010-2021, Valentin Lorentz
|
||||
|
@ -177,15 +177,9 @@ class Fediverse(callbacks.PluginRegexp):
|
||||
|
||||
def _has_webfinger_support(self, hostname):
|
||||
if hostname not in self._webfinger_support_cache:
|
||||
try:
|
||||
self._webfinger_support_cache[hostname] = ap.has_webfinger_support(
|
||||
hostname
|
||||
)
|
||||
except Exception as e:
|
||||
self.log.error(
|
||||
"Checking Webfinger support for %s raised %s", hostname, e
|
||||
)
|
||||
return False
|
||||
return self._webfinger_support_cache[hostname]
|
||||
|
||||
def _get_actor(self, irc, username):
|
||||
|
@ -187,7 +187,7 @@ class GeographyLocaltimeTestCase(PluginTestCase):
|
||||
|
||||
class GeographyWikidataTestCase(SupyTestCase):
|
||||
@skipIf(not network, "Network test")
|
||||
def testRelationOsmidToTimezone(self):
|
||||
def testOsmidToTimezone(self):
|
||||
self.assertEqual(
|
||||
wikidata.uri_from_osmid(450381),
|
||||
"http://www.wikidata.org/entity/Q22690",
|
||||
@ -196,12 +196,6 @@ class GeographyWikidataTestCase(SupyTestCase):
|
||||
wikidata.uri_from_osmid(192468),
|
||||
"http://www.wikidata.org/entity/Q47045",
|
||||
)
|
||||
@skipIf(not network, "Network test")
|
||||
def testNodeOsmidToTimezone(self):
|
||||
self.assertEqual(
|
||||
wikidata.uri_from_osmid(436012592),
|
||||
"http://www.wikidata.org/entity/Q933",
|
||||
)
|
||||
|
||||
@skipIf(not network, "Network test")
|
||||
def testDirect(self):
|
||||
|
@ -115,14 +115,7 @@ LIMIT 1
|
||||
OSMID_QUERY = string.Template(
|
||||
"""
|
||||
SELECT ?item WHERE {
|
||||
{
|
||||
?item wdt:P402 "$osmid". # OSM relation ID
|
||||
}
|
||||
UNION
|
||||
{
|
||||
?item wdt:P11693 "$osmid". # OSM node ID
|
||||
}
|
||||
|
||||
?item wdt:P402 "$osmid".
|
||||
}
|
||||
LIMIT 1
|
||||
"""
|
||||
|
@ -158,7 +158,7 @@ class Internet(callbacks.Plugin):
|
||||
if not status:
|
||||
status = 'unknown'
|
||||
try:
|
||||
t = telnetlib.Telnet('whois.iana.org', 43)
|
||||
t = telnetlib.Telnet('whois.pir.org', 43)
|
||||
except socket.error as e:
|
||||
irc.error(str(e))
|
||||
return
|
||||
|
@ -21,11 +21,6 @@ and checking latency to the server.
|
||||
Commands
|
||||
--------
|
||||
|
||||
.. _command-network-authenticate:
|
||||
|
||||
authenticate takes no arguments
|
||||
Manually initiate SASL authentication.
|
||||
|
||||
.. _command-network-capabilities:
|
||||
|
||||
capabilities [<network>]
|
||||
|
@ -8,8 +8,9 @@ Purpose
|
||||
|
||||
Provides basic functionality for handling RSS/RDF feeds, and allows announcing
|
||||
them periodically to channels.
|
||||
In order to use this plugin you must have `python3-feedparser
|
||||
<https://pypi.org/project/feedparser/>`_ installed.
|
||||
In order to use this plugin you must have the following modules
|
||||
installed:
|
||||
* feedparser: http://feedparser.org/
|
||||
|
||||
Usage
|
||||
-----
|
||||
@ -139,7 +140,7 @@ supybot.plugins.RSS.feeds
|
||||
supybot.plugins.RSS.format
|
||||
This config variable defaults to "$date: $title <$link>", is network-specific, and is channel-specific.
|
||||
|
||||
The format the bot will use for displaying headlines of a RSS feed that is triggered manually. In addition to fields defined by feedparser ($published (the entry date), $title, $link, $description, $id, etc.), the following variables can be used: $feed_name (the configured name) $feed_title/$feed_subtitle/$feed_author/$feed_language/$feed_link, $date (parsed date, as defined in supybot.reply.format.time)
|
||||
The format the bot will use for displaying headlines of a RSS feed that is triggered manually. In addition to fields defined by feedparser ($published (the entry date), $title, $link, $description, $id, etc.), the following variables can be used: $feed_name, $date (parsed date, as defined in supybot.reply.format.time)
|
||||
|
||||
.. _conf-supybot.plugins.RSS.headlineSeparator:
|
||||
|
||||
|
@ -31,8 +31,9 @@
|
||||
"""
|
||||
Provides basic functionality for handling RSS/RDF feeds, and allows announcing
|
||||
them periodically to channels.
|
||||
In order to use this plugin you must have `python3-feedparser
|
||||
<https://pypi.org/project/feedparser/>`_ installed.
|
||||
In order to use this plugin you must have the following modules
|
||||
installed:
|
||||
* feedparser: http://feedparser.org/
|
||||
"""
|
||||
|
||||
import supybot
|
||||
|
@ -364,11 +364,6 @@ class RSS(callbacks.Plugin):
|
||||
feed.url, e)
|
||||
feed.last_exception = e
|
||||
return
|
||||
except http.client.HTTPException as e:
|
||||
self.log.warning("HTTP error while fetching <%s>: %s",
|
||||
feed.url, e)
|
||||
feed.last_exception = e
|
||||
return
|
||||
except Exception as e:
|
||||
self.log.error("Failed to fetch <%s>: %s", feed.url, e)
|
||||
raise # reraise so @log.firewall prints the traceback
|
||||
@ -502,48 +497,6 @@ class RSS(callbacks.Plugin):
|
||||
isinstance(v, str)}
|
||||
kwargs["feed_name"] = feed.name
|
||||
kwargs.update(entry)
|
||||
for (key, value) in list(kwargs.items()):
|
||||
# First look for plain text
|
||||
if isinstance(value, list):
|
||||
for item in value:
|
||||
if isinstance(item, dict) and 'value' in item and \
|
||||
item.get('type') == 'text/plain':
|
||||
value = item['value']
|
||||
break
|
||||
# Then look for HTML text or URL
|
||||
if isinstance(value, list):
|
||||
for item in value:
|
||||
if isinstance(item, dict) and item.get('type') in \
|
||||
('text/html', 'application/xhtml+xml'):
|
||||
if 'value' in item:
|
||||
value = utils.web.htmlToText(item['value'])
|
||||
elif 'href' in item:
|
||||
value = item['href']
|
||||
# Then fall back to any URL
|
||||
if isinstance(value, list):
|
||||
for item in value:
|
||||
if isinstance(item, dict) and 'href' in item:
|
||||
value = item['href']
|
||||
break
|
||||
# Finally, as a last resort, use the value as-is
|
||||
if isinstance(value, list):
|
||||
for item in value:
|
||||
if isinstance(item, dict) and 'value' in item:
|
||||
value = item['value']
|
||||
kwargs[key] = value
|
||||
|
||||
for key in ('summary', 'title'):
|
||||
detail = kwargs.get('%s_detail' % key)
|
||||
if isinstance(detail, dict) and detail.get('type') in \
|
||||
('text/html', 'application/xhtml+xml'):
|
||||
kwargs[key] = utils.web.htmlToText(detail['value'])
|
||||
|
||||
if 'description' not in kwargs and kwargs[key]:
|
||||
kwargs['description'] = kwargs[key]
|
||||
|
||||
if 'description' not in kwargs and kwargs.get('content'):
|
||||
kwargs['description'] = kwargs['content']
|
||||
|
||||
s = string.Template(template).safe_substitute(entry, **kwargs, date=date)
|
||||
return self._normalize_entry(s)
|
||||
|
||||
|
@ -59,6 +59,7 @@ not_well_formed = """<?xml version="1.0" encoding="utf-8"?>
|
||||
</rss>
|
||||
"""
|
||||
|
||||
|
||||
class MockResponse:
|
||||
headers = {}
|
||||
url = ''
|
||||
@ -358,130 +359,6 @@ class RSSTestCase(ChannelPluginTestCase):
|
||||
self.assertRegexp('rss http://xkcd.com/rss.xml',
|
||||
'On the other hand, the refractor\'s')
|
||||
|
||||
@mock_urllib
|
||||
def testAtomContentHtmlOnly(self, mock):
|
||||
timeFastForward(1.1)
|
||||
mock._data = """
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<feed xmlns="http://www.w3.org/2005/Atom" xmlns:media="http://search.yahoo.com/mrss/" xml:lang="en-US">
|
||||
<title>Recent Commits to anope:2.0</title>
|
||||
<updated>2023-10-04T16:14:39Z</updated>
|
||||
<entry>
|
||||
<title>title with <pre>HTML</pre></title>
|
||||
<updated>2023-10-04T16:14:39Z</updated>
|
||||
<content type="html">
|
||||
content with <pre>HTML</pre>
|
||||
</content>
|
||||
</entry>
|
||||
</feed>"""
|
||||
with conf.supybot.plugins.RSS.format.context('$content'):
|
||||
self.assertRegexp('rss https://example.org',
|
||||
'content with HTML')
|
||||
with conf.supybot.plugins.RSS.format.context('$description'):
|
||||
self.assertRegexp('rss https://example.org',
|
||||
'content with HTML')
|
||||
|
||||
@mock_urllib
|
||||
def testAtomContentXhtmlOnly(self, mock):
|
||||
timeFastForward(1.1)
|
||||
mock._data = """
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<feed xmlns="http://www.w3.org/2005/Atom" xmlns:media="http://search.yahoo.com/mrss/" xml:lang="en-US">
|
||||
<title>Recent Commits to anope:2.0</title>
|
||||
<updated>2023-10-04T16:14:39Z</updated>
|
||||
<entry>
|
||||
<title>title with <pre>HTML</pre></title>
|
||||
<updated>2023-10-04T16:14:39Z</updated>
|
||||
<content type="xhtml">
|
||||
<div xmlns="http://www.w3.org/1999/xhtml">
|
||||
content with <pre>XHTML</pre>
|
||||
</div>
|
||||
</content>
|
||||
</entry>
|
||||
</feed>"""
|
||||
with conf.supybot.plugins.RSS.format.context('$content'):
|
||||
self.assertRegexp('rss https://example.org',
|
||||
'content with XHTML')
|
||||
with conf.supybot.plugins.RSS.format.context('$description'):
|
||||
self.assertRegexp('rss https://example.org',
|
||||
'content with XHTML')
|
||||
|
||||
@mock_urllib
|
||||
def testAtomContentHtmlAndPlaintext(self, mock):
|
||||
timeFastForward(1.1)
|
||||
mock._data = """
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<feed xmlns="http://www.w3.org/2005/Atom" xmlns:media="http://search.yahoo.com/mrss/" xml:lang="en-US">
|
||||
<title>Recent Commits to anope:2.0</title>
|
||||
<updated>2023-10-04T16:14:39Z</updated>
|
||||
<entry>
|
||||
<title>title with <pre>HTML</pre></title>
|
||||
<updated>2023-10-04T16:14:39Z</updated>
|
||||
<!-- Atom spec says multiple contents is invalid, feedparser says it's not.
|
||||
I like having the option, so let's make sure we support it. -->
|
||||
<content type="html">
|
||||
content with <pre>HTML</pre>
|
||||
</content>
|
||||
<content type="text">
|
||||
content with plaintext
|
||||
</content>
|
||||
</entry>
|
||||
</feed>"""
|
||||
with conf.supybot.plugins.RSS.format.context('$content'):
|
||||
self.assertRegexp('rss https://example.org',
|
||||
'content with plaintext')
|
||||
with conf.supybot.plugins.RSS.format.context('$description'):
|
||||
self.assertRegexp('rss https://example.org',
|
||||
'content with plaintext')
|
||||
|
||||
@mock_urllib
|
||||
def testAtomContentPlaintextAndHtml(self, mock):
|
||||
timeFastForward(1.1)
|
||||
mock._data = """
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<feed xmlns="http://www.w3.org/2005/Atom" xmlns:media="http://search.yahoo.com/mrss/" xml:lang="en-US">
|
||||
<title>Recent Commits to anope:2.0</title>
|
||||
<updated>2023-10-04T16:14:39Z</updated>
|
||||
<entry>
|
||||
<title>title with <pre>HTML</pre></title>
|
||||
<updated>2023-10-04T16:14:39Z</updated>
|
||||
<!-- Atom spec says multiple contents is invalid, feedparser says it's not.
|
||||
I like having the option, so let's make sure we support it. -->
|
||||
<content type="text">
|
||||
content with plaintext
|
||||
</content>
|
||||
<content type="html">
|
||||
content with <pre>HTML</pre>
|
||||
</content>
|
||||
</entry>
|
||||
</feed>"""
|
||||
with conf.supybot.plugins.RSS.format.context('$content'):
|
||||
self.assertRegexp('rss https://example.org',
|
||||
'content with plaintext')
|
||||
with conf.supybot.plugins.RSS.format.context('$description'):
|
||||
self.assertRegexp('rss https://example.org',
|
||||
'content with plaintext')
|
||||
|
||||
@mock_urllib
|
||||
def testRssDescriptionHtml(self, mock):
|
||||
timeFastForward(1.1)
|
||||
mock._data = """
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<rss xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:atom="http://www.w3.org/2005/Atom" xmlns:content="http://purl.org/rss/1.0/modules/content/" xmlns:foaf="http://xmlns.com/foaf/0.1/" xmlns:og="http://ogp.me/ns#" xmlns:rdfs="http://www.w3.org/2000/01/rdf-schema#" xmlns:schema="http://schema.org/" xmlns:sioc="http://rdfs.org/sioc/ns#" xmlns:sioct="http://rdfs.org/sioc/types#" xmlns:skos="http://www.w3.org/2004/02/skos/core#" xmlns:xsd="http://www.w3.org/2001/XMLSchema#" version="2.0">
|
||||
<channel>
|
||||
<title>feed title</title>
|
||||
<description/>
|
||||
<language>en</language>
|
||||
<item>
|
||||
<title>title with <pre>HTML</pre></title>
|
||||
<description>description with <pre>HTML</pre></description>
|
||||
</item>
|
||||
</channel>
|
||||
</feed>"""
|
||||
with conf.supybot.plugins.RSS.format.context('$description'):
|
||||
self.assertRegexp('rss https://example.org',
|
||||
'description with HTML')
|
||||
|
||||
@mock_urllib
|
||||
def testFeedAttribute(self, mock):
|
||||
timeFastForward(1.1)
|
||||
|
@ -67,22 +67,6 @@ supybot.plugins.SedRegex.enable
|
||||
|
||||
Should Perl/sed-style regex replacing work in this channel?
|
||||
|
||||
.. _conf-supybot.plugins.SedRegex.format:
|
||||
|
||||
|
||||
supybot.plugins.SedRegex.format
|
||||
This config variable defaults to "$nick meant to say: $replacement", is network-specific, and is channel-specific.
|
||||
|
||||
Sets the format string for a message edited by the original author. Required fields: $nick (nick of the author), $replacement (edited message)
|
||||
|
||||
.. _conf-supybot.plugins.SedRegex.format.other:
|
||||
|
||||
|
||||
supybot.plugins.SedRegex.format.other
|
||||
This config variable defaults to "$otherNick thinks $nick meant to say: $replacement", is network-specific, and is channel-specific.
|
||||
|
||||
Sets the format string for a message edited by another author. Required fields: $nick (nick of the original author), $otherNick (nick of the editor), $replacement (edited message)
|
||||
|
||||
.. _conf-supybot.plugins.SedRegex.ignoreRegex:
|
||||
|
||||
|
||||
|
@ -124,11 +124,9 @@ class Services(callbacks.Plugin):
|
||||
return
|
||||
nickserv = self.registryValue('NickServ', network=irc.network)
|
||||
password = self._getNickServPassword(nick, irc.network)
|
||||
if not nickserv:
|
||||
self.log.warning('Tried to identify without a NickServ set.')
|
||||
return
|
||||
if not password:
|
||||
self.log.warning('Tried to identify without a password set.')
|
||||
if not nickserv or not password:
|
||||
s = 'Tried to identify without a NickServ or password set.'
|
||||
self.log.warning(s)
|
||||
return
|
||||
assert ircutils.strEqual(irc.nick, nick), \
|
||||
'Identifying with not normal nick.'
|
||||
@ -152,15 +150,16 @@ class Services(callbacks.Plugin):
|
||||
ghostDelay = self.registryValue('ghostDelay', network=irc.network)
|
||||
if not ghostDelay:
|
||||
return
|
||||
if not nickserv:
|
||||
self.log.warning('Tried to ghost without a NickServ set.')
|
||||
return
|
||||
if not password:
|
||||
self.log.warning('Tried to ghost without a password set.')
|
||||
if not nickserv or not password:
|
||||
s = 'Tried to ghost without a NickServ or password set.'
|
||||
self.log.warning(s)
|
||||
return
|
||||
if state.sentGhost and time.time() < (state.sentGhost + ghostDelay):
|
||||
self.log.warning('Refusing to send GHOST more than once every '
|
||||
'%s seconds.' % ghostDelay)
|
||||
elif not password:
|
||||
self.log.warning('Not ghosting: no password set.')
|
||||
return
|
||||
else:
|
||||
self.log.info('Sending ghost (current nick: %s; ghosting: %s)',
|
||||
irc.nick, nick)
|
||||
|
@ -144,7 +144,7 @@ supybot.plugins.Unix.ping
|
||||
|
||||
|
||||
supybot.plugins.Unix.ping.command
|
||||
This config variable defaults to "/usr/bin/ping", is not network-specific, and is not channel-specific.
|
||||
This config variable defaults to "/bin/ping", is not network-specific, and is not channel-specific.
|
||||
|
||||
Determines what command will be called for the ping command.
|
||||
|
||||
@ -166,7 +166,7 @@ supybot.plugins.Unix.ping6
|
||||
|
||||
|
||||
supybot.plugins.Unix.ping6.command
|
||||
This config variable defaults to "/usr/bin/ping6", is not network-specific, and is not channel-specific.
|
||||
This config variable defaults to "/bin/ping6", is not network-specific, and is not channel-specific.
|
||||
|
||||
Determines what command will be called for the ping6 command.
|
||||
|
||||
@ -210,7 +210,7 @@ supybot.plugins.Unix.sysuname
|
||||
|
||||
|
||||
supybot.plugins.Unix.sysuname.command
|
||||
This config variable defaults to "/usr/bin/uname", is not network-specific, and is not channel-specific.
|
||||
This config variable defaults to "/bin/uname", is not network-specific, and is not channel-specific.
|
||||
|
||||
Determines what command will be called for the uname command.
|
||||
|
||||
|
@ -154,7 +154,7 @@ class Web(callbacks.PluginRegexp):
|
||||
if parsed_url.netloc == 'youtube.com' \
|
||||
or parsed_url.netloc.endswith(('.youtube.com')):
|
||||
# there is a lot of Javascript before the <title>
|
||||
size = max(819200, size)
|
||||
size = max(409600, size)
|
||||
if parsed_url.netloc in ('reddit.com', 'www.reddit.com', 'new.reddit.com'):
|
||||
# Since 2022-03, New Reddit has 'Reddit - Dive into anything' as
|
||||
# <title> on every page.
|
||||
@ -173,9 +173,8 @@ class Web(callbacks.PluginRegexp):
|
||||
if raiseErrors:
|
||||
irc.error(_('Connection to %s timed out') % url, Raise=True)
|
||||
else:
|
||||
self.log.info('Web plugins TitleSnarfer: URL <%s> timed out',
|
||||
selg.log.info('Web plugins TitleSnarfer: URL <%s> timed out',
|
||||
url)
|
||||
return
|
||||
except Exception as e:
|
||||
if raiseErrors:
|
||||
irc.error(_('That URL raised <' + str(e)) + '>',
|
||||
@ -187,14 +186,9 @@ class Web(callbacks.PluginRegexp):
|
||||
|
||||
encoding = None
|
||||
if 'Content-Type' in fd.headers:
|
||||
# using p.partition('=') instead of 'p.split('=', 1)' because,
|
||||
# unlike RFC 7231, RFC 9110 allows an empty parameter list
|
||||
# after ';':
|
||||
# * https://www.rfc-editor.org/rfc/rfc9110.html#name-media-type
|
||||
# * https://www.rfc-editor.org/rfc/rfc9110.html#parameter
|
||||
mime_params = [p.partition('=')
|
||||
mime_params = [p.split('=', 1)
|
||||
for p in fd.headers['Content-Type'].split(';')[1:]]
|
||||
mime_params = {k.strip(): v.strip() for (k, sep, v) in mime_params}
|
||||
mime_params = {k.strip(): v.strip() for (k, v) in mime_params}
|
||||
if mime_params.get('charset'):
|
||||
encoding = mime_params['charset']
|
||||
|
||||
|
@ -85,12 +85,6 @@ class WebTestCase(ChannelPluginTestCase):
|
||||
'title https://www.reddit.com/r/irc/',
|
||||
'Internet Relay Chat')
|
||||
|
||||
def testTitleMarcinfo(self):
|
||||
# Checks that we don't crash on 'Content-Type: text/html;'
|
||||
self.assertResponse(
|
||||
'title https://marc.info/?l=openbsd-tech&m=169841790407370&w=2',
|
||||
"'Removing syscall(2) from libc and kernel' - MARC")
|
||||
|
||||
def testTitleSnarfer(self):
|
||||
try:
|
||||
conf.supybot.plugins.Web.titleSnarfer.setValue(True)
|
||||
|
@ -941,7 +941,7 @@ class Directory(registry.String):
|
||||
if os.path.isabs(filename):
|
||||
filename = os.path.abspath(filename)
|
||||
selfAbs = os.path.abspath(myself)
|
||||
commonPrefix = os.path.commonpath([selfAbs, filename])
|
||||
commonPrefix = os.path.commonprefix([selfAbs, filename])
|
||||
filename = filename[len(commonPrefix):]
|
||||
elif not os.path.isabs(myself):
|
||||
if filename.startswith(myself):
|
||||
@ -954,7 +954,7 @@ class DataFilename(registry.String):
|
||||
def __call__(self):
|
||||
v = super(DataFilename, self).__call__()
|
||||
dataDir = supybot.directories.data()
|
||||
if not v.startswith("/") and not v.startswith(dataDir):
|
||||
if not v.startswith(dataDir):
|
||||
v = os.path.basename(v)
|
||||
v = os.path.join(dataDir, v)
|
||||
self.setValue(v)
|
||||
|
@ -337,7 +337,7 @@ class Static(SupyHTTPServerCallback):
|
||||
super(Static, self).__init__()
|
||||
self._mimetype = mimetype
|
||||
def doGetOrHead(self, handler, path, write_content):
|
||||
response = get_template(path[1:]) # strip leading /
|
||||
response = get_template(path)
|
||||
if minisix.PY3:
|
||||
response = response.encode()
|
||||
handler.send_response(200)
|
||||
|
@ -468,12 +468,7 @@ class IrcChannel(object):
|
||||
return True
|
||||
if world.testing:
|
||||
return False
|
||||
if not ircutils.isUserHostmask(hostmask):
|
||||
# Treat messages from a server (e.g. snomasks) as not ignored, as
|
||||
# the ignores system doesn't understand them
|
||||
if '.' not in hostmask:
|
||||
raise ValueError("Expected full prefix, got %r" % hostmask)
|
||||
return False
|
||||
assert ircutils.isUserHostmask(hostmask), 'got %s' % hostmask
|
||||
if self.checkBan(hostmask):
|
||||
return True
|
||||
if self.ignores.match(hostmask):
|
||||
|
@ -104,7 +104,7 @@ def _main():
|
||||
|
||||
def main():
|
||||
try:
|
||||
_main()
|
||||
main()
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
||||
|
@ -36,7 +36,6 @@ import sys
|
||||
import time
|
||||
import shutil
|
||||
import fnmatch
|
||||
from tempfile import TemporaryDirectory
|
||||
started = time.time()
|
||||
|
||||
import supybot
|
||||
@ -44,24 +43,21 @@ import logging
|
||||
import traceback
|
||||
|
||||
# We need to do this before we import conf.
|
||||
main_temp_dir = TemporaryDirectory()
|
||||
if not os.path.exists('test-conf'):
|
||||
os.mkdir('test-conf')
|
||||
|
||||
os.makedirs(os.path.join(main_temp_dir.name, 'conf'))
|
||||
os.makedirs(os.path.join(main_temp_dir.name, 'data'))
|
||||
os.makedirs(os.path.join(main_temp_dir.name, 'logs'))
|
||||
|
||||
registryFilename = os.path.join(main_temp_dir.name, 'conf', 'test.conf')
|
||||
with open(registryFilename, 'w') as fd:
|
||||
fd.write("""
|
||||
registryFilename = os.path.join('test-conf', 'test.conf')
|
||||
fd = open(registryFilename, 'w')
|
||||
fd.write("""
|
||||
supybot.directories.backup: /dev/null
|
||||
supybot.directories.conf: {temp_conf}
|
||||
supybot.directories.data: {temp_data}
|
||||
supybot.directories.log: {temp_logs}
|
||||
supybot.directories.conf: %(base_dir)s/test-conf
|
||||
supybot.directories.data: %(base_dir)s/test-data
|
||||
supybot.directories.log: %(base_dir)s/test-logs
|
||||
supybot.reply.whenNotCommand: True
|
||||
supybot.log.stdout: False
|
||||
supybot.log.stdout.level: ERROR
|
||||
supybot.log.level: DEBUG
|
||||
supybot.log.format: %(levelname)s %(message)s
|
||||
supybot.log.format: %%(levelname)s %%(message)s
|
||||
supybot.log.plugins.individualLogfiles: False
|
||||
supybot.protocols.irc.throttleTime: 0
|
||||
supybot.reply.whenAddressedBy.chars: @
|
||||
@ -71,11 +67,8 @@ supybot.networks.testnet2.server: should.not.need.this
|
||||
supybot.networks.testnet3.server: should.not.need.this
|
||||
supybot.nick: test
|
||||
supybot.databases.users.allowUnregistration: True
|
||||
""".format(
|
||||
temp_conf=os.path.join(main_temp_dir.name, 'conf'),
|
||||
temp_data=os.path.join(main_temp_dir.name, 'data'),
|
||||
temp_logs=os.path.join(main_temp_dir.name, 'logs')
|
||||
))
|
||||
""" % {'base_dir': os.getcwd()})
|
||||
fd.close()
|
||||
|
||||
import supybot.registry as registry
|
||||
registry.open_registry(registryFilename)
|
||||
@ -258,9 +251,6 @@ def main():
|
||||
if result.wasSuccessful():
|
||||
sys.exit(0)
|
||||
else:
|
||||
# Deactivate autocleaning for the temporary directiories to allow inspection.
|
||||
main_temp_dir._finalizer.detach()
|
||||
print(f"Temporary directory path: {main_temp_dir.name}")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
|
@ -350,23 +350,6 @@ class IrcChannelTestCase(IrcdbTestCase):
|
||||
c.removeBan(banmask)
|
||||
self.assertFalse(c.checkIgnored(prefix))
|
||||
|
||||
# Only full n!u@h is accepted here
|
||||
self.assertRaises(ValueError, c.checkIgnored, 'foo')
|
||||
|
||||
def testIgnoredServerNames(self):
|
||||
c = ircdb.IrcChannel()
|
||||
# Server names are not handled by the ignores system, so this is false
|
||||
self.assertFalse(c.checkIgnored('irc.example.com'))
|
||||
# But we should treat full prefixes that match nick!user@host normally,
|
||||
# even if they include "." like a server name
|
||||
prefix = 'irc.example.com!bar@baz'
|
||||
banmask = ircutils.banmask(prefix)
|
||||
self.assertFalse(c.checkIgnored(prefix))
|
||||
c.addIgnore(banmask)
|
||||
self.assertTrue(c.checkIgnored(prefix))
|
||||
c.removeIgnore(banmask)
|
||||
self.assertFalse(c.checkIgnored(prefix))
|
||||
|
||||
class IrcNetworkTestCase(IrcdbTestCase):
|
||||
def testDefaults(self):
|
||||
n = ircdb.IrcNetwork()
|
||||
|
Loading…
x
Reference in New Issue
Block a user