mirror of
https://github.com/Mikaela/Limnoria.git
synced 2024-11-19 08:59:27 +01:00
Remove need for fix_import, fix_types, and fix_urllib.
This commit is contained in:
parent
3991faf7ee
commit
c0ac84bb53
@ -31,7 +31,6 @@
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
from cStringIO import StringIO
|
|
||||||
|
|
||||||
import supybot.conf as conf
|
import supybot.conf as conf
|
||||||
import supybot.world as world
|
import supybot.world as world
|
||||||
|
@ -32,7 +32,6 @@ import os
|
|||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
import string
|
import string
|
||||||
import urllib
|
|
||||||
|
|
||||||
import supybot.conf as conf
|
import supybot.conf as conf
|
||||||
import supybot.ircdb as ircdb
|
import supybot.ircdb as ircdb
|
||||||
@ -128,7 +127,7 @@ class FactoidsCallback(httpserver.SupyHTTPServerCallback):
|
|||||||
self.end_headers()
|
self.end_headers()
|
||||||
self.write(httpserver.get_template('factoids/index.html'))
|
self.write(httpserver.get_template('factoids/index.html'))
|
||||||
elif len(parts) == 2:
|
elif len(parts) == 2:
|
||||||
channel = urllib.unquote(parts[0])
|
channel = utils.web.unquote(parts[0])
|
||||||
if not ircutils.isChannel(channel):
|
if not ircutils.isChannel(channel):
|
||||||
self.send_response(404)
|
self.send_response(404)
|
||||||
self.send_header('Content-type', 'text/html; charset=utf-8')
|
self.send_header('Content-type', 'text/html; charset=utf-8')
|
||||||
@ -180,7 +179,7 @@ class FactoidsCallback(httpserver.SupyHTTPServerCallback):
|
|||||||
if 'chan' in form:
|
if 'chan' in form:
|
||||||
self.send_response(303)
|
self.send_response(303)
|
||||||
self.send_header('Location',
|
self.send_header('Location',
|
||||||
'./%s/' % urllib.quote(form['chan'].value))
|
'./%s/' % utils.web.quote(form['chan'].value))
|
||||||
self.end_headers()
|
self.end_headers()
|
||||||
else:
|
else:
|
||||||
self.send_response(400)
|
self.send_response(400)
|
||||||
|
@ -33,7 +33,6 @@ import sys
|
|||||||
import codecs
|
import codecs
|
||||||
import string
|
import string
|
||||||
import random
|
import random
|
||||||
from cStringIO import StringIO
|
|
||||||
|
|
||||||
import supybot.conf as conf
|
import supybot.conf as conf
|
||||||
import supybot.utils as utils
|
import supybot.utils as utils
|
||||||
@ -610,7 +609,7 @@ class Filter(callbacks.Plugin):
|
|||||||
## for (c, v) in d.items():
|
## for (c, v) in d.items():
|
||||||
## dd[ord(c)] = unicode(v + ' ')
|
## dd[ord(c)] = unicode(v + ' ')
|
||||||
## irc.reply(unicode(text).translate(dd))
|
## irc.reply(unicode(text).translate(dd))
|
||||||
out = StringIO()
|
out = minisix.io.StringIO()
|
||||||
write = out.write
|
write = out.write
|
||||||
for c in text:
|
for c in text:
|
||||||
try:
|
try:
|
||||||
|
@ -28,9 +28,8 @@
|
|||||||
|
|
||||||
###
|
###
|
||||||
|
|
||||||
from cStringIO import StringIO
|
|
||||||
|
|
||||||
from supybot.test import *
|
from supybot.test import *
|
||||||
|
import supybot.minisix as minisix
|
||||||
|
|
||||||
import supybot.gpg as gpg
|
import supybot.gpg as gpg
|
||||||
|
|
||||||
@ -111,25 +110,25 @@ class GPGTestCase(PluginTestCase):
|
|||||||
return fd
|
return fd
|
||||||
(utils.web.getUrlFd, realGetUrlFd) = (fakeGetUrlFd, utils.web.getUrlFd)
|
(utils.web.getUrlFd, realGetUrlFd) = (fakeGetUrlFd, utils.web.getUrlFd)
|
||||||
|
|
||||||
fd = StringIO()
|
fd = minisix.io.StringIO()
|
||||||
fd.write('foo')
|
fd.write('foo')
|
||||||
fd.seek(0)
|
fd.seek(0)
|
||||||
self.assertResponse('gpg signing auth http://foo.bar/baz.gpg',
|
self.assertResponse('gpg signing auth http://foo.bar/baz.gpg',
|
||||||
'Error: Signature or token not found.')
|
'Error: Signature or token not found.')
|
||||||
|
|
||||||
fd = StringIO()
|
fd = minisix.io.StringIO()
|
||||||
fd.write(token)
|
fd.write(token)
|
||||||
fd.seek(0)
|
fd.seek(0)
|
||||||
self.assertResponse('gpg signing auth http://foo.bar/baz.gpg',
|
self.assertResponse('gpg signing auth http://foo.bar/baz.gpg',
|
||||||
'Error: Signature or token not found.')
|
'Error: Signature or token not found.')
|
||||||
|
|
||||||
fd = StringIO()
|
fd = minisix.io.StringIO()
|
||||||
fd.write(WRONG_TOKEN_SIGNATURE)
|
fd.write(WRONG_TOKEN_SIGNATURE)
|
||||||
fd.seek(0)
|
fd.seek(0)
|
||||||
self.assertRegexp('gpg signing auth http://foo.bar/baz.gpg',
|
self.assertRegexp('gpg signing auth http://foo.bar/baz.gpg',
|
||||||
'Error: Unknown token.*')
|
'Error: Unknown token.*')
|
||||||
|
|
||||||
fd = StringIO()
|
fd = minisix.io.StringIO()
|
||||||
fd.write(str(gpg.keyring.sign(token)))
|
fd.write(str(gpg.keyring.sign(token)))
|
||||||
fd.seek(0)
|
fd.seek(0)
|
||||||
self.assertResponse('gpg signing auth http://foo.bar/baz.gpg',
|
self.assertResponse('gpg signing auth http://foo.bar/baz.gpg',
|
||||||
|
@ -34,7 +34,6 @@ import cgi
|
|||||||
import json
|
import json
|
||||||
import time
|
import time
|
||||||
import socket
|
import socket
|
||||||
import urllib
|
|
||||||
|
|
||||||
import supybot.conf as conf
|
import supybot.conf as conf
|
||||||
import supybot.utils as utils
|
import supybot.utils as utils
|
||||||
@ -119,7 +118,7 @@ class Google(callbacks.PluginRegexp):
|
|||||||
opts['rsz'] = 'large'
|
opts['rsz'] = 'large'
|
||||||
|
|
||||||
text = utils.web.getUrl('%s?%s' % (self._gsearchUrl,
|
text = utils.web.getUrl('%s?%s' % (self._gsearchUrl,
|
||||||
urllib.urlencode(opts)),
|
utils.web.urlencode(opts)),
|
||||||
headers=headers).decode('utf8')
|
headers=headers).decode('utf8')
|
||||||
data = json.loads(text)
|
data = json.loads(text)
|
||||||
if data['responseStatus'] != 200:
|
if data['responseStatus'] != 200:
|
||||||
@ -255,10 +254,10 @@ class Google(callbacks.PluginRegexp):
|
|||||||
headers['User-Agent'] = ('Mozilla/5.0 (X11; U; Linux i686) '
|
headers['User-Agent'] = ('Mozilla/5.0 (X11; U; Linux i686) '
|
||||||
'Gecko/20071127 Firefox/2.0.0.11')
|
'Gecko/20071127 Firefox/2.0.0.11')
|
||||||
|
|
||||||
sourceLang = urllib.quote(sourceLang)
|
sourceLang = utils.web.urlquote(sourceLang)
|
||||||
targetLang = urllib.quote(targetLang)
|
targetLang = utils.web.urlquote(targetLang)
|
||||||
|
|
||||||
text = urllib.quote(text)
|
text = utils.web.urlquote(text)
|
||||||
|
|
||||||
result = utils.web.getUrlFd('http://translate.googleapis.com/translate_a/single'
|
result = utils.web.getUrlFd('http://translate.googleapis.com/translate_a/single'
|
||||||
'?client=gtx&dt=t&sl=%s&tl=%s&q='
|
'?client=gtx&dt=t&sl=%s&tl=%s&q='
|
||||||
@ -291,7 +290,7 @@ class Google(callbacks.PluginRegexp):
|
|||||||
googleSnarfer = urlSnarfer(googleSnarfer)
|
googleSnarfer = urlSnarfer(googleSnarfer)
|
||||||
|
|
||||||
def _googleUrl(self, s, channel):
|
def _googleUrl(self, s, channel):
|
||||||
s = urllib.quote_plus(s)
|
s = utils.web.urlquote_plus(s)
|
||||||
url = r'http://%s/search?q=%s' % \
|
url = r'http://%s/search?q=%s' % \
|
||||||
(self.registryValue('baseUrl', channel), s)
|
(self.registryValue('baseUrl', channel), s)
|
||||||
return url
|
return url
|
||||||
|
@ -10,9 +10,10 @@
|
|||||||
#
|
#
|
||||||
#****************************************************************************
|
#****************************************************************************
|
||||||
|
|
||||||
import re, copy, sys, os.path, StringIO
|
import re, copy, sys, os.path
|
||||||
|
|
||||||
import supybot.conf as conf
|
import supybot.conf as conf
|
||||||
|
import supybot.minisix as minisix
|
||||||
import supybot.registry as registry
|
import supybot.registry as registry
|
||||||
|
|
||||||
unitData = \
|
unitData = \
|
||||||
@ -1059,7 +1060,7 @@ class UnitData(dict):
|
|||||||
types = []
|
types = []
|
||||||
typeUnits = {}
|
typeUnits = {}
|
||||||
try:
|
try:
|
||||||
f = StringIO.StringIO(unitData)
|
f = minisix.io.StringIO(unitData)
|
||||||
lines = f.readlines()
|
lines = f.readlines()
|
||||||
f.close()
|
f.close()
|
||||||
except IOError:
|
except IOError:
|
||||||
|
@ -32,8 +32,6 @@ import sys
|
|||||||
import time
|
import time
|
||||||
import string
|
import string
|
||||||
|
|
||||||
from cStringIO import StringIO
|
|
||||||
|
|
||||||
import supybot.conf as conf
|
import supybot.conf as conf
|
||||||
import supybot.ircdb as ircdb
|
import supybot.ircdb as ircdb
|
||||||
import supybot.utils as utils
|
import supybot.utils as utils
|
||||||
@ -70,7 +68,7 @@ class OptionList(object):
|
|||||||
ret.append(token)
|
ret.append(token)
|
||||||
|
|
||||||
def tokenize(self, s):
|
def tokenize(self, s):
|
||||||
lexer = shlex.shlex(StringIO(s))
|
lexer = shlex.shlex(minisix.io.StringIO(s))
|
||||||
lexer.commenters = ''
|
lexer.commenters = ''
|
||||||
lexer.quotes = ''
|
lexer.quotes = ''
|
||||||
lexer.whitespace = ''
|
lexer.whitespace = ''
|
||||||
|
@ -33,9 +33,7 @@ import io
|
|||||||
import sys
|
import sys
|
||||||
import json
|
import json
|
||||||
import shutil
|
import shutil
|
||||||
import urllib
|
|
||||||
import tarfile
|
import tarfile
|
||||||
from cStringIO import StringIO
|
|
||||||
|
|
||||||
import supybot.log as log
|
import supybot.log as log
|
||||||
import supybot.conf as conf
|
import supybot.conf as conf
|
||||||
@ -47,8 +45,6 @@ import supybot.ircutils as ircutils
|
|||||||
import supybot.callbacks as callbacks
|
import supybot.callbacks as callbacks
|
||||||
from supybot.i18n import PluginInternationalization, internationalizeDocstring
|
from supybot.i18n import PluginInternationalization, internationalizeDocstring
|
||||||
|
|
||||||
BytesIO = StringIO if minisix.PY2 else io.BytesIO
|
|
||||||
|
|
||||||
_ = PluginInternationalization('PluginDownloader')
|
_ = PluginInternationalization('PluginDownloader')
|
||||||
|
|
||||||
class Repository:
|
class Repository:
|
||||||
@ -81,7 +77,7 @@ class GithubRepository(GitRepository):
|
|||||||
def _query(self, type_, uri_end, args={}):
|
def _query(self, type_, uri_end, args={}):
|
||||||
args = dict([(x,y) for x,y in args.items() if y is not None])
|
args = dict([(x,y) for x,y in args.items() if y is not None])
|
||||||
url = '%s/%s/%s?%s' % (self._apiUrl, type_, uri_end,
|
url = '%s/%s/%s?%s' % (self._apiUrl, type_, uri_end,
|
||||||
urllib.urlencode(args))
|
utils.web.urlencode(args))
|
||||||
return json.loads(utils.web.getUrl(url).decode('utf8'))
|
return json.loads(utils.web.getUrl(url).decode('utf8'))
|
||||||
|
|
||||||
def getPluginList(self):
|
def getPluginList(self):
|
||||||
@ -109,7 +105,7 @@ class GithubRepository(GitRepository):
|
|||||||
assert response.getcode() == 200, response.getcode()
|
assert response.getcode() == 200, response.getcode()
|
||||||
else:
|
else:
|
||||||
assert response.status == 200, response.status
|
assert response.status == 200, response.status
|
||||||
fileObject = BytesIO()
|
fileObject = minisix.io.BytesIO()
|
||||||
fileObject.write(response.read())
|
fileObject.write(response.read())
|
||||||
finally: # urllib does not handle 'with' statements :(
|
finally: # urllib does not handle 'with' statements :(
|
||||||
response.close()
|
response.close()
|
||||||
|
@ -33,10 +33,6 @@ import feedparser
|
|||||||
from supybot.test import *
|
from supybot.test import *
|
||||||
import supybot.conf as conf
|
import supybot.conf as conf
|
||||||
import supybot.minisix as minisix
|
import supybot.minisix as minisix
|
||||||
if minisix.PY3:
|
|
||||||
from io import BytesIO
|
|
||||||
else:
|
|
||||||
from cStringIO import StringIO as BytesIO
|
|
||||||
|
|
||||||
xkcd_old = """<?xml version="1.0" encoding="utf-8"?>
|
xkcd_old = """<?xml version="1.0" encoding="utf-8"?>
|
||||||
<rss version="2.0"><channel><title>xkcd.com</title><link>http://xkcd.com/</link><description>xkcd.com: A webcomic of romance and math humor.</description><language>en</language><item><title>Snake Facts</title><link>http://xkcd.com/1398/</link><description><img src="http://imgs.xkcd.com/comics/snake_facts.png" title="Biologically speaking, what we call a 'snake' is actually a human digestive tract which has escaped from its host." alt="Biologically speaking, what we call a 'snake' is actually a human digestive tract which has escaped from its host." /></description><pubDate>Wed, 23 Jul 2014 04:00:00 -0000</pubDate><guid>http://xkcd.com/1398/</guid></item></channel></rss>
|
<rss version="2.0"><channel><title>xkcd.com</title><link>http://xkcd.com/</link><description>xkcd.com: A webcomic of romance and math humor.</description><language>en</language><item><title>Snake Facts</title><link>http://xkcd.com/1398/</link><description><img src="http://imgs.xkcd.com/comics/snake_facts.png" title="Biologically speaking, what we call a 'snake' is actually a human digestive tract which has escaped from its host." alt="Biologically speaking, what we call a 'snake' is actually a human digestive tract which has escaped from its host." /></description><pubDate>Wed, 23 Jul 2014 04:00:00 -0000</pubDate><guid>http://xkcd.com/1398/</guid></item></channel></rss>
|
||||||
@ -51,7 +47,7 @@ def constant(content):
|
|||||||
if minisix.PY3:
|
if minisix.PY3:
|
||||||
content = content.encode()
|
content = content.encode()
|
||||||
def f(*args, **kwargs):
|
def f(*args, **kwargs):
|
||||||
return BytesIO(content)
|
return minisix.io.BytesIO(content)
|
||||||
return f
|
return f
|
||||||
|
|
||||||
url = 'http://www.advogato.org/rss/articles.xml'
|
url = 'http://www.advogato.org/rss/articles.xml'
|
||||||
|
@ -31,7 +31,6 @@ import time
|
|||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import tempfile
|
import tempfile
|
||||||
import cPickle as pickle
|
|
||||||
|
|
||||||
import supybot.conf as conf
|
import supybot.conf as conf
|
||||||
import supybot.utils as utils
|
import supybot.utils as utils
|
||||||
@ -42,6 +41,9 @@ from supybot.i18n import PluginInternationalization, internationalizeDocstring
|
|||||||
_ = PluginInternationalization('Scheduler')
|
_ = PluginInternationalization('Scheduler')
|
||||||
import supybot.world as world
|
import supybot.world as world
|
||||||
|
|
||||||
|
import supybot.minisix as minisix
|
||||||
|
pickle = minisix.pickle
|
||||||
|
|
||||||
datadir = conf.supybot.directories.data()
|
datadir = conf.supybot.directories.data()
|
||||||
filename = conf.supybot.directories.data.dirize('Scheduler.pickle')
|
filename = conf.supybot.directories.data.dirize('Scheduler.pickle')
|
||||||
|
|
||||||
|
@ -32,7 +32,6 @@ import re
|
|||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
import json
|
import json
|
||||||
import urllib
|
|
||||||
|
|
||||||
import supybot.log as log
|
import supybot.log as log
|
||||||
import supybot.conf as conf
|
import supybot.conf as conf
|
||||||
|
@ -32,7 +32,6 @@ import re
|
|||||||
import random
|
import random
|
||||||
import shutil
|
import shutil
|
||||||
import tempfile
|
import tempfile
|
||||||
import cPickle as pickle
|
|
||||||
|
|
||||||
import supybot.conf as conf
|
import supybot.conf as conf
|
||||||
import supybot.ircdb as ircdb
|
import supybot.ircdb as ircdb
|
||||||
@ -48,6 +47,8 @@ _ = PluginInternationalization('Topic')
|
|||||||
|
|
||||||
import supybot.ircdb as ircdb
|
import supybot.ircdb as ircdb
|
||||||
|
|
||||||
|
import supybot.minisix as minisix
|
||||||
|
pickle = minisix.pickle
|
||||||
|
|
||||||
def canChangeTopic(irc, msg, args, state):
|
def canChangeTopic(irc, msg, args, state):
|
||||||
assert not state.channel
|
assert not state.channel
|
||||||
|
@ -31,8 +31,6 @@
|
|||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import socket
|
import socket
|
||||||
import HTMLParser
|
|
||||||
import htmlentitydefs
|
|
||||||
|
|
||||||
import supybot.conf as conf
|
import supybot.conf as conf
|
||||||
import supybot.utils as utils
|
import supybot.utils as utils
|
||||||
@ -45,14 +43,21 @@ import supybot.callbacks as callbacks
|
|||||||
from supybot.i18n import PluginInternationalization, internationalizeDocstring
|
from supybot.i18n import PluginInternationalization, internationalizeDocstring
|
||||||
_ = PluginInternationalization('Web')
|
_ = PluginInternationalization('Web')
|
||||||
|
|
||||||
class Title(HTMLParser.HTMLParser):
|
if minisix.PY3:
|
||||||
entitydefs = htmlentitydefs.entitydefs.copy()
|
from html.parser import HTMLParser, HTMLParseError
|
||||||
|
from html.entities import entitydefs
|
||||||
|
else:
|
||||||
|
from HTMLParser import HTMLParser, HTMLParseError
|
||||||
|
from htmlentitydefs import entitydefs
|
||||||
|
|
||||||
|
class Title(HTMLParser):
|
||||||
|
entitydefs = entitydefs.copy()
|
||||||
entitydefs['nbsp'] = ' '
|
entitydefs['nbsp'] = ' '
|
||||||
entitydefs['apos'] = '\''
|
entitydefs['apos'] = '\''
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.inTitle = False
|
self.inTitle = False
|
||||||
self.title = ''
|
self.title = ''
|
||||||
HTMLParser.HTMLParser.__init__(self)
|
HTMLParser.__init__(self)
|
||||||
|
|
||||||
def handle_starttag(self, tag, attrs):
|
def handle_starttag(self, tag, attrs):
|
||||||
if tag == 'title':
|
if tag == 'title':
|
||||||
@ -156,7 +161,7 @@ class Web(callbacks.PluginRegexp):
|
|||||||
parser = Title()
|
parser = Title()
|
||||||
try:
|
try:
|
||||||
parser.feed(text)
|
parser.feed(text)
|
||||||
except HTMLParser.HTMLParseError:
|
except HTMLParseError:
|
||||||
self.log.debug('Encountered a problem parsing %u. Title may '
|
self.log.debug('Encountered a problem parsing %u. Title may '
|
||||||
'already be set, though', url)
|
'already be set, though', url)
|
||||||
if parser.title:
|
if parser.title:
|
||||||
@ -286,7 +291,7 @@ class Web(callbacks.PluginRegexp):
|
|||||||
'installing python-charade.)'), Raise=True)
|
'installing python-charade.)'), Raise=True)
|
||||||
try:
|
try:
|
||||||
parser.feed(text)
|
parser.feed(text)
|
||||||
except HTMLParser.HTMLParseError:
|
except HTMLParseError:
|
||||||
self.log.debug('Encountered a problem parsing %u. Title may '
|
self.log.debug('Encountered a problem parsing %u. Title may '
|
||||||
'already be set, though', url)
|
'already be set, though', url)
|
||||||
if parser.title:
|
if parser.title:
|
||||||
|
4
setup.py
4
setup.py
@ -158,11 +158,9 @@ try:
|
|||||||
log.debug(msg, *args)
|
log.debug(msg, *args)
|
||||||
|
|
||||||
fixer_names = ['fix_basestring',
|
fixer_names = ['fix_basestring',
|
||||||
'fix_imports',
|
|
||||||
'fix_metaclass', 'fix_methodattrs',
|
'fix_metaclass', 'fix_methodattrs',
|
||||||
'fix_numliterals',
|
'fix_numliterals',
|
||||||
'fix_types',
|
'fix_unicode', 'fix_xrange']
|
||||||
'fix_unicode', 'fix_urllib', 'fix_xrange']
|
|
||||||
fixers = list(map(lambda x:'lib2to3.fixes.'+x, fixer_names))
|
fixers = list(map(lambda x:'lib2to3.fixes.'+x, fixer_names))
|
||||||
fixers += get_fixers_from_package('2to3')
|
fixers += get_fixers_from_package('2to3')
|
||||||
r = DistutilsRefactoringTool(fixers, options=options)
|
r = DistutilsRefactoringTool(fixers, options=options)
|
||||||
|
@ -48,14 +48,6 @@ from .utils.iter import any, all
|
|||||||
from .i18n import PluginInternationalization, internationalizeDocstring
|
from .i18n import PluginInternationalization, internationalizeDocstring
|
||||||
_ = PluginInternationalization()
|
_ = PluginInternationalization()
|
||||||
|
|
||||||
if minisix.PY2:
|
|
||||||
# cStringIO is buggy with Python 2.6 (
|
|
||||||
# see http://paste.progval.net/show/227/ )
|
|
||||||
# and it does not handle unicode objects in Python 2.x
|
|
||||||
from StringIO import StringIO
|
|
||||||
else:
|
|
||||||
from cStringIO import StringIO
|
|
||||||
|
|
||||||
def _addressed(nick, msg, prefixChars=None, nicks=None,
|
def _addressed(nick, msg, prefixChars=None, nicks=None,
|
||||||
prefixStrings=None, whenAddressedByNick=None,
|
prefixStrings=None, whenAddressedByNick=None,
|
||||||
whenAddressedByNickAtEnd=None):
|
whenAddressedByNickAtEnd=None):
|
||||||
@ -329,7 +321,7 @@ class Tokenizer(object):
|
|||||||
return ret
|
return ret
|
||||||
|
|
||||||
def tokenize(self, s):
|
def tokenize(self, s):
|
||||||
lexer = shlex.shlex(StringIO(s))
|
lexer = shlex.shlex(minisix.io.StringIO(s))
|
||||||
lexer.commenters = ''
|
lexer.commenters = ''
|
||||||
lexer.quotes = self.quotes
|
lexer.quotes = self.quotes
|
||||||
lexer.separators = self.separators
|
lexer.separators = self.separators
|
||||||
|
@ -38,9 +38,8 @@ import os
|
|||||||
import sys
|
import sys
|
||||||
import struct
|
import struct
|
||||||
import os.path
|
import os.path
|
||||||
import cPickle as pickle
|
|
||||||
|
|
||||||
from . import utils
|
from . import utils, minisix
|
||||||
|
|
||||||
def hash(s):
|
def hash(s):
|
||||||
"""DJB's hash function for CDB."""
|
"""DJB's hash function for CDB."""
|
||||||
@ -447,14 +446,14 @@ class ReaderWriter(utils.IterableMap):
|
|||||||
class Shelf(ReaderWriter):
|
class Shelf(ReaderWriter):
|
||||||
"""Uses pickle to mimic the shelf module."""
|
"""Uses pickle to mimic the shelf module."""
|
||||||
def __getitem__(self, key):
|
def __getitem__(self, key):
|
||||||
return pickle.loads(ReaderWriter.__getitem__(self, key))
|
return minisix.pickle.loads(ReaderWriter.__getitem__(self, key))
|
||||||
|
|
||||||
def __setitem__(self, key, value):
|
def __setitem__(self, key, value):
|
||||||
ReaderWriter.__setitem__(self, key, pickle.dumps(value, True))
|
ReaderWriter.__setitem__(self, key, minisix.pickle.dumps(value, True))
|
||||||
|
|
||||||
def items(self):
|
def items(self):
|
||||||
for (key, value) in ReaderWriter.items(self):
|
for (key, value) in ReaderWriter.items(self):
|
||||||
yield (key, pickle.loads(value))
|
yield (key, minisix.pickle.loads(value))
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
@ -33,7 +33,6 @@ Includes wrappers for commands.
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
import time
|
import time
|
||||||
import Queue
|
|
||||||
import getopt
|
import getopt
|
||||||
import inspect
|
import inspect
|
||||||
import threading
|
import threading
|
||||||
@ -124,7 +123,7 @@ def process(f, *args, **kwargs):
|
|||||||
raise ProcessTimeoutError("%s aborted due to timeout." % (p.name,))
|
raise ProcessTimeoutError("%s aborted due to timeout." % (p.name,))
|
||||||
try:
|
try:
|
||||||
v = q.get(block=False)
|
v = q.get(block=False)
|
||||||
except Queue.Empty:
|
except minisix.queue.Empty:
|
||||||
return None
|
return None
|
||||||
finally:
|
finally:
|
||||||
q.close()
|
q.close()
|
||||||
|
@ -36,7 +36,6 @@ import sys
|
|||||||
import cgi
|
import cgi
|
||||||
import socket
|
import socket
|
||||||
from threading import Thread
|
from threading import Thread
|
||||||
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
|
|
||||||
|
|
||||||
import supybot.log as log
|
import supybot.log as log
|
||||||
import supybot.conf as conf
|
import supybot.conf as conf
|
||||||
@ -45,6 +44,11 @@ import supybot.minisix as minisix
|
|||||||
from supybot.i18n import PluginInternationalization
|
from supybot.i18n import PluginInternationalization
|
||||||
_ = PluginInternationalization()
|
_ = PluginInternationalization()
|
||||||
|
|
||||||
|
if minisix.PY2:
|
||||||
|
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
|
||||||
|
else:
|
||||||
|
from http.server import HTTPServer, BaseHTTPRequestHandler
|
||||||
|
|
||||||
configGroup = conf.supybot.servers.http
|
configGroup = conf.supybot.servers.http
|
||||||
|
|
||||||
class RequestNotHandled(Exception):
|
class RequestNotHandled(Exception):
|
||||||
|
@ -45,7 +45,6 @@ import random
|
|||||||
import string
|
import string
|
||||||
import textwrap
|
import textwrap
|
||||||
import functools
|
import functools
|
||||||
from cStringIO import StringIO as sio
|
|
||||||
|
|
||||||
from . import utils
|
from . import utils
|
||||||
from . import minisix
|
from . import minisix
|
||||||
@ -174,7 +173,7 @@ def _hostmaskPatternEqual(pattern, hostmask):
|
|||||||
except KeyError:
|
except KeyError:
|
||||||
# We make our own regexps, rather than use fnmatch, because fnmatch's
|
# We make our own regexps, rather than use fnmatch, because fnmatch's
|
||||||
# case-insensitivity is not IRC's case-insensitity.
|
# case-insensitivity is not IRC's case-insensitity.
|
||||||
fd = sio()
|
fd = minisix.io.StringIO()
|
||||||
for c in pattern:
|
for c in pattern:
|
||||||
if c == '*':
|
if c == '*':
|
||||||
fd.write('.*')
|
fd.write('.*')
|
||||||
@ -528,7 +527,7 @@ class FormatContext(object):
|
|||||||
|
|
||||||
class FormatParser(object):
|
class FormatParser(object):
|
||||||
def __init__(self, s):
|
def __init__(self, s):
|
||||||
self.fd = sio(s)
|
self.fd = minisix.io.StringIO(s)
|
||||||
self.last = None
|
self.last = None
|
||||||
|
|
||||||
def getChar(self):
|
def getChar(self):
|
||||||
|
@ -37,6 +37,10 @@ if sys.version_info[0] >= 3:
|
|||||||
intern = sys.intern
|
intern = sys.intern
|
||||||
integer_types = (int,)
|
integer_types = (int,)
|
||||||
long = int
|
long = int
|
||||||
|
|
||||||
|
import io
|
||||||
|
import pickle
|
||||||
|
import queue
|
||||||
else:
|
else:
|
||||||
PY2 = True
|
PY2 = True
|
||||||
PY3 = False
|
PY3 = False
|
||||||
@ -46,3 +50,12 @@ else:
|
|||||||
intern = __builtins__.intern
|
intern = __builtins__.intern
|
||||||
integer_types = (int, long)
|
integer_types = (int, long)
|
||||||
long = long
|
long = long
|
||||||
|
|
||||||
|
class io:
|
||||||
|
# cStringIO is buggy with Python 2.6 (
|
||||||
|
# see http://paste.progval.net/show/227/ )
|
||||||
|
# and it does not handle unicode objects in Python 2.x
|
||||||
|
from StringIO import StringIO
|
||||||
|
from cStringIO import StringIO as BytesIO
|
||||||
|
import cPickle as pickle
|
||||||
|
import Queue as queue
|
||||||
|
34
src/test.py
34
src/test.py
@ -35,13 +35,21 @@ import sys
|
|||||||
import time
|
import time
|
||||||
import shutil
|
import shutil
|
||||||
import urllib
|
import urllib
|
||||||
import httplib
|
|
||||||
import unittest
|
import unittest
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
from . import (callbacks, conf, drivers, httpserver, i18n, ircdb, irclib,
|
from . import (callbacks, conf, drivers, httpserver, i18n, ircdb, irclib,
|
||||||
ircmsgs, ircutils, log, minisix, plugin, registry, utils, world)
|
ircmsgs, ircutils, log, minisix, plugin, registry, utils, world)
|
||||||
|
|
||||||
|
if minisix.PY2:
|
||||||
|
from httplib import HTTPConnection
|
||||||
|
from urllib import splithost, splituser
|
||||||
|
from urllib import URLopener
|
||||||
|
else:
|
||||||
|
from http.client import HTTPConnection
|
||||||
|
from urllib.parse import splithost, splituser
|
||||||
|
from urllib.request import URLopener
|
||||||
|
|
||||||
i18n.import_conf()
|
i18n.import_conf()
|
||||||
network = True
|
network = True
|
||||||
|
|
||||||
@ -511,15 +519,15 @@ def open_http(url, data=None):
|
|||||||
user_passwd = None
|
user_passwd = None
|
||||||
proxy_passwd= None
|
proxy_passwd= None
|
||||||
if isinstance(url, str):
|
if isinstance(url, str):
|
||||||
host, selector = urllib.splithost(url)
|
host, selector = splithost(url)
|
||||||
if host:
|
if host:
|
||||||
user_passwd, host = urllib.splituser(host)
|
user_passwd, host = splituser(host)
|
||||||
host = urllib.unquote(host)
|
host = urllib.unquote(host)
|
||||||
realhost = host
|
realhost = host
|
||||||
else:
|
else:
|
||||||
host, selector = url
|
host, selector = url
|
||||||
# check whether the proxy contains authorization information
|
# check whether the proxy contains authorization information
|
||||||
proxy_passwd, host = urllib.splituser(host)
|
proxy_passwd, host = splituser(host)
|
||||||
# now we proceed with the url we want to obtain
|
# now we proceed with the url we want to obtain
|
||||||
urltype, rest = urllib.splittype(selector)
|
urltype, rest = urllib.splittype(selector)
|
||||||
url = rest
|
url = rest
|
||||||
@ -527,9 +535,9 @@ def open_http(url, data=None):
|
|||||||
if urltype.lower() != 'http':
|
if urltype.lower() != 'http':
|
||||||
realhost = None
|
realhost = None
|
||||||
else:
|
else:
|
||||||
realhost, rest = urllib.splithost(rest)
|
realhost, rest = splithost(rest)
|
||||||
if realhost:
|
if realhost:
|
||||||
user_passwd, realhost = urllib.splituser(realhost)
|
user_passwd, realhost = splituser(realhost)
|
||||||
if user_passwd:
|
if user_passwd:
|
||||||
selector = "%s://%s%s" % (urltype, realhost, rest)
|
selector = "%s://%s%s" % (urltype, realhost, rest)
|
||||||
if urllib.proxy_bypass(realhost):
|
if urllib.proxy_bypass(realhost):
|
||||||
@ -559,15 +567,15 @@ def open_http(url, data=None):
|
|||||||
if proxy_auth: c.putheader('Proxy-Authorization', 'Basic %s' % proxy_auth)
|
if proxy_auth: c.putheader('Proxy-Authorization', 'Basic %s' % proxy_auth)
|
||||||
if auth: c.putheader('Authorization', 'Basic %s' % auth)
|
if auth: c.putheader('Authorization', 'Basic %s' % auth)
|
||||||
if realhost: c.putheader('Host', realhost)
|
if realhost: c.putheader('Host', realhost)
|
||||||
for args in urllib.URLopener().addheaders: c.putheader(*args)
|
for args in URLopener().addheaders: c.putheader(*args)
|
||||||
c.endheaders()
|
c.endheaders()
|
||||||
return c
|
return c
|
||||||
|
|
||||||
class FakeHTTPConnection(httplib.HTTPConnection):
|
class FakeHTTPConnection(HTTPConnection):
|
||||||
_data = ''
|
_data = ''
|
||||||
_headers = {}
|
_headers = {}
|
||||||
def __init__(self, rfile, wfile):
|
def __init__(self, rfile, wfile):
|
||||||
httplib.HTTPConnection.__init__(self, 'localhost')
|
HTTPConnection.__init__(self, 'localhost')
|
||||||
self.rfile = rfile
|
self.rfile = rfile
|
||||||
self.wfile = wfile
|
self.wfile = wfile
|
||||||
def send(self, data):
|
def send(self, data):
|
||||||
@ -585,12 +593,8 @@ class HTTPPluginTestCase(PluginTestCase):
|
|||||||
|
|
||||||
def request(self, url, method='GET', read=True, data={}):
|
def request(self, url, method='GET', read=True, data={}):
|
||||||
assert url.startswith('/')
|
assert url.startswith('/')
|
||||||
try:
|
wfile = minisix.io.StringIO()
|
||||||
from io import BytesIO as StringIO
|
rfile = minisix.io.StringIO()
|
||||||
except ImportError:
|
|
||||||
from StringIO import StringIO
|
|
||||||
wfile = StringIO()
|
|
||||||
rfile = StringIO()
|
|
||||||
connection = FakeHTTPConnection(wfile, rfile)
|
connection = FakeHTTPConnection(wfile, rfile)
|
||||||
connection.putrequest(method, url)
|
connection.putrequest(method, url)
|
||||||
connection.endheaders()
|
connection.endheaders()
|
||||||
|
@ -29,20 +29,20 @@
|
|||||||
###
|
###
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
from .. import minisix
|
||||||
|
|
||||||
###
|
###
|
||||||
# csv.{join,split} -- useful functions that should exist.
|
# csv.{join,split} -- useful functions that should exist.
|
||||||
###
|
###
|
||||||
import csv
|
import csv
|
||||||
import cStringIO as StringIO
|
|
||||||
def join(L):
|
def join(L):
|
||||||
fd = StringIO.StringIO()
|
fd = minisix.io.StringIO()
|
||||||
writer = csv.writer(fd)
|
writer = csv.writer(fd)
|
||||||
writer.writerow(L)
|
writer.writerow(L)
|
||||||
return fd.getvalue().rstrip('\r\n')
|
return fd.getvalue().rstrip('\r\n')
|
||||||
|
|
||||||
def split(s):
|
def split(s):
|
||||||
fd = StringIO.StringIO(s)
|
fd = minisix.io.StringIO(s)
|
||||||
reader = csv.reader(fd)
|
reader = csv.reader(fd)
|
||||||
return next(reader)
|
return next(reader)
|
||||||
csv.join = join
|
csv.join = join
|
||||||
|
@ -101,7 +101,7 @@ class RingBuffer(object):
|
|||||||
def __getitem__(self, idx):
|
def __getitem__(self, idx):
|
||||||
if self.full:
|
if self.full:
|
||||||
oidx = idx
|
oidx = idx
|
||||||
if isinstance(oidx, types.SliceType):
|
if isinstance(oidx, slice):
|
||||||
L = []
|
L = []
|
||||||
for i in xrange(*slice.indices(oidx, len(self))):
|
for i in xrange(*slice.indices(oidx, len(self))):
|
||||||
L.append(self[i])
|
L.append(self[i])
|
||||||
@ -113,7 +113,7 @@ class RingBuffer(object):
|
|||||||
idx = (idx + self.i) % len(self.L)
|
idx = (idx + self.i) % len(self.L)
|
||||||
return self.L[idx]
|
return self.L[idx]
|
||||||
else:
|
else:
|
||||||
if isinstance(idx, types.SliceType):
|
if isinstance(idx, slice):
|
||||||
L = []
|
L = []
|
||||||
for i in xrange(*slice.indices(idx, len(self))):
|
for i in xrange(*slice.indices(idx, len(self))):
|
||||||
L.append(self[i])
|
L.append(self[i])
|
||||||
@ -124,7 +124,7 @@ class RingBuffer(object):
|
|||||||
def __setitem__(self, idx, elt):
|
def __setitem__(self, idx, elt):
|
||||||
if self.full:
|
if self.full:
|
||||||
oidx = idx
|
oidx = idx
|
||||||
if isinstance(oidx, types.SliceType):
|
if isinstance(oidx, slice):
|
||||||
range_ = xrange(*slice.indices(oidx, len(self)))
|
range_ = xrange(*slice.indices(oidx, len(self)))
|
||||||
if len(range_) != len(elt):
|
if len(range_) != len(elt):
|
||||||
raise ValueError('seq must be the same length as slice.')
|
raise ValueError('seq must be the same length as slice.')
|
||||||
@ -138,7 +138,7 @@ class RingBuffer(object):
|
|||||||
idx = (idx + self.i) % len(self.L)
|
idx = (idx + self.i) % len(self.L)
|
||||||
self.L[idx] = elt
|
self.L[idx] = elt
|
||||||
else:
|
else:
|
||||||
if isinstance(idx, types.SliceType):
|
if isinstance(idx, slice):
|
||||||
range_ = xrange(*slice.indices(idx, len(self)))
|
range_ = xrange(*slice.indices(idx, len(self)))
|
||||||
if len(range_) != len(elt):
|
if len(range_) != len(elt):
|
||||||
raise ValueError('seq must be the same length as slice.')
|
raise ValueError('seq must be the same length as slice.')
|
||||||
@ -228,7 +228,7 @@ class queue(object):
|
|||||||
def __getitem__(self, oidx):
|
def __getitem__(self, oidx):
|
||||||
if len(self) == 0:
|
if len(self) == 0:
|
||||||
raise IndexError('queue index out of range')
|
raise IndexError('queue index out of range')
|
||||||
if isinstance(oidx, types.SliceType):
|
if isinstance(oidx, slice):
|
||||||
L = []
|
L = []
|
||||||
for i in xrange(*slice.indices(oidx, len(self))):
|
for i in xrange(*slice.indices(oidx, len(self))):
|
||||||
L.append(self[i])
|
L.append(self[i])
|
||||||
@ -245,7 +245,7 @@ class queue(object):
|
|||||||
def __setitem__(self, oidx, value):
|
def __setitem__(self, oidx, value):
|
||||||
if len(self) == 0:
|
if len(self) == 0:
|
||||||
raise IndexError('queue index out of range')
|
raise IndexError('queue index out of range')
|
||||||
if isinstance(oidx, types.SliceType):
|
if isinstance(oidx, slice):
|
||||||
range_ = xrange(*slice.indices(oidx, len(self)))
|
range_ = xrange(*slice.indices(oidx, len(self)))
|
||||||
if len(range_) != len(value):
|
if len(range_) != len(value):
|
||||||
raise ValueError('seq must be the same length as slice.')
|
raise ValueError('seq must be the same length as slice.')
|
||||||
@ -266,7 +266,7 @@ class queue(object):
|
|||||||
self.back[idx-len(self.front)] = value
|
self.back[idx-len(self.front)] = value
|
||||||
|
|
||||||
def __delitem__(self, oidx):
|
def __delitem__(self, oidx):
|
||||||
if isinstance(oidx, types.SliceType):
|
if isinstance(oidx, slice):
|
||||||
range_ = xrange(*slice.indices(oidx, len(self)))
|
range_ = xrange(*slice.indices(oidx, len(self)))
|
||||||
for i in range_:
|
for i in range_:
|
||||||
del self[i]
|
del self[i]
|
||||||
|
@ -32,12 +32,6 @@ import re
|
|||||||
import sys
|
import sys
|
||||||
import base64
|
import base64
|
||||||
import socket
|
import socket
|
||||||
import urllib
|
|
||||||
import urllib2
|
|
||||||
import httplib
|
|
||||||
import urlparse
|
|
||||||
import htmlentitydefs
|
|
||||||
from HTMLParser import HTMLParser
|
|
||||||
|
|
||||||
sockerrors = (socket.error,)
|
sockerrors = (socket.error,)
|
||||||
try:
|
try:
|
||||||
@ -48,12 +42,37 @@ except AttributeError:
|
|||||||
from .str import normalizeWhitespace
|
from .str import normalizeWhitespace
|
||||||
from .. import minisix
|
from .. import minisix
|
||||||
|
|
||||||
Request = urllib2.Request
|
if minisix.PY2:
|
||||||
urlquote = urllib.quote
|
import urllib
|
||||||
urlunquote = urllib.unquote
|
import urllib2
|
||||||
|
from httplib import InvalidURL
|
||||||
def urlencode(*args, **kwargs):
|
from urlparse import urlsplit, urlunsplit, urlparse
|
||||||
return urllib.urlencode(*args, **kwargs).encode()
|
from htmlentitydefs import entitydefs, name2codepoint
|
||||||
|
from HTMLParser import HTMLParser
|
||||||
|
Request = urllib2.Request
|
||||||
|
urlquote = urllib.quote
|
||||||
|
urlquote_plus = urllib.quote_plus
|
||||||
|
urlunquote = urllib.unquote
|
||||||
|
urlopen = urllib2.urlopen
|
||||||
|
def urlencode(*args, **kwargs):
|
||||||
|
return urllib.urlencode(*args, **kwargs).encode()
|
||||||
|
from urllib2 import HTTPError, URLError
|
||||||
|
from urllib import splithost, splituser
|
||||||
|
else:
|
||||||
|
from http.client import InvalidURL
|
||||||
|
from urllib.parse import urlsplit, urlunsplit, urlparse
|
||||||
|
from html.entities import entitydefs, name2codepoint
|
||||||
|
from html.parser import HTMLParser
|
||||||
|
import urllib.request, urllib.parse, urllib.error
|
||||||
|
Request = urllib.request.Request
|
||||||
|
urlquote = urllib.parse.quote
|
||||||
|
urlquote_plus = urllib.parse.quote_plus
|
||||||
|
urlunquote = urllib.parse.unquote
|
||||||
|
urlopen = urllib.request.urlopen
|
||||||
|
def urlencode(*args, **kwargs):
|
||||||
|
return urllib.parse.urlencode(*args, **kwargs).encode()
|
||||||
|
from urllib.error import HTTPError, URLError
|
||||||
|
from urllib.parse import splithost, splituser
|
||||||
|
|
||||||
class Error(Exception):
|
class Error(Exception):
|
||||||
pass
|
pass
|
||||||
@ -106,17 +125,18 @@ def getUrlFd(url, headers=None, data=None, timeout=None):
|
|||||||
"""getUrlFd(url, headers=None, data=None, timeout=None)
|
"""getUrlFd(url, headers=None, data=None, timeout=None)
|
||||||
|
|
||||||
Opens the given url and returns a file object. Headers and data are
|
Opens the given url and returns a file object. Headers and data are
|
||||||
a dict and string, respectively, as per urllib2.Request's arguments."""
|
a dict and string, respectively, as per urllib.request.Request's
|
||||||
|
arguments."""
|
||||||
if headers is None:
|
if headers is None:
|
||||||
headers = defaultHeaders
|
headers = defaultHeaders
|
||||||
if minisix.PY3 and isinstance(data, str):
|
if minisix.PY3 and isinstance(data, str):
|
||||||
data = data.encode()
|
data = data.encode()
|
||||||
try:
|
try:
|
||||||
if not isinstance(url, urllib2.Request):
|
if not isinstance(url, Request):
|
||||||
(scheme, loc, path, query, frag) = urlparse.urlsplit(url)
|
(scheme, loc, path, query, frag) = urlsplit(url)
|
||||||
(user, host) = urllib.splituser(loc)
|
(user, host) = splituser(loc)
|
||||||
url = urlparse.urlunsplit((scheme, host, path, query, ''))
|
url = urlunsplit((scheme, host, path, query, ''))
|
||||||
request = urllib2.Request(url, headers=headers, data=data)
|
request = Request(url, headers=headers, data=data)
|
||||||
if user:
|
if user:
|
||||||
request.add_header('Authorization',
|
request.add_header('Authorization',
|
||||||
'Basic %s' % base64.b64encode(user))
|
'Basic %s' % base64.b64encode(user))
|
||||||
@ -126,17 +146,17 @@ def getUrlFd(url, headers=None, data=None, timeout=None):
|
|||||||
httpProxy = force(proxy)
|
httpProxy = force(proxy)
|
||||||
if httpProxy:
|
if httpProxy:
|
||||||
request.set_proxy(httpProxy, 'http')
|
request.set_proxy(httpProxy, 'http')
|
||||||
fd = urllib2.urlopen(request, timeout=timeout)
|
fd = urlopen(request, timeout=timeout)
|
||||||
return fd
|
return fd
|
||||||
except socket.timeout as e:
|
except socket.timeout as e:
|
||||||
raise Error(TIMED_OUT)
|
raise Error(TIMED_OUT)
|
||||||
except sockerrors as e:
|
except sockerrors as e:
|
||||||
raise Error(strError(e))
|
raise Error(strError(e))
|
||||||
except httplib.InvalidURL as e:
|
except InvalidURL as e:
|
||||||
raise Error('Invalid URL: %s' % e)
|
raise Error('Invalid URL: %s' % e)
|
||||||
except urllib2.HTTPError as e:
|
except HTTPError as e:
|
||||||
raise Error(strError(e))
|
raise Error(strError(e))
|
||||||
except urllib2.URLError as e:
|
except URLError as e:
|
||||||
raise Error(strError(e.reason))
|
raise Error(strError(e.reason))
|
||||||
# Raised when urllib doesn't recognize the url type
|
# Raised when urllib doesn't recognize the url type
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
@ -147,7 +167,7 @@ def getUrl(url, size=None, headers=None, data=None, timeout=None):
|
|||||||
|
|
||||||
Gets a page. Returns a string that is the page gotten. Size is an integer
|
Gets a page. Returns a string that is the page gotten. Size is an integer
|
||||||
number of bytes to read from the URL. Headers and data are dicts as per
|
number of bytes to read from the URL. Headers and data are dicts as per
|
||||||
urllib2.Request's arguments."""
|
urllib.request.Request's arguments."""
|
||||||
fd = getUrlFd(url, headers=headers, data=data, timeout=timeout)
|
fd = getUrlFd(url, headers=headers, data=data, timeout=timeout)
|
||||||
try:
|
try:
|
||||||
if size is None:
|
if size is None:
|
||||||
@ -160,7 +180,7 @@ def getUrl(url, size=None, headers=None, data=None, timeout=None):
|
|||||||
return text
|
return text
|
||||||
|
|
||||||
def getDomain(url):
|
def getDomain(url):
|
||||||
return urlparse.urlparse(url)[1]
|
return urlparse(url)[1]
|
||||||
|
|
||||||
_charset_re = ('<meta[^a-z<>]+charset='
|
_charset_re = ('<meta[^a-z<>]+charset='
|
||||||
"""(?P<charset>("[^"]+"|'[^']+'))""")
|
"""(?P<charset>("[^"]+"|'[^']+'))""")
|
||||||
@ -185,7 +205,7 @@ def getEncoding(s):
|
|||||||
|
|
||||||
class HtmlToText(HTMLParser, object):
|
class HtmlToText(HTMLParser, object):
|
||||||
"""Taken from some eff-bot code on c.l.p."""
|
"""Taken from some eff-bot code on c.l.p."""
|
||||||
entitydefs = htmlentitydefs.entitydefs.copy()
|
entitydefs = entitydefs.copy()
|
||||||
entitydefs['nbsp'] = ' '
|
entitydefs['nbsp'] = ' '
|
||||||
def __init__(self, tagReplace=' '):
|
def __init__(self, tagReplace=' '):
|
||||||
self.data = []
|
self.data = []
|
||||||
@ -202,8 +222,8 @@ class HtmlToText(HTMLParser, object):
|
|||||||
self.data.append(data)
|
self.data.append(data)
|
||||||
|
|
||||||
def handle_entityref(self, data):
|
def handle_entityref(self, data):
|
||||||
if data in htmlentitydefs.name2codepoint:
|
if data in name2codepoint:
|
||||||
self.data.append(unichr(htmlentitydefs.name2codepoint[data]))
|
self.data.append(unichr(name2codepoint[data]))
|
||||||
elif minisix.PY3 and isinstance(data, bytes):
|
elif minisix.PY3 and isinstance(data, bytes):
|
||||||
self.data.append(data.decode())
|
self.data.append(data.decode())
|
||||||
elif minisix.PY2 and isinstance(data, str):
|
elif minisix.PY2 and isinstance(data, str):
|
||||||
|
Loading…
Reference in New Issue
Block a user