Remove need for fix_unicode.

This commit is contained in:
Valentin Lorentz 2015-08-10 18:52:51 +02:00
parent 6ceec0c541
commit be6bc1a734
8 changed files with 63 additions and 46 deletions

View File

@ -33,6 +33,7 @@ from supybot.test import *
import supybot.conf as conf import supybot.conf as conf
import supybot.plugin as plugin import supybot.plugin as plugin
import supybot.registry as registry import supybot.registry as registry
from supybot.minisix import u
import plugin as Aka import plugin as Aka
@ -147,8 +148,8 @@ class AkaChannelTestCase(ChannelPluginTestCase):
self.assertResponse('egg', 'baz') self.assertResponse('egg', 'baz')
def testComplicatedNames(self): def testComplicatedNames(self):
self.assertNotError(u'aka add café "echo coffee"') self.assertNotError(u('aka add café "echo coffee"'))
self.assertResponse(u'café', 'coffee') self.assertResponse(u('café'), 'coffee')
self.assertNotError('aka add "foo bar" "echo spam"') self.assertNotError('aka add "foo bar" "echo spam"')
self.assertResponse('foo bar', 'spam') self.assertResponse('foo bar', 'spam')
@ -201,7 +202,7 @@ class AkaTestCase(PluginTestCase):
def testAliasImport(self): def testAliasImport(self):
self.assertNotError('alias add foo "echo bar"') self.assertNotError('alias add foo "echo bar"')
self.assertNotError(u'alias add baz "echo café"') self.assertNotError(u('alias add baz "echo café"'))
self.assertNotError('aka add qux "echo quux"') self.assertNotError('aka add qux "echo quux"')
self.assertResponse('alias foo', 'bar') self.assertResponse('alias foo', 'bar')
self.assertResponse('alias baz', 'café') self.assertResponse('alias baz', 'café')

View File

@ -28,6 +28,8 @@
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
### ###
from __future__ import unicode_literals
import re import re
import sys import sys
import codecs import codecs
@ -653,39 +655,39 @@ class Filter(callbacks.Plugin):
# XXX suckiest: B,K,P,Q,T # XXX suckiest: B,K,P,Q,T
# alternatives: 3: U+2107 # alternatives: 3: U+2107
_uniudMap = { _uniudMap = {
' ': u' ', '0': u'0', '@': u'@', ' ': ' ', '0': '0', '@': '@',
'!': u'\u00a1', '1': u'1', 'A': u'\u2200', '!': '\u00a1', '1': '1', 'A': '\u2200',
'"': u'\u201e', '2': u'\u2681', 'B': u'q', '"': '\u201e', '2': '\u2681', 'B': 'q',
'#': u'#', '3': u'\u0190', 'C': u'\u0186', '#': '#', '3': '\u0190', 'C': '\u0186',
'$': u'$', '4': u'\u2683', 'D': u'\u15e1', '$': '$', '4': '\u2683', 'D': '\u15e1',
'%': u'%', '5': u'\u1515', 'E': u'\u018e', '%': '%', '5': '\u1515', 'E': '\u018e',
'&': u'\u214b', '6': u'9', 'F': u'\u2132', '&': '\u214b', '6': '9', 'F': '\u2132',
"'": u'\u0375', '7': u'L', 'G': u'\u2141', "'": '\u0375', '7': 'L', 'G': '\u2141',
'(': u')', '8': u'8', 'H': u'H', '(': ')', '8': '8', 'H': 'H',
')': u'(', '9': u'6', 'I': u'I', ')': '(', '9': '6', 'I': 'I',
'*': u'*', ':': u':', 'J': u'\u148b', '*': '*', ':': ':', 'J': '\u148b',
'+': u'+', ';': u';', 'K': u'\u029e', '+': '+', ';': ';', 'K': '\u029e',
',': u'\u2018', '<': u'>', 'L': u'\u2142', ',': '\u2018', '<': '>', 'L': '\u2142',
'-': u'-', '=': u'=', 'M': u'\u019c', '-': '-', '=': '=', 'M': '\u019c',
'.': u'\u02d9', '>': u'<', 'N': u'N', '.': '\u02d9', '>': '<', 'N': 'N',
'/': u'/', '?': u'\u00bf', 'O': u'O', '/': '/', '?': '\u00bf', 'O': 'O',
'P': u'd', '`': u'\u02ce', 'p': u'd', 'P': 'd', '`': '\u02ce', 'p': 'd',
'Q': u'b', 'a': u'\u0250', 'q': u'b', 'Q': 'b', 'a': '\u0250', 'q': 'b',
'R': u'\u1d1a', 'b': u'q', 'r': u'\u0279', 'R': '\u1d1a', 'b': 'q', 'r': '\u0279',
'S': u'S', 'c': u'\u0254', 's': u's', 'S': 'S', 'c': '\u0254', 's': 's',
'T': u'\u22a5', 'd': u'p', 't': u'\u0287', 'T': '\u22a5', 'd': 'p', 't': '\u0287',
'U': u'\u144e', 'e': u'\u01dd', 'u': u'n', 'U': '\u144e', 'e': '\u01dd', '': 'n',
'V': u'\u039b', 'f': u'\u214e', 'v': u'\u028c', 'V': '\u039b', 'f': '\u214e', 'v': '\u028c',
'W': u'M', 'g': u'\u0253', 'w': u'\u028d', 'W': 'M', 'g': '\u0253', 'w': '\u028d',
'X': u'X', 'h': u'\u0265', 'x': u'x', 'X': 'X', 'h': '\u0265', 'x': 'x',
'Y': u'\u2144', 'i': u'\u1d09', 'y': u'\u028e', 'Y': '\u2144', 'i': '\u1d09', 'y': '\u028e',
'Z': u'Z', 'j': u'\u027f', 'z': u'z', 'Z': 'Z', 'j': '\u027f', 'z': 'z',
'[': u']', 'k': u'\u029e', '{': u'}', '[': ']', 'k': '\u029e', '{': '}',
'\\': u'\\', 'l': u'\u05df', '|': u'|', '\\': '\\', 'l': '\u05df', '|': '|',
']': u'[', 'm': u'\u026f', '}': u'{', ']': '[', 'm': '\u026f', '}': '{',
'^': u'\u2335', 'n': u'u', '~': u'~', '^': '\u2335', 'n': '', '~': '~',
'_': u'\u203e', 'o': u'o', '_': '\u203e', 'o': 'o',
} }
@internationalizeDocstring @internationalizeDocstring

View File

@ -28,6 +28,8 @@
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
### ###
from __future__ import unicode_literals
from supybot.test import * from supybot.test import *
import re import re

View File

@ -28,6 +28,8 @@
# POSSIBILITY OF SUCH DAMAGE. # POSSIBILITY OF SUCH DAMAGE.
### ###
from supybot.minisix import u
from supybot.test import * from supybot.test import *
class UtilitiesTestCase(PluginTestCase): class UtilitiesTestCase(PluginTestCase):
@ -46,8 +48,8 @@ class UtilitiesTestCase(PluginTestCase):
def testEcho(self): def testEcho(self):
self.assertHelp('echo') self.assertHelp('echo')
self.assertResponse('echo foo', 'foo') self.assertResponse('echo foo', 'foo')
self.assertResponse(u'echo 好', '') self.assertResponse(u('echo 好'), '')
self.assertResponse(u'echo ""', '') self.assertResponse(u('echo ""'), '')
def testEchoDollarOneRepliesDollarOne(self): def testEchoDollarOneRepliesDollarOne(self):
self.assertResponse('echo $1', '$1') self.assertResponse('echo $1', '$1')

View File

@ -78,7 +78,7 @@ class Title(HTMLParser):
def handle_charref(self, name): def handle_charref(self, name):
if self.inTitle: if self.inTitle:
self.title += unichr(int(name)) self.title += (unichr if minisix.PY2 else chr)(int(name))
class DelayedIrc: class DelayedIrc:
def __init__(self, irc): def __init__(self, irc):

View File

@ -160,7 +160,7 @@ try:
fixer_names = ['fix_basestring', fixer_names = ['fix_basestring',
'fix_metaclass', 'fix_metaclass',
'fix_numliterals', 'fix_numliterals',
'fix_unicode', 'fix_xrange'] 'fix_xrange']
fixers = list(map(lambda x:'lib2to3.fixes.'+x, fixer_names)) fixers = list(map(lambda x:'lib2to3.fixes.'+x, fixer_names))
fixers += get_fixers_from_package('2to3') fixers += get_fixers_from_package('2to3')
r = DistutilsRefactoringTool(fixers, options=options) r = DistutilsRefactoringTool(fixers, options=options)

View File

@ -41,6 +41,8 @@ if sys.version_info[0] >= 3:
import io import io
import pickle import pickle
import queue import queue
u = lambda x:x
else: else:
PY2 = True PY2 = True
PY3 = False PY3 = False
@ -59,3 +61,5 @@ else:
from cStringIO import StringIO as BytesIO from cStringIO import StringIO as BytesIO
import cPickle as pickle import cPickle as pickle
import Queue as queue import Queue as queue
u = lambda x:x.decode('utf8')

View File

@ -222,14 +222,20 @@ class HtmlToText(HTMLParser, object):
self.data.append(data) self.data.append(data)
def handle_entityref(self, data): def handle_entityref(self, data):
if data in name2codepoint: if minisix.PY3:
self.data.append(unichr(name2codepoint[data])) if data in name2codepoint:
elif minisix.PY3 and isinstance(data, bytes): self.data.append(chr(name2codepoint[data]))
self.data.append(data.decode()) elif isinstance(data, bytes):
elif minisix.PY2 and isinstance(data, str): self.data.append(data.decode())
self.data.append(data.decode('utf8', errors='replace')) else:
self.data.append(data)
else: else:
self.data.append(data) if data in name2codepoint:
self.data.append(unichr(name2codepoint[data]))
elif isinstance(data, str):
self.data.append(data.decode('utf8', errors='replace'))
else:
self.data.append(data)
def getText(self): def getText(self):
text = ''.join(self.data).strip() text = ''.join(self.data).strip()