Configurable quotes, w00t.

This commit is contained in:
Jeremy Fincher 2004-09-24 01:13:17 +00:00
parent bbddc4b834
commit 78da84e4cb
3 changed files with 39 additions and 6 deletions

View File

@ -228,9 +228,8 @@ class Tokenizer(object):
#
# These are the characters valid in a token. Everything printable except
# double-quote, left-bracket, and right-bracket.
validChars = string.ascii.translate(string.ascii, '\x00\r\n \t"')
quotes = '"'
def __init__(self, brackets='', pipe=False):
validChars = string.ascii.translate(string.ascii, '\x00\r\n \t')
def __init__(self, brackets='', pipe=False, quotes='"'):
if brackets:
self.validChars = self.validChars.translate(string.ascii, brackets)
self.left = brackets[0]
@ -241,8 +240,9 @@ class Tokenizer(object):
self.pipe = pipe
if self.pipe:
self.validChars = self.validChars.translate(string.ascii, '|')
else:
assert '|' in self.validChars
self.quotes = quotes
self.validChars = self.validChars.translate(string.ascii, quotes)
def _handleToken(self, token):
if token[0] == token[-1] and token[0] in self.quotes:
@ -319,9 +319,10 @@ def tokenize(s, channel=None):
brackets = conf.get(nested.brackets, channel)
if conf.get(nested.pipeSyntax, channel): # No nesting, no pipe.
pipe = True
quotes = conf.get(conf.supybot.commands.quotes, channel)
start = time.time()
try:
ret = Tokenizer(brackets=brackets, pipe=pipe).tokenize(s)
ret = Tokenizer(brackets=brackets,pipe=pipe,quotes=quotes).tokenize(s)
log.stat('tokenize took %s seconds.' % (time.time() - start))
return ret
except ValueError, e:

View File

@ -502,6 +502,18 @@ registerGlobalValue(supybot, 'flush',
# supybot.commands. For stuff relating to commands.
###
registerGroup(supybot, 'commands')
class ValidQuotes(registry.Value):
"""Value must consist solely of \", ', and ` characters."""
def setValue(self, v):
if [c for c in v if c not in '"`\'']:
self.error()
super(ValidQuotes, self).setValue(v)
registerChannelValue(supybot.commands, 'quotes',
ValidQuotes('"', """Determines what characters are valid for quoting
arguments to commands in order to prevent them from being tokenized.
"""))
# This is a GlobalValue because bot owners should be able to say, "There will
# be no nesting at all on this bot." Individual channels can just set their
# brackets to the empty string.

View File

@ -118,6 +118,26 @@ class TokenizerTestCase(SupyTestCase):
self.assertEqual(tokenize('foo bar | baz'),
['foo', 'bar', '|', 'baz'])
def testQuoteConfiguration(self):
f = callbacks.tokenize
self.assertEqual(f('[foo]'), [['foo']])
self.assertEqual(f('"[foo]"'), ['[foo]'])
try:
original = conf.supybot.commands.quotes()
conf.supybot.commands.quotes.setValue('`')
self.assertEqual(f('[foo]'), [['foo']])
self.assertEqual(f('`[foo]`'), ['[foo]'])
conf.supybot.commands.quotes.setValue('\'')
self.assertEqual(f('[foo]'), [['foo']])
self.assertEqual(f('\'[foo]\''), ['[foo]'])
conf.supybot.commands.quotes.setValue('`\'')
self.assertEqual(f('[foo]'), [['foo']])
self.assertEqual(f('`[foo]`'), ['[foo]'])
self.assertEqual(f('[foo]'), [['foo']])
self.assertEqual(f('\'[foo]\''), ['[foo]'])
finally:
conf.supybot.commands.quotes.setValue(original)
def testBold(self):
s = '\x02foo\x02'
self.assertEqual(tokenize(s), [s])