mirror of
https://github.com/Mikaela/Limnoria.git
synced 2025-02-09 02:54:13 +01:00
Added pipe syntax.
This commit is contained in:
parent
c07468e275
commit
315d8710dc
@ -192,7 +192,7 @@ class Tokenizer:
|
|||||||
#
|
#
|
||||||
# These are the characters valid in a token. Everything printable except
|
# These are the characters valid in a token. Everything printable except
|
||||||
# double-quote, left-bracket, and right-bracket.
|
# double-quote, left-bracket, and right-bracket.
|
||||||
validChars = string.ascii[33:].translate(string.ascii, '"[]')
|
validChars = string.ascii[33:].translate(string.ascii, '"[]|')
|
||||||
def __init__(self, tokens=''):
|
def __init__(self, tokens=''):
|
||||||
self.validChars = self.validChars.translate(string.ascii, tokens)
|
self.validChars = self.validChars.translate(string.ascii, tokens)
|
||||||
|
|
||||||
@ -224,17 +224,29 @@ class Tokenizer:
|
|||||||
lexer.quotes = '"'
|
lexer.quotes = '"'
|
||||||
lexer.wordchars = self.validChars
|
lexer.wordchars = self.validChars
|
||||||
args = []
|
args = []
|
||||||
|
ends = []
|
||||||
while True:
|
while True:
|
||||||
token = lexer.get_token()
|
token = lexer.get_token()
|
||||||
#debug.printf(repr(token))
|
#debug.printf(repr(token))
|
||||||
if not token:
|
if not token:
|
||||||
break
|
break
|
||||||
|
elif token == '|':
|
||||||
|
if not args:
|
||||||
|
raise SyntaxError, '"|" with nothing preceding'
|
||||||
|
ends.append(args)
|
||||||
|
args = []
|
||||||
elif token == '[':
|
elif token == '[':
|
||||||
args.append(self.insideBrackets(lexer))
|
args.append(self.insideBrackets(lexer))
|
||||||
elif token == ']':
|
elif token == ']':
|
||||||
raise SyntaxError, 'Spurious "["'
|
raise SyntaxError, 'Spurious "["'
|
||||||
else:
|
else:
|
||||||
args.append(self.handleToken(token))
|
args.append(self.handleToken(token))
|
||||||
|
if ends:
|
||||||
|
if not args:
|
||||||
|
raise SyntaxError, '"|" with nothing following'
|
||||||
|
args.append(ends.pop())
|
||||||
|
while ends:
|
||||||
|
args[-1].append(ends.pop())
|
||||||
return args
|
return args
|
||||||
|
|
||||||
def tokenize(s):
|
def tokenize(s):
|
||||||
|
@ -81,6 +81,17 @@ class TokenizerTestCase(unittest.TestCase):
|
|||||||
self.assertRaises(SyntaxError, tokenize, '[foo') #]
|
self.assertRaises(SyntaxError, tokenize, '[foo') #]
|
||||||
self.assertRaises(SyntaxError, tokenize, '"foo') #"
|
self.assertRaises(SyntaxError, tokenize, '"foo') #"
|
||||||
|
|
||||||
|
def testPipe(self):
|
||||||
|
self.assertRaises(SyntaxError, tokenize, '| foo')
|
||||||
|
self.assertRaises(SyntaxError, tokenize, 'foo ||bar')
|
||||||
|
self.assertRaises(SyntaxError, tokenize, 'bar |')
|
||||||
|
self.assertEqual(tokenize('foo | bar'), ['bar', ['foo']])
|
||||||
|
self.assertEqual(tokenize('foo | bar | baz'), ['baz', ['bar',['foo']]])
|
||||||
|
self.assertEqual(tokenize('foo bar | baz'), ['baz', ['foo', 'bar']])
|
||||||
|
self.assertEqual(tokenize('foo | bar baz'), ['bar', 'baz', ['foo']])
|
||||||
|
self.assertEqual(tokenize('foo bar | baz quux'),
|
||||||
|
['baz', 'quux', ['foo', 'bar']])
|
||||||
|
|
||||||
|
|
||||||
class FunctionsTestCase(unittest.TestCase):
|
class FunctionsTestCase(unittest.TestCase):
|
||||||
def testCanonicalName(self):
|
def testCanonicalName(self):
|
||||||
|
Loading…
Reference in New Issue
Block a user