From 315d8710dc80318d0e28e276f193a95f836873af Mon Sep 17 00:00:00 2001 From: Jeremy Fincher Date: Sun, 7 Sep 2003 09:41:47 +0000 Subject: [PATCH] Added pipe syntax. --- src/callbacks.py | 14 +++++++++++++- test/test_callbacks.py | 11 +++++++++++ 2 files changed, 24 insertions(+), 1 deletion(-) diff --git a/src/callbacks.py b/src/callbacks.py index 5c88c530c..47c0294ed 100644 --- a/src/callbacks.py +++ b/src/callbacks.py @@ -192,7 +192,7 @@ class Tokenizer: # # These are the characters valid in a token. Everything printable except # double-quote, left-bracket, and right-bracket. - validChars = string.ascii[33:].translate(string.ascii, '"[]') + validChars = string.ascii[33:].translate(string.ascii, '"[]|') def __init__(self, tokens=''): self.validChars = self.validChars.translate(string.ascii, tokens) @@ -224,17 +224,29 @@ class Tokenizer: lexer.quotes = '"' lexer.wordchars = self.validChars args = [] + ends = [] while True: token = lexer.get_token() #debug.printf(repr(token)) if not token: break + elif token == '|': + if not args: + raise SyntaxError, '"|" with nothing preceding' + ends.append(args) + args = [] elif token == '[': args.append(self.insideBrackets(lexer)) elif token == ']': raise SyntaxError, 'Spurious "["' else: args.append(self.handleToken(token)) + if ends: + if not args: + raise SyntaxError, '"|" with nothing following' + args.append(ends.pop()) + while ends: + args[-1].append(ends.pop()) return args def tokenize(s): diff --git a/test/test_callbacks.py b/test/test_callbacks.py index 7e40d4431..3c5bfc81b 100644 --- a/test/test_callbacks.py +++ b/test/test_callbacks.py @@ -81,6 +81,17 @@ class TokenizerTestCase(unittest.TestCase): self.assertRaises(SyntaxError, tokenize, '[foo') #] self.assertRaises(SyntaxError, tokenize, '"foo') #" + def testPipe(self): + self.assertRaises(SyntaxError, tokenize, '| foo') + self.assertRaises(SyntaxError, tokenize, 'foo ||bar') + self.assertRaises(SyntaxError, tokenize, 'bar |') + self.assertEqual(tokenize('foo | bar'), ['bar', ['foo']]) + self.assertEqual(tokenize('foo | bar | baz'), ['baz', ['bar',['foo']]]) + self.assertEqual(tokenize('foo bar | baz'), ['baz', ['foo', 'bar']]) + self.assertEqual(tokenize('foo | bar baz'), ['bar', 'baz', ['foo']]) + self.assertEqual(tokenize('foo bar | baz quux'), + ['baz', 'quux', ['foo', 'bar']]) + class FunctionsTestCase(unittest.TestCase): def testCanonicalName(self):