Actually, that is rather unnecessary. Sorry for messing up so bad today. :/

This commit is contained in:
Stéphan Kochen 2004-02-18 16:29:44 +00:00
parent 52e7551493
commit 708ee91776

View File

@ -187,15 +187,12 @@ class Tokenizer:
token = lexer.get_token()
if not token:
raise SyntaxError, 'Missing "]"'
elif conf.supybot.bracketSyntax():
if token == ']':
elif token == ']':
return ret
elif token == '[':
ret.append(self._insideBrackets(lexer))
else:
ret.append(self._handleToken(token))
else:
ret.append(self._handleToken(token))
return ret
def tokenize(self, s):