Actually, that is rather unnecessary. Sorry for messing up so bad today. :/

This commit is contained in:
Stéphan Kochen 2004-02-18 16:29:44 +00:00
parent 52e7551493
commit 708ee91776

View File

@ -187,15 +187,12 @@ class Tokenizer:
token = lexer.get_token() token = lexer.get_token()
if not token: if not token:
raise SyntaxError, 'Missing "]"' raise SyntaxError, 'Missing "]"'
elif conf.supybot.bracketSyntax(): elif token == ']':
if token == ']':
return ret return ret
elif token == '[': elif token == '[':
ret.append(self._insideBrackets(lexer)) ret.append(self._insideBrackets(lexer))
else: else:
ret.append(self._handleToken(token)) ret.append(self._handleToken(token))
else:
ret.append(self._handleToken(token))
return ret return ret
def tokenize(self, s): def tokenize(self, s):