Added memoization optimization to tokenize function.

This commit is contained in:
Jeremy Fincher 2003-10-20 04:17:14 +00:00
parent 06cd9ae285
commit 68fd496516

View File

@ -284,19 +284,19 @@ def tokenize(s):
debug.msg('tokenize took %s seconds.' % (time.time() - start), 'verbose') debug.msg('tokenize took %s seconds.' % (time.time() - start), 'verbose')
return _lastTokenizeResult return _lastTokenizeResult
def tokenize(s): ## def tokenize(s):
"""A utility function to create a Tokenizer and tokenize a string.""" ## """A utility function to create a Tokenizer and tokenize a string."""
start = time.time() ## start = time.time()
try: ## try:
if conf.enablePipeSyntax: ## if conf.enablePipeSyntax:
tokens = '|' ## tokens = '|'
else: ## else:
tokens = '' ## tokens = ''
args = Tokenizer(tokens).tokenize(s) ## args = Tokenizer(tokens).tokenize(s)
except ValueError, e: ## except ValueError, e:
raise SyntaxError, str(e) ## raise SyntaxError, str(e)
#debug.msg('tokenize took %s seconds.' % (time.time() - start), 'verbose') ## #debug.msg('tokenize took %s seconds.' % (time.time() - start), 'verbose')
return args ## return args
def getCommands(tokens): def getCommands(tokens):
"""Given tokens as output by tokenize, returns the command names.""" """Given tokens as output by tokenize, returns the command names."""