Added memoization optimization to tokenize function.

This commit is contained in:
Jeremy Fincher 2003-10-20 04:26:37 +00:00
parent 68fd496516
commit eb31db6277

View File

@ -41,6 +41,7 @@ how to use them.
import fix import fix
import re import re
import copy
import sets import sets
import time import time
import shlex import shlex
@ -281,22 +282,8 @@ def tokenize(s):
_lastTokenized = None _lastTokenized = None
_lastTokenizedResult = None _lastTokenizedResult = None
raise SyntaxError, str(e) raise SyntaxError, str(e)
debug.msg('tokenize took %s seconds.' % (time.time() - start), 'verbose') #debug.msg('tokenize took %s seconds.' % (time.time() - start), 'verbose')
return _lastTokenizeResult return copy.deepcopy(_lastTokenizeResult)
## def tokenize(s):
## """A utility function to create a Tokenizer and tokenize a string."""
## start = time.time()
## try:
## if conf.enablePipeSyntax:
## tokens = '|'
## else:
## tokens = ''
## args = Tokenizer(tokens).tokenize(s)
## except ValueError, e:
## raise SyntaxError, str(e)
## #debug.msg('tokenize took %s seconds.' % (time.time() - start), 'verbose')
## return args
def getCommands(tokens): def getCommands(tokens):
"""Given tokens as output by tokenize, returns the command names.""" """Given tokens as output by tokenize, returns the command names."""