Skip to content

Commit

Permalink
Make TokenList.value a property not an attribute.
Browse files Browse the repository at this point in the history
The fact that a new value was being computed each time
TokenList.group_tokens() was called caused supra-linear runtime when
token grouping was enabled.

Address by making TokenList.value a dynamically-computed property rather
than a static attribute.
  • Loading branch information
living180 committed Mar 30, 2023
1 parent 7de0ab1 commit ff4f391
Showing 1 changed file with 8 additions and 9 deletions.
17 changes: 8 additions & 9 deletions sqlparse/sql.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,9 @@ class TokenBase:
def __init__(self):
self.parent = None

def __str__(self):
return self.value

# Pending tokenlist __len__ bug fix
# def __len__(self):
# return len(self.value)
Expand Down Expand Up @@ -151,9 +154,6 @@ def __init__(self, ttype, value):
self.is_whitespace = ttype in T.Whitespace
self.normalized = value.upper() if self.is_keyword else value

def __str__(self):
return self.value

def _get_repr_name(self):
return str(self.ttype).split('.')[-1]

Expand All @@ -165,11 +165,11 @@ def flatten(self):
class TokenList(TokenBase):
"""A group of tokens.
It has two additional instance attributes, ``value``, which is the value of
the token list, and ``tokens``, which holds a list of child-tokens.
It has an additional instance attribute ``tokens`` which holds a
list of child-tokens.
"""

__slots__ = ('tokens', 'value')
__slots__ = 'tokens'

is_group = True
ttype = None
Expand All @@ -179,10 +179,10 @@ class TokenList(TokenBase):
def __init__(self, tokens=None):
super().__init__()
self.tokens = tokens or []
self.value = str(self)
[setattr(token, 'parent', self) for token in self.tokens]

def __str__(self):
@property
def value(self):
return ''.join(token.value for token in self.flatten())

@property
Expand Down Expand Up @@ -347,7 +347,6 @@ def group_tokens(self, grp_cls, start, end, include_end=True,
grp = start
grp.tokens.extend(subtokens)
del self.tokens[start_idx + 1:end_idx]
grp.value = str(start)
else:
subtokens = self.tokens[start_idx:end_idx]
grp = grp_cls(subtokens)
Expand Down

0 comments on commit ff4f391

Please sign in to comment.