@@ -24,14 +24,17 @@ class Token(object):
2424 the type of the token.
2525 """
2626
27- __slots__ = ('value' , 'ttype' , 'parent' , 'normalized' , 'is_keyword' )
27+ __slots__ = ('value' , 'ttype' , 'parent' , 'normalized' , 'is_keyword' ,
28+ 'is_group' , 'is_whitespace' )
2829
2930 def __init__ (self , ttype , value ):
3031 value = text_type (value )
3132 self .value = value
3233 self .ttype = ttype
3334 self .parent = None
35+ self .is_group = False
3436 self .is_keyword = ttype in T .Keyword
37+ self .is_whitespace = self .ttype in T .Whitespace
3538 self .normalized = value .upper () if self .is_keyword else value
3639
3740 def __str__ (self ):
@@ -96,14 +99,6 @@ def match(self, ttype, values, regex=False):
9699
97100 return self .normalized in values
98101
99- def is_group (self ):
100- """Returns ``True`` if this object has children."""
101- return False
102-
103- def is_whitespace (self ):
104- """Return ``True`` if this token is a whitespace token."""
105- return self .ttype in T .Whitespace
106-
107102 def within (self , group_cls ):
108103 """Returns ``True`` if this token is within *group_cls*.
109104
@@ -145,6 +140,7 @@ def __init__(self, tokens=None):
145140 self .tokens = tokens or []
146141 [setattr (token , 'parent' , self ) for token in tokens ]
147142 super (TokenList , self ).__init__ (None , text_type (self ))
143+ self .is_group = True
148144
149145 def __str__ (self ):
150146 return '' .join (token .value for token in self .flatten ())
@@ -173,7 +169,7 @@ def _pprint_tree(self, max_depth=None, depth=0, f=None):
173169 print ("{indent}{idx:2d} {cls} {q}{value}{q}"
174170 .format (** locals ()), file = f )
175171
176- if token .is_group () and (max_depth is None or depth < max_depth ):
172+ if token .is_group and (max_depth is None or depth < max_depth ):
177173 token ._pprint_tree (max_depth , depth + 1 , f )
178174
179175 def get_token_at_offset (self , offset ):
@@ -191,18 +187,15 @@ def flatten(self):
191187 This method is recursively called for all child tokens.
192188 """
193189 for token in self .tokens :
194- if token .is_group () :
190+ if token .is_group :
195191 for item in token .flatten ():
196192 yield item
197193 else :
198194 yield token
199195
200- def is_group (self ):
201- return True
202-
203196 def get_sublists (self ):
204197 for token in self .tokens :
205- if token .is_group () :
198+ if token .is_group :
206199 yield token
207200
208201 @property
@@ -241,7 +234,7 @@ def token_first(self, skip_ws=True, skip_cm=False):
241234 ignored too.
242235 """
243236 # this on is inconsistent, using Comment instead of T.Comment...
244- funcs = lambda tk : not ((skip_ws and tk .is_whitespace () ) or
237+ funcs = lambda tk : not ((skip_ws and tk .is_whitespace ) or
245238 (skip_cm and imt (tk , t = T .Comment , i = Comment )))
246239 return self ._token_matching (funcs )[1 ]
247240
@@ -278,7 +271,7 @@ def token_next(self, idx, skip_ws=True, skip_cm=False, _reverse=False):
278271 if idx is None :
279272 return None , None
280273 idx += 1 # alot of code usage current pre-compensates for this
281- funcs = lambda tk : not ((skip_ws and tk .is_whitespace () ) or
274+ funcs = lambda tk : not ((skip_ws and tk .is_whitespace ) or
282275 (skip_cm and imt (tk , t = T .Comment , i = Comment )))
283276 return self ._token_matching (funcs , idx , reverse = _reverse )
284277
@@ -296,7 +289,7 @@ def group_tokens(self, grp_cls, start, end, include_end=True,
296289 end_idx = end + include_end
297290
298291 # will be needed later for new group_clauses
299- # while skip_ws and tokens and tokens[-1].is_whitespace() :
292+ # while skip_ws and tokens and tokens[-1].is_whitespace:
300293 # tokens = tokens[:-1]
301294
302295 if extend and isinstance (start , grp_cls ):
@@ -471,7 +464,7 @@ def get_identifiers(self):
471464 Whitespaces and punctuations are not included in this generator.
472465 """
473466 for token in self .tokens :
474- if not (token .is_whitespace () or token .match (T .Punctuation , ',' )):
467+ if not (token .is_whitespace or token .match (T .Punctuation , ',' )):
475468 yield token
476469
477470
0 commit comments