@@ -255,12 +255,9 @@ def token_first(self, ignore_whitespace=True, ignore_comments=False):
255255 if *ignore_comments* is ``True`` (default: ``False``), comments are
256256 ignored too.
257257 """
258- for token in self .tokens :
259- if ignore_whitespace and token .is_whitespace ():
260- continue
261- if ignore_comments and imt (token , i = Comment ):
262- continue
263- return token
258+ funcs = lambda tk : not ((ignore_whitespace and tk .is_whitespace ()) or
259+ (ignore_comments and imt (tk , i = Comment )))
260+ return self ._token_matching (funcs )
264261
265262 def token_next_by (self , i = None , m = None , t = None , idx = 0 , end = None ):
266263 funcs = lambda tk : imt (tk , i , m , t )
@@ -274,110 +271,62 @@ def token_next_by_instance(self, idx, clss, end=None):
274271
275272 If no matching token can be found ``None`` is returned.
276273 """
277- if not isinstance (clss , (list , tuple )):
278- clss = (clss ,)
279-
280- for token in self .tokens [idx :end ]:
281- if isinstance (token , clss ):
282- return token
274+ funcs = lambda tk : imt (tk , i = clss )
275+ return self ._token_matching (funcs , idx , end )
283276
284277 def token_next_by_type (self , idx , ttypes ):
285278 """Returns next matching token by it's token type."""
286- if not isinstance (ttypes , (list , tuple )):
287- ttypes = [ttypes ]
288-
289- for token in self .tokens [idx :]:
290- if token .ttype in ttypes :
291- return token
279+ funcs = lambda tk : imt (tk , t = ttypes )
280+ return self ._token_matching (funcs , idx )
292281
293282 def token_next_match (self , idx , ttype , value , regex = False ):
294283 """Returns next token where it's ``match`` method returns ``True``."""
295- if not isinstance (idx , int ):
296- idx = self .token_index (idx )
297-
298- for n in range (idx , len (self .tokens )):
299- token = self .tokens [n ]
300- if token .match (ttype , value , regex ):
301- return token
284+ funcs = lambda tk : imt (tk , m = (ttype , value , regex ))
285+ return self ._token_matching (funcs , idx )
302286
303287 def token_not_matching (self , idx , funcs ):
304- for token in self .tokens [idx :]:
305- passed = False
306- for func in funcs :
307- if func (token ):
308- passed = True
309- break
310-
311- if not passed :
312- return token
288+ funcs = (funcs ,) if not isinstance (funcs , (list , tuple )) else funcs
289+ funcs = [lambda tk : not func (tk ) for func in funcs ]
290+ return self ._token_matching (funcs , idx )
313291
314292 def token_matching (self , idx , funcs ):
315- for token in self .tokens [idx :]:
316- for func in funcs :
317- if func (token ):
318- return token
293+ return self ._token_matching (funcs , idx )
319294
320295 def token_prev (self , idx , skip_ws = True ):
321296 """Returns the previous token relative to *idx*.
322297
323298 If *skip_ws* is ``True`` (the default) whitespace tokens are ignored.
324299 ``None`` is returned if there's no previous token.
325300 """
326- if idx is None :
327- return None
328-
329- if not isinstance (idx , int ):
330- idx = self .token_index (idx )
331-
332- while idx :
333- idx -= 1
334- if self .tokens [idx ].is_whitespace () and skip_ws :
335- continue
336- return self .tokens [idx ]
301+ if isinstance (idx , int ):
302+ idx += 1 # alot of code usage current pre-compensates for this
303+ funcs = lambda tk : not (tk .is_whitespace () and skip_ws )
304+ return self ._token_matching (funcs , idx , reverse = True )
337305
338306 def token_next (self , idx , skip_ws = True ):
339307 """Returns the next token relative to *idx*.
340308
341309 If *skip_ws* is ``True`` (the default) whitespace tokens are ignored.
342310 ``None`` is returned if there's no next token.
343311 """
344- if idx is None :
345- return None
346-
347- if not isinstance (idx , int ):
348- idx = self .token_index (idx )
349-
350- while idx < len (self .tokens ) - 1 :
351- idx += 1
352- if self .tokens [idx ].is_whitespace () and skip_ws :
353- continue
354- return self .tokens [idx ]
312+ if isinstance (idx , int ):
313+ idx += 1 # alot of code usage current pre-compensates for this
314+ funcs = lambda tk : not (tk .is_whitespace () and skip_ws )
315+ return self ._token_matching (funcs , idx )
355316
356317 def token_index (self , token , start = 0 ):
357318 """Return list index of token."""
358- if start > 0 :
359- # Performing `index` manually is much faster when starting
360- # in the middle of the list of tokens and expecting to find
361- # the token near to the starting index.
362- for i in range (start , len (self .tokens )):
363- if self .tokens [i ] == token :
364- return i
365- return - 1
366- return self .tokens .index (token )
367-
368- def tokens_between (self , start , end , exclude_end = False ):
319+ start = self .token_index (start ) if not isinstance (start , int ) else start
320+ return start + self .tokens [start :].index (token )
321+
322+ def tokens_between (self , start , end , include_end = True ):
369323 """Return all tokens between (and including) start and end.
370324
371- If *exclude_end * is ``True `` (default is ``False ``) the end token
372- is included too .
325+ If *include_end * is ``False `` (default is ``True ``) the end token
326+ is excluded .
373327 """
374- # FIXME(andi): rename exclude_end to inlcude_end
375- if exclude_end :
376- offset = 0
377- else :
378- offset = 1
379- end_idx = self .token_index (end ) + offset
380328 start_idx = self .token_index (start )
329+ end_idx = include_end + self .token_index (end )
381330 return self .tokens [start_idx :end_idx ]
382331
383332 def group_tokens (self , grp_cls , tokens , ignore_ws = False , extend = False ):
@@ -431,13 +380,12 @@ def get_alias(self):
431380 """Returns the alias for this identifier or ``None``."""
432381
433382 # "name AS alias"
434- kw = self .token_next_match ( 0 , T .Keyword , 'AS' )
383+ kw = self .token_next_by ( m = ( T .Keyword , 'AS' ) )
435384 if kw is not None :
436385 return self ._get_first_name (kw , keywords = True )
437386
438387 # "name alias" or "complicated column expression alias"
439- if len (self .tokens ) > 2 \
440- and self .token_next_by_type (0 , T .Whitespace ) is not None :
388+ if len (self .tokens ) > 2 and self .token_next_by (t = T .Whitespace ):
441389 return self ._get_first_name (reverse = True )
442390
443391 return None
0 commit comments