Skip to content

Commit 67dc823

Browse files
sjoerdjobvmuriart
authored andcommitted
Use specialized token_idx_next_by in group_aliased.
The method group_aliased was making a lot of calls to token_index. By specializing token_next_by to token_idx_next_by, the calls to token_index became superfluous. Also use token_idx_next_by in group_identifier_list. It was making a lot of calls, which is now more than reduced in half.
1 parent 237575e commit 67dc823

File tree

2 files changed

+24
-6
lines changed

2 files changed

+24
-6
lines changed

sqlparse/engine/grouping.py

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -163,17 +163,16 @@ def group_identifier_list(tlist):
163163
(T.Keyword, T.Comment, T.Wildcard))
164164

165165
func = lambda t: imt(t, i=I_IDENT_LIST, m=M_ROLE, t=T_IDENT_LIST)
166-
token = tlist.token_next_by(m=M_COMMA)
167166

167+
tidx, token = tlist.token_idx_next_by(m=M_COMMA)
168168
while token:
169-
tidx = tlist.token_index(token)
170169
before, after = tlist.token_prev(tidx), tlist.token_next(tidx)
171170

172171
if func(before) and func(after):
173172
tidx = tlist.token_index(before)
174173
token = tlist.group_tokens_between(sql.IdentifierList, tidx, after, extend=True)
175174

176-
token = tlist.token_next_by(m=M_COMMA, idx=tidx + 1)
175+
tidx, token = tlist.token_idx_next_by(m=M_COMMA, idx=tidx + 1)
177176

178177

179178
def group_brackets(tlist):
@@ -217,13 +216,12 @@ def group_aliased(tlist):
217216
I_ALIAS = (sql.Parenthesis, sql.Function, sql.Case, sql.Identifier,
218217
) # sql.Operation)
219218

220-
token = tlist.token_next_by(i=I_ALIAS, t=T.Number)
219+
tidx, token = tlist.token_idx_next_by(i=I_ALIAS, t=T.Number)
221220
while token:
222-
tidx = tlist.token_index(token)
223221
next_ = tlist.token_next(tidx)
224222
if imt(next_, i=sql.Identifier):
225223
token = tlist.group_tokens_between(sql.Identifier, tidx, next_, extend=True)
226-
token = tlist.token_next_by(i=I_ALIAS, t=T.Number, idx=tidx + 1)
224+
tidx, token = tlist.token_idx_next_by(i=I_ALIAS, t=T.Number, idx=tidx + 1)
227225

228226

229227
def group_typecasts(tlist):

sqlparse/sql.py

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -225,6 +225,22 @@ def get_sublists(self):
225225
def _groupable_tokens(self):
226226
return self.tokens
227227

228+
def _token_idx_matching(self, funcs, start=0, end=None, reverse=False):
229+
"""next token that match functions"""
230+
if start is None:
231+
return None
232+
233+
if not isinstance(funcs, (list, tuple)):
234+
funcs = (funcs,)
235+
236+
iterable = enumerate(self.tokens[start:end], start=start)
237+
238+
for idx, token in iterable:
239+
for func in funcs:
240+
if func(token):
241+
return idx, token
242+
return None, None
243+
228244
def _token_matching(self, funcs, start=0, end=None, reverse=False):
229245
"""next token that match functions"""
230246
if start is None:
@@ -259,6 +275,10 @@ def token_first(self, ignore_whitespace=True, ignore_comments=False):
259275
(ignore_comments and imt(tk, i=Comment)))
260276
return self._token_matching(funcs)
261277

278+
def token_idx_next_by(self, i=None, m=None, t=None, idx=0, end=None):
279+
funcs = lambda tk: imt(tk, i, m, t)
280+
return self._token_idx_matching(funcs, idx, end)
281+
262282
def token_next_by(self, i=None, m=None, t=None, idx=0, end=None):
263283
funcs = lambda tk: imt(tk, i, m, t)
264284
return self._token_matching(funcs, idx, end)

0 commit comments

Comments
 (0)