Skip to content

Commit 56b28dc

Browse files
committed
Make use of token_index more obvious
1 parent 5002bfa commit 56b28dc

File tree

3 files changed

+9
-15
lines changed

3 files changed

+9
-15
lines changed

sqlparse/engine/grouping.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -64,7 +64,9 @@ def _group_matching(tlist, cls):
6464
# this indicates invalid sql and unbalanced tokens.
6565
# instead of break, continue in case other "valid" groups exist
6666
continue
67-
tlist.group_tokens(cls, open_token, token)
67+
oidx = tlist.token_index(open_token)
68+
cidx = tlist.token_index(token)
69+
tlist.group_tokens(cls, oidx, cidx)
6870

6971

7072
def group_if(tlist):
@@ -196,10 +198,9 @@ def group_parenthesis(tlist):
196198
def group_comments(tlist):
197199
tidx, token = tlist.token_next_by(t=T.Comment)
198200
while token:
199-
end = tlist.token_not_matching(
201+
eidx, end = tlist.token_not_matching(
200202
lambda tk: imt(tk, t=T.Comment) or tk.is_whitespace(), idx=tidx)
201203
if end is not None:
202-
eidx = tlist.token_index(end)
203204
eidx, end = tlist.token_prev(eidx, skip_ws=False)
204205
tlist.group_tokens(sql.Comment, tidx, eidx)
205206

@@ -218,7 +219,8 @@ def group_where(tlist):
218219
end = tlist.tokens[eidx - 1]
219220
# TODO: convert this to eidx instead of end token.
220221
# i think above values are len(tlist) and eidx-1
221-
tlist.group_tokens(sql.Where, tidx, end)
222+
eidx = tlist.token_index(end)
223+
tlist.group_tokens(sql.Where, tidx, eidx)
222224
tidx, token = tlist.token_next_by(m=sql.Where.M_OPEN, idx=tidx)
223225

224226

sqlparse/filters/reindent.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -62,7 +62,6 @@ def _next_token(self, tlist, idx=-1):
6262
def _split_kwds(self, tlist):
6363
tidx, token = self._next_token(tlist)
6464
while token:
65-
tidx = tlist.token_index(token)
6665
pidx, prev_ = tlist.token_prev(tidx, skip_ws=False)
6766
uprev = text_type(prev_)
6867

sqlparse/sql.py

Lines changed: 3 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -248,7 +248,7 @@ def token_next_by(self, i=None, m=None, t=None, idx=-1, end=None):
248248
def token_not_matching(self, funcs, idx):
249249
funcs = (funcs,) if not isinstance(funcs, (list, tuple)) else funcs
250250
funcs = [lambda tk: not func(tk) for func in funcs]
251-
return self._token_matching(funcs, idx)[1]
251+
return self._token_matching(funcs, idx)
252252

253253
def token_matching(self, funcs, idx):
254254
return self._token_matching(funcs, idx)[1]
@@ -297,13 +297,9 @@ def token_index(self, token, start=0):
297297
def group_tokens(self, grp_cls, start, end, include_end=True,
298298
extend=False):
299299
"""Replace tokens by an instance of *grp_cls*."""
300-
if isinstance(start, int):
301-
start_idx = start
302-
start = self.tokens[start_idx]
303-
else:
304-
start_idx = self.token_index(start)
300+
start_idx = start
301+
start = self.tokens[start_idx]
305302

306-
end = end if isinstance(end, int) else self.token_index(end, start_idx)
307303
end_idx = end + include_end
308304

309305
# will be needed later for new group_clauses
@@ -390,9 +386,6 @@ def get_parent_name(self):
390386
def _get_first_name(self, idx=None, reverse=False, keywords=False):
391387
"""Returns the name of the first token with a name"""
392388

393-
if idx and not isinstance(idx, int):
394-
idx = self.token_index(idx) + 1
395-
396389
tokens = self.tokens[idx:] if idx else self.tokens
397390
tokens = reversed(tokens) if reverse else tokens
398391
types = [T.Name, T.Wildcard, T.String.Symbol]

0 commit comments

Comments
 (0)