Skip to content

Commit 997f95b

Browse files
committed
Change argument order to match order of all other functions
1 parent 954ba46 commit 997f95b

File tree

3 files changed

+9
-10
lines changed

3 files changed

+9
-10
lines changed

sqlparse/engine/grouping.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -195,7 +195,8 @@ def group_comments(tlist):
195195
token = tlist.token_next_by(t=T.Comment)
196196
while token:
197197
end = tlist.token_not_matching(
198-
tlist.token_index(token) + 1, lambda tk: imt(tk, t=T.Comment) or tk.is_whitespace())
198+
lambda tk: imt(tk, t=T.Comment) or tk.is_whitespace(),
199+
idx=tlist.token_index(token) + 1)
199200
if end is not None:
200201
end = tlist.token_prev(tlist.token_index(end), False)
201202
token = tlist.group_tokens_between(sql.Comment, token, end)

sqlparse/sql.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -269,12 +269,12 @@ def token_next_by(self, i=None, m=None, t=None, idx=0, end=None):
269269
funcs = lambda tk: imt(tk, i, m, t)
270270
return self._token_matching(funcs, idx, end)
271271

272-
def token_not_matching(self, idx, funcs):
272+
def token_not_matching(self, funcs, idx):
273273
funcs = (funcs,) if not isinstance(funcs, (list, tuple)) else funcs
274274
funcs = [lambda tk: not func(tk) for func in funcs]
275275
return self._token_matching(funcs, idx)
276276

277-
def token_matching(self, idx, funcs):
277+
def token_matching(self, funcs, idx):
278278
return self._token_matching(funcs, idx)
279279

280280
def token_idx_prev(self, idx, skip_ws=True):

tests/test_tokenize.py

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -113,14 +113,12 @@ def test_token_matching(self):
113113
t1 = sql.Token(T.Keyword, 'foo')
114114
t2 = sql.Token(T.Punctuation, ',')
115115
x = sql.TokenList([t1, t2])
116-
self.assertEqual(x.token_matching(0, [lambda t: t.ttype is T.Keyword]),
117-
t1)
118116
self.assertEqual(x.token_matching(
119-
0,
120-
[lambda t: t.ttype is T.Punctuation]),
121-
t2)
122-
self.assertEqual(x.token_matching(1, [lambda t: t.ttype is T.Keyword]),
123-
None)
117+
[lambda t: t.ttype is T.Keyword], 0), t1)
118+
self.assertEqual(x.token_matching(
119+
[lambda t: t.ttype is T.Punctuation], 0), t2)
120+
self.assertEqual(x.token_matching(
121+
[lambda t: t.ttype is T.Keyword], 1), None)
124122

125123

126124
class TestStream(unittest.TestCase):

0 commit comments

Comments
 (0)