Skip to content

Commit 81e408a

Browse files
committed
Remove redundant functions
1 parent e4c5625 commit 81e408a

2 files changed

Lines changed: 2 additions & 16 deletions

File tree

sqlparse/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ def parsestream(stream, encoding=None):
3535
:returns: A generator of :class:`~sqlparse.sql.Statement` instances.
3636
"""
3737
stack = engine.FilterStack()
38-
stack.full_analyze()
38+
stack.enable_grouping()
3939
return stack.run(stream, encoding)
4040

4141

sqlparse/engine/__init__.py

Lines changed: 1 addition & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -9,9 +9,6 @@
99
from sqlparse.engine import grouping
1010
from sqlparse.engine.filter import StatementFilter
1111

12-
# XXX remove this when cleanup is complete
13-
Filter = object
14-
1512

1613
class FilterStack(object):
1714

@@ -22,20 +19,9 @@ def __init__(self):
2219
self.split_statements = False
2320
self._grouping = False
2421

25-
def _flatten(self, stream):
26-
for token in stream:
27-
if token.is_group():
28-
for t in self._flatten(token.tokens):
29-
yield t
30-
else:
31-
yield token
32-
3322
def enable_grouping(self):
3423
self._grouping = True
3524

36-
def full_analyze(self):
37-
self.enable_grouping()
38-
3925
def run(self, sql, encoding=None):
4026
stream = lexer.tokenize(sql, encoding)
4127
# Process token stream
@@ -71,7 +57,7 @@ def _run1(stream):
7157

7258
def _run2(stream):
7359
for stmt in stream:
74-
stmt.tokens = list(self._flatten(stmt.tokens))
60+
stmt.tokens = list(stmt.flatten())
7561
for filter_ in self.postprocess:
7662
stmt = filter_.process(self, stmt)
7763
yield stmt

0 commit comments

Comments
 (0)