Skip to content

Commit f487632

Browse files
committed
PEP8'ified to code a bit.
1 parent 403b814 commit f487632

File tree

15 files changed

+106
-86
lines changed

15 files changed

+106
-86
lines changed

pytest.ini

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
[pytest]
2+
pep8ignore =
3+
extras/* ALL
4+
examples/* ALL
5+
docs/* ALL
6+
* E125 E127

sqlparse/__init__.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -61,7 +61,10 @@ def split(sql):
6161
stack.split_statements = True
6262
return [unicode(stmt) for stmt in stack.run(sql)]
6363

64+
6465
from sqlparse.engine.filter import StatementFilter
66+
67+
6568
def split2(stream):
6669
splitter = StatementFilter()
6770
return list(splitter.process(None, stream))

sqlparse/engine/grouping.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -155,7 +155,8 @@ def _consume_cycle(tl, i):
155155
def _next_token(tl, i):
156156
# chooses the next token. if two tokens are found then the
157157
# first is returned.
158-
t1 = tl.token_next_by_type(i, (T.String.Symbol, T.String.Single, T.Name))
158+
t1 = tl.token_next_by_type(
159+
i, (T.String.Symbol, T.String.Single, T.Name))
159160
t2 = tl.token_next_by_instance(i, sql.Function)
160161
if t1 and t2:
161162
i1 = tl.token_index(t1)

sqlparse/exceptions.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@
55

66
"""Exceptions used in this package."""
77

8+
89
class SQLParseError(Exception):
910
"""Base class for exceptions in this module."""
10-
11-

sqlparse/filters.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -329,8 +329,8 @@ def _process_parenthesis(self, tlist):
329329
self.indent += 1
330330
tlist.tokens.insert(0, self.nl())
331331
indented = True
332-
num_offset = self._get_offset(tlist.token_next_match(0,
333-
T.Punctuation, '('))
332+
num_offset = self._get_offset(
333+
tlist.token_next_match(0, T.Punctuation, '('))
334334
self.offset += num_offset
335335
self._process_default(tlist, stmts=not indented)
336336
if indented:
@@ -397,8 +397,8 @@ def process(self, stack, stmt):
397397
nl = '\n'
398398
else:
399399
nl = '\n\n'
400-
stmt.tokens.insert(0,
401-
sql.Token(T.Whitespace, nl))
400+
stmt.tokens.insert(
401+
0, sql.Token(T.Whitespace, nl))
402402
if self._last_stmt != stmt:
403403
self._last_stmt = stmt
404404

@@ -407,7 +407,7 @@ def process(self, stack, stmt):
407407
class RightMarginFilter:
408408

409409
keep_together = (
410-
# sql.TypeCast, sql.Identifier, sql.Alias,
410+
# sql.TypeCast, sql.Identifier, sql.Alias,
411411
)
412412

413413
def __init__(self, width=79):

sqlparse/functions.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,9 +6,9 @@
66
Several utility functions to extract info from the SQL sentences
77
'''
88

9-
from sqlparse.filters import ColumnsSelect, Limit
9+
from sqlparse.filters import ColumnsSelect, Limit
1010
from sqlparse.pipeline import Pipeline
11-
from sqlparse.tokens import Keyword, Whitespace
11+
from sqlparse.tokens import Keyword, Whitespace
1212

1313

1414
def getlimit(stream):

sqlparse/keywords.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@
3636
'BOTH': tokens.Keyword,
3737
'BREADTH': tokens.Keyword,
3838

39-
# 'C': tokens.Keyword, # most likely this is an alias
39+
# 'C': tokens.Keyword, # most likely this is an alias
4040
'CACHE': tokens.Keyword,
4141
'CALL': tokens.Keyword,
4242
'CALLED': tokens.Keyword,
@@ -172,7 +172,7 @@
172172
'FULL': tokens.Keyword,
173173
'FUNCTION': tokens.Keyword,
174174

175-
# 'G': tokens.Keyword,
175+
# 'G': tokens.Keyword,
176176
'GENERAL': tokens.Keyword,
177177
'GENERATED': tokens.Keyword,
178178
'GET': tokens.Keyword,
@@ -219,7 +219,7 @@
219219
'ISOLATION': tokens.Keyword,
220220
'ITERATE': tokens.Keyword,
221221

222-
# 'K': tokens.Keyword,
222+
# 'K': tokens.Keyword,
223223
'KEY': tokens.Keyword,
224224
'KEY_MEMBER': tokens.Keyword,
225225
'KEY_TYPE': tokens.Keyword,
@@ -244,7 +244,7 @@
244244
'LOCK': tokens.Keyword,
245245
'LOWER': tokens.Keyword,
246246

247-
# 'M': tokens.Keyword,
247+
# 'M': tokens.Keyword,
248248
'MAP': tokens.Keyword,
249249
'MATCH': tokens.Keyword,
250250
'MAXVALUE': tokens.Keyword,
@@ -519,7 +519,7 @@
519519
'INT8': tokens.Name.Builtin,
520520
'SERIAL8': tokens.Name.Builtin,
521521
'TEXT': tokens.Name.Builtin,
522-
}
522+
}
523523

524524

525525
KEYWORDS_COMMON = {
@@ -562,4 +562,4 @@
562562
'MIN': tokens.Keyword,
563563
'MAX': tokens.Keyword,
564564
'DISTINCT': tokens.Keyword,
565-
}
565+
}

sqlparse/lexer.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
from sqlparse.keywords import KEYWORDS, KEYWORDS_COMMON
1919
from cStringIO import StringIO
2020

21+
2122
class include(str):
2223
pass
2324

@@ -158,7 +159,7 @@ class Lexer(object):
158159
stripall = False
159160
stripnl = False
160161
tabsize = 0
161-
flags = re.IGNORECASE|re.UNICODE
162+
flags = re.IGNORECASE | re.UNICODE
162163
bufsize = 4096
163164

164165
tokens = {

tests/test_filters.py

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
import unittest
77

88
from sqlparse.filters import StripWhitespace, Tokens2Unicode
9-
from sqlparse.lexer import tokenize
9+
from sqlparse.lexer import tokenize
1010

1111

1212
class Test__StripWhitespace(unittest.TestCase):
@@ -49,17 +49,20 @@ class Test__StripWhitespace(unittest.TestCase):
4949
LIMIT 1"""
5050

5151
def test_StripWhitespace1(self):
52-
self.assertEqual(Tokens2Unicode(StripWhitespace(tokenize(self.sql))),
52+
self.assertEqual(
53+
Tokens2Unicode(StripWhitespace(tokenize(self.sql))),
5354
'INSERT INTO dir_entries(type)VALUES(:type);INSERT INTO '
5455
'directories(inode)VALUES(:inode)LIMIT 1')
5556

5657
def test_StripWhitespace2(self):
57-
self.assertEqual(Tokens2Unicode(StripWhitespace(tokenize(self.sql2))),
58+
self.assertEqual(
59+
Tokens2Unicode(StripWhitespace(tokenize(self.sql2))),
5860
'SELECT child_entry,asdf AS inode,creation FROM links WHERE '
5961
'parent_dir==:parent_dir AND name==:name LIMIT 1')
6062

6163
def test_StripWhitespace3(self):
62-
self.assertEqual(Tokens2Unicode(StripWhitespace(tokenize(self.sql3))),
64+
self.assertEqual(
65+
Tokens2Unicode(StripWhitespace(tokenize(self.sql3))),
6366
'SELECT 0 AS st_dev,0 AS st_uid,0 AS st_gid,dir_entries.type AS '
6467
'st_mode,dir_entries.inode AS st_ino,COUNT(links.child_entry)AS '
6568
'st_nlink,:creation AS st_ctime,dir_entries.access AS st_atime,'
@@ -72,4 +75,4 @@ def test_StripWhitespace3(self):
7275

7376
if __name__ == "__main__":
7477
#import sys;sys.argv = ['', 'Test.testName']
75-
unittest.main()
78+
unittest.main()

tests/test_format.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,7 @@ def test_option(self):
9191
self.assertRaises(SQLParseError, sqlparse.format, 'foo',
9292
reindent=True, indent_width='foo')
9393
self.assertRaises(SQLParseError, sqlparse.format, 'foo',
94-
reindent=True, indent_width= -12)
94+
reindent=True, indent_width=-12)
9595

9696
def test_stmts(self):
9797
f = lambda sql: sqlparse.format(sql, reindent=True)

0 commit comments

Comments
 (0)