|
3 | 3 | import unittest |
4 | 4 | import types |
5 | 5 |
|
| 6 | +import sqlparse |
6 | 7 | from sqlparse import lexer |
| 8 | +from sqlparse import sql |
7 | 9 | from sqlparse.tokens import * |
8 | 10 |
|
9 | 11 |
|
@@ -38,3 +40,47 @@ def test_linebreaks(self): # issue1 |
38 | 40 | sql = 'foo\r\nbar\n' |
39 | 41 | tokens = lexer.tokenize(sql) |
40 | 42 | self.assertEqual(''.join(str(x[1]) for x in tokens), sql) |
| 43 | + |
| 44 | + |
| 45 | +class TestToken(unittest.TestCase): |
| 46 | + |
| 47 | + def test_str(self): |
| 48 | + token = sql.Token(None, 'FoO') |
| 49 | + self.assertEqual(str(token), 'FoO') |
| 50 | + |
| 51 | + def test_repr(self): |
| 52 | + token = sql.Token(Keyword, 'foo') |
| 53 | + tst = "<Keyword 'foo' at 0x" |
| 54 | + self.assertEqual(repr(token)[:len(tst)], tst) |
| 55 | + token = sql.Token(Keyword, '1234567890') |
| 56 | + tst = "<Keyword '123456...' at 0x" |
| 57 | + self.assertEqual(repr(token)[:len(tst)], tst) |
| 58 | + |
| 59 | + def test_flatten(self): |
| 60 | + token = sql.Token(Keyword, 'foo') |
| 61 | + gen = token.flatten() |
| 62 | + self.assertEqual(type(gen), types.GeneratorType) |
| 63 | + lgen = list(gen) |
| 64 | + self.assertEqual(lgen, [token]) |
| 65 | + |
| 66 | + |
| 67 | +class TestTokenList(unittest.TestCase): |
| 68 | + |
| 69 | + def test_token_first(self): |
| 70 | + p = sqlparse.parse(' select foo')[0] |
| 71 | + first = p.token_first() |
| 72 | + self.assertEqual(first.value, 'select') |
| 73 | + self.assertEqual(p.token_first(ignore_whitespace=False).value, ' ') |
| 74 | + self.assertEqual(sql.TokenList([]).token_first(), None) |
| 75 | + |
| 76 | + def test_token_matching(self): |
| 77 | + t1 = sql.Token(Keyword, 'foo') |
| 78 | + t2 = sql.Token(Punctuation, ',') |
| 79 | + x = sql.TokenList([t1, t2]) |
| 80 | + self.assertEqual(x.token_matching(0, [lambda t: t.ttype is Keyword]), |
| 81 | + t1) |
| 82 | + self.assertEqual(x.token_matching(0, |
| 83 | + [lambda t: t.ttype is Punctuation]), |
| 84 | + t2) |
| 85 | + self.assertEqual(x.token_matching(1, [lambda t: t.ttype is Keyword]), |
| 86 | + None) |
0 commit comments