# -*- coding: utf-8 -*-
import sys
import types
import unittest
import pytest
import sqlparse
from sqlparse import lexer
from sqlparse import sql
from sqlparse.tokens import *
class TestTokenize(unittest.TestCase):
def test_simple(self):
s = 'select * from foo;'
stream = lexer.tokenize(s)
self.assert_(isinstance(stream, types.GeneratorType))
tokens = list(stream)
self.assertEqual(len(tokens), 8)
self.assertEqual(len(tokens[0]), 2)
self.assertEqual(tokens[0], (Keyword.DML, u'select'))
self.assertEqual(tokens[-1], (Punctuation, u';'))
def test_backticks(self):
s = '`foo`.`bar`'
tokens = list(lexer.tokenize(s))
self.assertEqual(len(tokens), 3)
self.assertEqual(tokens[0], (Name, u'`foo`'))
def test_linebreaks(self): # issue1
s = 'foo\nbar\n'
tokens = lexer.tokenize(s)
self.assertEqual(''.join(str(x[1]) for x in tokens), s)
s = 'foo\rbar\r'
tokens = lexer.tokenize(s)
self.assertEqual(''.join(str(x[1]) for x in tokens), s)
s = 'foo\r\nbar\r\n'
tokens = lexer.tokenize(s)
self.assertEqual(''.join(str(x[1]) for x in tokens), s)
s = 'foo\r\nbar\n'
tokens = lexer.tokenize(s)
self.assertEqual(''.join(str(x[1]) for x in tokens), s)
def test_inline_keywords(self): # issue 7
s = "create created_foo"
tokens = list(lexer.tokenize(s))
self.assertEqual(len(tokens), 3)
self.assertEqual(tokens[0][0], Keyword.DDL)
self.assertEqual(tokens[2][0], Name)
self.assertEqual(tokens[2][1], u'created_foo')
s = "enddate"
tokens = list(lexer.tokenize(s))
self.assertEqual(len(tokens), 1)
self.assertEqual(tokens[0][0], Name)
s = "join_col"
tokens = list(lexer.tokenize(s))
self.assertEqual(len(tokens), 1)
self.assertEqual(tokens[0][0], Name)
s = "left join_col"
tokens = list(lexer.tokenize(s))
self.assertEqual(len(tokens), 3)
self.assertEqual(tokens[2][0], Name)
self.assertEqual(tokens[2][1], 'join_col')
def test_negative_numbers(self):
s = "values(-1)"
tokens = list(lexer.tokenize(s))
self.assertEqual(len(tokens), 4)
self.assertEqual(tokens[2][0], Number.Integer)
self.assertEqual(tokens[2][1], '-1')
# Somehow this test fails on Python 3.2
@pytest.mark.skipif('sys.version_info >= (3,0)')
def test_tab_expansion(self):
s = "\t"
lex = lexer.Lexer()
lex.tabsize = 5
tokens = list(lex.get_tokens(s))
self.assertEqual(tokens[0][1], " " * 5)
class TestToken(unittest.TestCase):
def test_str(self):
token = sql.Token(None, 'FoO')
self.assertEqual(str(token), 'FoO')
def test_repr(self):
token = sql.Token(Keyword, 'foo')
tst = "