forked from du-song/SublimeFormatSQL
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathfilter.py
More file actions
105 lines (86 loc) · 2.96 KB
/
filter.py
File metadata and controls
105 lines (86 loc) · 2.96 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
# -*- coding: utf-8 -*-
from sqlparse.sql import Statement, Token
from sqlparse import tokens as T
class TokenFilter(object):
def __init__(self, **options):
self.options = options
def process(self, stack, stream):
"""Process token stream."""
raise NotImplementedError
class StatementFilter(TokenFilter):
def __init__(self):
TokenFilter.__init__(self)
self._in_declare = False
self._in_dbldollar = False
self._is_create = False
self._begin_depth = 0
def _reset(self):
self._in_declare = False
self._in_dbldollar = False
self._is_create = False
self._begin_depth = 0
def _change_splitlevel(self, ttype, value):
# PostgreSQL
if (ttype == T.Name.Builtin
and value.startswith('$') and value.endswith('$')):
if self._in_dbldollar:
self._in_dbldollar = False
return -1
else:
self._in_dbldollar = True
return 1
elif self._in_dbldollar:
return 0
# ANSI
if ttype not in T.Keyword:
return 0
unified = value.upper()
if unified == 'DECLARE' and self._is_create:
self._in_declare = True
return 1
if unified == 'BEGIN':
self._begin_depth += 1
if self._in_declare: # FIXME(andi): This makes no sense.
return 0
return 0
if unified == 'END':
# Should this respect a preceeding BEGIN?
# In CASE ... WHEN ... END this results in a split level -1.
self._begin_depth = max(0, self._begin_depth - 1)
return -1
if ttype is T.Keyword.DDL and unified.startswith('CREATE'):
self._is_create = True
return 0
if (unified in ('IF', 'FOR')
and self._is_create and self._begin_depth > 0):
return 1
# Default
return 0
def process(self, stack, stream):
splitlevel = 0
stmt = None
consume_ws = False
stmt_tokens = []
for ttype, value in stream:
# Before appending the token
if (consume_ws and ttype is not T.Whitespace
and ttype is not T.Comment.Single):
consume_ws = False
stmt.tokens = stmt_tokens
yield stmt
self._reset()
stmt = None
splitlevel = 0
if stmt is None:
stmt = Statement()
stmt_tokens = []
splitlevel += self._change_splitlevel(ttype, value)
# Append the token
stmt_tokens.append(Token(ttype, value))
# After appending the token
if (splitlevel <= 0 and ttype is T.Punctuation
and value == ';'):
consume_ws = True
if stmt is not None:
stmt.tokens = stmt_tokens
yield stmt