7
7
8
8
"""SQL Lexer"""
9
9
import re
10
+
10
11
# This code is based on the SqlLexer in pygments.
11
12
# http://pygments.org/
12
13
# It's separated from the rest of pygments to increase performance
18
19
from sqlparse .utils import consume
19
20
20
21
21
- class _LexerSingletonMetaclass (type ):
22
- _lexer_instance = None
23
-
24
- def __call__ (cls , * args , ** kwargs ):
25
- if _LexerSingletonMetaclass ._lexer_instance is None :
26
- _LexerSingletonMetaclass ._lexer_instance = super (
27
- _LexerSingletonMetaclass , cls
28
- ).__call__ (* args , ** kwargs )
29
- return _LexerSingletonMetaclass ._lexer_instance
30
-
31
-
32
- class Lexer (metaclass = _LexerSingletonMetaclass ):
22
+ class Lexer :
33
23
"""The Lexer supports configurable syntax.
34
24
To add support for additional keywords, use the `add_keywords` method."""
35
25
26
+ _default_intance = None
27
+
28
+ # Development notes:
29
+ # - This class is prepared to be able to support additional SQL dialects
30
+ # in the future by adding additional functions that take the place of
31
+ # the function default_initialization()
32
+ # - The lexer class uses an explicit singleton behavior with the
33
+ # instance-getter method get_default_instance(). This mechanism has
34
+ # the advantage that the call signature of the entry-points to the
35
+ # sqlparse library are not affected. Also, usage of sqlparse in third
36
+ # party code does not need to be adapted. On the other hand, singleton
37
+ # behavior is not thread safe, and the current implementation does not
38
+ # easily allow for multiple SQL dialects to be parsed in the same
39
+ # process. Such behavior can be supported in the future by passing a
40
+ # suitably initialized lexer object as an additional parameter to the
41
+ # entry-point functions (such as `parse`). Code will need to be written
42
+ # to pass down and utilize such an object. The current implementation
43
+ # is prepared to support this thread safe approach without the
44
+ # default_instance part needing to change interface.
45
+
46
+ @classmethod
47
+ def get_default_instance (cls ):
48
+ """Returns the lexer instance used internally
49
+ by the sqlparse core functions."""
50
+ if cls ._default_intance is None :
51
+ cls ._default_intance = cls ()
52
+ cls ._default_intance .default_initialization ()
53
+ return cls ._default_intance
54
+
36
55
def default_initialization (self ):
37
56
"""Initialize the lexer with default dictionaries.
38
57
Useful if you need to revert custom syntax settings."""
@@ -45,13 +64,10 @@ def default_initialization(self):
45
64
self .add_keywords (keywords .KEYWORDS_MSACCESS )
46
65
self .add_keywords (keywords .KEYWORDS )
47
66
48
- def __init__ (self ):
49
- self .default_initialization ()
50
-
51
67
def clear (self ):
52
68
"""Clear all syntax configurations.
53
69
Useful if you want to load a reduced set of syntax configurations.
54
- After this call, reg-exps and keyword dictionaries need to be loaded
70
+ After this call, regexps and keyword dictionaries need to be loaded
55
71
to make the lexer functional again."""
56
72
self ._SQL_REGEX = []
57
73
self ._keywords = []
@@ -73,7 +89,7 @@ def is_keyword(self, value):
73
89
"""Checks for a keyword.
74
90
75
91
If the given value is in one of the KEYWORDS_* dictionary
76
- it's considered a keyword. Otherwise tokens.Name is returned.
92
+ it's considered a keyword. Otherwise, tokens.Name is returned.
77
93
"""
78
94
val = value .upper ()
79
95
for kwdict in self ._keywords :
@@ -136,4 +152,4 @@ def tokenize(sql, encoding=None):
136
152
Tokenize *sql* using the :class:`Lexer` and return a 2-tuple stream
137
153
of ``(token type, value)`` items.
138
154
"""
139
- return Lexer ().get_tokens (sql , encoding )
155
+ return Lexer . get_default_instance ().get_tokens (sql , encoding )
0 commit comments