sqlalchemy-iris 0.5.0b3__py3-none-any.whl → 0.6.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- intersystems_iris/_BufferReader.py +10 -0
- intersystems_iris/_BufferWriter.py +32 -0
- intersystems_iris/_ConnectionInformation.py +54 -0
- intersystems_iris/_ConnectionParameters.py +18 -0
- intersystems_iris/_Constant.py +38 -0
- intersystems_iris/_DBList.py +499 -0
- intersystems_iris/_Device.py +69 -0
- intersystems_iris/_GatewayContext.py +25 -0
- intersystems_iris/_GatewayException.py +4 -0
- intersystems_iris/_GatewayUtility.py +74 -0
- intersystems_iris/_IRIS.py +1294 -0
- intersystems_iris/_IRISConnection.py +516 -0
- intersystems_iris/_IRISEmbedded.py +85 -0
- intersystems_iris/_IRISGlobalNode.py +273 -0
- intersystems_iris/_IRISGlobalNodeView.py +25 -0
- intersystems_iris/_IRISIterator.py +143 -0
- intersystems_iris/_IRISList.py +360 -0
- intersystems_iris/_IRISNative.py +208 -0
- intersystems_iris/_IRISOREF.py +4 -0
- intersystems_iris/_IRISObject.py +424 -0
- intersystems_iris/_IRISReference.py +133 -0
- intersystems_iris/_InStream.py +149 -0
- intersystems_iris/_LegacyIterator.py +135 -0
- intersystems_iris/_ListItem.py +15 -0
- intersystems_iris/_ListReader.py +84 -0
- intersystems_iris/_ListWriter.py +157 -0
- intersystems_iris/_LogFileStream.py +115 -0
- intersystems_iris/_MessageHeader.py +51 -0
- intersystems_iris/_OutStream.py +25 -0
- intersystems_iris/_PrintStream.py +65 -0
- intersystems_iris/_PythonGateway.py +850 -0
- intersystems_iris/_SharedMemorySocket.py +87 -0
- intersystems_iris/__init__.py +79 -0
- intersystems_iris/__main__.py +7 -0
- intersystems_iris/dbapi/_Column.py +56 -0
- intersystems_iris/dbapi/_DBAPI.py +2295 -0
- intersystems_iris/dbapi/_Descriptor.py +46 -0
- intersystems_iris/dbapi/_IRISStream.py +63 -0
- intersystems_iris/dbapi/_Message.py +158 -0
- intersystems_iris/dbapi/_Parameter.py +138 -0
- intersystems_iris/dbapi/_ParameterCollection.py +133 -0
- intersystems_iris/dbapi/_ResultSetRow.py +314 -0
- intersystems_iris/dbapi/_SQLType.py +32 -0
- intersystems_iris/dbapi/__init__.py +0 -0
- intersystems_iris/dbapi/preparser/_PreParser.py +1658 -0
- intersystems_iris/dbapi/preparser/_Scanner.py +391 -0
- intersystems_iris/dbapi/preparser/_Token.py +81 -0
- intersystems_iris/dbapi/preparser/_TokenList.py +251 -0
- intersystems_iris/dbapi/preparser/__init__.py +0 -0
- intersystems_iris/pex/_BusinessHost.py +101 -0
- intersystems_iris/pex/_BusinessOperation.py +105 -0
- intersystems_iris/pex/_BusinessProcess.py +214 -0
- intersystems_iris/pex/_BusinessService.py +95 -0
- intersystems_iris/pex/_Common.py +228 -0
- intersystems_iris/pex/_Director.py +24 -0
- intersystems_iris/pex/_IRISBusinessOperation.py +5 -0
- intersystems_iris/pex/_IRISBusinessService.py +18 -0
- intersystems_iris/pex/_IRISInboundAdapter.py +5 -0
- intersystems_iris/pex/_IRISOutboundAdapter.py +17 -0
- intersystems_iris/pex/_InboundAdapter.py +57 -0
- intersystems_iris/pex/_Message.py +6 -0
- intersystems_iris/pex/_OutboundAdapter.py +46 -0
- intersystems_iris/pex/__init__.py +25 -0
- iris/__init__.py +25 -0
- iris/iris_site.py +13 -0
- iris/irisbuiltins.py +97 -0
- iris/irisloader.py +199 -0
- irisnative/_IRISNative.py +9 -0
- irisnative/__init__.py +10 -0
- {sqlalchemy_iris-0.5.0b3.dist-info → sqlalchemy_iris-0.6.0b1.dist-info}/METADATA +1 -1
- sqlalchemy_iris-0.6.0b1.dist-info/RECORD +83 -0
- sqlalchemy_iris-0.6.0b1.dist-info/top_level.txt +4 -0
- sqlalchemy_iris-0.5.0b3.dist-info/RECORD +0 -14
- sqlalchemy_iris-0.5.0b3.dist-info/top_level.txt +0 -1
- {sqlalchemy_iris-0.5.0b3.dist-info → sqlalchemy_iris-0.6.0b1.dist-info}/LICENSE +0 -0
- {sqlalchemy_iris-0.5.0b3.dist-info → sqlalchemy_iris-0.6.0b1.dist-info}/WHEEL +0 -0
- {sqlalchemy_iris-0.5.0b3.dist-info → sqlalchemy_iris-0.6.0b1.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,1658 @@
|
|
1
|
+
import functools
|
2
|
+
import enum
|
3
|
+
import intersystems_iris._IRISList
|
4
|
+
import intersystems_iris.dbapi._DBAPI
|
5
|
+
import intersystems_iris.dbapi._Parameter
|
6
|
+
import intersystems_iris.dbapi.preparser._Token
|
7
|
+
import intersystems_iris.dbapi.preparser._TokenList
|
8
|
+
import intersystems_iris.dbapi.preparser._Scanner
|
9
|
+
from intersystems_iris.dbapi._Parameter import ParameterMode
|
10
|
+
from intersystems_iris.dbapi.preparser._Token import TOKEN
|
11
|
+
from intersystems_iris.dbapi.preparser._Scanner import ParseToken
|
12
|
+
|
13
|
+
# May want to move to its own file eventually
|
14
|
+
# SQL Statement Types
|
15
|
+
class StatementType(enum.IntEnum):
|
16
|
+
UPDATE = 0
|
17
|
+
QUERY = 1
|
18
|
+
CALL = 2
|
19
|
+
SYNC_COMMIT = 3
|
20
|
+
ASYNC_COMMIT = 4
|
21
|
+
STREAMS_OFF = 5
|
22
|
+
STREAMS_ON = 6
|
23
|
+
CALLWITHRESULT = 7
|
24
|
+
DDL_ALTER_DROP = 8
|
25
|
+
DDL_OTHER = 9
|
26
|
+
DIRECT_CALL_QUERY = 10
|
27
|
+
DIRECT_CALL_UPDATE = 11
|
28
|
+
PREPARED_CALL_QUERY = 12
|
29
|
+
PREPARED_CALL_UPDATE = 13
|
30
|
+
SQL_DIALECT = 14
|
31
|
+
STMT_USE = 15
|
32
|
+
|
33
|
+
class PreParseResult(object):
|
34
|
+
'''
|
35
|
+
A simple structure, returned by _PreParser.PreParse(), containing the parsed statement and statement type
|
36
|
+
'''
|
37
|
+
def __init__(self):
|
38
|
+
self.sResult = ""
|
39
|
+
self.p_eStmtType = None
|
40
|
+
|
41
|
+
class _PreParser(object):
|
42
|
+
"""
|
43
|
+
This is the interface to the SQL PreParser. A string of SQL and a list of parameters ( bound parameters or empty ) is input.
|
44
|
+
The processed string is returned along with a count of parameters found and a classfication of the statement type
|
45
|
+
"""
|
46
|
+
# Class properties
|
47
|
+
# Table for keyword lookups (used when tokenizing the statement)
|
48
|
+
s_KeywordTable = {}
|
49
|
+
s_KeywordTable["AND"] = TOKEN.OP
|
50
|
+
s_KeywordTable["BETWEEN"] = TOKEN.OP
|
51
|
+
s_KeywordTable["CHAR"] = TOKEN.DATATYPE
|
52
|
+
s_KeywordTable["CHARACTER"] = TOKEN.DATATYPE
|
53
|
+
s_KeywordTable["DEC"] = TOKEN.DATATYPE
|
54
|
+
s_KeywordTable["DECIMAL"] = TOKEN.DATATYPE
|
55
|
+
s_KeywordTable["FLOAT"] = TOKEN.DATATYPE
|
56
|
+
s_KeywordTable["IS"] = TOKEN.IS
|
57
|
+
s_KeywordTable["LIKE"] = TOKEN.OP
|
58
|
+
s_KeywordTable["LONGVARBINARY"] = TOKEN.DATATYPE
|
59
|
+
s_KeywordTable["LONGVARCHAR"] = TOKEN.DATATYPE
|
60
|
+
s_KeywordTable["NCHAR"] = TOKEN.DATATYPE
|
61
|
+
s_KeywordTable["NOT["] = TOKEN.OP
|
62
|
+
s_KeywordTable["NOT"] = TOKEN.NOT
|
63
|
+
s_KeywordTable["NULL"] = TOKEN.NULL
|
64
|
+
s_KeywordTable["NUMBER"] = TOKEN.DATATYPE
|
65
|
+
s_KeywordTable["NUMERIC"] = TOKEN.DATATYPE
|
66
|
+
s_KeywordTable["NVARCHAR"] = TOKEN.DATATYPE
|
67
|
+
s_KeywordTable["RAW"] = TOKEN.DATATYPE
|
68
|
+
s_KeywordTable["STARTSWITH"] = TOKEN.OP
|
69
|
+
s_KeywordTable["THEN"] = TOKEN.THEN
|
70
|
+
s_KeywordTable["ELSE"] = TOKEN.ELSE
|
71
|
+
s_KeywordTable["VARBINARY"] = TOKEN.DATATYPE
|
72
|
+
s_KeywordTable["VARCHAR"] = TOKEN.DATATYPE
|
73
|
+
s_KeywordTable["VARCHAR2"] = TOKEN.DATATYPE
|
74
|
+
s_KeywordTable["VARYING"] = TOKEN.DATATYPE
|
75
|
+
s_KeywordTable["_"] = TOKEN.NOT
|
76
|
+
s_KeywordTable["%SQLUPPER"] = TOKEN.STRFUNCTION
|
77
|
+
s_KeywordTable["%STRING"] = TOKEN.STRFUNCTION
|
78
|
+
s_KeywordTable["%SQLSTRING"] = TOKEN.STRFUNCTION
|
79
|
+
s_KeywordTable["%TRUNCATE"] = TOKEN.STRFUNCTION
|
80
|
+
s_KeywordTable["TRUNCATE"] = TOKEN.STRFUNCTION
|
81
|
+
|
82
|
+
# Table for statement type lookups
|
83
|
+
s_StatementTable = {}
|
84
|
+
s_StatementTable["ALTER"] = StatementType.DDL_ALTER_DROP
|
85
|
+
s_StatementTable["CREATE"] = StatementType.DDL_OTHER
|
86
|
+
s_StatementTable["DROP"] = StatementType.DDL_ALTER_DROP
|
87
|
+
s_StatementTable["GRANT"] = StatementType.DDL_OTHER
|
88
|
+
s_StatementTable["REVOKE"] = StatementType.DDL_OTHER
|
89
|
+
s_StatementTable["%CHECKPRIV"] = StatementType.DDL_OTHER
|
90
|
+
s_StatementTable["TRAIN"] = StatementType.DDL_OTHER
|
91
|
+
s_StatementTable["VALIDATE"] = StatementType.DDL_OTHER
|
92
|
+
s_StatementTable["TUNE"] = StatementType.DDL_OTHER
|
93
|
+
s_StatementTable["VALIDATE"] = StatementType.DDL_OTHER
|
94
|
+
|
95
|
+
s_StatementTable["USE"] = StatementType.STMT_USE
|
96
|
+
s_StatementTable["EXPLAIN"] = StatementType.CALLWITHRESULT
|
97
|
+
|
98
|
+
# Table for common statement type lookups (SELECT,DELETE,UPDATE,INSERT)
|
99
|
+
s_ParsedStatements = {}
|
100
|
+
s_ParsedStatements["SELECT"] = StatementType.QUERY
|
101
|
+
s_ParsedStatements["INSERT"] = StatementType.UPDATE
|
102
|
+
s_ParsedStatements["DELETE"] = StatementType.UPDATE
|
103
|
+
s_ParsedStatements["UPDATE"] = StatementType.UPDATE
|
104
|
+
|
105
|
+
# Table for statements to cache, beyond those in s_ParsedStatements
|
106
|
+
# TODO: change this to be a set
|
107
|
+
# Since the server now tells us whether to cache, this may be superfluous
|
108
|
+
s_TransactionStatements = {}
|
109
|
+
s_TransactionStatements["COMMIT"] = True
|
110
|
+
s_TransactionStatements["ROLLBACK"] = True
|
111
|
+
s_TransactionStatements["START"] = True
|
112
|
+
s_TransactionStatements["%INTRANSACTION"] = True
|
113
|
+
s_TransactionStatements["%INTRANS"] = True
|
114
|
+
s_TransactionStatements["%BEGTRANS"] = True
|
115
|
+
|
116
|
+
# keywords for replacing parameters
|
117
|
+
s_replaceparm = " SELECT TOP WHERE ON AND OR NOT BETWEEN %STARTSWITH LIKE CASE WHEN ELSE THEN"
|
118
|
+
|
119
|
+
# keywords that should be output all upper case after preparsing
|
120
|
+
s_ReservedKeywords = set()
|
121
|
+
s_ReservedKeywords.add("%AFTERHAVING")
|
122
|
+
s_ReservedKeywords.add("%ALLINDEX")
|
123
|
+
s_ReservedKeywords.add("%ALPHAUP")
|
124
|
+
s_ReservedKeywords.add("%ALTER")
|
125
|
+
s_ReservedKeywords.add("%BEGTRANS")
|
126
|
+
s_ReservedKeywords.add("%CHECKPRIV")
|
127
|
+
s_ReservedKeywords.add("%CLASSNAME")
|
128
|
+
s_ReservedKeywords.add("%CLASSPARAMETER")
|
129
|
+
s_ReservedKeywords.add("%DBUGFULL")
|
130
|
+
s_ReservedKeywords.add("%DELDATA")
|
131
|
+
s_ReservedKeywords.add("%DESCRIPTION")
|
132
|
+
s_ReservedKeywords.add("%EXACT")
|
133
|
+
s_ReservedKeywords.add("%EXTERNAL")
|
134
|
+
s_ReservedKeywords.add("%FILE")
|
135
|
+
s_ReservedKeywords.add("%FIRSTTABLE")
|
136
|
+
s_ReservedKeywords.add("%FLATTEN")
|
137
|
+
s_ReservedKeywords.add("%FOREACH")
|
138
|
+
s_ReservedKeywords.add("%FULL")
|
139
|
+
s_ReservedKeywords.add("%ID")
|
140
|
+
s_ReservedKeywords.add("%IDADDED")
|
141
|
+
s_ReservedKeywords.add("%IGNOREINDEX")
|
142
|
+
s_ReservedKeywords.add("%IGNOREINDICES")
|
143
|
+
s_ReservedKeywords.add("%INLIST")
|
144
|
+
s_ReservedKeywords.add("%INORDER")
|
145
|
+
s_ReservedKeywords.add("%INTERNAL")
|
146
|
+
s_ReservedKeywords.add("%INTEXT")
|
147
|
+
s_ReservedKeywords.add("%INTRANS")
|
148
|
+
s_ReservedKeywords.add("%INTRANSACTION")
|
149
|
+
s_ReservedKeywords.add("%KEY")
|
150
|
+
s_ReservedKeywords.add("%MATCHES")
|
151
|
+
s_ReservedKeywords.add("%MCODE")
|
152
|
+
s_ReservedKeywords.add("%MERGE")
|
153
|
+
s_ReservedKeywords.add("%MINUS")
|
154
|
+
s_ReservedKeywords.add("%MVR")
|
155
|
+
s_ReservedKeywords.add("%NOCHECK")
|
156
|
+
s_ReservedKeywords.add("%NODELDATA")
|
157
|
+
s_ReservedKeywords.add("%NOFLATTEN")
|
158
|
+
s_ReservedKeywords.add("%NOFPLAN")
|
159
|
+
s_ReservedKeywords.add("%NOINDEX")
|
160
|
+
s_ReservedKeywords.add("%NOLOCK")
|
161
|
+
s_ReservedKeywords.add("%NOMERGE")
|
162
|
+
s_ReservedKeywords.add("%NOPARALLEL")
|
163
|
+
s_ReservedKeywords.add("%NOREDUCE")
|
164
|
+
s_ReservedKeywords.add("%NORUNTIME")
|
165
|
+
s_ReservedKeywords.add("%NOSVSO")
|
166
|
+
s_ReservedKeywords.add("%NOTOPOPT")
|
167
|
+
s_ReservedKeywords.add("%NOTRIGGER")
|
168
|
+
s_ReservedKeywords.add("%NOUNIONOROPT")
|
169
|
+
s_ReservedKeywords.add("%NUMROWS")
|
170
|
+
s_ReservedKeywords.add("%ODBCIN")
|
171
|
+
s_ReservedKeywords.add("%ODBCOUT")
|
172
|
+
s_ReservedKeywords.add("%PARALLEL")
|
173
|
+
s_ReservedKeywords.add("%PLUS")
|
174
|
+
s_ReservedKeywords.add("%PROFILE")
|
175
|
+
s_ReservedKeywords.add("%PROFILE_ALL")
|
176
|
+
s_ReservedKeywords.add("%PUBLICROWID")
|
177
|
+
s_ReservedKeywords.add("%ROUTINE")
|
178
|
+
s_ReservedKeywords.add("%ROWCOUNT")
|
179
|
+
s_ReservedKeywords.add("%RUNTIMEIN")
|
180
|
+
s_ReservedKeywords.add("%RUNTIMEOUT")
|
181
|
+
s_ReservedKeywords.add("%STARTSWITH")
|
182
|
+
s_ReservedKeywords.add("%STARTTABLE")
|
183
|
+
s_ReservedKeywords.add("%SQLSTRING")
|
184
|
+
s_ReservedKeywords.add("%SQLUPPER")
|
185
|
+
s_ReservedKeywords.add("%STRING")
|
186
|
+
s_ReservedKeywords.add("%TABLENAME")
|
187
|
+
s_ReservedKeywords.add("%TRUNCATE")
|
188
|
+
s_ReservedKeywords.add("%UPPER")
|
189
|
+
s_ReservedKeywords.add("%VALUE")
|
190
|
+
s_ReservedKeywords.add("%VID")
|
191
|
+
s_ReservedKeywords.add("ABSOLUTE")
|
192
|
+
s_ReservedKeywords.add("ADD")
|
193
|
+
s_ReservedKeywords.add("ALL")
|
194
|
+
s_ReservedKeywords.add("ALLOCATE")
|
195
|
+
s_ReservedKeywords.add("ALTER")
|
196
|
+
s_ReservedKeywords.add("AND")
|
197
|
+
s_ReservedKeywords.add("ANY")
|
198
|
+
s_ReservedKeywords.add("ARE")
|
199
|
+
s_ReservedKeywords.add("AS")
|
200
|
+
s_ReservedKeywords.add("ASC")
|
201
|
+
s_ReservedKeywords.add("ASSERTION")
|
202
|
+
s_ReservedKeywords.add("AT")
|
203
|
+
s_ReservedKeywords.add("AUTHORIZATION")
|
204
|
+
s_ReservedKeywords.add("AVG")
|
205
|
+
s_ReservedKeywords.add("BEGIN")
|
206
|
+
s_ReservedKeywords.add("BETWEEN")
|
207
|
+
s_ReservedKeywords.add("BIT")
|
208
|
+
s_ReservedKeywords.add("BIT_LENGTH")
|
209
|
+
s_ReservedKeywords.add("BOTH")
|
210
|
+
s_ReservedKeywords.add("BY")
|
211
|
+
s_ReservedKeywords.add("CASCADE")
|
212
|
+
s_ReservedKeywords.add("CASE")
|
213
|
+
s_ReservedKeywords.add("CAST")
|
214
|
+
s_ReservedKeywords.add("CHAR")
|
215
|
+
s_ReservedKeywords.add("CHARACTER")
|
216
|
+
s_ReservedKeywords.add("CHARACTER_LENGTH")
|
217
|
+
s_ReservedKeywords.add("CHAR_LENGTH")
|
218
|
+
s_ReservedKeywords.add("CHECK")
|
219
|
+
s_ReservedKeywords.add("CLOSE")
|
220
|
+
s_ReservedKeywords.add("COALESCE")
|
221
|
+
s_ReservedKeywords.add("COLLATE")
|
222
|
+
s_ReservedKeywords.add("COMMIT")
|
223
|
+
s_ReservedKeywords.add("CONNECT")
|
224
|
+
s_ReservedKeywords.add("CONNECTION")
|
225
|
+
s_ReservedKeywords.add("CONSTRAINT")
|
226
|
+
s_ReservedKeywords.add("CONSTRAINTS")
|
227
|
+
s_ReservedKeywords.add("CONTINUE")
|
228
|
+
s_ReservedKeywords.add("CONVERT")
|
229
|
+
s_ReservedKeywords.add("CORRESPONDING")
|
230
|
+
s_ReservedKeywords.add("COUNT")
|
231
|
+
s_ReservedKeywords.add("CREATE")
|
232
|
+
s_ReservedKeywords.add("CROSS")
|
233
|
+
s_ReservedKeywords.add("CURRENT")
|
234
|
+
s_ReservedKeywords.add("CURRENT_DATE")
|
235
|
+
s_ReservedKeywords.add("CURRENT_TIME")
|
236
|
+
s_ReservedKeywords.add("CURRENT_TIMESTAMP")
|
237
|
+
s_ReservedKeywords.add("CURRENT_USER")
|
238
|
+
s_ReservedKeywords.add("CURSOR")
|
239
|
+
s_ReservedKeywords.add("DATE")
|
240
|
+
s_ReservedKeywords.add("DEALLOCATE")
|
241
|
+
s_ReservedKeywords.add("DEC")
|
242
|
+
s_ReservedKeywords.add("DECIMAL")
|
243
|
+
s_ReservedKeywords.add("DECLARE")
|
244
|
+
s_ReservedKeywords.add("DEFAULT")
|
245
|
+
s_ReservedKeywords.add("DEFERRABLE")
|
246
|
+
s_ReservedKeywords.add("DEFERRED")
|
247
|
+
s_ReservedKeywords.add("DELETE")
|
248
|
+
s_ReservedKeywords.add("DESC")
|
249
|
+
s_ReservedKeywords.add("DESCRIBE")
|
250
|
+
s_ReservedKeywords.add("DESCRIPTOR")
|
251
|
+
s_ReservedKeywords.add("DIAGNOSTICS")
|
252
|
+
s_ReservedKeywords.add("DISCONNECT")
|
253
|
+
s_ReservedKeywords.add("DISTINCT")
|
254
|
+
s_ReservedKeywords.add("DOMAIN")
|
255
|
+
s_ReservedKeywords.add("DOUBLE")
|
256
|
+
s_ReservedKeywords.add("DROP")
|
257
|
+
s_ReservedKeywords.add("ELSE")
|
258
|
+
s_ReservedKeywords.add("END")
|
259
|
+
s_ReservedKeywords.add("ENDEXEC")
|
260
|
+
s_ReservedKeywords.add("ESCAPE")
|
261
|
+
s_ReservedKeywords.add("EXCEPT")
|
262
|
+
s_ReservedKeywords.add("EXCEPTION")
|
263
|
+
s_ReservedKeywords.add("EXEC")
|
264
|
+
s_ReservedKeywords.add("EXECUTE")
|
265
|
+
s_ReservedKeywords.add("EXISTS")
|
266
|
+
s_ReservedKeywords.add("EXTERNAL")
|
267
|
+
s_ReservedKeywords.add("EXTRACT")
|
268
|
+
s_ReservedKeywords.add("FALSE")
|
269
|
+
s_ReservedKeywords.add("FETCH")
|
270
|
+
s_ReservedKeywords.add("FIRST")
|
271
|
+
s_ReservedKeywords.add("FLOAT")
|
272
|
+
s_ReservedKeywords.add("FOR")
|
273
|
+
s_ReservedKeywords.add("FOREIGN")
|
274
|
+
s_ReservedKeywords.add("FOUND")
|
275
|
+
s_ReservedKeywords.add("FROM")
|
276
|
+
s_ReservedKeywords.add("FULL")
|
277
|
+
s_ReservedKeywords.add("GET")
|
278
|
+
s_ReservedKeywords.add("GLOBAL")
|
279
|
+
s_ReservedKeywords.add("GO")
|
280
|
+
s_ReservedKeywords.add("GOTO")
|
281
|
+
s_ReservedKeywords.add("GRANT")
|
282
|
+
s_ReservedKeywords.add("GROUP")
|
283
|
+
s_ReservedKeywords.add("HAVING")
|
284
|
+
s_ReservedKeywords.add("HOUR")
|
285
|
+
s_ReservedKeywords.add("IDENTITY")
|
286
|
+
s_ReservedKeywords.add("IMMEDIATE")
|
287
|
+
s_ReservedKeywords.add("IN")
|
288
|
+
s_ReservedKeywords.add("INDICATOR")
|
289
|
+
s_ReservedKeywords.add("INITIALLY")
|
290
|
+
s_ReservedKeywords.add("INNER")
|
291
|
+
s_ReservedKeywords.add("INPUT")
|
292
|
+
s_ReservedKeywords.add("INSENSITIVE")
|
293
|
+
s_ReservedKeywords.add("INSERT")
|
294
|
+
s_ReservedKeywords.add("INT")
|
295
|
+
s_ReservedKeywords.add("INTEGER")
|
296
|
+
s_ReservedKeywords.add("INTERSECT")
|
297
|
+
s_ReservedKeywords.add("INTERVAL")
|
298
|
+
s_ReservedKeywords.add("INTO")
|
299
|
+
s_ReservedKeywords.add("IS")
|
300
|
+
s_ReservedKeywords.add("ISOLATION")
|
301
|
+
s_ReservedKeywords.add("JOIN")
|
302
|
+
s_ReservedKeywords.add("LANGUAGE")
|
303
|
+
s_ReservedKeywords.add("LAST")
|
304
|
+
s_ReservedKeywords.add("LEADING")
|
305
|
+
s_ReservedKeywords.add("LEFT")
|
306
|
+
s_ReservedKeywords.add("LEVEL")
|
307
|
+
s_ReservedKeywords.add("LIKE")
|
308
|
+
s_ReservedKeywords.add("LOCAL")
|
309
|
+
s_ReservedKeywords.add("LOWER")
|
310
|
+
s_ReservedKeywords.add("MATCH")
|
311
|
+
s_ReservedKeywords.add("MAX")
|
312
|
+
s_ReservedKeywords.add("MIN")
|
313
|
+
s_ReservedKeywords.add("MINUTE")
|
314
|
+
s_ReservedKeywords.add("MODULE")
|
315
|
+
s_ReservedKeywords.add("NAMES")
|
316
|
+
s_ReservedKeywords.add("NATIONAL")
|
317
|
+
s_ReservedKeywords.add("NATURAL")
|
318
|
+
s_ReservedKeywords.add("NCHAR")
|
319
|
+
s_ReservedKeywords.add("NEXT")
|
320
|
+
s_ReservedKeywords.add("NO")
|
321
|
+
s_ReservedKeywords.add("NOT")
|
322
|
+
s_ReservedKeywords.add("NULL")
|
323
|
+
s_ReservedKeywords.add("NULLIF")
|
324
|
+
s_ReservedKeywords.add("NUMERIC")
|
325
|
+
s_ReservedKeywords.add("OCTET_LENGTH")
|
326
|
+
s_ReservedKeywords.add("OF")
|
327
|
+
s_ReservedKeywords.add("ON")
|
328
|
+
s_ReservedKeywords.add("ONLY")
|
329
|
+
s_ReservedKeywords.add("OPEN")
|
330
|
+
s_ReservedKeywords.add("OPTION")
|
331
|
+
s_ReservedKeywords.add("OR")
|
332
|
+
s_ReservedKeywords.add("OUTER")
|
333
|
+
s_ReservedKeywords.add("OUTPUT")
|
334
|
+
s_ReservedKeywords.add("OVERLAPS")
|
335
|
+
s_ReservedKeywords.add("PAD")
|
336
|
+
s_ReservedKeywords.add("PARTIAL")
|
337
|
+
s_ReservedKeywords.add("PREPARE")
|
338
|
+
s_ReservedKeywords.add("PRESERVE")
|
339
|
+
s_ReservedKeywords.add("PRIMARY")
|
340
|
+
s_ReservedKeywords.add("PRIOR")
|
341
|
+
s_ReservedKeywords.add("PRIVILEGES")
|
342
|
+
s_ReservedKeywords.add("PROCEDURE")
|
343
|
+
s_ReservedKeywords.add("PUBLIC")
|
344
|
+
s_ReservedKeywords.add("READ")
|
345
|
+
s_ReservedKeywords.add("REAL")
|
346
|
+
s_ReservedKeywords.add("REFERENCES")
|
347
|
+
s_ReservedKeywords.add("RELATIVE")
|
348
|
+
s_ReservedKeywords.add("RESTRICT")
|
349
|
+
s_ReservedKeywords.add("REVOKE")
|
350
|
+
s_ReservedKeywords.add("RIGHT")
|
351
|
+
s_ReservedKeywords.add("ROLE")
|
352
|
+
s_ReservedKeywords.add("ROLLBACK")
|
353
|
+
s_ReservedKeywords.add("ROWS")
|
354
|
+
s_ReservedKeywords.add("SCHEMA")
|
355
|
+
s_ReservedKeywords.add("SCROLL")
|
356
|
+
s_ReservedKeywords.add("SECOND")
|
357
|
+
s_ReservedKeywords.add("SECTION")
|
358
|
+
s_ReservedKeywords.add("SELECT")
|
359
|
+
s_ReservedKeywords.add("SESSION_USER")
|
360
|
+
s_ReservedKeywords.add("SET")
|
361
|
+
s_ReservedKeywords.add("SHARD")
|
362
|
+
s_ReservedKeywords.add("SMALLINT")
|
363
|
+
s_ReservedKeywords.add("SOME")
|
364
|
+
s_ReservedKeywords.add("SPACE")
|
365
|
+
s_ReservedKeywords.add("SQLERROR")
|
366
|
+
s_ReservedKeywords.add("SQLSTATE")
|
367
|
+
s_ReservedKeywords.add("STATISTICS")
|
368
|
+
s_ReservedKeywords.add("SUBSTRING")
|
369
|
+
s_ReservedKeywords.add("SUM")
|
370
|
+
s_ReservedKeywords.add("SYSDATE")
|
371
|
+
s_ReservedKeywords.add("SYSTEM_USER")
|
372
|
+
s_ReservedKeywords.add("TABLE")
|
373
|
+
s_ReservedKeywords.add("TEMPORARY")
|
374
|
+
s_ReservedKeywords.add("THEN")
|
375
|
+
s_ReservedKeywords.add("TIME")
|
376
|
+
s_ReservedKeywords.add("TIMEZONE_HOUR")
|
377
|
+
s_ReservedKeywords.add("TIMEZONE_MINUTE")
|
378
|
+
s_ReservedKeywords.add("TO")
|
379
|
+
s_ReservedKeywords.add("TOP")
|
380
|
+
s_ReservedKeywords.add("TRAILING")
|
381
|
+
s_ReservedKeywords.add("TRANSACTION")
|
382
|
+
s_ReservedKeywords.add("TRIM")
|
383
|
+
s_ReservedKeywords.add("TRUE")
|
384
|
+
s_ReservedKeywords.add("UNION")
|
385
|
+
s_ReservedKeywords.add("UNIQUE")
|
386
|
+
s_ReservedKeywords.add("UPDATE")
|
387
|
+
s_ReservedKeywords.add("UPPER")
|
388
|
+
s_ReservedKeywords.add("USER")
|
389
|
+
s_ReservedKeywords.add("USING")
|
390
|
+
s_ReservedKeywords.add("VALUES")
|
391
|
+
s_ReservedKeywords.add("VARCHAR")
|
392
|
+
s_ReservedKeywords.add("VARYING")
|
393
|
+
s_ReservedKeywords.add("WHEN")
|
394
|
+
s_ReservedKeywords.add("WHENEVER")
|
395
|
+
s_ReservedKeywords.add("WHERE")
|
396
|
+
s_ReservedKeywords.add("WITH")
|
397
|
+
s_ReservedKeywords.add("WORK")
|
398
|
+
s_ReservedKeywords.add("WRITE")
|
399
|
+
|
400
|
+
# Supported SQL Dialects
|
401
|
+
SQL_DIALECT_DEFAULT = 0
|
402
|
+
SQL_DIALECT_MSSQL = 1
|
403
|
+
SQL_DIALECT_SYBASE = 2
|
404
|
+
|
405
|
+
# methods
|
406
|
+
def CacheOnServerGet(self):
|
407
|
+
return self.m_CacheOnServer
|
408
|
+
|
409
|
+
def CacheOnServerSet(self, b):
|
410
|
+
b = bool(b)
|
411
|
+
|
412
|
+
self.m_CacheOnServer = b
|
413
|
+
|
414
|
+
def ParamInfoGet(self):
|
415
|
+
return self.m_ParamInfo
|
416
|
+
|
417
|
+
def ParamInfoSet(self, s):
|
418
|
+
if not isinstance(type(s), intersystems_iris._ListWriter._ListWriter):
|
419
|
+
raise TypeError("s must be a _ListWriter")
|
420
|
+
|
421
|
+
self.m_ParamInfo = s
|
422
|
+
|
423
|
+
# Build a PreParser
|
424
|
+
def __init__(self, p_bDelimitedIdentifiers = False, addRID = 0, embedded = False):
|
425
|
+
p_bDelimitedIdentifiers = bool(p_bDelimitedIdentifiers)
|
426
|
+
try:
|
427
|
+
addRID = int(addRID)
|
428
|
+
except (TypeError, ValueError):
|
429
|
+
raise TypeError("addRID must be an interger")
|
430
|
+
|
431
|
+
self.m_addRowID = addRID
|
432
|
+
self.m_ExecParamCount = 0
|
433
|
+
self.m_ParamInfo = intersystems_iris._IRISList._IRISList()
|
434
|
+
|
435
|
+
# flags for delimited identifier use
|
436
|
+
self.m_bDelimitedIdentifiers = p_bDelimitedIdentifiers
|
437
|
+
self.m_bBracketSubstitution = False
|
438
|
+
|
439
|
+
# flag for when statements are cached on the server
|
440
|
+
# potentially irrelevant now because server tells us directly whether it cached the statement
|
441
|
+
self.CacheOnServerSet(False)
|
442
|
+
|
443
|
+
# List for tokenizer
|
444
|
+
self.m_Tokens = None
|
445
|
+
|
446
|
+
# The source scanner
|
447
|
+
self.m_Scanner = None
|
448
|
+
|
449
|
+
# flag for when Named Parameters are used
|
450
|
+
self.hasNamedParameters = False
|
451
|
+
|
452
|
+
# use to pass UndefinedCount value from methods
|
453
|
+
self.m_nUndefinedCount = 0
|
454
|
+
|
455
|
+
self.embedded = embedded
|
456
|
+
|
457
|
+
# Preparse an SQL string returning output statement, parameters, parameter count and statement type
|
458
|
+
def PreParse(self, query, p_Parameters):
|
459
|
+
|
460
|
+
t_query = query
|
461
|
+
while True:
|
462
|
+
# First tokenize the input
|
463
|
+
self.Tokenize(t_query)
|
464
|
+
# Convert WITH Clause, can be recursive
|
465
|
+
found_with, t_query = self.With(t_query)
|
466
|
+
if not found_with:
|
467
|
+
break
|
468
|
+
|
469
|
+
found_insert, t_query = self.InsertMultiValues(t_query)
|
470
|
+
if found_insert:
|
471
|
+
self.Tokenize(t_query)
|
472
|
+
|
473
|
+
# Resolve the tokens and determine output
|
474
|
+
return self.Resolve(t_query, p_Parameters)
|
475
|
+
|
476
|
+
def With(self, query):
|
477
|
+
try:
|
478
|
+
found = False
|
479
|
+
new_query = ''
|
480
|
+
with_statements = {}
|
481
|
+
|
482
|
+
def _query(find_end_paren=True):
|
483
|
+
sub_query = ''
|
484
|
+
open_parens = 0
|
485
|
+
while tokens.MoveNext():
|
486
|
+
token = tokens.Current()
|
487
|
+
|
488
|
+
if token.TokenType is TOKEN.OPEN_PAREN:
|
489
|
+
open_parens += 1
|
490
|
+
elif token.TokenType is TOKEN.CLOSE_PAREN:
|
491
|
+
open_parens -= 1
|
492
|
+
|
493
|
+
sub_query += token.Lexeme
|
494
|
+
sub_query += ' '
|
495
|
+
|
496
|
+
if token.TokenType is TOKEN.ID and (token.UpperEquals('FROM') or token.UpperEquals('JOIN')):
|
497
|
+
assert tokens.MoveNext() and tokens.Current().TokenType is TOKEN.ID
|
498
|
+
table_name = tokens.Current().Lexeme
|
499
|
+
table_name_upper = tokens.Current().UpperLexeme
|
500
|
+
if table_name_upper in with_statements:
|
501
|
+
sub_query += with_statements[table_name_upper]
|
502
|
+
sub_query += ' AS '
|
503
|
+
sub_query += table_name
|
504
|
+
sub_query += ' '
|
505
|
+
|
506
|
+
if find_end_paren and open_parens == 0:
|
507
|
+
break
|
508
|
+
return sub_query
|
509
|
+
|
510
|
+
tokens = self.m_Tokens.GetEnumerator()
|
511
|
+
while tokens.MoveNext():
|
512
|
+
token = tokens.Current()
|
513
|
+
|
514
|
+
if token.TokenType is TOKEN.ID and token.UpperEquals("WITH"):
|
515
|
+
found = True
|
516
|
+
break
|
517
|
+
else:
|
518
|
+
new_query += token.Lexeme
|
519
|
+
new_query += ' '
|
520
|
+
|
521
|
+
if not found:
|
522
|
+
return False, query
|
523
|
+
|
524
|
+
while True:
|
525
|
+
assert tokens.MoveNext() and tokens.Current().TokenType is TOKEN.ID
|
526
|
+
with_name = tokens.Current().UpperLexeme
|
527
|
+
assert tokens.MoveNext() and tokens.Current().TokenType is TOKEN.ID and tokens.Current().UpperContains('AS')
|
528
|
+
assert tokens.MoveNext() and tokens.Current().TokenType is TOKEN.OPEN_PAREN
|
529
|
+
|
530
|
+
tokens.MovePrevious()
|
531
|
+
with_statements[with_name] = _query()
|
532
|
+
|
533
|
+
if not tokens.MoveNext() or tokens.Current().TokenType is not TOKEN.COMMA:
|
534
|
+
tokens.MovePrevious()
|
535
|
+
break
|
536
|
+
|
537
|
+
assert tokens.MoveNext()
|
538
|
+
tokens.MovePrevious()
|
539
|
+
new_query += _query(False)
|
540
|
+
|
541
|
+
return found, new_query
|
542
|
+
except:
|
543
|
+
return False, query
|
544
|
+
|
545
|
+
def InsertMultiValues(self, query):
|
546
|
+
try:
|
547
|
+
new_query = ''
|
548
|
+
values_list = []
|
549
|
+
|
550
|
+
tokens = self.m_Tokens.GetEnumerator()
|
551
|
+
while tokens.MoveNext() and not tokens.Current().UpperEquals("INSERT"):
|
552
|
+
new_query += tokens.Current().Lexeme + ' '
|
553
|
+
if not tokens.MoveNext() or not tokens.Current().UpperEquals("INTO"):
|
554
|
+
return False, query
|
555
|
+
new_query += 'INSERT INTO '
|
556
|
+
while tokens.MoveNext() and not tokens.Current().UpperEquals("VALUES"):
|
557
|
+
new_query += tokens.Current().Lexeme + ' '
|
558
|
+
|
559
|
+
values = ''
|
560
|
+
while tokens.MoveNext():
|
561
|
+
assert tokens.Current().TokenType is TOKEN.OPEN_PAREN
|
562
|
+
open_parens = 1
|
563
|
+
while tokens.MoveNext() or open_parens > 0:
|
564
|
+
token = tokens.Current()
|
565
|
+
if token.TokenType is TOKEN.OPEN_PAREN:
|
566
|
+
open_parens += 1
|
567
|
+
elif token.TokenType is TOKEN.CLOSE_PAREN:
|
568
|
+
open_parens -= 1
|
569
|
+
if open_parens == 0:
|
570
|
+
break
|
571
|
+
values += token.Lexeme
|
572
|
+
values += ' '
|
573
|
+
values_list.append(values)
|
574
|
+
values = ''
|
575
|
+
if not tokens.MoveNext() or tokens.Current().TokenType is not TOKEN.COMMA:
|
576
|
+
break
|
577
|
+
|
578
|
+
if len(values_list) <= 1:
|
579
|
+
return False, query
|
580
|
+
|
581
|
+
new_query += ' SELECT '
|
582
|
+
new_query += ' UNION ALL SELECT '.join(values_list)
|
583
|
+
|
584
|
+
return True, new_query
|
585
|
+
except:
|
586
|
+
return False, query
|
587
|
+
|
588
|
+
# Parse a statement
|
589
|
+
def Tokenize(self, p_strInput):
|
590
|
+
# Get a scanner on the sql string
|
591
|
+
self.m_Scanner = intersystems_iris.dbapi.preparser._Scanner._Scanner(p_strInput)
|
592
|
+
# Create a new token list
|
593
|
+
self.m_Tokens = intersystems_iris.dbapi.preparser._TokenList._TokenList()
|
594
|
+
# Scan the input string and break into tokens
|
595
|
+
tokenize_switcher = {
|
596
|
+
ParseToken.tokEOS: self.Tokenize_eos,
|
597
|
+
ParseToken.tokDOT: self.Tokenize_dot,
|
598
|
+
ParseToken.tokDIGIT: self.Tokenize_digit,
|
599
|
+
ParseToken.tokMINUS: self.Tokenize_minus,
|
600
|
+
ParseToken.tokPLUS: self.Tokenize_plus,
|
601
|
+
ParseToken.tokLBRACK: self.Tokenize_lbrack,
|
602
|
+
ParseToken.tokDQUOTE: self.Tokenize_quote,
|
603
|
+
ParseToken.tokSQUOTE: self.Tokenize_quote,
|
604
|
+
ParseToken.tokSLASH: self.Tokenize_slash,
|
605
|
+
ParseToken.tokQUEST: functools.partial(self.Tokenize_single, token = TOKEN.QUESTION_MARK, char = "?"),
|
606
|
+
ParseToken.tokATSIGN: self.Tokenize_atsign,
|
607
|
+
ParseToken.tokLPARN: functools.partial(self.Tokenize_single, token = TOKEN.OPEN_PAREN, char = "("),
|
608
|
+
ParseToken.tokRPARN: functools.partial(self.Tokenize_single, token = TOKEN.CLOSE_PAREN, char = ")"),
|
609
|
+
ParseToken.tokCOMMA: functools.partial(self.Tokenize_single, token = TOKEN.COMMA, char = ","),
|
610
|
+
ParseToken.tokCOLON: self.Tokenize_colon,
|
611
|
+
ParseToken.tokLETTER: self.Tokenize_identifier,
|
612
|
+
ParseToken.tokPERCENT: self.Tokenize_identifier,
|
613
|
+
ParseToken.tokDOLLAR: self.Tokenize_identifier,
|
614
|
+
ParseToken.tokUSCORE: self.Tokenize_identifier,
|
615
|
+
ParseToken.tokPOUND: self.Tokenize_identifier,
|
616
|
+
ParseToken.tokLESS: functools.partial(self.Tokenize_op, check_tokens = [ParseToken.tokEQUAL, ParseToken.tokGREAT]),
|
617
|
+
ParseToken.tokEXCLA: self.Tokenize_op,
|
618
|
+
ParseToken.tokGREAT: self.Tokenize_op,
|
619
|
+
ParseToken.tokASTER: self.Tokenize_op,
|
620
|
+
ParseToken.tokEQUAL: functools.partial(self.Tokenize_op, check_tokens = [ParseToken.tokASTER]),
|
621
|
+
ParseToken.tokVBAR: self.Tokenize_vbar,
|
622
|
+
ParseToken.tokLBRACE: self.Tokenize_lbrace
|
623
|
+
}
|
624
|
+
while self.m_Scanner.CurrentTokenGet() != ParseToken.tokEOS:
|
625
|
+
self.m_Scanner.SkipWhitespace()
|
626
|
+
|
627
|
+
tokenize_func = tokenize_switcher.get(self.m_Scanner.CurrentTokenGet(), self.Tokenize_default)
|
628
|
+
tokenize_func()
|
629
|
+
|
630
|
+
# generic function for when a token consists of a single character
|
631
|
+
def Tokenize_single(self, token, char):
|
632
|
+
self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(token, char))
|
633
|
+
# Skip this character
|
634
|
+
self.m_Scanner.NextToken()
|
635
|
+
|
636
|
+
# default behavior for an unknown character or ParseToken
|
637
|
+
def Tokenize_default(self, token = TOKEN.UNKNOWN):
|
638
|
+
self.m_Scanner.BeginLexeme()
|
639
|
+
self.m_Scanner.NextToken() # One character unknown
|
640
|
+
self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(token, self.m_Scanner.EndLexeme(), self.m_Scanner.EndUpperLexeme()))
|
641
|
+
|
642
|
+
# end of source, do nothing
|
643
|
+
def Tokenize_eos(self):
|
644
|
+
pass
|
645
|
+
|
646
|
+
# if dot is part of a decimal, parse a number, otherwise default behavior
|
647
|
+
def Tokenize_dot(self):
|
648
|
+
if ParseToken.tokDIGIT != self.m_Scanner.PeekNextToken():
|
649
|
+
self.Tokenize_default()
|
650
|
+
else:
|
651
|
+
self.Tokenize_digit()
|
652
|
+
|
653
|
+
# either the beginning of hex data, or a number
|
654
|
+
def Tokenize_digit(self):
|
655
|
+
(t_strNumber, goodParse) = self.m_Scanner.Hex()
|
656
|
+
if goodParse:
|
657
|
+
self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(TOKEN.HEX, t_strNumber))
|
658
|
+
return
|
659
|
+
|
660
|
+
(t_strNumber, goodParse) = self.m_Scanner.Number()
|
661
|
+
if not goodParse:
|
662
|
+
raise Exception("Invalid Numeric Constant")
|
663
|
+
self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(TOKEN.CONSTANT, t_strNumber))
|
664
|
+
|
665
|
+
def Tokenize_minus(self):
|
666
|
+
nextToken = self.m_Scanner.PeekNextToken()
|
667
|
+
if nextToken == ParseToken.tokMINUS:
|
668
|
+
# Continuation sequence, skip to next line
|
669
|
+
self.m_Scanner.Skip(2)
|
670
|
+
self.m_Scanner.BeginLexeme()
|
671
|
+
self.m_Scanner.SkipToEndOfLine() # Skip '--' to end of line
|
672
|
+
# DVU m_Tokens.Append(new _Token(TOKEN.UNKNOWN, "/*" + m_Scanner.EndLexeme() + "*/"))
|
673
|
+
return
|
674
|
+
elif nextToken == ParseToken.tokGREAT:
|
675
|
+
# -> operator
|
676
|
+
self.m_Scanner.BeginLexeme()
|
677
|
+
self.m_Scanner.Skip(2) # Skip '->'
|
678
|
+
self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(TOKEN.OP, self.m_Scanner.EndLexeme()))
|
679
|
+
return
|
680
|
+
self.Tokenize_plus("-")
|
681
|
+
|
682
|
+
def Tokenize_plus(self, op_char = "+"):
|
683
|
+
# RULE: Per Aviel, Preparser.txt. A numeric constant may include a preceding "+" or "-" ,
|
684
|
+
# but only if the token before the +/- is an OP or LPAR, otherwise the +/- might be
|
685
|
+
# a monadic operator and should be considered an OP.
|
686
|
+
t_eToken = self.m_Tokens.Last().GetValue().TokenTypeGet() if self.m_Tokens.Last() is not None else TOKEN.UNKNOWN
|
687
|
+
if t_eToken in [TOKEN.OP, TOKEN.OPEN_PAREN, TOKEN.COMMA] and (self.m_Scanner.PeekNextToken == ParseToken.tokDIGIT or (self.m_Scanner.PeekNextToken() == ParseToken.tokDOT and self.m_Scanner.PeekNextNextToken() == ParseToken.tokDIGIT)):
|
688
|
+
# Scan in number
|
689
|
+
(t_strNumber, goodParse) = self.m_Scanner.Number()
|
690
|
+
if not goodParse:
|
691
|
+
# TO DO: Replace with ParseException
|
692
|
+
raise Exception("Invalid Numeric Constant")
|
693
|
+
self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(TOKEN.CONSTANT, t_strNumber))
|
694
|
+
else:
|
695
|
+
self.Tokenize_single(TOKEN.OP, op_char)
|
696
|
+
|
697
|
+
def Tokenize_lbrack(self):
|
698
|
+
if self.m_bBracketSubstitution:
|
699
|
+
if not self.m_bDelimitedIdentifiers:
|
700
|
+
raise Exception("Delimited identifiers must be enabled on the server to support brackets")
|
701
|
+
(t_strString, t_eToken) = self.m_Scanner.ParseBrackets(self.m_bDelimitedIdentifiers)
|
702
|
+
self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(t_eToken, t_strString))
|
703
|
+
return
|
704
|
+
self.Tokenize_default()
|
705
|
+
|
706
|
+
# quotes indicate a string
|
707
|
+
def Tokenize_quote(self):
|
708
|
+
(t_strString, t_eToken) = self.m_Scanner.String(self.m_bDelimitedIdentifiers)
|
709
|
+
self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(t_eToken, t_strString))
|
710
|
+
|
711
|
+
def Tokenize_slash(self):
|
712
|
+
if self.m_Scanner.PeekNextToken() == ParseToken.tokASTER:
|
713
|
+
# scan in the comment
|
714
|
+
self.m_Scanner.BeginLexeme()
|
715
|
+
# Skip '/' '*'
|
716
|
+
self.m_Scanner.Skip(2)
|
717
|
+
# Scan in the comment, returns true if successful scan
|
718
|
+
if not self.m_Scanner.Comment():
|
719
|
+
# Ran off end of statement
|
720
|
+
# TO DO: Replace with ParseException?
|
721
|
+
raise Exception("Unexpected End-Of-Statement")
|
722
|
+
self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(TOKEN.UNKNOWN, self.m_Scanner.EndLexeme(), self.m_Scanner.EndUpperLexeme()))
|
723
|
+
else:
|
724
|
+
self.Tokenize_default(TOKEN.OP) # '/' operator
|
725
|
+
|
726
|
+
# '@' used for named parameters
|
727
|
+
def Tokenize_atsign(self):
|
728
|
+
self.m_Scanner.NextToken()
|
729
|
+
if self.m_Scanner.CurrentTokenGet() == ParseToken.tokDIGIT:
|
730
|
+
raise Exception(("Parameter Name error, First value cannot be a digit: " + self.m_Scanner.CurrentChar()))
|
731
|
+
t_strID = self.m_Scanner.Identifier()
|
732
|
+
if t_strID == "":
|
733
|
+
self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(TOKEN.QUESTION_MARK, "?"))
|
734
|
+
else:
|
735
|
+
self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(TOKEN.ATSIGN, "@" + t_strID))
|
736
|
+
|
737
|
+
# ':' indicates variables
|
738
|
+
def Tokenize_colon(self):
|
739
|
+
# Skip ':'
|
740
|
+
self.m_Scanner.NextToken()
|
741
|
+
# Scan in a variable
|
742
|
+
t_strVariable = self.m_Scanner.Variable()
|
743
|
+
t_strVariable = ":" + t_strVariable
|
744
|
+
self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(TOKEN.VAR, t_strVariable))
|
745
|
+
|
746
|
+
def Tokenize_identifier(self):
|
747
|
+
# Initially, the token is an ID
|
748
|
+
t_eToken = TOKEN.ID
|
749
|
+
# Scan in an identifier
|
750
|
+
t_strID = self.m_Scanner.Identifier()
|
751
|
+
# Get an uppercase version for lookups
|
752
|
+
t_strIDUpper = self.m_Scanner.EndUpperLexeme()
|
753
|
+
# Do a table lookup to identify token
|
754
|
+
if t_strIDUpper in self.s_KeywordTable:
|
755
|
+
# Found it, replace ID with specific type
|
756
|
+
t_eToken = self.s_KeywordTable[t_strIDUpper]
|
757
|
+
if (t_eToken == TOKEN.NOT):
|
758
|
+
t_strID = self.m_Scanner.checkForNotPredicates()
|
759
|
+
t_strIDUpper = t_strID.upper()
|
760
|
+
self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(t_eToken, t_strID, t_strIDUpper))
|
761
|
+
|
762
|
+
# used for various operators
|
763
|
+
def Tokenize_op(self, check_tokens = [ParseToken.tokEQUAL]):
|
764
|
+
self.m_Scanner.BeginLexeme()
|
765
|
+
if self.m_Scanner.PeekNextToken() in check_tokens:
|
766
|
+
# Check for composite operators (e.g. <=, >=, !=, etc.)
|
767
|
+
self.m_Scanner.NextToken()
|
768
|
+
self.m_Scanner.NextToken()
|
769
|
+
self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(TOKEN.OP, self.m_Scanner.EndLexeme()))
|
770
|
+
|
771
|
+
# either || operator, or unknown
|
772
|
+
def Tokenize_vbar(self):
|
773
|
+
self.m_Scanner.BeginLexeme()
|
774
|
+
t_eToken = TOKEN.OP
|
775
|
+
if self.m_Scanner.PeekNextToken() == ParseToken.tokVBAR:
|
776
|
+
self.m_Scanner.Skip(2)
|
777
|
+
else:
|
778
|
+
self.m_Scanner.NextToken()
|
779
|
+
t_eToken = TOKEN.UNKNOWN
|
780
|
+
self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(t_eToken, self.m_Scanner.EndLexeme(), self.m_Scanner.EndUpperLexeme()))
|
781
|
+
|
782
|
+
def Tokenize_lbrace(self):
|
783
|
+
self.m_Scanner.NextToken() # Skip '{'
|
784
|
+
# Create a checkpoint
|
785
|
+
t_CP = self.m_Scanner.CreateCheckPoint()
|
786
|
+
self.m_Scanner.SkipWhitespace()
|
787
|
+
# Scan in a potential keyowrd
|
788
|
+
t_strKeyword = self.m_Scanner.Keyword()
|
789
|
+
if t_strKeyword in ["d", "ds", "t", "ts"]:
|
790
|
+
# Recognized dts token
|
791
|
+
self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(TOKEN.UNKNOWN, "{"))
|
792
|
+
self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(TOKEN.DTS, t_strKeyword))
|
793
|
+
else:
|
794
|
+
# wasn't a dts keyword, restore to check point
|
795
|
+
self.m_Scanner.RestoreCheckPoint(t_CP)
|
796
|
+
self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(TOKEN.UNKNOWN, "{"))
|
797
|
+
|
798
|
+
# Resolve parameters and perform appropriate substitutions
|
799
|
+
def Resolve(self, p_strInput, p_Parameters):
|
800
|
+
pOut = PreParseResult()
|
801
|
+
pOut.p_eStmtType = StatementType.UPDATE
|
802
|
+
if self.ParamInfoGet() == None:
|
803
|
+
self.ParamInfoSet(intersystems_iris._IRISList._IRISList())
|
804
|
+
else:
|
805
|
+
self.ParamInfoGet().clear() # reset buffer
|
806
|
+
# Get an enumerator on the token collection
|
807
|
+
t_Enum = self.m_Tokens.GetEnumerator()
|
808
|
+
for i in range(1):
|
809
|
+
# If Parameter list is not empty prior then we have bound parameters
|
810
|
+
# from a previous parse (or user inputted?)
|
811
|
+
t_bBoundParameters = (len(p_Parameters._params_list) > 0)
|
812
|
+
if self.m_Tokens.Count() < 2:
|
813
|
+
pOut.sResult = p_strInput
|
814
|
+
break # Resolved
|
815
|
+
# Make first token current (we know we have at least 2 tokens)
|
816
|
+
t_Enum.MoveNext()
|
817
|
+
t_str = t_Enum.Current().UpperLexeme
|
818
|
+
# TODO: comments are not skipped when the enumerator is reset later in the algorithm; does this need to be fixed? Is this worth fixing?
|
819
|
+
while TOKEN.UNKNOWN == t_Enum.Current().TokenTypeGet() and t_str.startswith("/*"):
|
820
|
+
t_Enum.MoveNext() # skip comments
|
821
|
+
t_str = t_Enum.Current().UpperLexeme
|
822
|
+
# Determine statement types that need further processing
|
823
|
+
if t_str in self.s_ParsedStatements:
|
824
|
+
pOut.p_eStmtType = self.s_ParsedStatements[t_str]
|
825
|
+
self.CacheOnServerSet(True)
|
826
|
+
else:
|
827
|
+
if t_str in self.s_StatementTable:
|
828
|
+
pOut.p_eStmtType = self.s_StatementTable[t_str]
|
829
|
+
# Copy the whole statement to the output
|
830
|
+
if self.m_bBracketSubstitution and self.m_bDelimitedIdentifiers:
|
831
|
+
t_Enum.Reset()
|
832
|
+
while t_Enum.MoveNext():
|
833
|
+
pOut.sResult += t_Enum.Current().Lexeme + " "
|
834
|
+
else:
|
835
|
+
# Copy the whole statement to the output and ignore tokenizing
|
836
|
+
# syntax can fail if not exact
|
837
|
+
pOut.sResult += p_strInput
|
838
|
+
if t_str == "EXPLAIN" and pOut.p_eStmtType == StatementType.CALLWITHRESULT:
|
839
|
+
pQuery = p_strInput
|
840
|
+
pAlt = "ShowPlan"
|
841
|
+
pStat = "0"
|
842
|
+
pQuery = pQuery[(pQuery.upper().find("EXPLAIN") + len("EXPLAIN")):] # slice off "EXPLAIN"
|
843
|
+
while t_Enum.MoveNext():
|
844
|
+
if t_Enum.Current().UpperLexeme == "ALT":
|
845
|
+
pAlt = "ShowPlanAlt"
|
846
|
+
pQuery = pQuery[(pQuery.upper().find("ALT") + len("ALT")):] # slice off "ALT"
|
847
|
+
elif t_Enum.Current().UpperLexeme == "STAT":
|
848
|
+
pStat = "1"
|
849
|
+
pQuery = pQuery[(pQuery.upper().find("STAT") + len("STAT")):] # slice off "STAT"
|
850
|
+
else:
|
851
|
+
p_Parameters._clear()
|
852
|
+
p_Parameters._params_list.append(intersystems_iris.dbapi._Parameter._Parameter(pQuery, ParameterMode.REPLACED_LITERAL))
|
853
|
+
p_Parameters._params_list.append(intersystems_iris.dbapi._Parameter._Parameter(pStat, ParameterMode.REPLACED_LITERAL))
|
854
|
+
p_Parameters._params_list.append(intersystems_iris.dbapi._Parameter._Parameter(pAlt, ParameterMode.REPLACED_LITERAL))
|
855
|
+
p_Parameters._params_list.append(intersystems_iris.dbapi._Parameter._Parameter("", ParameterMode.REPLACED_LITERAL))
|
856
|
+
pOut.sResult = "select %SYSTEM . QUERY_PLAN ( :%qpar(1) , :%qpar(2) , :%qpar(3) , :%qpar(4) ) as Plan"
|
857
|
+
pOut.p_eStmtType = StatementType.QUERY
|
858
|
+
self.m_ParamInfo.add(4)
|
859
|
+
self.m_ParamInfo.add('c')
|
860
|
+
self.m_ParamInfo.add(2)
|
861
|
+
self.m_ParamInfo.add('c')
|
862
|
+
self.m_ParamInfo.add(1)
|
863
|
+
self.m_ParamInfo.add('c')
|
864
|
+
self.m_ParamInfo.add(1)
|
865
|
+
self.m_ParamInfo.add('c')
|
866
|
+
self.m_ParamInfo.add(1)
|
867
|
+
self.CacheOnServerSet(False)
|
868
|
+
return pOut
|
869
|
+
break # Resolved
|
870
|
+
else:
|
871
|
+
if t_str in self.s_TransactionStatements:
|
872
|
+
self.CacheOnServerSet(True)
|
873
|
+
else:
|
874
|
+
if t_str.startswith("("):
|
875
|
+
if t_Enum.MoveNext():
|
876
|
+
t_str = t_Enum.Current().UpperLexeme
|
877
|
+
if t_str == "SELECT":
|
878
|
+
pOut.p_eStmtType = self.s_ParsedStatements[t_str]
|
879
|
+
t_Enum.MovePrevious()
|
880
|
+
self.CacheOnServerSet(True)
|
881
|
+
if self.m_Tokens.First().GetValue().UpperEquals("SET"):
|
882
|
+
# Resolve "SET TRANSACTION" and "SET OPTION"
|
883
|
+
t_NewEnum = self.m_Tokens.GetEnumerator()
|
884
|
+
t_NewEnum.MoveNext() # "SET" is current
|
885
|
+
bMoveNext = t_NewEnum.MoveNext() # token after "SET" is current (if any)
|
886
|
+
if bMoveNext and t_NewEnum.Current().UpperEquals("TRANSACTION"):
|
887
|
+
self.CacheOnServerSet(True)
|
888
|
+
if 5 == self.m_Tokens.Count():
|
889
|
+
if bMoveNext and t_NewEnum.Current().UpperEquals("OPTION"):
|
890
|
+
bMoveNext = t_NewEnum.MoveNext()
|
891
|
+
if bMoveNext and t_NewEnum.Current().UpperEquals("BLOB_SUPPORT"):
|
892
|
+
bMoveNext = t_NewEnum.MoveNext()
|
893
|
+
if bMoveNext and t_NewEnum.Current().UpperEquals("="):
|
894
|
+
bMoveNext = t_NewEnum.MoveNext()
|
895
|
+
if bMoveNext and t_NewEnum.Current().UpperEquals("1"):
|
896
|
+
pOut.p_eStmtType = StatementType.STREAMS_ON
|
897
|
+
elif bMoveNext and t_NewEnum.Current().UpperEquals("0"):
|
898
|
+
pOut.p_eStmtType = StatementType.STREAMS_OFF
|
899
|
+
else:
|
900
|
+
raise Exception("BLOB_SUPPORT must be 0 or 1")
|
901
|
+
else:
|
902
|
+
raise Exception("Expected '=' after BLOB_SUPPORT")
|
903
|
+
elif bMoveNext and t_NewEnum.Current().UpperEquals("SYNCHRONOUS_COMMIT"):
|
904
|
+
bMoveNext = t_NewEnum.MoveNext()
|
905
|
+
if bMoveNext and t_NewEnum.Current().UpperEquals("="):
|
906
|
+
bMoveNext = t_NewEnum.MoveNext()
|
907
|
+
if bMoveNext and t_NewEnum.Current().UpperEquals("1"):
|
908
|
+
pOut.p_eStmtType = StatementType.SYNC_COMMIT
|
909
|
+
elif bMoveNext and t_NewEnum.Current().UpperEquals("0"):
|
910
|
+
pOut.p_eStmtType = StatementType.ASYNC_COMMIT
|
911
|
+
else:
|
912
|
+
raise Exception("SYNCHRONOUS_COMMIT must be 0 or 1")
|
913
|
+
else:
|
914
|
+
raise Exception("Expected '=' after SYNCHRONOUS_COMMIT")
|
915
|
+
else:
|
916
|
+
# aren't there other options beyond BLOB_SUPPORT and SYNCHRONOUS_COMMIT?
|
917
|
+
raise Exception("Unknown SET OPTION")
|
918
|
+
t_Enum.Reset()
|
919
|
+
while t_Enum.MoveNext():
|
920
|
+
pOut.sResult += t_Enum.Current().Lexeme + " "
|
921
|
+
break # Resolved
|
922
|
+
# check for Exec and Call statements
|
923
|
+
if (not self.CacheOnServerGet()) and self.Exec(pOut, p_Parameters):
|
924
|
+
self.CacheOnServerSet(True)
|
925
|
+
break
|
926
|
+
self.m_nUndefinedCount = 0
|
927
|
+
if (not self.CacheOnServerGet()) and self.Call(pOut, p_Parameters):
|
928
|
+
self.CacheOnServerSet(True)
|
929
|
+
break
|
930
|
+
|
931
|
+
pOut.sResult = ""
|
932
|
+
t_Enum.Reset()
|
933
|
+
|
934
|
+
self.t_nOpenParen = 0 # keeps track of number of open parentheses
|
935
|
+
self.t_nOrdinal = 0 # keeps track of where in p_Parameters new parameters will be inserted
|
936
|
+
self.t_nRound = 0 # keeps track of which argument of ROUND is being parsed
|
937
|
+
self.t_nRoundNested = 0 # keeps track of any nested parentheses inside of a ROUND argument
|
938
|
+
|
939
|
+
self.orderbyToken = None
|
940
|
+
self.lastToken = None # previous token that was resolved (not counting things like parentheses and commas)
|
941
|
+
|
942
|
+
t_bQuitLoop = False # currently nothing meaningful is done with this
|
943
|
+
bFirstElement = True
|
944
|
+
resolve_switcher = {
|
945
|
+
TOKEN.QUESTION_MARK: self.Resolve_question_mark,
|
946
|
+
TOKEN.ATSIGN: self.Resolve_atsign,
|
947
|
+
TOKEN.HEX: self.Resolve_hex,
|
948
|
+
TOKEN.ID: functools.partial(self.Resolve_id, stmtType = pOut.p_eStmtType),
|
949
|
+
TOKEN.STRFUNCTION: self.Resolve_strfunction,
|
950
|
+
TOKEN.DATATYPE: self.Resolve_datatype,
|
951
|
+
TOKEN.OPEN_PAREN: self.Resolve_open_paren,
|
952
|
+
TOKEN.CLOSE_PAREN: self.Resolve_close_paren,
|
953
|
+
TOKEN.OP: self.Resolve_op,
|
954
|
+
TOKEN.CONSTANT: self.Resolve_constant,
|
955
|
+
# TOKEN.NULL: self.Resolve_null,
|
956
|
+
TOKEN.COMMA: self.Resolve_comma
|
957
|
+
}
|
958
|
+
while (not t_bQuitLoop) and t_Enum.MoveNext():
|
959
|
+
t_Token = t_Enum.Current()
|
960
|
+
if bFirstElement:
|
961
|
+
bFirstElement = False
|
962
|
+
if t_Token.UpperEquals("{"):
|
963
|
+
raise Exception("'{' encountered at the beginning of the statement") # , "37000", 37000)
|
964
|
+
|
965
|
+
resolve_func = resolve_switcher.get(t_Token.TokenTypeGet(), None)
|
966
|
+
if resolve_func is not None:
|
967
|
+
t_bQuitLoop = resolve_func(p_Parameters, t_Enum, t_Token, t_bBoundParameters)
|
968
|
+
|
969
|
+
if t_Token.TokenTypeGet() not in [TOKEN.COMMA, TOKEN.OPEN_PAREN, TOKEN.CLOSE_PAREN]:
|
970
|
+
self.lastToken = t_Token
|
971
|
+
|
972
|
+
# now that we've resolved every token, need to replace parameters with ":%qpar" syntax
|
973
|
+
t_Enum.Reset()
|
974
|
+
t_nParamIndex = 1
|
975
|
+
t_count = 0
|
976
|
+
|
977
|
+
bExecute = False
|
978
|
+
while t_Enum.MoveNext():
|
979
|
+
t_count += 1
|
980
|
+
t_Token = t_Enum.Current()
|
981
|
+
|
982
|
+
# exclude an initial "EXECUTE" from the final preparsed statement
|
983
|
+
if t_Token.UpperEquals("EXECUTE"):
|
984
|
+
bExecute = True
|
985
|
+
if (2 == t_count) and (bExecute):
|
986
|
+
if t_Token.UpperEquals("SELECT"):
|
987
|
+
pOut.p_eStmtType = StatementType.QUERY
|
988
|
+
pOut.sResult = ""
|
989
|
+
elif t_Token.UpperEquals("UPDATE") or t_Token.UpperEquals("INSERT"):
|
990
|
+
pOut.p_eStmtType = StatementType.UPDATE
|
991
|
+
pOut.sResult = ""
|
992
|
+
|
993
|
+
if TOKEN.QUESTION_MARK == t_Token.TokenTypeGet() or TOKEN.ATSIGN == t_Token.TokenTypeGet():
|
994
|
+
pOut.sResult += "?" if self.embedded else ":%qpar({0})".format(t_nParamIndex)
|
995
|
+
t_nParamIndex += 1
|
996
|
+
if t_count < t_Enum.Count():
|
997
|
+
pOut.sResult += ' '
|
998
|
+
else:
|
999
|
+
pOut.sResult += t_Token.Lexeme
|
1000
|
+
if t_count < t_Enum.Count():
|
1001
|
+
pOut.sResult += ' '
|
1002
|
+
if t_Token.UpperEquals("SELECT"):
|
1003
|
+
pOut.sResult = self.appendRowId(pOut.sResult)
|
1004
|
+
if t_Token.UpperEquals("ORDER"):
|
1005
|
+
haveMore = t_Enum.MoveNext()
|
1006
|
+
if haveMore:
|
1007
|
+
pOut.sResult += t_Enum.Current().Lexeme
|
1008
|
+
if t_count < t_Enum.Count():
|
1009
|
+
pOut.sResult += ' '
|
1010
|
+
if t_Enum.Current().UpperEquals("BY"):
|
1011
|
+
pOut.sResult = self.appendIdAdded(pOut.sResult)
|
1012
|
+
# create paramInfo $list to be passed to server
|
1013
|
+
length = 0
|
1014
|
+
if len(p_Parameters._params_list) > 0:
|
1015
|
+
item = p_Parameters._params_list[0]
|
1016
|
+
if isinstance(item, list) or isinstance(item, tuple):
|
1017
|
+
length = len(item)
|
1018
|
+
else:
|
1019
|
+
length = len(p_Parameters._params_list)
|
1020
|
+
self.m_ParamInfo.add(length - self.m_ExecParamCount) #len(p_Parameters._params_list)
|
1021
|
+
if length - self.m_ExecParamCount > 0:
|
1022
|
+
t_Enum.Reset()
|
1023
|
+
nParamIndex = 1
|
1024
|
+
p_Parameters._user_index = [-1]
|
1025
|
+
while t_Enum.MoveNext():
|
1026
|
+
if TOKEN.QUESTION_MARK == t_Enum.Current().TokenTypeGet() or TOKEN.ATSIGN == t_Enum.Current().TokenTypeGet():
|
1027
|
+
if t_Enum.Current().m_replaced:
|
1028
|
+
self.m_ParamInfo.add('c')
|
1029
|
+
else:
|
1030
|
+
self.m_ParamInfo.add('?')
|
1031
|
+
p_Parameters._add_user_param(None)
|
1032
|
+
p_Parameters._user_index.append(nParamIndex - 1)
|
1033
|
+
self.m_ParamInfo.add(t_Enum.Current().m_format)
|
1034
|
+
nParamIndex += 1
|
1035
|
+
if nParamIndex == length + 1:
|
1036
|
+
break
|
1037
|
+
return pOut
|
1038
|
+
|
1039
|
+
# '?' represents a parameter; adds a parameter to p_Parameters if none were provided
|
1040
|
+
def Resolve_question_mark(self, p_Parameters, t_Enum, t_Token, t_bBoundParameters):
|
1041
|
+
self.t_nOrdinal += 1
|
1042
|
+
if not t_bBoundParameters:
|
1043
|
+
p_Parameters._params_list.append(intersystems_iris.dbapi._Parameter._Parameter("?", ParameterMode.INPUT, '?'))
|
1044
|
+
return False
|
1045
|
+
|
1046
|
+
# "@" used for named parameters
|
1047
|
+
def Resolve_atsign(self, p_Parameters, t_Enum, t_Token, t_bBoundParameters):
|
1048
|
+
self.hasNamedParameters = True
|
1049
|
+
self.t_nOrdinal += 1
|
1050
|
+
if (not t_bBoundParameters) or len(p_Parameters._params_list) == 0:
|
1051
|
+
p_Parameters._params_list.append(intersystems_iris.dbapi._Parameter._Parameter(t_Token.Lexeme, ParameterMode.UNKNOWN))
|
1052
|
+
else:
|
1053
|
+
if not matchUpParam(p_Parameters, t_Token.Lexeme, len(p_Parameters._params_list)):
|
1054
|
+
p_Parameters._params_list.append(intersystems_iris.dbapi._Parameter._Parameter(t_Token.Lexeme, ParameterMode.UNKNOWN))
|
1055
|
+
return False
|
1056
|
+
|
1057
|
+
# replaces a hex literal with a parameter
|
1058
|
+
def Resolve_hex(self, p_Parameters, t_Enum, t_Token, t_bBoundParameters):
|
1059
|
+
self.t_nOrdinal += 1
|
1060
|
+
cp = intersystems_iris.dbapi._Parameter._Parameter(bytes.fromhex(t_Token.Lexeme[2:]), ParameterMode.REPLACED_LITERAL, '?', type = intersystems_iris.dbapi._DBAPI.SQLType.BINARY)
|
1061
|
+
p_Parameters._params_list.append(cp)
|
1062
|
+
t_Token.TokenTypeSet(TOKEN.QUESTION_MARK)
|
1063
|
+
return False
|
1064
|
+
|
1065
|
+
def Resolve_id(self, p_Parameters, t_Enum, t_Token, t_bBoundParameters, stmtType):
|
1066
|
+
if self.orderbyToken is not None and t_Enum.Current().UpperEquals("UNION"):
|
1067
|
+
self.orderbyToken = None
|
1068
|
+
if self.lastToken is not None and self.lastToken == self.orderbyToken:
|
1069
|
+
self.orderbyToken = t_Token
|
1070
|
+
self.lastToken = t_Token
|
1071
|
+
return False
|
1072
|
+
# ORDER follows parameters, quit early
|
1073
|
+
if t_Token.UpperEquals("ORDER"):
|
1074
|
+
t_NewEnum = t_Enum.Clone()
|
1075
|
+
if t_NewEnum.MoveNext():
|
1076
|
+
t_NewToken = t_NewEnum.Current()
|
1077
|
+
if t_NewToken.UpperEquals("BY"):
|
1078
|
+
self.orderbyToken = t_NewToken
|
1079
|
+
if self.t_nOpenParen == 0:
|
1080
|
+
return False
|
1081
|
+
else:
|
1082
|
+
while t_Enum.MoveNext():
|
1083
|
+
if t_Enum.Current().TokenTypeGet() == TOKEN.CLOSE_PAREN:
|
1084
|
+
self.t_nOpenParen -= 1
|
1085
|
+
break
|
1086
|
+
elif (TOKEN.ID == t_Enum.Current().TokenTypeGet()) and (t_Enum.Current().UpperEquals("UNION")):
|
1087
|
+
break
|
1088
|
+
# JSON_TABLE should have no literal substitution
|
1089
|
+
if t_Token.UpperContains("JSON_") or t_Token.UpperContains("_JSON"):
|
1090
|
+
startParen = self.t_nOpenParen
|
1091
|
+
while t_Enum.MoveNext():
|
1092
|
+
if t_Enum.Current().TokenTypeGet() == TOKEN.OPEN_PAREN:
|
1093
|
+
self.t_nOpenParen += 1
|
1094
|
+
if t_Enum.Current().TokenTypeGet() == TOKEN.CLOSE_PAREN:
|
1095
|
+
self.t_nOpenParen -= 1
|
1096
|
+
if self.t_nOpenParen == startParen:
|
1097
|
+
break
|
1098
|
+
# ROUND special handling for second parameter
|
1099
|
+
if t_Token.UpperEquals("ROUND"):
|
1100
|
+
if stmtType == StatementType.QUERY and self.t_nRound == 0:
|
1101
|
+
self.t_nRound = 1
|
1102
|
+
# DATEPART with first parameter sent as is, not a literal
|
1103
|
+
if t_Token.UpperEquals("DATEPART") or t_Token.UpperEquals("TIMESTAMPADD") or t_Token.UpperEquals("TIMESTAMPDIFF"):
|
1104
|
+
if t_Enum.MoveNext():
|
1105
|
+
if t_Enum.Current().TokenTypeGet() == TOKEN.OPEN_PAREN:
|
1106
|
+
while t_Enum.MoveNext():
|
1107
|
+
if t_Enum.Current().TokenTypeGet() == TOKEN.CONSTANT:
|
1108
|
+
t_Enum.Current().TokenTypeSet(TOKEN.ID)
|
1109
|
+
break
|
1110
|
+
if t_Enum.Current().TokenTypeGet() in [TOKEN.COMMA, TOKEN.CLOSE_PAREN]:
|
1111
|
+
break
|
1112
|
+
else:
|
1113
|
+
t_Enum.MovePrevious()
|
1114
|
+
return False
|
1115
|
+
|
1116
|
+
# I honestly have no idea why this method does what it does
|
1117
|
+
def Resolve_strfunction(self, p_Parameters, t_Enum, t_Token, t_bBoundParameters):
|
1118
|
+
parenLevel = 0
|
1119
|
+
t_TokenLast = None
|
1120
|
+
inOrderBy = False
|
1121
|
+
while t_Enum.MoveNext():
|
1122
|
+
if t_TokenLast is not None and t_TokenLast.UpperLexeme == "ORDER":
|
1123
|
+
if t_Enum.Current().UpperLexeme == "BY":
|
1124
|
+
inOrderBy = True
|
1125
|
+
t_TokenLast = t_Enum.Current()
|
1126
|
+
if parenLevel == 1 and t_Enum.Current().TokenTypeGet() == TOKEN.COMMA:
|
1127
|
+
while t_Enum.MoveNext():
|
1128
|
+
if t_Enum.Current().TokenTypeGet() == TOKEN.CONSTANT:
|
1129
|
+
t_Enum.Current().TokenTypeSet(TOKEN.ID)
|
1130
|
+
if parenLevel == 1:
|
1131
|
+
break
|
1132
|
+
elif t_Enum.Current().TokenTypeGet() == TOKEN.OPEN_PAREN:
|
1133
|
+
parenLevel += 1
|
1134
|
+
elif t_Enum.Current().TokenTypeGet() == TOKEN.CLOSE_PAREN:
|
1135
|
+
parenLevel -= 1
|
1136
|
+
if parenLevel == 1:
|
1137
|
+
break
|
1138
|
+
elif t_Enum.Current().TokenTypeGet() == TOKEN.OPEN_PAREN:
|
1139
|
+
parenLevel += 1
|
1140
|
+
elif t_Enum.Current().TokenTypeGet() == TOKEN.CLOSE_PAREN:
|
1141
|
+
if parenLevel == 1:
|
1142
|
+
break
|
1143
|
+
parenLevel -= 1
|
1144
|
+
elif t_Enum.Current().TokenTypeGet() == TOKEN.CONSTANT:
|
1145
|
+
bSubstitute = not inOrderBy
|
1146
|
+
if parenLevel > 1:
|
1147
|
+
t_Enum.MovePrevious()
|
1148
|
+
if TOKEN.OPEN_PAREN == t_Enum.Current().TokenTypeGet():
|
1149
|
+
t_Enum.MoveNext()
|
1150
|
+
t_Enum.MoveNext()
|
1151
|
+
if TOKEN.CLOSE_PAREN == t_Enum.Current().TokenTypeGet():
|
1152
|
+
bSubstitute = False
|
1153
|
+
t_Enum.MovePrevious()
|
1154
|
+
else:
|
1155
|
+
t_Enum.MoveNext()
|
1156
|
+
if bSubstitute:
|
1157
|
+
t_Token = t_Enum.Current()
|
1158
|
+
self.t_nOrdinal = self.DynamicVariable(t_bBoundParameters, t_Token, self.t_nOrdinal, p_Parameters)
|
1159
|
+
if parenLevel == 0:
|
1160
|
+
break
|
1161
|
+
return False
|
1162
|
+
|
1163
|
+
# Skips over the data type's arguments (if any)
|
1164
|
+
def Resolve_datatype(self, p_Parameters, t_Enum, t_Token, t_bBoundParameters):
|
1165
|
+
t_NewEnum = t_Enum.Clone()
|
1166
|
+
if t_NewEnum.MoveNext():
|
1167
|
+
t_NewToken = t_NewEnum.Current()
|
1168
|
+
if TOKEN.OPEN_PAREN == t_NewToken.TokenTypeGet():
|
1169
|
+
while t_NewEnum.MoveNext():
|
1170
|
+
t_NewToken = t_NewEnum.Current()
|
1171
|
+
if t_NewToken.TokenTypeGet() == TOKEN.CLOSE_PAREN:
|
1172
|
+
break
|
1173
|
+
t_Enum = t_NewEnum
|
1174
|
+
return False
|
1175
|
+
|
1176
|
+
# generally just increments t_nOpenParen (and t_nRoundNested, when relevant),
|
1177
|
+
# but also checks for "((CONSTANT))" syntax (this is a way you can get the preparser to not replace a constant with a parameter)
|
1178
|
+
def Resolve_open_paren(self, p_Parameters, t_Enum, t_Token, t_bBoundParameters):
|
1179
|
+
self.t_nOpenParen += 1
|
1180
|
+
t_NewEnum = t_Enum.Clone()
|
1181
|
+
if self.t_nRound > 0:
|
1182
|
+
self.t_nRoundNested += 1
|
1183
|
+
if t_NewEnum.MoveNext():
|
1184
|
+
t_NewToken = t_NewEnum.Current()
|
1185
|
+
if TOKEN.OPEN_PAREN == t_NewToken.TokenTypeGet():
|
1186
|
+
self.t_nOpenParen += 1
|
1187
|
+
if t_NewEnum.MoveNext():
|
1188
|
+
t_NewToken = t_NewEnum.Current()
|
1189
|
+
bCurlyBrace = (t_NewToken.Lexeme == "{")
|
1190
|
+
if TOKEN.CONSTANT == t_NewToken.TokenTypeGet() or bCurlyBrace:
|
1191
|
+
if t_NewEnum.MoveNext():
|
1192
|
+
t_NewToken = t_NewEnum.Current()
|
1193
|
+
if bCurlyBrace:
|
1194
|
+
while t_NewToken.Lexeme != "}":
|
1195
|
+
if not t_NewEnum.MoveNext():
|
1196
|
+
bCurlyBrace = False
|
1197
|
+
break
|
1198
|
+
t_NewToken = t_NewEnum.Current()
|
1199
|
+
bCurlyBrace = False
|
1200
|
+
if not t_NewEnum.MoveNext():
|
1201
|
+
return False
|
1202
|
+
t_NewToken = t_NewEnum.Current()
|
1203
|
+
if TOKEN.CLOSE_PAREN == t_NewToken.TokenTypeGet():
|
1204
|
+
self.t_nOpenParen -= 1
|
1205
|
+
if t_NewEnum.MoveNext():
|
1206
|
+
t_NewToken = t_NewEnum.Current()
|
1207
|
+
if TOKEN.CLOSE_PAREN == t_NewToken.TokenTypeGet():
|
1208
|
+
self.t_nOpenParen -= 1
|
1209
|
+
t_Enum = t_NewEnum
|
1210
|
+
if self.t_nRound > 0:
|
1211
|
+
self.t_nRoundNested -= 1
|
1212
|
+
return False
|
1213
|
+
|
1214
|
+
# decrements t_nOpenParen (and t_nRoundNested, when relevant)
|
1215
|
+
def Resolve_close_paren(self, p_Parameters, t_Enum, t_Token, t_bBoundParameters):
|
1216
|
+
if self.t_nRound > 0:
|
1217
|
+
self.t_nRoundNested -= 1
|
1218
|
+
self.t_nOpenParen -= 1
|
1219
|
+
return False
|
1220
|
+
|
1221
|
+
# skips over "(CONSTANT)" after an operator (another way to get the preparser to not replace a constant with a parameter)
|
1222
|
+
def Resolve_op(self, p_Parameters, t_Enum, t_Token, t_bBoundParameters):
|
1223
|
+
t_NewEnum = t_Enum.Clone()
|
1224
|
+
if t_NewEnum.MoveNext():
|
1225
|
+
t_NewToken = t_NewEnum.Current()
|
1226
|
+
if TOKEN.OPEN_PAREN == t_NewToken.TokenTypeGet():
|
1227
|
+
if t_NewEnum.MoveNext() and t_NewEnum.Current().TokenTypeGet() == TOKEN.CONSTANT:
|
1228
|
+
if t_NewEnum.MoveNext() and t_NewEnum.Current().TokenTypeGet() == TOKEN.CLOSE_PAREN:
|
1229
|
+
t_Enum = t_NewEnum
|
1230
|
+
return False
|
1231
|
+
|
1232
|
+
def Resolve_constant(self, p_Parameters, t_Enum, t_Token, t_bBoundParameters):
|
1233
|
+
# the second argument (and beyond?) of ROUND should not be replaced with a parameter
|
1234
|
+
if (self.t_nRound == 2) and (self.t_nRoundNested == 1):
|
1235
|
+
t_Enum.MoveNext()
|
1236
|
+
if TOKEN.COMMA != t_Enum.Current().TokenTypeGet():
|
1237
|
+
self.t_nRound = 0
|
1238
|
+
self.t_nRoundNested = 0
|
1239
|
+
t_Enum.MovePrevious()
|
1240
|
+
return False
|
1241
|
+
# Detect and Skip IN clause
|
1242
|
+
if self.lastToken is not None:
|
1243
|
+
if self.lastToken == self.orderbyToken:
|
1244
|
+
self.orderbyToken = t_Token
|
1245
|
+
self.lastToken = t_Token
|
1246
|
+
return False
|
1247
|
+
t_NewEnum = t_Enum.Clone()
|
1248
|
+
|
1249
|
+
# not 100% sure what this block does
|
1250
|
+
if t_NewEnum.MoveNext() and t_NewEnum.Current().TokenTypeGet() == TOKEN.CLOSE_PAREN:
|
1251
|
+
t_NewEnum.MovePrevious()
|
1252
|
+
if t_NewEnum.MovePrevious() and (t_NewEnum.Current().Lexeme[0] == '-'):
|
1253
|
+
t_NewEnum.MovePrevious()
|
1254
|
+
if t_Enum.Current() is not None and t_NewEnum.Current().TokenTypeGet() == TOKEN.OPEN_PAREN:
|
1255
|
+
t_NewEnum.MovePrevious()
|
1256
|
+
if t_Enum.Current() is not None:
|
1257
|
+
if TOKEN.ID != t_NewEnum.Current().TokenTypeGet() or (t_NewEnum.Current().UpperLexeme in _PreParser.s_replaceparm):
|
1258
|
+
t_Enum.MoveNext()
|
1259
|
+
return False
|
1260
|
+
|
1261
|
+
# determine format the constant will be sent to the server in (stored in paramInfo at the end of Resolve())
|
1262
|
+
if t_Enum.Current() is not None:
|
1263
|
+
c = t_Enum.Current().Lexeme
|
1264
|
+
if c[0] == '\'' or c[0] == '"':
|
1265
|
+
if c[-1] != c[0]:
|
1266
|
+
raise Exception("unmatched quote in " + t_Enum.Current().Lexeme)
|
1267
|
+
t_Enum.Current().m_format = intersystems_iris.dbapi.preparser._Token._Token.CAST_CHAR
|
1268
|
+
else:
|
1269
|
+
isInt = True
|
1270
|
+
for ii in range(len(c)):
|
1271
|
+
if c[ii] in ['.', 'e', 'E']:
|
1272
|
+
isInt = False
|
1273
|
+
break
|
1274
|
+
if isInt:
|
1275
|
+
if (21 < len(c)) or ((c[0] == '-') and (20 < len(c))):
|
1276
|
+
t_Enum.Current().m_format = intersystems_iris.dbapi.preparser._Token._Token.CAST_CHAR
|
1277
|
+
else:
|
1278
|
+
t_Enum.Current().m_format = intersystems_iris.dbapi.preparser._Token._Token.CAST_INT
|
1279
|
+
else:
|
1280
|
+
t_Enum.Current().m_format = intersystems_iris.dbapi.preparser._Token._Token.CAST_NUM
|
1281
|
+
self.t_nOrdinal = self.DynamicVariable(t_bBoundParameters, t_Token, self.t_nOrdinal, p_Parameters)
|
1282
|
+
return False
|
1283
|
+
|
1284
|
+
# not sure why this does what it does
|
1285
|
+
def Resolve_null(self, p_Parameters, t_Enum, t_Token, t_bBoundParameters):
|
1286
|
+
t_NewEnum = t_Enum.Clone()
|
1287
|
+
if t_NewEnum.MovePrevious():
|
1288
|
+
t_PreviousToken = t_NewEnum.Current()
|
1289
|
+
if t_PreviousToken.TokenTypeGet() not in [TOKEN.NOT, TOKEN.IS, TOKEN.THEN, TOKEN.COMMA, TOKEN.OPEN_PAREN, TOKEN.ELSE]:
|
1290
|
+
self.t_nOrdinal = Null(t_bBoundParameters, t_Token, self.t_nOrdinal, p_Parameters)
|
1291
|
+
return False
|
1292
|
+
|
1293
|
+
def Resolve_comma(self, p_Parameters, t_Enum, t_Token, t_bBoundParameters):
|
1294
|
+
if (self.t_nRoundNested == 1) and (self.t_nRound == 1):
|
1295
|
+
self.t_nRound += 1
|
1296
|
+
return False
|
1297
|
+
|
1298
|
+
# no idea what this does, I don't think it's used anywhere, but I kept it in just in case
|
1299
|
+
@classmethod
|
1300
|
+
def GetHexVal(cls, hex):
|
1301
|
+
""" generated source for method GetHexVal """
|
1302
|
+
val = int(hex)
|
1303
|
+
return val - (48 if val < 58 else (55 if val < 97 else 87))
|
1304
|
+
|
1305
|
+
# not sure why this does what it does
|
1306
|
+
def Null(self, p_bBoundParameters, p_Token, p_nOrdinal, p_Parameters):
|
1307
|
+
p_nOrdinal += 1
|
1308
|
+
t_Parameter = intersystems_iris.dbapi._Parameter._Parameter(mode = ParameterMode.DEFAULT_PARAMETER, paramType = 'c')
|
1309
|
+
if p_bBoundParameters:
|
1310
|
+
p_Parameters._params_list.insert(p_nOrdinal - 1, t_Parameter)
|
1311
|
+
else:
|
1312
|
+
p_Parameters._params_list.append(t_Parameter)
|
1313
|
+
p_Token.Lexeme = "?"
|
1314
|
+
p_Token.TokenTypeSet(TOKEN.QUESTION_MARK)
|
1315
|
+
p_Token.m_replaced = True
|
1316
|
+
return p_nOrdinal
|
1317
|
+
|
1318
|
+
# matches a named parameter in the SQL statement with a param in the list of parameters
|
1319
|
+
# param - list of parameters (p_Parameters from Resolve())
|
1320
|
+
# paramName - name of the parameter to be matched
|
1321
|
+
# numParam - len(param)
|
1322
|
+
def matchUpParam(self, param, paramName, numParam):
|
1323
|
+
match = False
|
1324
|
+
if not self.hasNamedParameters or (paramName == None or paramName == "" or paramName[0] != '@'):
|
1325
|
+
return False
|
1326
|
+
for i in range(len(param._params_list)):
|
1327
|
+
if (param._params_list[i].name.upper() == paramName.upper()) or (("@" + param._params_list[i].name.upper()) == paramName.upper()):
|
1328
|
+
match = True
|
1329
|
+
if i != numParam:
|
1330
|
+
cp = param._params_list[i]
|
1331
|
+
cporig = cp
|
1332
|
+
if not cporig.parsermatched:
|
1333
|
+
del param._params_list[i:(i+1)]
|
1334
|
+
else:
|
1335
|
+
cp = cporig.Clone()
|
1336
|
+
cp.name = cporig.name + str(numParam)
|
1337
|
+
cp.mode = ParameterMode.UNKNOWN
|
1338
|
+
if cporig.matchedParameterList == None:
|
1339
|
+
cporig.matchedParameterList = []
|
1340
|
+
cporig.matchedParamaterList.append(cp)
|
1341
|
+
cp.parsermatched = True
|
1342
|
+
param._params_list.insert(numParam, cp)
|
1343
|
+
else:
|
1344
|
+
param._params_list[i].parsermatched = True
|
1345
|
+
break
|
1346
|
+
return match
|
1347
|
+
|
1348
|
+
# I don't 100% follow this function, but I'm pretty sure it spends most of its time trying to isolate a return parameter, if any, then preparses as normal (?)
|
1349
|
+
def Call(self, pOut, p_Parameters):
|
1350
|
+
t_bRet = False
|
1351
|
+
pOut.p_eStmtType = StatementType.UPDATE
|
1352
|
+
pOut.sResult = ""
|
1353
|
+
for i in range(1):
|
1354
|
+
t_Enum = self.m_Tokens.GetEnumerator()
|
1355
|
+
t_Enum.MoveNext()
|
1356
|
+
t_str = t_Enum.Current().UpperLexeme
|
1357
|
+
while (TOKEN.UNKNOWN == t_Enum.Current().TokenTypeGet()) and t_str.startswith("/*"):
|
1358
|
+
t_Enum.MoveNext() # skip comments
|
1359
|
+
t_str = t_Enum.Current().UpperLexeme
|
1360
|
+
t_Token = t_Enum.Current()
|
1361
|
+
if t_Token.Lexeme[0] == '{':
|
1362
|
+
t_Enum.MoveNext()
|
1363
|
+
t_Token = t_Enum.Current()
|
1364
|
+
returnParam = None
|
1365
|
+
# expects either "? = ..." or one of "CALL", "EXEC", "EXECUTE"
|
1366
|
+
if t_Token.TokenTypeGet() == TOKEN.QUESTION_MARK:
|
1367
|
+
returnParam = intersystems_iris.dbapi._Parameter._Parameter("?", ParameterMode.RETURN_VALUE, '?')
|
1368
|
+
if not t_Enum.MoveNext() or t_Enum.Current().Lexeme[0] != '=':
|
1369
|
+
break
|
1370
|
+
if not t_Enum.MoveNext():
|
1371
|
+
break
|
1372
|
+
elif not (t_Enum.Current().UpperEquals("CALL") or t_Enum.Current().UpperEquals("EXEC") or t_Enum.Current().UpperEquals("EXECUTE")):
|
1373
|
+
return False
|
1374
|
+
|
1375
|
+
# not really sure what to make of the next couple blocks of code
|
1376
|
+
# feels like they should maybe be in another elif block, not their own if block
|
1377
|
+
if t_Token.TokenTypeGet() == TOKEN.ATSIGN:
|
1378
|
+
self.hasNamedParameters = True
|
1379
|
+
returnParam = intersystems_iris.dbapi._Parameter._Parameter(t_Token.Lexeme, ParameterMode.RETURN_VALUE)
|
1380
|
+
if not t_Enum.MoveNext() or t_Enum.Current().Lexeme[0] != '=':
|
1381
|
+
break
|
1382
|
+
if not t_Enum.MoveNext():
|
1383
|
+
break
|
1384
|
+
if t_Enum.Current().UpperEquals("CALL") or t_Enum.Current().UpperEquals("EXEC") or t_Enum.Current().UpperEquals("EXECUTE"):
|
1385
|
+
if not t_Enum.MoveNext():
|
1386
|
+
break
|
1387
|
+
else:
|
1388
|
+
if TOKEN.STRFUNCTION == t_Enum.Current().TokenTypeGet():
|
1389
|
+
break
|
1390
|
+
|
1391
|
+
pOut.sResult += t_Enum.Current().Lexeme
|
1392
|
+
t_Token = t_Enum.Current()
|
1393
|
+
if t_Token.UpperEquals("SELECT") or t_Token.UpperEquals("UPDATE") or t_Token.UpperEquals("INSERT"):
|
1394
|
+
pOut.sResult = ""
|
1395
|
+
break
|
1396
|
+
if not t_Enum.MoveNext():
|
1397
|
+
break
|
1398
|
+
t_Token = t_Enum.Current()
|
1399
|
+
if t_Token.UpperEquals("SELECT") or t_Token.UpperEquals("UPDATE") or t_Token.UpperEquals("INSERT"):
|
1400
|
+
pOut.sResult = ""
|
1401
|
+
break
|
1402
|
+
t_bQuitLoop = False
|
1403
|
+
while t_Token.Lexeme[0] == '.':
|
1404
|
+
pOut.sResult += '.'
|
1405
|
+
if not t_Enum.MoveNext():
|
1406
|
+
t_bQuitLoop = True
|
1407
|
+
break
|
1408
|
+
t_Token = t_Enum.Current()
|
1409
|
+
if t_Token.TokenTypeGet() == TOKEN.ID:
|
1410
|
+
pOut.sResult += t_Token.Lexeme
|
1411
|
+
if not t_Enum.MoveNext():
|
1412
|
+
t_bQuitLoop = True
|
1413
|
+
break
|
1414
|
+
t_Token = t_Enum.Current()
|
1415
|
+
t_bBoundParameters = (len(p_Parameters._params_list) > 0)
|
1416
|
+
t_nOrdinal = 0
|
1417
|
+
if returnParam is not None:
|
1418
|
+
t_nOrdinal += 1
|
1419
|
+
if not t_bBoundParameters:
|
1420
|
+
p_Parameters._params_list.insert(0, returnParam)
|
1421
|
+
else:
|
1422
|
+
if not matchUpParam(p_Parameters, returnParam.GetName(), t_nOrdinal - 1):
|
1423
|
+
if p_Parameters._params_list[0].mode != ParameterMode.RETURN_VALUE:
|
1424
|
+
p_Parameters._params_list.insert(0, returnParam)
|
1425
|
+
if not t_bQuitLoop:
|
1426
|
+
t_eLastToken = TOKEN.UNKNOWN
|
1427
|
+
call_switcher = {
|
1428
|
+
TOKEN.QUESTION_MARK: self.Call_question_mark,
|
1429
|
+
TOKEN.ATSIGN: self.Call_atsign,
|
1430
|
+
TOKEN.HEX: self.Call_hex,
|
1431
|
+
TOKEN.CONSTANT: functools.partial(self.Call_constant_id, t_Enum = t_Enum),
|
1432
|
+
TOKEN.ID: functools.partial(self.Call_constant_id, t_Enum = t_Enum),
|
1433
|
+
TOKEN.NULL: self.Call_null,
|
1434
|
+
TOKEN.COMMA: functools.partial(self.Call_comma_paren, t_eLastToken = t_eLastToken),
|
1435
|
+
TOKEN.CLOSE_PAREN: functools.partial(self.Call_comma_paren, t_eLastToken = t_eLastToken)
|
1436
|
+
}
|
1437
|
+
while True:
|
1438
|
+
t_Token = t_Enum.Current()
|
1439
|
+
call_func = call_switcher.get(t_Token.TokenTypeGet(), self.Call_default)
|
1440
|
+
(t_nOrdinal, t_eLastToken) = call_func(p_Parameters, t_Token, t_nOrdinal, t_bBoundParameters)
|
1441
|
+
|
1442
|
+
if not t_Enum.MoveNext():
|
1443
|
+
break
|
1444
|
+
pOut.p_eStmtType = StatementType.CALL if (returnParam == None) else StatementType.CALLWITHRESULT
|
1445
|
+
t_bRet = True
|
1446
|
+
return t_bRet
|
1447
|
+
|
1448
|
+
def Call_default(self, p_Parameters, t_Token, t_nOrdinal, t_bBoundParameters):
|
1449
|
+
return (t_nOrdinal, t_Token.TokenTypeGet())
|
1450
|
+
|
1451
|
+
def Call_question_mark(self, p_Parameters, t_Token, t_nOrdinal, t_bBoundParameters):
|
1452
|
+
if not t_bBoundParameters:
|
1453
|
+
p_Parameters._params_list.append(intersystems_iris.dbapi._Parameter._Parameter("?", ParameterMode.INPUT, '?'))
|
1454
|
+
return (t_nOrdinal + 1, TOKEN.QUESTION_MARK)
|
1455
|
+
|
1456
|
+
def Call_atsign(self, p_Parameters, t_Token, t_nOrdinal, t_bBoundParameters):
|
1457
|
+
self.hasNamedParameters = True
|
1458
|
+
if (not t_bBoundParameters) or len(p_Parameters._params_list) == 0:
|
1459
|
+
p_Parameters._params_list.add(intersystems_iris.dbapi._Parameter._Parameter(t_Token.Lexeme, ParameterMode.UNKNOWN))
|
1460
|
+
else:
|
1461
|
+
if not matchUpParam(p_Parameters, t_Token.Lexeme, t_nOrdinal):
|
1462
|
+
p_Parameters._params_list.add(intersystems_iris.dbapi._Parameter._Parameter(t_Token.Lexeme, ParameterMode.UNKNOWN))
|
1463
|
+
return (t_nOrdinal + 1, TOKEN.ATSIGN)
|
1464
|
+
|
1465
|
+
def Call_hex(self, p_Parameters, t_Token, t_nOrdinal, t_bBoundParameters):
|
1466
|
+
cp = intersystems_iris.dbapi._Parameter._Parameter(bytes.fromhex(t_Token.Lexeme[2:]), ParameterMode.REPLACED_LITERAL, '?')
|
1467
|
+
p_Parameters._params_list.append(cp)
|
1468
|
+
t_Token.TokenTypeSet(TOKEN.QUESTION_MARK)
|
1469
|
+
return (t_nOrdinal + 1, TOKEN.QUESTION_MARK)
|
1470
|
+
|
1471
|
+
def Call_constant_id(self, p_Parameters, t_Token, t_nOrdinal, t_bBoundParameters, t_Enum):
|
1472
|
+
t_NewEnum = t_Enum.Clone()
|
1473
|
+
if t_NewEnum.MovePrevious():
|
1474
|
+
t_PreviousToken = t_NewEnum.Current()
|
1475
|
+
if t_PreviousToken.TokenTypeGet() == TOKEN.OP:
|
1476
|
+
t_Token.TokenTypeSet(TOKEN.QUESTION_MARK)
|
1477
|
+
return (self.DynamicVariable(t_bBoundParameters, intersystems_iris.dbapi.preparser._Token._Token(TOKEN.CONSTANT, t_PreviousToken.Lexeme, t_PreviousToken.UpperLexeme), t_nOrdinal, p_Parameters),
|
1478
|
+
TOKEN.QUESTION_MARK)
|
1479
|
+
return (self.DynamicVariable(t_bBoundParameters, t_Token, t_nOrdinal, p_Parameters), TOKEN.QUESTION_MARK)
|
1480
|
+
|
1481
|
+
def Call_null(self, p_Parameters, t_Token, t_nOrdinal, t_bBoundParameters):
|
1482
|
+
return (self.Null(t_bBoundParameters, t_Token, t_nOrdinal, p_Parameters), t_Token.TokenTypeGet())
|
1483
|
+
|
1484
|
+
def Call_comma_paren(self, p_Parameters, t_Token, t_nOrdinal, t_bBoundParameters, t_eLastToken):
|
1485
|
+
if TOKEN.COMMA == t_eLastToken or TOKEN.OPEN_PAREN == t_eLastToken:
|
1486
|
+
t_Parameter = intersystems_iris.dbapi._Parameter._Parameter(mode = ParameterMode.DEFAULT_PARAMETER, paramType = 'd')
|
1487
|
+
t_nOrdinal += 1
|
1488
|
+
self.m_nUndefinedCount += 1
|
1489
|
+
if t_bBoundParameters:
|
1490
|
+
p_Parameters._params_list.insert(t_nOrdinal - 1, t_Parameter)
|
1491
|
+
else:
|
1492
|
+
p_Parameters._params_list.append(t_Parameter)
|
1493
|
+
return (t_nOrdinal, t_Token.TokenTypeGet())
|
1494
|
+
|
1495
|
+
# No idea why this function does what it does
|
1496
|
+
def Exec(self, pOut, p_Parameters):
|
1497
|
+
t_bRet = False
|
1498
|
+
pOut.p_eStmtType = StatementType.UPDATE
|
1499
|
+
t_Enum = self.m_Tokens.GetEnumerator()
|
1500
|
+
for i in range(1):
|
1501
|
+
t_Enum.MoveNext()
|
1502
|
+
t_Token = t_Enum.Current()
|
1503
|
+
if not t_Token.UpperEquals("EXEC") and not t_Token.UpperEquals("EXECUTE"):
|
1504
|
+
break
|
1505
|
+
pOut.p_eStmtType = StatementType.CALL
|
1506
|
+
t_Enum.MoveNext()
|
1507
|
+
t_str = t_Enum.Current().UpperLexeme
|
1508
|
+
while (TOKEN.UNKNOWN == t_Enum.Current().TokenTypeGet()) and t_str.startswith("/*"):
|
1509
|
+
t_Enum.MoveNext() # skip comments
|
1510
|
+
t_str = t_Enum.Current().UpperLexeme
|
1511
|
+
t_Token = t_Enum.Current()
|
1512
|
+
if (t_Token.UpperEquals("SELECT")) or (t_Token.UpperEquals("UPDATE")) or (t_Token.UpperEquals("INSERT")):
|
1513
|
+
break
|
1514
|
+
t_bRet = True
|
1515
|
+
t_bHasReturnType = False
|
1516
|
+
if '@' == t_Token.Lexeme[0]:
|
1517
|
+
t_bHasReturnType = True
|
1518
|
+
if not t_Enum.MoveNext():
|
1519
|
+
break
|
1520
|
+
if not t_Enum.MoveNext():
|
1521
|
+
break
|
1522
|
+
t_Token = t_Enum.Current()
|
1523
|
+
if t_Token.Lexeme != "=":
|
1524
|
+
break
|
1525
|
+
if not t_Enum.MoveNext():
|
1526
|
+
break
|
1527
|
+
t_Token = t_Enum.Current()
|
1528
|
+
pOut.sResult += t_Token.Lexeme
|
1529
|
+
if not t_Enum.MoveNext():
|
1530
|
+
return True
|
1531
|
+
t_Token = t_Enum.Current()
|
1532
|
+
t_bQuitLoop = False
|
1533
|
+
while t_Token.Lexeme[0] == '.':
|
1534
|
+
pOut.sResult.append('.')
|
1535
|
+
if not t_Enum.MoveNext():
|
1536
|
+
t_bQuitLoop = True
|
1537
|
+
break
|
1538
|
+
t_Token = t_Enum.Current()
|
1539
|
+
if t_Token.TokenTypeGet() == TOKEN.ID:
|
1540
|
+
pOut.sResult.append(t_Token.Lexeme)
|
1541
|
+
if not t_Enum.MoveNext():
|
1542
|
+
t_bQuitLoop = True
|
1543
|
+
break
|
1544
|
+
t_Token = t_Enum.Current()
|
1545
|
+
if t_bQuitLoop:
|
1546
|
+
break
|
1547
|
+
t_nOrdinal = 0
|
1548
|
+
while True:
|
1549
|
+
t_Token = t_Enum.Current()
|
1550
|
+
if TOKEN.COMMA == t_Token.TokenTypeGet():
|
1551
|
+
if not t_Enum.MoveNext():
|
1552
|
+
break
|
1553
|
+
t_Token = t_Enum.Current()
|
1554
|
+
if t_Token.UpperEquals("WITH RECOMPILE"): # Shouldn't it be impossible for this to be a single token?
|
1555
|
+
break
|
1556
|
+
t_strParameterName = ""
|
1557
|
+
if t_Token.Lexeme[0] == '@':
|
1558
|
+
if t_Enum.MoveNext():
|
1559
|
+
t_strParameterName = t_Enum.Current().Lexeme
|
1560
|
+
bMoveNext = t_Enum.MoveNext()
|
1561
|
+
if (not bMoveNext) or (t_Enum.Current().Lexeme != "="):
|
1562
|
+
if not bMoveNext:
|
1563
|
+
t_bQuitLoop = True
|
1564
|
+
t_Param = intersystems_iris.dbapi._Parameter._Parameter(mode = ParameterMode.INPUT_OUTPUT, name = t_Token.Lexeme[1:], execParam = True)
|
1565
|
+
self.m_ExecParamCount += 1
|
1566
|
+
p_Parameters._params_list.append(t_Param)
|
1567
|
+
continue
|
1568
|
+
t_Enum.MoveNext()
|
1569
|
+
t_Token = t_Enum.Current()
|
1570
|
+
t_Enum.MoveNext()
|
1571
|
+
if t_Token.TokenTypeGet() not in [TOKEN.OPEN_PAREN, TOKEN.CLOSE_PAREN, TOKEN.QUESTION_MARK, TOKEN.UNKNOWN]:
|
1572
|
+
if t_Token is not None:
|
1573
|
+
if t_Token.Lexeme[0] == '-':
|
1574
|
+
if not t_Enum.MoveNext():
|
1575
|
+
t_bQuitLoop = True
|
1576
|
+
else:
|
1577
|
+
t_Token = t_Enum.Current()
|
1578
|
+
t_NewToken = intersystems_iris.dbapi.preparser._Token._Token(TOKEN.CONSTANT, "-" + t_Token.Lexem, "-" + t_Token.UpperLexeme)
|
1579
|
+
t_nOrdinal = self.DynamicVariable(False, t_NewToken, t_nOrdinal, p_Parameters)
|
1580
|
+
else:
|
1581
|
+
t_nOrdinal = self.DynamicVariable(False, t_Token, t_nOrdinal, p_Parameters)
|
1582
|
+
if not t_bQuitLoop:
|
1583
|
+
t_Parameter = p_Parameters._params_list[-1]
|
1584
|
+
t_Parameter.name = t_strParameterName
|
1585
|
+
t_Parameter.execParam = True
|
1586
|
+
self.m_ExecParamCount += 1
|
1587
|
+
if not t_Enum.MoveNext():
|
1588
|
+
break
|
1589
|
+
if t_bQuitLoop:
|
1590
|
+
break
|
1591
|
+
if t_bHasReturnType:
|
1592
|
+
pOut.p_eStmtType = StatementType.CALLWITHRESULT
|
1593
|
+
t_ReturnParam = intersystems_iris.dbapi._Parameter._Parameter(mode = ParameterMode.UNKNOWN, execParam = True)
|
1594
|
+
self.m_ExecParamCount += 1
|
1595
|
+
p_Parameters._params_list.insert(0, t_ReturnParam)
|
1596
|
+
else:
|
1597
|
+
pOut.p_eStmtType = StatementType.CALL
|
1598
|
+
t_bRet = True
|
1599
|
+
if 0 == self.m_ExecParamCount:
|
1600
|
+
return False
|
1601
|
+
return t_bRet
|
1602
|
+
|
1603
|
+
# creates Parameter object for replaced literals
|
1604
|
+
def DynamicVariable(self, p_bBoundParameters, p_Token, p_nOrdinal, p_Parameters):
|
1605
|
+
p_nOrdinal += 1
|
1606
|
+
t_str = p_Token.Lexeme
|
1607
|
+
t_c = t_str[0]
|
1608
|
+
if t_c in ["'", "\""]:
|
1609
|
+
# Remove leading and trailing quotes
|
1610
|
+
t_str = t_str[1:-1]
|
1611
|
+
# Condense doubled quotes to a single quote
|
1612
|
+
t_i = 0
|
1613
|
+
while t_i < len(t_str) - 1:
|
1614
|
+
if (t_str[t_i] == t_c) and (t_str[t_i + 1] == t_c):
|
1615
|
+
t_str = t_str[:t_i] + t_str[(t_i + 1):]
|
1616
|
+
t_i += 1
|
1617
|
+
else:
|
1618
|
+
if 'e' in t_str or 'E' in t_str:
|
1619
|
+
# Normalize number
|
1620
|
+
try:
|
1621
|
+
t_double = float(t_str)
|
1622
|
+
t_str = str(t_double)
|
1623
|
+
except ValueError:
|
1624
|
+
# wasn't able to parse, leave as is
|
1625
|
+
pass
|
1626
|
+
else:
|
1627
|
+
p = 0
|
1628
|
+
if t_str[p] == '+':
|
1629
|
+
t_str = t_str[1:]
|
1630
|
+
if t_str[p] == '-':
|
1631
|
+
p += 1
|
1632
|
+
while (p < len(t_str)) and (t_str[p] == '0'):
|
1633
|
+
t_str = t_str[:p] + t_str[(p + 1):]
|
1634
|
+
if '.' in t_str:
|
1635
|
+
while t_str[-1] == '0':
|
1636
|
+
t_str = t_str[:-1]
|
1637
|
+
if t_str[-1] == '.':
|
1638
|
+
t_str = t_str[:-1]
|
1639
|
+
if p >= len(t_str):
|
1640
|
+
t_str = "0"
|
1641
|
+
if p_bBoundParameters:
|
1642
|
+
p_Parameters._params_list.insert(p_nOrdinal - 1, intersystems_iris.dbapi._Parameter._Parameter(t_str, ParameterMode.REPLACED_LITERAL))
|
1643
|
+
else:
|
1644
|
+
p_Parameters._params_list.append(intersystems_iris.dbapi._Parameter._Parameter(t_str, ParameterMode.REPLACED_LITERAL))
|
1645
|
+
p_Token.Lexeme = "?"
|
1646
|
+
p_Token.TokenTypeSet(TOKEN.QUESTION_MARK)
|
1647
|
+
p_Token.m_replaced = True
|
1648
|
+
return p_nOrdinal
|
1649
|
+
|
1650
|
+
def appendRowId(self, sb):
|
1651
|
+
if self.m_addRowID != 0:
|
1652
|
+
return sb + "%ID ,"
|
1653
|
+
return sb
|
1654
|
+
|
1655
|
+
def appendIdAdded(self, sb):
|
1656
|
+
if self.m_addRowID == 2:
|
1657
|
+
return sb + "%IDADDED "
|
1658
|
+
return sb
|