iris-pex-embedded-python 3.4.0b14__py3-none-any.whl → 3.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of iris-pex-embedded-python might be problematic. Click here for more details.

Files changed (86) hide show
  1. iop/_async_request.py +3 -1
  2. iop/_business_host.py +2 -1
  3. iop/_business_service.py +6 -0
  4. iop/_cli.py +9 -1
  5. iop/_common.py +11 -1
  6. iop/_debugpy.py +145 -0
  7. iop/_director.py +40 -40
  8. iop/_iris.py +7 -0
  9. iop/_log_manager.py +9 -9
  10. iop/_serialization.py +27 -19
  11. iop/_utils.py +10 -10
  12. iop/cls/IOP/Common.cls +3 -1
  13. iop/cls/IOP/Utils.cls +59 -9
  14. {iris_pex_embedded_python-3.4.0b14.dist-info → iris_pex_embedded_python-3.4.1.dist-info}/METADATA +2 -1
  15. iris_pex_embedded_python-3.4.1.dist-info/RECORD +80 -0
  16. {iris_pex_embedded_python-3.4.0b14.dist-info → iris_pex_embedded_python-3.4.1.dist-info}/WHEEL +1 -1
  17. iris_pex_embedded_python-3.4.1.dist-info/top_level.txt +2 -0
  18. intersystems_iris/_BufferReader.py +0 -10
  19. intersystems_iris/_BufferWriter.py +0 -32
  20. intersystems_iris/_ConnectionInformation.py +0 -56
  21. intersystems_iris/_ConnectionParameters.py +0 -18
  22. intersystems_iris/_Constant.py +0 -38
  23. intersystems_iris/_DBList.py +0 -506
  24. intersystems_iris/_Device.py +0 -69
  25. intersystems_iris/_GatewayContext.py +0 -25
  26. intersystems_iris/_GatewayException.py +0 -4
  27. intersystems_iris/_GatewayUtility.py +0 -74
  28. intersystems_iris/_IRIS.py +0 -1294
  29. intersystems_iris/_IRISConnection.py +0 -516
  30. intersystems_iris/_IRISEmbedded.py +0 -85
  31. intersystems_iris/_IRISGlobalNode.py +0 -273
  32. intersystems_iris/_IRISGlobalNodeView.py +0 -25
  33. intersystems_iris/_IRISIterator.py +0 -143
  34. intersystems_iris/_IRISList.py +0 -360
  35. intersystems_iris/_IRISNative.py +0 -208
  36. intersystems_iris/_IRISOREF.py +0 -9
  37. intersystems_iris/_IRISObject.py +0 -424
  38. intersystems_iris/_IRISReference.py +0 -133
  39. intersystems_iris/_InStream.py +0 -149
  40. intersystems_iris/_LegacyIterator.py +0 -135
  41. intersystems_iris/_ListItem.py +0 -15
  42. intersystems_iris/_ListReader.py +0 -84
  43. intersystems_iris/_ListWriter.py +0 -161
  44. intersystems_iris/_LogFileStream.py +0 -115
  45. intersystems_iris/_MessageHeader.py +0 -51
  46. intersystems_iris/_OutStream.py +0 -25
  47. intersystems_iris/_PrintStream.py +0 -65
  48. intersystems_iris/_PythonGateway.py +0 -850
  49. intersystems_iris/_SharedMemorySocket.py +0 -87
  50. intersystems_iris/__init__.py +0 -79
  51. intersystems_iris/__main__.py +0 -7
  52. intersystems_iris/dbapi/_Column.py +0 -56
  53. intersystems_iris/dbapi/_DBAPI.py +0 -2631
  54. intersystems_iris/dbapi/_Descriptor.py +0 -46
  55. intersystems_iris/dbapi/_IRISStream.py +0 -65
  56. intersystems_iris/dbapi/_Message.py +0 -158
  57. intersystems_iris/dbapi/_Parameter.py +0 -171
  58. intersystems_iris/dbapi/_ParameterCollection.py +0 -141
  59. intersystems_iris/dbapi/_ResultSetRow.py +0 -361
  60. intersystems_iris/dbapi/_SQLType.py +0 -32
  61. intersystems_iris/dbapi/__init__.py +0 -0
  62. intersystems_iris/dbapi/preparser/_PreParser.py +0 -1674
  63. intersystems_iris/dbapi/preparser/_Scanner.py +0 -391
  64. intersystems_iris/dbapi/preparser/_Token.py +0 -81
  65. intersystems_iris/dbapi/preparser/_TokenList.py +0 -251
  66. intersystems_iris/dbapi/preparser/__init__.py +0 -0
  67. intersystems_iris/pex/_BusinessHost.py +0 -101
  68. intersystems_iris/pex/_BusinessOperation.py +0 -105
  69. intersystems_iris/pex/_BusinessProcess.py +0 -214
  70. intersystems_iris/pex/_BusinessService.py +0 -95
  71. intersystems_iris/pex/_Common.py +0 -228
  72. intersystems_iris/pex/_Director.py +0 -24
  73. intersystems_iris/pex/_IRISBusinessOperation.py +0 -5
  74. intersystems_iris/pex/_IRISBusinessService.py +0 -18
  75. intersystems_iris/pex/_IRISInboundAdapter.py +0 -5
  76. intersystems_iris/pex/_IRISOutboundAdapter.py +0 -17
  77. intersystems_iris/pex/_InboundAdapter.py +0 -57
  78. intersystems_iris/pex/_Message.py +0 -6
  79. intersystems_iris/pex/_OutboundAdapter.py +0 -46
  80. intersystems_iris/pex/__init__.py +0 -25
  81. iris_pex_embedded_python-3.4.0b14.dist-info/RECORD +0 -143
  82. iris_pex_embedded_python-3.4.0b14.dist-info/top_level.txt +0 -4
  83. irisnative/_IRISNative.py +0 -9
  84. irisnative/__init__.py +0 -10
  85. {iris_pex_embedded_python-3.4.0b14.dist-info → iris_pex_embedded_python-3.4.1.dist-info}/entry_points.txt +0 -0
  86. {iris_pex_embedded_python-3.4.0b14.dist-info → iris_pex_embedded_python-3.4.1.dist-info}/licenses/LICENSE +0 -0
@@ -1,1674 +0,0 @@
1
- import re
2
- import functools
3
- import enum
4
- import intersystems_iris._IRISList
5
- import intersystems_iris.dbapi._DBAPI
6
- import intersystems_iris.dbapi._Parameter
7
- import intersystems_iris.dbapi.preparser._Token
8
- import intersystems_iris.dbapi.preparser._TokenList
9
- import intersystems_iris.dbapi.preparser._Scanner
10
- from intersystems_iris.dbapi._Parameter import ParameterMode
11
- from intersystems_iris.dbapi.preparser._Token import TOKEN
12
- from intersystems_iris.dbapi.preparser._Scanner import ParseToken
13
-
14
- class MultiValuesInsert(Exception):
15
-
16
- def __init__(self, *args: object, query: str, rows: int, params=None) -> None:
17
- super().__init__(*args)
18
- self.query = query
19
- self.rows = rows
20
- self.params = params
21
-
22
-
23
- # May want to move to its own file eventually
24
- # SQL Statement Types
25
- class StatementType(enum.IntEnum):
26
- UPDATE = 0
27
- QUERY = 1
28
- CALL = 2
29
- SYNC_COMMIT = 3
30
- ASYNC_COMMIT = 4
31
- STREAMS_OFF = 5
32
- STREAMS_ON = 6
33
- CALLWITHRESULT = 7
34
- DDL_ALTER_DROP = 8
35
- DDL_OTHER = 9
36
- DIRECT_CALL_QUERY = 10
37
- DIRECT_CALL_UPDATE = 11
38
- PREPARED_CALL_QUERY = 12
39
- PREPARED_CALL_UPDATE = 13
40
- SQL_DIALECT = 14
41
- STMT_USE = 15
42
-
43
- class PreParseResult(object):
44
- '''
45
- A simple structure, returned by _PreParser.PreParse(), containing the parsed statement and statement type
46
- '''
47
- def __init__(self):
48
- self.sResult = ""
49
- self.p_eStmtType = None
50
-
51
- class _PreParser(object):
52
- """
53
- This is the interface to the SQL PreParser. A string of SQL and a list of parameters ( bound parameters or empty ) is input.
54
- The processed string is returned along with a count of parameters found and a classfication of the statement type
55
- """
56
- # Class properties
57
- # Table for keyword lookups (used when tokenizing the statement)
58
- s_KeywordTable = {}
59
- s_KeywordTable["AND"] = TOKEN.OP
60
- s_KeywordTable["BETWEEN"] = TOKEN.OP
61
- s_KeywordTable["CHAR"] = TOKEN.DATATYPE
62
- s_KeywordTable["CHARACTER"] = TOKEN.DATATYPE
63
- s_KeywordTable["DEC"] = TOKEN.DATATYPE
64
- s_KeywordTable["DECIMAL"] = TOKEN.DATATYPE
65
- s_KeywordTable["FLOAT"] = TOKEN.DATATYPE
66
- s_KeywordTable["IS"] = TOKEN.IS
67
- s_KeywordTable["LIKE"] = TOKEN.OP
68
- s_KeywordTable["LONGVARBINARY"] = TOKEN.DATATYPE
69
- s_KeywordTable["LONGVARCHAR"] = TOKEN.DATATYPE
70
- s_KeywordTable["NCHAR"] = TOKEN.DATATYPE
71
- s_KeywordTable["NOT["] = TOKEN.OP
72
- s_KeywordTable["NOT"] = TOKEN.NOT
73
- s_KeywordTable["NULL"] = TOKEN.NULL
74
- s_KeywordTable["NUMBER"] = TOKEN.DATATYPE
75
- s_KeywordTable["NUMERIC"] = TOKEN.DATATYPE
76
- s_KeywordTable["NVARCHAR"] = TOKEN.DATATYPE
77
- s_KeywordTable["RAW"] = TOKEN.DATATYPE
78
- s_KeywordTable["STARTSWITH"] = TOKEN.OP
79
- s_KeywordTable["THEN"] = TOKEN.THEN
80
- s_KeywordTable["ELSE"] = TOKEN.ELSE
81
- s_KeywordTable["VARBINARY"] = TOKEN.DATATYPE
82
- s_KeywordTable["VARCHAR"] = TOKEN.DATATYPE
83
- s_KeywordTable["VARCHAR2"] = TOKEN.DATATYPE
84
- s_KeywordTable["VARYING"] = TOKEN.DATATYPE
85
- s_KeywordTable["_"] = TOKEN.NOT
86
- s_KeywordTable["%SQLUPPER"] = TOKEN.STRFUNCTION
87
- s_KeywordTable["%STRING"] = TOKEN.STRFUNCTION
88
- s_KeywordTable["%SQLSTRING"] = TOKEN.STRFUNCTION
89
- s_KeywordTable["%TRUNCATE"] = TOKEN.STRFUNCTION
90
- s_KeywordTable["TRUNCATE"] = TOKEN.STRFUNCTION
91
-
92
- # Table for statement type lookups
93
- s_StatementTable = {}
94
- s_StatementTable["ALTER"] = StatementType.DDL_ALTER_DROP
95
- s_StatementTable["CREATE"] = StatementType.DDL_OTHER
96
- s_StatementTable["DROP"] = StatementType.DDL_ALTER_DROP
97
- s_StatementTable["GRANT"] = StatementType.DDL_OTHER
98
- s_StatementTable["REVOKE"] = StatementType.DDL_OTHER
99
- s_StatementTable["%CHECKPRIV"] = StatementType.DDL_OTHER
100
- s_StatementTable["TRAIN"] = StatementType.DDL_OTHER
101
- s_StatementTable["VALIDATE"] = StatementType.DDL_OTHER
102
- s_StatementTable["TUNE"] = StatementType.DDL_OTHER
103
- s_StatementTable["VALIDATE"] = StatementType.DDL_OTHER
104
-
105
- s_StatementTable["USE"] = StatementType.STMT_USE
106
- s_StatementTable["EXPLAIN"] = StatementType.CALLWITHRESULT
107
-
108
- # Table for common statement type lookups (SELECT,DELETE,UPDATE,INSERT)
109
- s_ParsedStatements = {}
110
- s_ParsedStatements["SELECT"] = StatementType.QUERY
111
- s_ParsedStatements["INSERT"] = StatementType.UPDATE
112
- s_ParsedStatements["DELETE"] = StatementType.UPDATE
113
- s_ParsedStatements["UPDATE"] = StatementType.UPDATE
114
-
115
- # Table for statements to cache, beyond those in s_ParsedStatements
116
- # TODO: change this to be a set
117
- # Since the server now tells us whether to cache, this may be superfluous
118
- s_TransactionStatements = {}
119
- s_TransactionStatements["COMMIT"] = True
120
- s_TransactionStatements["ROLLBACK"] = True
121
- s_TransactionStatements["START"] = True
122
- s_TransactionStatements["%INTRANSACTION"] = True
123
- s_TransactionStatements["%INTRANS"] = True
124
- s_TransactionStatements["%BEGTRANS"] = True
125
-
126
- # keywords for replacing parameters
127
- s_replaceparm = " SELECT TOP WHERE ON AND OR NOT BETWEEN %STARTSWITH LIKE CASE WHEN ELSE THEN"
128
-
129
- # keywords that should be output all upper case after preparsing
130
- s_ReservedKeywords = set()
131
- s_ReservedKeywords.add("%AFTERHAVING")
132
- s_ReservedKeywords.add("%ALLINDEX")
133
- s_ReservedKeywords.add("%ALPHAUP")
134
- s_ReservedKeywords.add("%ALTER")
135
- s_ReservedKeywords.add("%BEGTRANS")
136
- s_ReservedKeywords.add("%CHECKPRIV")
137
- s_ReservedKeywords.add("%CLASSNAME")
138
- s_ReservedKeywords.add("%CLASSPARAMETER")
139
- s_ReservedKeywords.add("%DBUGFULL")
140
- s_ReservedKeywords.add("%DELDATA")
141
- s_ReservedKeywords.add("%DESCRIPTION")
142
- s_ReservedKeywords.add("%EXACT")
143
- s_ReservedKeywords.add("%EXTERNAL")
144
- s_ReservedKeywords.add("%FILE")
145
- s_ReservedKeywords.add("%FIRSTTABLE")
146
- s_ReservedKeywords.add("%FLATTEN")
147
- s_ReservedKeywords.add("%FOREACH")
148
- s_ReservedKeywords.add("%FULL")
149
- s_ReservedKeywords.add("%ID")
150
- s_ReservedKeywords.add("%IDADDED")
151
- s_ReservedKeywords.add("%IGNOREINDEX")
152
- s_ReservedKeywords.add("%IGNOREINDICES")
153
- s_ReservedKeywords.add("%INLIST")
154
- s_ReservedKeywords.add("%INORDER")
155
- s_ReservedKeywords.add("%INTERNAL")
156
- s_ReservedKeywords.add("%INTEXT")
157
- s_ReservedKeywords.add("%INTRANS")
158
- s_ReservedKeywords.add("%INTRANSACTION")
159
- s_ReservedKeywords.add("%KEY")
160
- s_ReservedKeywords.add("%MATCHES")
161
- s_ReservedKeywords.add("%MCODE")
162
- s_ReservedKeywords.add("%MERGE")
163
- s_ReservedKeywords.add("%MINUS")
164
- s_ReservedKeywords.add("%MVR")
165
- s_ReservedKeywords.add("%NOCHECK")
166
- s_ReservedKeywords.add("%NODELDATA")
167
- s_ReservedKeywords.add("%NOFLATTEN")
168
- s_ReservedKeywords.add("%NOFPLAN")
169
- s_ReservedKeywords.add("%NOINDEX")
170
- s_ReservedKeywords.add("%NOLOCK")
171
- s_ReservedKeywords.add("%NOMERGE")
172
- s_ReservedKeywords.add("%NOPARALLEL")
173
- s_ReservedKeywords.add("%NOREDUCE")
174
- s_ReservedKeywords.add("%NORUNTIME")
175
- s_ReservedKeywords.add("%NOSVSO")
176
- s_ReservedKeywords.add("%NOTOPOPT")
177
- s_ReservedKeywords.add("%NOTRIGGER")
178
- s_ReservedKeywords.add("%NOUNIONOROPT")
179
- s_ReservedKeywords.add("%NUMROWS")
180
- s_ReservedKeywords.add("%ODBCIN")
181
- s_ReservedKeywords.add("%ODBCOUT")
182
- s_ReservedKeywords.add("%PARALLEL")
183
- s_ReservedKeywords.add("%PLUS")
184
- s_ReservedKeywords.add("%PROFILE")
185
- s_ReservedKeywords.add("%PROFILE_ALL")
186
- s_ReservedKeywords.add("%PUBLICROWID")
187
- s_ReservedKeywords.add("%ROUTINE")
188
- s_ReservedKeywords.add("%ROWCOUNT")
189
- s_ReservedKeywords.add("%RUNTIMEIN")
190
- s_ReservedKeywords.add("%RUNTIMEOUT")
191
- s_ReservedKeywords.add("%STARTSWITH")
192
- s_ReservedKeywords.add("%STARTTABLE")
193
- s_ReservedKeywords.add("%SQLSTRING")
194
- s_ReservedKeywords.add("%SQLUPPER")
195
- s_ReservedKeywords.add("%STRING")
196
- s_ReservedKeywords.add("%TABLENAME")
197
- s_ReservedKeywords.add("%TRUNCATE")
198
- s_ReservedKeywords.add("%UPPER")
199
- s_ReservedKeywords.add("%VALUE")
200
- s_ReservedKeywords.add("%VID")
201
- s_ReservedKeywords.add("ABSOLUTE")
202
- s_ReservedKeywords.add("ADD")
203
- s_ReservedKeywords.add("ALL")
204
- s_ReservedKeywords.add("ALLOCATE")
205
- s_ReservedKeywords.add("ALTER")
206
- s_ReservedKeywords.add("AND")
207
- s_ReservedKeywords.add("ANY")
208
- s_ReservedKeywords.add("ARE")
209
- s_ReservedKeywords.add("AS")
210
- s_ReservedKeywords.add("ASC")
211
- s_ReservedKeywords.add("ASSERTION")
212
- s_ReservedKeywords.add("AT")
213
- s_ReservedKeywords.add("AUTHORIZATION")
214
- s_ReservedKeywords.add("AVG")
215
- s_ReservedKeywords.add("BEGIN")
216
- s_ReservedKeywords.add("BETWEEN")
217
- s_ReservedKeywords.add("BIT")
218
- s_ReservedKeywords.add("BIT_LENGTH")
219
- s_ReservedKeywords.add("BOTH")
220
- s_ReservedKeywords.add("BY")
221
- s_ReservedKeywords.add("CASCADE")
222
- s_ReservedKeywords.add("CASE")
223
- s_ReservedKeywords.add("CAST")
224
- s_ReservedKeywords.add("CHAR")
225
- s_ReservedKeywords.add("CHARACTER")
226
- s_ReservedKeywords.add("CHARACTER_LENGTH")
227
- s_ReservedKeywords.add("CHAR_LENGTH")
228
- s_ReservedKeywords.add("CHECK")
229
- s_ReservedKeywords.add("CLOSE")
230
- s_ReservedKeywords.add("COALESCE")
231
- s_ReservedKeywords.add("COLLATE")
232
- s_ReservedKeywords.add("COMMIT")
233
- s_ReservedKeywords.add("CONNECT")
234
- s_ReservedKeywords.add("CONNECTION")
235
- s_ReservedKeywords.add("CONSTRAINT")
236
- s_ReservedKeywords.add("CONSTRAINTS")
237
- s_ReservedKeywords.add("CONTINUE")
238
- s_ReservedKeywords.add("CONVERT")
239
- s_ReservedKeywords.add("CORRESPONDING")
240
- s_ReservedKeywords.add("COUNT")
241
- s_ReservedKeywords.add("CREATE")
242
- s_ReservedKeywords.add("CROSS")
243
- s_ReservedKeywords.add("CURRENT")
244
- s_ReservedKeywords.add("CURRENT_DATE")
245
- s_ReservedKeywords.add("CURRENT_TIME")
246
- s_ReservedKeywords.add("CURRENT_TIMESTAMP")
247
- s_ReservedKeywords.add("CURRENT_USER")
248
- s_ReservedKeywords.add("CURSOR")
249
- s_ReservedKeywords.add("DATE")
250
- s_ReservedKeywords.add("DEALLOCATE")
251
- s_ReservedKeywords.add("DEC")
252
- s_ReservedKeywords.add("DECIMAL")
253
- s_ReservedKeywords.add("DECLARE")
254
- s_ReservedKeywords.add("DEFAULT")
255
- s_ReservedKeywords.add("DEFERRABLE")
256
- s_ReservedKeywords.add("DEFERRED")
257
- s_ReservedKeywords.add("DELETE")
258
- s_ReservedKeywords.add("DESC")
259
- s_ReservedKeywords.add("DESCRIBE")
260
- s_ReservedKeywords.add("DESCRIPTOR")
261
- s_ReservedKeywords.add("DIAGNOSTICS")
262
- s_ReservedKeywords.add("DISCONNECT")
263
- s_ReservedKeywords.add("DISTINCT")
264
- s_ReservedKeywords.add("DOMAIN")
265
- s_ReservedKeywords.add("DOUBLE")
266
- s_ReservedKeywords.add("DROP")
267
- s_ReservedKeywords.add("ELSE")
268
- s_ReservedKeywords.add("END")
269
- s_ReservedKeywords.add("ENDEXEC")
270
- s_ReservedKeywords.add("ESCAPE")
271
- s_ReservedKeywords.add("EXCEPT")
272
- s_ReservedKeywords.add("EXCEPTION")
273
- s_ReservedKeywords.add("EXEC")
274
- s_ReservedKeywords.add("EXECUTE")
275
- s_ReservedKeywords.add("EXISTS")
276
- s_ReservedKeywords.add("EXTERNAL")
277
- s_ReservedKeywords.add("EXTRACT")
278
- s_ReservedKeywords.add("FALSE")
279
- s_ReservedKeywords.add("FETCH")
280
- s_ReservedKeywords.add("FIRST")
281
- s_ReservedKeywords.add("FLOAT")
282
- s_ReservedKeywords.add("FOR")
283
- s_ReservedKeywords.add("FOREIGN")
284
- s_ReservedKeywords.add("FOUND")
285
- s_ReservedKeywords.add("FROM")
286
- s_ReservedKeywords.add("FULL")
287
- s_ReservedKeywords.add("GET")
288
- s_ReservedKeywords.add("GLOBAL")
289
- s_ReservedKeywords.add("GO")
290
- s_ReservedKeywords.add("GOTO")
291
- s_ReservedKeywords.add("GRANT")
292
- s_ReservedKeywords.add("GROUP")
293
- s_ReservedKeywords.add("HAVING")
294
- s_ReservedKeywords.add("HOUR")
295
- s_ReservedKeywords.add("IDENTITY")
296
- s_ReservedKeywords.add("IMMEDIATE")
297
- s_ReservedKeywords.add("IN")
298
- s_ReservedKeywords.add("INDICATOR")
299
- s_ReservedKeywords.add("INITIALLY")
300
- s_ReservedKeywords.add("INNER")
301
- s_ReservedKeywords.add("INPUT")
302
- s_ReservedKeywords.add("INSENSITIVE")
303
- s_ReservedKeywords.add("INSERT")
304
- s_ReservedKeywords.add("INT")
305
- s_ReservedKeywords.add("INTEGER")
306
- s_ReservedKeywords.add("INTERSECT")
307
- s_ReservedKeywords.add("INTERVAL")
308
- s_ReservedKeywords.add("INTO")
309
- s_ReservedKeywords.add("IS")
310
- s_ReservedKeywords.add("ISOLATION")
311
- s_ReservedKeywords.add("JOIN")
312
- s_ReservedKeywords.add("LANGUAGE")
313
- s_ReservedKeywords.add("LAST")
314
- s_ReservedKeywords.add("LEADING")
315
- s_ReservedKeywords.add("LEFT")
316
- s_ReservedKeywords.add("LEVEL")
317
- s_ReservedKeywords.add("LIKE")
318
- s_ReservedKeywords.add("LOCAL")
319
- s_ReservedKeywords.add("LOWER")
320
- s_ReservedKeywords.add("MATCH")
321
- s_ReservedKeywords.add("MAX")
322
- s_ReservedKeywords.add("MIN")
323
- s_ReservedKeywords.add("MINUTE")
324
- s_ReservedKeywords.add("MODULE")
325
- s_ReservedKeywords.add("NAMES")
326
- s_ReservedKeywords.add("NATIONAL")
327
- s_ReservedKeywords.add("NATURAL")
328
- s_ReservedKeywords.add("NCHAR")
329
- s_ReservedKeywords.add("NEXT")
330
- s_ReservedKeywords.add("NO")
331
- s_ReservedKeywords.add("NOT")
332
- s_ReservedKeywords.add("NULL")
333
- s_ReservedKeywords.add("NULLIF")
334
- s_ReservedKeywords.add("NUMERIC")
335
- s_ReservedKeywords.add("OCTET_LENGTH")
336
- s_ReservedKeywords.add("OF")
337
- s_ReservedKeywords.add("ON")
338
- s_ReservedKeywords.add("ONLY")
339
- s_ReservedKeywords.add("OPEN")
340
- s_ReservedKeywords.add("OPTION")
341
- s_ReservedKeywords.add("OR")
342
- s_ReservedKeywords.add("OUTER")
343
- s_ReservedKeywords.add("OUTPUT")
344
- s_ReservedKeywords.add("OVERLAPS")
345
- s_ReservedKeywords.add("PAD")
346
- s_ReservedKeywords.add("PARTIAL")
347
- s_ReservedKeywords.add("PREPARE")
348
- s_ReservedKeywords.add("PRESERVE")
349
- s_ReservedKeywords.add("PRIMARY")
350
- s_ReservedKeywords.add("PRIOR")
351
- s_ReservedKeywords.add("PRIVILEGES")
352
- s_ReservedKeywords.add("PROCEDURE")
353
- s_ReservedKeywords.add("PUBLIC")
354
- s_ReservedKeywords.add("READ")
355
- s_ReservedKeywords.add("REAL")
356
- s_ReservedKeywords.add("REFERENCES")
357
- s_ReservedKeywords.add("RELATIVE")
358
- s_ReservedKeywords.add("RESTRICT")
359
- s_ReservedKeywords.add("REVOKE")
360
- s_ReservedKeywords.add("RIGHT")
361
- s_ReservedKeywords.add("ROLE")
362
- s_ReservedKeywords.add("ROLLBACK")
363
- s_ReservedKeywords.add("ROWS")
364
- s_ReservedKeywords.add("SCHEMA")
365
- s_ReservedKeywords.add("SCROLL")
366
- s_ReservedKeywords.add("SECOND")
367
- s_ReservedKeywords.add("SECTION")
368
- s_ReservedKeywords.add("SELECT")
369
- s_ReservedKeywords.add("SESSION_USER")
370
- s_ReservedKeywords.add("SET")
371
- s_ReservedKeywords.add("SHARD")
372
- s_ReservedKeywords.add("SMALLINT")
373
- s_ReservedKeywords.add("SOME")
374
- s_ReservedKeywords.add("SPACE")
375
- s_ReservedKeywords.add("SQLERROR")
376
- s_ReservedKeywords.add("SQLSTATE")
377
- s_ReservedKeywords.add("STATISTICS")
378
- s_ReservedKeywords.add("SUBSTRING")
379
- s_ReservedKeywords.add("SUM")
380
- s_ReservedKeywords.add("SYSDATE")
381
- s_ReservedKeywords.add("SYSTEM_USER")
382
- s_ReservedKeywords.add("TABLE")
383
- s_ReservedKeywords.add("TEMPORARY")
384
- s_ReservedKeywords.add("THEN")
385
- s_ReservedKeywords.add("TIME")
386
- s_ReservedKeywords.add("TIMEZONE_HOUR")
387
- s_ReservedKeywords.add("TIMEZONE_MINUTE")
388
- s_ReservedKeywords.add("TO")
389
- s_ReservedKeywords.add("TOP")
390
- s_ReservedKeywords.add("TRAILING")
391
- s_ReservedKeywords.add("TRANSACTION")
392
- s_ReservedKeywords.add("TRIM")
393
- s_ReservedKeywords.add("TRUE")
394
- s_ReservedKeywords.add("UNION")
395
- s_ReservedKeywords.add("UNIQUE")
396
- s_ReservedKeywords.add("UPDATE")
397
- s_ReservedKeywords.add("UPPER")
398
- s_ReservedKeywords.add("USER")
399
- s_ReservedKeywords.add("USING")
400
- s_ReservedKeywords.add("VALUES")
401
- s_ReservedKeywords.add("VARCHAR")
402
- s_ReservedKeywords.add("VARYING")
403
- s_ReservedKeywords.add("WHEN")
404
- s_ReservedKeywords.add("WHENEVER")
405
- s_ReservedKeywords.add("WHERE")
406
- s_ReservedKeywords.add("WITH")
407
- s_ReservedKeywords.add("WORK")
408
- s_ReservedKeywords.add("WRITE")
409
-
410
- # Supported SQL Dialects
411
- SQL_DIALECT_DEFAULT = 0
412
- SQL_DIALECT_MSSQL = 1
413
- SQL_DIALECT_SYBASE = 2
414
-
415
- # methods
416
- def CacheOnServerGet(self):
417
- return self.m_CacheOnServer
418
-
419
- def CacheOnServerSet(self, b):
420
- b = bool(b)
421
-
422
- self.m_CacheOnServer = b
423
-
424
- def ParamInfoGet(self):
425
- return self.m_ParamInfo
426
-
427
- def ParamInfoSet(self, s):
428
- if not isinstance(type(s), intersystems_iris._ListWriter._ListWriter):
429
- raise TypeError("s must be a _ListWriter")
430
-
431
- self.m_ParamInfo = s
432
-
433
- # Build a PreParser
434
- def __init__(self, p_bDelimitedIdentifiers = False, addRID = 0, embedded = False):
435
- p_bDelimitedIdentifiers = bool(p_bDelimitedIdentifiers)
436
- try:
437
- addRID = int(addRID)
438
- except (TypeError, ValueError):
439
- raise TypeError("addRID must be an interger")
440
-
441
- self.m_addRowID = addRID
442
- self.m_ExecParamCount = 0
443
- self.m_ParamInfo = intersystems_iris._IRISList._IRISList()
444
-
445
- # flags for delimited identifier use
446
- self.m_bDelimitedIdentifiers = p_bDelimitedIdentifiers
447
- self.m_bBracketSubstitution = False
448
-
449
- # flag for when statements are cached on the server
450
- # potentially irrelevant now because server tells us directly whether it cached the statement
451
- self.CacheOnServerSet(False)
452
-
453
- # List for tokenizer
454
- self.m_Tokens = None
455
-
456
- # The source scanner
457
- self.m_Scanner = None
458
-
459
- # flag for when Named Parameters are used
460
- self.hasNamedParameters = False
461
-
462
- # use to pass UndefinedCount value from methods
463
- self.m_nUndefinedCount = 0
464
-
465
- self.embedded = embedded
466
-
467
- # Preparse an SQL string returning output statement, parameters, parameter count and statement type
468
- def PreParse(self, query, p_Parameters):
469
-
470
- t_query = query
471
- while True:
472
- # First tokenize the input
473
- self.Tokenize(t_query)
474
- # Convert WITH Clause, can be recursive
475
- found_with, t_query = self.With(t_query)
476
- if not found_with:
477
- break
478
-
479
- found_insert, t_query = self.InsertMultiValues(t_query)
480
- if found_insert:
481
- self.Tokenize(t_query)
482
-
483
- # Resolve the tokens and determine output
484
- return self.Resolve(t_query, p_Parameters)
485
-
486
- def With(self, query):
487
- try:
488
- found = False
489
- new_query = ''
490
- with_statements = {}
491
-
492
- def _query(find_end_paren=True):
493
- sub_query = ''
494
- open_parens = 0
495
- while tokens.MoveNext():
496
- token = tokens.Current()
497
-
498
- if token.TokenType is TOKEN.OPEN_PAREN:
499
- open_parens += 1
500
- elif token.TokenType is TOKEN.CLOSE_PAREN:
501
- open_parens -= 1
502
-
503
- sub_query += token.Lexeme
504
- sub_query += ' '
505
-
506
- if token.TokenType is TOKEN.ID and (token.UpperEquals('FROM') or token.UpperEquals('JOIN')):
507
- assert tokens.MoveNext() and tokens.Current().TokenType is TOKEN.ID
508
- table_name = tokens.Current().Lexeme
509
- table_name_upper = tokens.Current().UpperLexeme
510
- if table_name_upper in with_statements:
511
- sub_query += with_statements[table_name_upper]
512
- sub_query += ' AS '
513
- sub_query += table_name
514
- sub_query += ' '
515
-
516
- if find_end_paren and open_parens == 0:
517
- break
518
- return sub_query
519
-
520
- tokens = self.m_Tokens.GetEnumerator()
521
- while tokens.MoveNext():
522
- token = tokens.Current()
523
-
524
- if token.TokenType is TOKEN.ID and token.UpperEquals("WITH"):
525
- found = True
526
- break
527
- else:
528
- new_query += token.Lexeme
529
- new_query += ' '
530
-
531
- if not found:
532
- return False, query
533
-
534
- while True:
535
- assert tokens.MoveNext() and tokens.Current().TokenType is TOKEN.ID
536
- with_name = tokens.Current().UpperLexeme
537
- assert tokens.MoveNext() and tokens.Current().TokenType is TOKEN.ID and tokens.Current().UpperContains('AS')
538
- assert tokens.MoveNext() and tokens.Current().TokenType is TOKEN.OPEN_PAREN
539
-
540
- tokens.MovePrevious()
541
- with_statements[with_name] = _query()
542
-
543
- if not tokens.MoveNext() or tokens.Current().TokenType is not TOKEN.COMMA:
544
- tokens.MovePrevious()
545
- break
546
-
547
- assert tokens.MoveNext()
548
- tokens.MovePrevious()
549
- new_query += _query(False)
550
-
551
- return found, new_query
552
- except:
553
- return False, query
554
-
555
- def InsertMultiValues(self, query):
556
- new_query = ''
557
- values_list = []
558
-
559
- tokens = self.m_Tokens.GetEnumerator()
560
- while tokens.MoveNext() and not tokens.Current().UpperEquals("INSERT"):
561
- new_query += tokens.Current().Lexeme + ' '
562
- if not tokens.MoveNext() or not tokens.Current().UpperEquals("INTO"):
563
- return False, query
564
- new_query += 'INSERT INTO '
565
- while tokens.MoveNext() and not tokens.Current().UpperEquals("VALUES"):
566
- new_query += tokens.Current().Lexeme + ' '
567
-
568
- values = ''
569
- params = []
570
- while tokens.MoveNext():
571
- assert tokens.Current().TokenType is TOKEN.OPEN_PAREN
572
- open_parens = 1
573
- while tokens.MoveNext() or open_parens > 0:
574
- token = tokens.Current()
575
- if token.TokenType is TOKEN.OPEN_PAREN:
576
- open_parens += 1
577
- elif token.TokenType is TOKEN.CLOSE_PAREN:
578
- open_parens -= 1
579
- if open_parens == 0:
580
- break
581
- if token.TokenType is TOKEN.CONSTANT:
582
- values += '?'
583
- param = token.Lexeme
584
- if param.__len__ and param[0] == "'" and param[0] == param[-1]:
585
- param = param[1: -1]
586
- params += [param]
587
- else:
588
- values += token.Lexeme
589
- values += ' '
590
- values_list.append(values)
591
- values = ''
592
- if not tokens.MoveNext() or tokens.Current().TokenType is not TOKEN.COMMA:
593
- break
594
-
595
- if len(values_list) <= 1:
596
- return False, query
597
-
598
- new_query += f" VALUES ({values_list[0]})"
599
- raise MultiValuesInsert(query=new_query, rows=len(values_list), params=params)
600
-
601
- # Parse a statement
602
- def Tokenize(self, p_strInput):
603
- # Get a scanner on the sql string
604
- self.m_Scanner = intersystems_iris.dbapi.preparser._Scanner._Scanner(p_strInput)
605
- # Create a new token list
606
- self.m_Tokens = intersystems_iris.dbapi.preparser._TokenList._TokenList()
607
- # Scan the input string and break into tokens
608
- tokenize_switcher = {
609
- ParseToken.tokEOS: self.Tokenize_eos,
610
- ParseToken.tokDOT: self.Tokenize_dot,
611
- ParseToken.tokDIGIT: self.Tokenize_digit,
612
- ParseToken.tokMINUS: self.Tokenize_minus,
613
- ParseToken.tokPLUS: self.Tokenize_plus,
614
- ParseToken.tokLBRACK: self.Tokenize_lbrack,
615
- ParseToken.tokDQUOTE: self.Tokenize_quote,
616
- ParseToken.tokSQUOTE: self.Tokenize_quote,
617
- ParseToken.tokSLASH: self.Tokenize_slash,
618
- ParseToken.tokQUEST: functools.partial(self.Tokenize_single, token = TOKEN.QUESTION_MARK, char = "?"),
619
- ParseToken.tokATSIGN: self.Tokenize_atsign,
620
- ParseToken.tokLPARN: functools.partial(self.Tokenize_single, token = TOKEN.OPEN_PAREN, char = "("),
621
- ParseToken.tokRPARN: functools.partial(self.Tokenize_single, token = TOKEN.CLOSE_PAREN, char = ")"),
622
- ParseToken.tokCOMMA: functools.partial(self.Tokenize_single, token = TOKEN.COMMA, char = ","),
623
- ParseToken.tokCOLON: self.Tokenize_colon,
624
- ParseToken.tokLETTER: self.Tokenize_identifier,
625
- ParseToken.tokPERCENT: self.Tokenize_identifier,
626
- ParseToken.tokDOLLAR: self.Tokenize_identifier,
627
- ParseToken.tokUSCORE: self.Tokenize_identifier,
628
- ParseToken.tokPOUND: self.Tokenize_identifier,
629
- ParseToken.tokLESS: functools.partial(self.Tokenize_op, check_tokens = [ParseToken.tokEQUAL, ParseToken.tokGREAT]),
630
- ParseToken.tokEXCLA: self.Tokenize_op,
631
- ParseToken.tokGREAT: self.Tokenize_op,
632
- ParseToken.tokASTER: self.Tokenize_op,
633
- ParseToken.tokEQUAL: functools.partial(self.Tokenize_op, check_tokens = [ParseToken.tokASTER]),
634
- ParseToken.tokVBAR: self.Tokenize_vbar,
635
- ParseToken.tokLBRACE: self.Tokenize_lbrace
636
- }
637
- while self.m_Scanner.CurrentTokenGet() != ParseToken.tokEOS:
638
- self.m_Scanner.SkipWhitespace()
639
-
640
- tokenize_func = tokenize_switcher.get(self.m_Scanner.CurrentTokenGet(), self.Tokenize_default)
641
- tokenize_func()
642
-
643
- # generic function for when a token consists of a single character
644
- def Tokenize_single(self, token, char):
645
- self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(token, char))
646
- # Skip this character
647
- self.m_Scanner.NextToken()
648
-
649
- # default behavior for an unknown character or ParseToken
650
- def Tokenize_default(self, token = TOKEN.UNKNOWN):
651
- self.m_Scanner.BeginLexeme()
652
- self.m_Scanner.NextToken() # One character unknown
653
- self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(token, self.m_Scanner.EndLexeme(), self.m_Scanner.EndUpperLexeme()))
654
-
655
- # end of source, do nothing
656
- def Tokenize_eos(self):
657
- pass
658
-
659
- # if dot is part of a decimal, parse a number, otherwise default behavior
660
- def Tokenize_dot(self):
661
- if ParseToken.tokDIGIT != self.m_Scanner.PeekNextToken():
662
- self.Tokenize_default()
663
- else:
664
- self.Tokenize_digit()
665
-
666
- # either the beginning of hex data, or a number
667
- def Tokenize_digit(self):
668
- (t_strNumber, goodParse) = self.m_Scanner.Hex()
669
- if goodParse:
670
- self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(TOKEN.HEX, t_strNumber))
671
- return
672
-
673
- (t_strNumber, goodParse) = self.m_Scanner.Number()
674
- if not goodParse:
675
- raise Exception("Invalid Numeric Constant")
676
- self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(TOKEN.CONSTANT, t_strNumber))
677
-
678
- def Tokenize_minus(self):
679
- nextToken = self.m_Scanner.PeekNextToken()
680
- if nextToken == ParseToken.tokMINUS:
681
- # Continuation sequence, skip to next line
682
- self.m_Scanner.Skip(2)
683
- self.m_Scanner.BeginLexeme()
684
- self.m_Scanner.SkipToEndOfLine() # Skip '--' to end of line
685
- # DVU m_Tokens.Append(new _Token(TOKEN.UNKNOWN, "/*" + m_Scanner.EndLexeme() + "*/"))
686
- return
687
- elif nextToken == ParseToken.tokGREAT:
688
- # -> operator
689
- self.m_Scanner.BeginLexeme()
690
- self.m_Scanner.Skip(2) # Skip '->'
691
- self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(TOKEN.OP, self.m_Scanner.EndLexeme()))
692
- return
693
- self.Tokenize_plus("-")
694
-
695
- def Tokenize_plus(self, op_char = "+"):
696
- # RULE: Per Aviel, Preparser.txt. A numeric constant may include a preceding "+" or "-" ,
697
- # but only if the token before the +/- is an OP or LPAR, otherwise the +/- might be
698
- # a monadic operator and should be considered an OP.
699
- t_eToken = self.m_Tokens.Last().GetValue().TokenTypeGet() if self.m_Tokens.Last() is not None else TOKEN.UNKNOWN
700
- if t_eToken in [TOKEN.OP, TOKEN.OPEN_PAREN, TOKEN.COMMA] and (self.m_Scanner.PeekNextToken == ParseToken.tokDIGIT or (self.m_Scanner.PeekNextToken() == ParseToken.tokDOT and self.m_Scanner.PeekNextNextToken() == ParseToken.tokDIGIT)):
701
- # Scan in number
702
- (t_strNumber, goodParse) = self.m_Scanner.Number()
703
- if not goodParse:
704
- # TO DO: Replace with ParseException
705
- raise Exception("Invalid Numeric Constant")
706
- self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(TOKEN.CONSTANT, t_strNumber))
707
- else:
708
- self.Tokenize_single(TOKEN.OP, op_char)
709
-
710
- def Tokenize_lbrack(self):
711
- if self.m_bBracketSubstitution:
712
- if not self.m_bDelimitedIdentifiers:
713
- raise Exception("Delimited identifiers must be enabled on the server to support brackets")
714
- (t_strString, t_eToken) = self.m_Scanner.ParseBrackets(self.m_bDelimitedIdentifiers)
715
- self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(t_eToken, t_strString))
716
- return
717
- self.Tokenize_default()
718
-
719
- # quotes indicate a string
720
- def Tokenize_quote(self):
721
- (t_strString, t_eToken) = self.m_Scanner.String(self.m_bDelimitedIdentifiers)
722
- self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(t_eToken, t_strString))
723
-
724
- def Tokenize_slash(self):
725
- if self.m_Scanner.PeekNextToken() == ParseToken.tokASTER:
726
- # scan in the comment
727
- self.m_Scanner.BeginLexeme()
728
- # Skip '/' '*'
729
- self.m_Scanner.Skip(2)
730
- # Scan in the comment, returns true if successful scan
731
- if not self.m_Scanner.Comment():
732
- # Ran off end of statement
733
- # TO DO: Replace with ParseException?
734
- raise Exception("Unexpected End-Of-Statement")
735
- self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(TOKEN.UNKNOWN, self.m_Scanner.EndLexeme(), self.m_Scanner.EndUpperLexeme()))
736
- else:
737
- self.Tokenize_default(TOKEN.OP) # '/' operator
738
-
739
- # '@' used for named parameters
740
- def Tokenize_atsign(self):
741
- self.m_Scanner.NextToken()
742
- if self.m_Scanner.CurrentTokenGet() == ParseToken.tokDIGIT:
743
- raise Exception(("Parameter Name error, First value cannot be a digit: " + self.m_Scanner.CurrentChar()))
744
- t_strID = self.m_Scanner.Identifier()
745
- if t_strID == "":
746
- self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(TOKEN.QUESTION_MARK, "?"))
747
- else:
748
- self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(TOKEN.ATSIGN, "@" + t_strID))
749
-
750
- # ':' indicates variables
751
- def Tokenize_colon(self):
752
- # Skip ':'
753
- self.m_Scanner.NextToken()
754
- # Scan in a variable
755
- t_strVariable = self.m_Scanner.Variable()
756
- t_strVariable = ":" + t_strVariable
757
- self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(TOKEN.VAR, t_strVariable))
758
-
759
- def Tokenize_identifier(self):
760
- # Initially, the token is an ID
761
- t_eToken = TOKEN.ID
762
- # Scan in an identifier
763
- t_strID = self.m_Scanner.Identifier()
764
- # Get an uppercase version for lookups
765
- t_strIDUpper = self.m_Scanner.EndUpperLexeme()
766
- # Do a table lookup to identify token
767
- if t_strIDUpper in self.s_KeywordTable:
768
- # Found it, replace ID with specific type
769
- t_eToken = self.s_KeywordTable[t_strIDUpper]
770
- if (t_eToken == TOKEN.NOT):
771
- t_strID = self.m_Scanner.checkForNotPredicates()
772
- t_strIDUpper = t_strID.upper()
773
- if t_strID == '%s':
774
- self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(TOKEN.QUESTION_MARK, "?"))
775
- else:
776
- self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(t_eToken, t_strID, t_strIDUpper))
777
-
778
- # used for various operators
779
- def Tokenize_op(self, check_tokens = [ParseToken.tokEQUAL]):
780
- self.m_Scanner.BeginLexeme()
781
- if self.m_Scanner.PeekNextToken() in check_tokens:
782
- # Check for composite operators (e.g. <=, >=, !=, etc.)
783
- self.m_Scanner.NextToken()
784
- self.m_Scanner.NextToken()
785
- self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(TOKEN.OP, self.m_Scanner.EndLexeme()))
786
-
787
- # either || operator, or unknown
788
- def Tokenize_vbar(self):
789
- self.m_Scanner.BeginLexeme()
790
- t_eToken = TOKEN.OP
791
- if self.m_Scanner.PeekNextToken() == ParseToken.tokVBAR:
792
- self.m_Scanner.Skip(2)
793
- else:
794
- self.m_Scanner.NextToken()
795
- t_eToken = TOKEN.UNKNOWN
796
- self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(t_eToken, self.m_Scanner.EndLexeme(), self.m_Scanner.EndUpperLexeme()))
797
-
798
- def Tokenize_lbrace(self):
799
- self.m_Scanner.NextToken() # Skip '{'
800
- # Create a checkpoint
801
- t_CP = self.m_Scanner.CreateCheckPoint()
802
- self.m_Scanner.SkipWhitespace()
803
- # Scan in a potential keyowrd
804
- t_strKeyword = self.m_Scanner.Keyword()
805
- if t_strKeyword in ["d", "ds", "t", "ts"]:
806
- # Recognized dts token
807
- self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(TOKEN.UNKNOWN, "{"))
808
- self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(TOKEN.DTS, t_strKeyword))
809
- else:
810
- # wasn't a dts keyword, restore to check point
811
- self.m_Scanner.RestoreCheckPoint(t_CP)
812
- self.m_Tokens.Append(intersystems_iris.dbapi.preparser._Token._Token(TOKEN.UNKNOWN, "{"))
813
-
814
- # Resolve parameters and perform appropriate substitutions
815
- def Resolve(self, p_strInput, p_Parameters):
816
- pOut = PreParseResult()
817
- pOut.p_eStmtType = StatementType.UPDATE
818
- if self.ParamInfoGet() == None:
819
- self.ParamInfoSet(intersystems_iris._IRISList._IRISList())
820
- else:
821
- self.ParamInfoGet().clear() # reset buffer
822
- # Get an enumerator on the token collection
823
- t_Enum = self.m_Tokens.GetEnumerator()
824
- for i in range(1):
825
- # If Parameter list is not empty prior then we have bound parameters
826
- # from a previous parse (or user inputted?)
827
- t_bBoundParameters = (len(p_Parameters._params_list) > 0)
828
- if self.m_Tokens.Count() < 2:
829
- pOut.sResult = p_strInput
830
- break # Resolved
831
- # Make first token current (we know we have at least 2 tokens)
832
- t_Enum.MoveNext()
833
- t_str = t_Enum.Current().UpperLexeme
834
- # TODO: comments are not skipped when the enumerator is reset later in the algorithm; does this need to be fixed? Is this worth fixing?
835
- while TOKEN.UNKNOWN == t_Enum.Current().TokenTypeGet() and t_str.startswith("/*"):
836
- t_Enum.MoveNext() # skip comments
837
- t_str = t_Enum.Current().UpperLexeme
838
- # Determine statement types that need further processing
839
- if t_str in self.s_ParsedStatements:
840
- pOut.p_eStmtType = self.s_ParsedStatements[t_str]
841
- self.CacheOnServerSet(True)
842
- else:
843
- if t_str in self.s_StatementTable:
844
- pOut.p_eStmtType = self.s_StatementTable[t_str]
845
- # Copy the whole statement to the output
846
- if self.m_bBracketSubstitution and self.m_bDelimitedIdentifiers:
847
- t_Enum.Reset()
848
- while t_Enum.MoveNext():
849
- pOut.sResult += t_Enum.Current().Lexeme + " "
850
- else:
851
- # Copy the whole statement to the output and ignore tokenizing
852
- # syntax can fail if not exact
853
- pOut.sResult += p_strInput
854
- if t_str == "EXPLAIN" and pOut.p_eStmtType == StatementType.CALLWITHRESULT:
855
- pQuery = p_strInput
856
- pAlt = "ShowPlan"
857
- pStat = "0"
858
- pQuery = pQuery[(pQuery.upper().find("EXPLAIN") + len("EXPLAIN")):] # slice off "EXPLAIN"
859
- while t_Enum.MoveNext():
860
- if t_Enum.Current().UpperLexeme == "ALT":
861
- pAlt = "ShowPlanAlt"
862
- pQuery = pQuery[(pQuery.upper().find("ALT") + len("ALT")):] # slice off "ALT"
863
- elif t_Enum.Current().UpperLexeme == "STAT":
864
- pStat = "1"
865
- pQuery = pQuery[(pQuery.upper().find("STAT") + len("STAT")):] # slice off "STAT"
866
- else:
867
- p_Parameters._clear()
868
- p_Parameters._params_list.append(intersystems_iris.dbapi._Parameter._Parameter(pQuery, ParameterMode.REPLACED_LITERAL))
869
- p_Parameters._params_list.append(intersystems_iris.dbapi._Parameter._Parameter(pStat, ParameterMode.REPLACED_LITERAL))
870
- p_Parameters._params_list.append(intersystems_iris.dbapi._Parameter._Parameter(pAlt, ParameterMode.REPLACED_LITERAL))
871
- p_Parameters._params_list.append(intersystems_iris.dbapi._Parameter._Parameter("", ParameterMode.REPLACED_LITERAL))
872
- pOut.sResult = "select %SYSTEM . QUERY_PLAN ( :%qpar(1) , :%qpar(2) , :%qpar(3) , :%qpar(4) ) as Plan"
873
- pOut.p_eStmtType = StatementType.QUERY
874
- self.m_ParamInfo.add(4)
875
- self.m_ParamInfo.add('c')
876
- self.m_ParamInfo.add(2)
877
- self.m_ParamInfo.add('c')
878
- self.m_ParamInfo.add(1)
879
- self.m_ParamInfo.add('c')
880
- self.m_ParamInfo.add(1)
881
- self.m_ParamInfo.add('c')
882
- self.m_ParamInfo.add(1)
883
- self.CacheOnServerSet(False)
884
- return pOut
885
- break # Resolved
886
- else:
887
- if t_str in self.s_TransactionStatements:
888
- self.CacheOnServerSet(True)
889
- else:
890
- if t_str.startswith("("):
891
- if t_Enum.MoveNext():
892
- t_str = t_Enum.Current().UpperLexeme
893
- if t_str == "SELECT":
894
- pOut.p_eStmtType = self.s_ParsedStatements[t_str]
895
- t_Enum.MovePrevious()
896
- self.CacheOnServerSet(True)
897
- if self.m_Tokens.First().GetValue().UpperEquals("SET"):
898
- # Resolve "SET TRANSACTION" and "SET OPTION"
899
- t_NewEnum = self.m_Tokens.GetEnumerator()
900
- t_NewEnum.MoveNext() # "SET" is current
901
- bMoveNext = t_NewEnum.MoveNext() # token after "SET" is current (if any)
902
- if bMoveNext and t_NewEnum.Current().UpperEquals("TRANSACTION"):
903
- self.CacheOnServerSet(True)
904
- if 5 == self.m_Tokens.Count():
905
- if bMoveNext and t_NewEnum.Current().UpperEquals("OPTION"):
906
- bMoveNext = t_NewEnum.MoveNext()
907
- if bMoveNext and t_NewEnum.Current().UpperEquals("BLOB_SUPPORT"):
908
- bMoveNext = t_NewEnum.MoveNext()
909
- if bMoveNext and t_NewEnum.Current().UpperEquals("="):
910
- bMoveNext = t_NewEnum.MoveNext()
911
- if bMoveNext and t_NewEnum.Current().UpperEquals("1"):
912
- pOut.p_eStmtType = StatementType.STREAMS_ON
913
- elif bMoveNext and t_NewEnum.Current().UpperEquals("0"):
914
- pOut.p_eStmtType = StatementType.STREAMS_OFF
915
- else:
916
- raise Exception("BLOB_SUPPORT must be 0 or 1")
917
- else:
918
- raise Exception("Expected '=' after BLOB_SUPPORT")
919
- elif bMoveNext and t_NewEnum.Current().UpperEquals("SYNCHRONOUS_COMMIT"):
920
- bMoveNext = t_NewEnum.MoveNext()
921
- if bMoveNext and t_NewEnum.Current().UpperEquals("="):
922
- bMoveNext = t_NewEnum.MoveNext()
923
- if bMoveNext and t_NewEnum.Current().UpperEquals("1"):
924
- pOut.p_eStmtType = StatementType.SYNC_COMMIT
925
- elif bMoveNext and t_NewEnum.Current().UpperEquals("0"):
926
- pOut.p_eStmtType = StatementType.ASYNC_COMMIT
927
- else:
928
- raise Exception("SYNCHRONOUS_COMMIT must be 0 or 1")
929
- else:
930
- raise Exception("Expected '=' after SYNCHRONOUS_COMMIT")
931
- else:
932
- # aren't there other options beyond BLOB_SUPPORT and SYNCHRONOUS_COMMIT?
933
- raise Exception("Unknown SET OPTION")
934
- t_Enum.Reset()
935
- while t_Enum.MoveNext():
936
- pOut.sResult += t_Enum.Current().Lexeme + " "
937
- break # Resolved
938
- # check for Exec and Call statements
939
- if (not self.CacheOnServerGet()) and self.Exec(pOut, p_Parameters):
940
- self.CacheOnServerSet(True)
941
- break
942
- self.m_nUndefinedCount = 0
943
- if (not self.CacheOnServerGet()) and self.Call(pOut, p_Parameters):
944
- self.CacheOnServerSet(True)
945
- break
946
-
947
- pOut.sResult = ""
948
- t_Enum.Reset()
949
-
950
- self.t_nOpenParen = 0 # keeps track of number of open parentheses
951
- self.t_nOrdinal = 0 # keeps track of where in p_Parameters new parameters will be inserted
952
- self.t_nRound = 0 # keeps track of which argument of ROUND is being parsed
953
- self.t_nRoundNested = 0 # keeps track of any nested parentheses inside of a ROUND argument
954
-
955
- self.orderbyToken = None
956
- self.lastToken = None # previous token that was resolved (not counting things like parentheses and commas)
957
-
958
- t_bQuitLoop = False # currently nothing meaningful is done with this
959
- bFirstElement = True
960
- resolve_switcher = {
961
- TOKEN.QUESTION_MARK: self.Resolve_question_mark,
962
- TOKEN.ATSIGN: self.Resolve_atsign,
963
- TOKEN.HEX: self.Resolve_hex,
964
- TOKEN.ID: functools.partial(self.Resolve_id, stmtType = pOut.p_eStmtType),
965
- TOKEN.STRFUNCTION: self.Resolve_strfunction,
966
- TOKEN.DATATYPE: self.Resolve_datatype,
967
- TOKEN.OPEN_PAREN: self.Resolve_open_paren,
968
- TOKEN.CLOSE_PAREN: self.Resolve_close_paren,
969
- TOKEN.OP: self.Resolve_op,
970
- TOKEN.CONSTANT: self.Resolve_constant,
971
- # TOKEN.NULL: self.Resolve_null,
972
- TOKEN.COMMA: self.Resolve_comma
973
- }
974
- while (not t_bQuitLoop) and t_Enum.MoveNext():
975
- t_Token = t_Enum.Current()
976
- if bFirstElement:
977
- bFirstElement = False
978
- if t_Token.UpperEquals("{"):
979
- raise Exception("'{' encountered at the beginning of the statement") # , "37000", 37000)
980
-
981
- resolve_func = resolve_switcher.get(t_Token.TokenTypeGet(), None)
982
- if resolve_func is not None:
983
- t_bQuitLoop = resolve_func(p_Parameters, t_Enum, t_Token, t_bBoundParameters)
984
-
985
- if t_Token.TokenTypeGet() not in [TOKEN.COMMA, TOKEN.OPEN_PAREN, TOKEN.CLOSE_PAREN]:
986
- self.lastToken = t_Token
987
-
988
- # now that we've resolved every token, need to replace parameters with ":%qpar" syntax
989
- t_Enum.Reset()
990
- t_nParamIndex = 1
991
- t_count = 0
992
-
993
- bExecute = False
994
- while t_Enum.MoveNext():
995
- t_count += 1
996
- t_Token = t_Enum.Current()
997
-
998
- # exclude an initial "EXECUTE" from the final preparsed statement
999
- if t_Token.UpperEquals("EXECUTE"):
1000
- bExecute = True
1001
- if (2 == t_count) and (bExecute):
1002
- if t_Token.UpperEquals("SELECT"):
1003
- pOut.p_eStmtType = StatementType.QUERY
1004
- pOut.sResult = ""
1005
- elif t_Token.UpperEquals("UPDATE") or t_Token.UpperEquals("INSERT"):
1006
- pOut.p_eStmtType = StatementType.UPDATE
1007
- pOut.sResult = ""
1008
-
1009
- if TOKEN.QUESTION_MARK == t_Token.TokenTypeGet() or TOKEN.ATSIGN == t_Token.TokenTypeGet():
1010
- pOut.sResult += "?" if self.embedded else ":%qpar({0})".format(t_nParamIndex)
1011
- t_nParamIndex += 1
1012
- if t_count < t_Enum.Count():
1013
- pOut.sResult += ' '
1014
- else:
1015
- pOut.sResult += t_Token.Lexeme
1016
- if t_count < t_Enum.Count():
1017
- pOut.sResult += ' '
1018
- if t_Token.UpperEquals("SELECT"):
1019
- pOut.sResult = self.appendRowId(pOut.sResult)
1020
- if t_Token.UpperEquals("ORDER"):
1021
- haveMore = t_Enum.MoveNext()
1022
- if haveMore:
1023
- pOut.sResult += t_Enum.Current().Lexeme
1024
- if t_count < t_Enum.Count():
1025
- pOut.sResult += ' '
1026
- if t_Enum.Current().UpperEquals("BY"):
1027
- pOut.sResult = self.appendIdAdded(pOut.sResult)
1028
- # create paramInfo $list to be passed to server
1029
- length = 0
1030
- if len(p_Parameters._params_list) > 0:
1031
- item = p_Parameters._params_list[0]
1032
- if isinstance(item, list) or isinstance(item, tuple):
1033
- length = len(item)
1034
- else:
1035
- length = len(p_Parameters._params_list)
1036
- self.m_ParamInfo.add(length - self.m_ExecParamCount) #len(p_Parameters._params_list)
1037
- if length - self.m_ExecParamCount > 0:
1038
- t_Enum.Reset()
1039
- nParamIndex = 1
1040
- p_Parameters._user_index = [-1]
1041
- while t_Enum.MoveNext():
1042
- if TOKEN.QUESTION_MARK == t_Enum.Current().TokenTypeGet() or TOKEN.ATSIGN == t_Enum.Current().TokenTypeGet():
1043
- if t_Enum.Current().m_replaced:
1044
- self.m_ParamInfo.add('c')
1045
- else:
1046
- self.m_ParamInfo.add('?')
1047
- p_Parameters._add_user_param(None)
1048
- p_Parameters._user_index.append(nParamIndex - 1)
1049
- self.m_ParamInfo.add(t_Enum.Current().m_format)
1050
- nParamIndex += 1
1051
- if nParamIndex == length + 1:
1052
- break
1053
- return pOut
1054
-
1055
- # '?' represents a parameter; adds a parameter to p_Parameters if none were provided
1056
- def Resolve_question_mark(self, p_Parameters, t_Enum, t_Token, t_bBoundParameters):
1057
- self.t_nOrdinal += 1
1058
- if not t_bBoundParameters:
1059
- p_Parameters._params_list.append(intersystems_iris.dbapi._Parameter._Parameter("?", ParameterMode.INPUT, '?'))
1060
- return False
1061
-
1062
- # "@" used for named parameters
1063
- def Resolve_atsign(self, p_Parameters, t_Enum, t_Token, t_bBoundParameters):
1064
- self.hasNamedParameters = True
1065
- self.t_nOrdinal += 1
1066
- if (not t_bBoundParameters) or len(p_Parameters._params_list) == 0:
1067
- p_Parameters._params_list.append(intersystems_iris.dbapi._Parameter._Parameter(t_Token.Lexeme, ParameterMode.UNKNOWN))
1068
- else:
1069
- if not matchUpParam(p_Parameters, t_Token.Lexeme, len(p_Parameters._params_list)):
1070
- p_Parameters._params_list.append(intersystems_iris.dbapi._Parameter._Parameter(t_Token.Lexeme, ParameterMode.UNKNOWN))
1071
- return False
1072
-
1073
- # replaces a hex literal with a parameter
1074
- def Resolve_hex(self, p_Parameters, t_Enum, t_Token, t_bBoundParameters):
1075
- self.t_nOrdinal += 1
1076
- cp = intersystems_iris.dbapi._Parameter._Parameter(bytes.fromhex(t_Token.Lexeme[2:]), ParameterMode.REPLACED_LITERAL, '?', type = intersystems_iris.dbapi._DBAPI.SQLType.BINARY)
1077
- p_Parameters._params_list.append(cp)
1078
- t_Token.TokenTypeSet(TOKEN.QUESTION_MARK)
1079
- return False
1080
-
1081
- def Resolve_id(self, p_Parameters, t_Enum, t_Token, t_bBoundParameters, stmtType):
1082
- if self.orderbyToken is not None and t_Enum.Current().UpperEquals("UNION"):
1083
- self.orderbyToken = None
1084
- if self.lastToken is not None and self.lastToken == self.orderbyToken:
1085
- self.orderbyToken = t_Token
1086
- self.lastToken = t_Token
1087
- return False
1088
- # ORDER follows parameters, quit early
1089
- if t_Token.UpperEquals("ORDER"):
1090
- t_NewEnum = t_Enum.Clone()
1091
- if t_NewEnum.MoveNext():
1092
- t_NewToken = t_NewEnum.Current()
1093
- if t_NewToken.UpperEquals("BY"):
1094
- self.orderbyToken = t_NewToken
1095
- if self.t_nOpenParen == 0:
1096
- return False
1097
- else:
1098
- while t_Enum.MoveNext():
1099
- if t_Enum.Current().TokenTypeGet() == TOKEN.CLOSE_PAREN:
1100
- self.t_nOpenParen -= 1
1101
- break
1102
- elif (TOKEN.ID == t_Enum.Current().TokenTypeGet()) and (t_Enum.Current().UpperEquals("UNION")):
1103
- break
1104
- # JSON_TABLE should have no literal substitution
1105
- if t_Token.UpperContains("JSON_") or t_Token.UpperContains("_JSON"):
1106
- startParen = self.t_nOpenParen
1107
- while t_Enum.MoveNext():
1108
- if t_Enum.Current().TokenTypeGet() == TOKEN.OPEN_PAREN:
1109
- self.t_nOpenParen += 1
1110
- if t_Enum.Current().TokenTypeGet() == TOKEN.CLOSE_PAREN:
1111
- self.t_nOpenParen -= 1
1112
- if self.t_nOpenParen == startParen:
1113
- break
1114
- # ROUND special handling for second parameter
1115
- if t_Token.UpperEquals("ROUND"):
1116
- if stmtType == StatementType.QUERY and self.t_nRound == 0:
1117
- self.t_nRound = 1
1118
- # DATEPART with first parameter sent as is, not a literal
1119
- if t_Token.UpperEquals("DATEPART") or t_Token.UpperEquals("TIMESTAMPADD") or t_Token.UpperEquals("TIMESTAMPDIFF"):
1120
- if t_Enum.MoveNext():
1121
- if t_Enum.Current().TokenTypeGet() == TOKEN.OPEN_PAREN:
1122
- while t_Enum.MoveNext():
1123
- if t_Enum.Current().TokenTypeGet() == TOKEN.CONSTANT:
1124
- t_Enum.Current().TokenTypeSet(TOKEN.ID)
1125
- break
1126
- if t_Enum.Current().TokenTypeGet() in [TOKEN.COMMA, TOKEN.CLOSE_PAREN]:
1127
- break
1128
- else:
1129
- t_Enum.MovePrevious()
1130
- return False
1131
-
1132
- # I honestly have no idea why this method does what it does
1133
- def Resolve_strfunction(self, p_Parameters, t_Enum, t_Token, t_bBoundParameters):
1134
- parenLevel = 0
1135
- t_TokenLast = None
1136
- inOrderBy = False
1137
- while t_Enum.MoveNext():
1138
- if t_TokenLast is not None and t_TokenLast.UpperLexeme == "ORDER":
1139
- if t_Enum.Current().UpperLexeme == "BY":
1140
- inOrderBy = True
1141
- t_TokenLast = t_Enum.Current()
1142
- if parenLevel == 1 and t_Enum.Current().TokenTypeGet() == TOKEN.COMMA:
1143
- while t_Enum.MoveNext():
1144
- if t_Enum.Current().TokenTypeGet() == TOKEN.CONSTANT:
1145
- t_Enum.Current().TokenTypeSet(TOKEN.ID)
1146
- if parenLevel == 1:
1147
- break
1148
- elif t_Enum.Current().TokenTypeGet() == TOKEN.OPEN_PAREN:
1149
- parenLevel += 1
1150
- elif t_Enum.Current().TokenTypeGet() == TOKEN.CLOSE_PAREN:
1151
- parenLevel -= 1
1152
- if parenLevel == 1:
1153
- break
1154
- elif t_Enum.Current().TokenTypeGet() == TOKEN.OPEN_PAREN:
1155
- parenLevel += 1
1156
- elif t_Enum.Current().TokenTypeGet() == TOKEN.CLOSE_PAREN:
1157
- if parenLevel == 1:
1158
- break
1159
- parenLevel -= 1
1160
- elif t_Enum.Current().TokenTypeGet() == TOKEN.CONSTANT:
1161
- bSubstitute = not inOrderBy
1162
- if parenLevel > 1:
1163
- t_Enum.MovePrevious()
1164
- if TOKEN.OPEN_PAREN == t_Enum.Current().TokenTypeGet():
1165
- t_Enum.MoveNext()
1166
- t_Enum.MoveNext()
1167
- if TOKEN.CLOSE_PAREN == t_Enum.Current().TokenTypeGet():
1168
- bSubstitute = False
1169
- t_Enum.MovePrevious()
1170
- else:
1171
- t_Enum.MoveNext()
1172
- if bSubstitute:
1173
- t_Token = t_Enum.Current()
1174
- self.t_nOrdinal = self.DynamicVariable(t_bBoundParameters, t_Token, self.t_nOrdinal, p_Parameters)
1175
- if parenLevel == 0:
1176
- break
1177
- return False
1178
-
1179
- # Skips over the data type's arguments (if any)
1180
- def Resolve_datatype(self, p_Parameters, t_Enum, t_Token, t_bBoundParameters):
1181
- t_NewEnum = t_Enum.Clone()
1182
- if t_NewEnum.MoveNext():
1183
- t_NewToken = t_NewEnum.Current()
1184
- if TOKEN.OPEN_PAREN == t_NewToken.TokenTypeGet():
1185
- while t_NewEnum.MoveNext():
1186
- t_NewToken = t_NewEnum.Current()
1187
- if t_NewToken.TokenTypeGet() == TOKEN.CLOSE_PAREN:
1188
- break
1189
- t_Enum = t_NewEnum
1190
- return False
1191
-
1192
- # generally just increments t_nOpenParen (and t_nRoundNested, when relevant),
1193
- # but also checks for "((CONSTANT))" syntax (this is a way you can get the preparser to not replace a constant with a parameter)
1194
- def Resolve_open_paren(self, p_Parameters, t_Enum, t_Token, t_bBoundParameters):
1195
- self.t_nOpenParen += 1
1196
- t_NewEnum = t_Enum.Clone()
1197
- if self.t_nRound > 0:
1198
- self.t_nRoundNested += 1
1199
- if t_NewEnum.MoveNext():
1200
- t_NewToken = t_NewEnum.Current()
1201
- if TOKEN.OPEN_PAREN == t_NewToken.TokenTypeGet():
1202
- self.t_nOpenParen += 1
1203
- if t_NewEnum.MoveNext():
1204
- t_NewToken = t_NewEnum.Current()
1205
- bCurlyBrace = (t_NewToken.Lexeme == "{")
1206
- if TOKEN.CONSTANT == t_NewToken.TokenTypeGet() or bCurlyBrace:
1207
- if t_NewEnum.MoveNext():
1208
- t_NewToken = t_NewEnum.Current()
1209
- if bCurlyBrace:
1210
- while t_NewToken.Lexeme != "}":
1211
- if not t_NewEnum.MoveNext():
1212
- bCurlyBrace = False
1213
- break
1214
- t_NewToken = t_NewEnum.Current()
1215
- bCurlyBrace = False
1216
- if not t_NewEnum.MoveNext():
1217
- return False
1218
- t_NewToken = t_NewEnum.Current()
1219
- if TOKEN.CLOSE_PAREN == t_NewToken.TokenTypeGet():
1220
- self.t_nOpenParen -= 1
1221
- if t_NewEnum.MoveNext():
1222
- t_NewToken = t_NewEnum.Current()
1223
- if TOKEN.CLOSE_PAREN == t_NewToken.TokenTypeGet():
1224
- self.t_nOpenParen -= 1
1225
- t_Enum = t_NewEnum
1226
- if self.t_nRound > 0:
1227
- self.t_nRoundNested -= 1
1228
- return False
1229
-
1230
- # decrements t_nOpenParen (and t_nRoundNested, when relevant)
1231
- def Resolve_close_paren(self, p_Parameters, t_Enum, t_Token, t_bBoundParameters):
1232
- if self.t_nRound > 0:
1233
- self.t_nRoundNested -= 1
1234
- self.t_nOpenParen -= 1
1235
- return False
1236
-
1237
- # skips over "(CONSTANT)" after an operator (another way to get the preparser to not replace a constant with a parameter)
1238
- def Resolve_op(self, p_Parameters, t_Enum, t_Token, t_bBoundParameters):
1239
- t_NewEnum = t_Enum.Clone()
1240
- if t_NewEnum.MoveNext():
1241
- t_NewToken = t_NewEnum.Current()
1242
- if TOKEN.OPEN_PAREN == t_NewToken.TokenTypeGet():
1243
- if t_NewEnum.MoveNext() and t_NewEnum.Current().TokenTypeGet() == TOKEN.CONSTANT:
1244
- if t_NewEnum.MoveNext() and t_NewEnum.Current().TokenTypeGet() == TOKEN.CLOSE_PAREN:
1245
- t_Enum = t_NewEnum
1246
- return False
1247
-
1248
- def Resolve_constant(self, p_Parameters, t_Enum, t_Token, t_bBoundParameters):
1249
- # the second argument (and beyond?) of ROUND should not be replaced with a parameter
1250
- if (self.t_nRound == 2) and (self.t_nRoundNested == 1):
1251
- t_Enum.MoveNext()
1252
- if TOKEN.COMMA != t_Enum.Current().TokenTypeGet():
1253
- self.t_nRound = 0
1254
- self.t_nRoundNested = 0
1255
- t_Enum.MovePrevious()
1256
- return False
1257
- # Detect and Skip IN clause
1258
- if self.lastToken is not None:
1259
- if self.lastToken == self.orderbyToken:
1260
- self.orderbyToken = t_Token
1261
- self.lastToken = t_Token
1262
- return False
1263
- t_NewEnum = t_Enum.Clone()
1264
-
1265
- # not 100% sure what this block does
1266
- if t_NewEnum.MoveNext() and t_NewEnum.Current().TokenTypeGet() == TOKEN.CLOSE_PAREN:
1267
- t_NewEnum.MovePrevious()
1268
- if t_NewEnum.MovePrevious() and (t_NewEnum.Current().Lexeme[0] == '-'):
1269
- t_NewEnum.MovePrevious()
1270
- if t_Enum.Current() is not None and t_NewEnum.Current().TokenTypeGet() == TOKEN.OPEN_PAREN:
1271
- t_NewEnum.MovePrevious()
1272
- if t_Enum.Current() is not None:
1273
- if TOKEN.ID != t_NewEnum.Current().TokenTypeGet() or (t_NewEnum.Current().UpperLexeme in _PreParser.s_replaceparm):
1274
- t_Enum.MoveNext()
1275
- return False
1276
-
1277
- # determine format the constant will be sent to the server in (stored in paramInfo at the end of Resolve())
1278
- if t_Enum.Current() is not None:
1279
- c = t_Enum.Current().Lexeme
1280
- if c[0] == '\'' or c[0] == '"':
1281
- if c[-1] != c[0]:
1282
- raise Exception("unmatched quote in " + t_Enum.Current().Lexeme)
1283
- t_Enum.Current().m_format = intersystems_iris.dbapi.preparser._Token._Token.CAST_CHAR
1284
- else:
1285
- isInt = True
1286
- for ii in range(len(c)):
1287
- if c[ii] in ['.', 'e', 'E']:
1288
- isInt = False
1289
- break
1290
- if isInt:
1291
- if (21 < len(c)) or ((c[0] == '-') and (20 < len(c))):
1292
- t_Enum.Current().m_format = intersystems_iris.dbapi.preparser._Token._Token.CAST_CHAR
1293
- else:
1294
- t_Enum.Current().m_format = intersystems_iris.dbapi.preparser._Token._Token.CAST_INT
1295
- else:
1296
- t_Enum.Current().m_format = intersystems_iris.dbapi.preparser._Token._Token.CAST_NUM
1297
- self.t_nOrdinal = self.DynamicVariable(t_bBoundParameters, t_Token, self.t_nOrdinal, p_Parameters)
1298
- return False
1299
-
1300
- # not sure why this does what it does
1301
- def Resolve_null(self, p_Parameters, t_Enum, t_Token, t_bBoundParameters):
1302
- t_NewEnum = t_Enum.Clone()
1303
- if t_NewEnum.MovePrevious():
1304
- t_PreviousToken = t_NewEnum.Current()
1305
- if t_PreviousToken.TokenTypeGet() not in [TOKEN.NOT, TOKEN.IS, TOKEN.THEN, TOKEN.COMMA, TOKEN.OPEN_PAREN, TOKEN.ELSE]:
1306
- self.t_nOrdinal = Null(t_bBoundParameters, t_Token, self.t_nOrdinal, p_Parameters)
1307
- return False
1308
-
1309
- def Resolve_comma(self, p_Parameters, t_Enum, t_Token, t_bBoundParameters):
1310
- if (self.t_nRoundNested == 1) and (self.t_nRound == 1):
1311
- self.t_nRound += 1
1312
- return False
1313
-
1314
- # no idea what this does, I don't think it's used anywhere, but I kept it in just in case
1315
- @classmethod
1316
- def GetHexVal(cls, hex):
1317
- """ generated source for method GetHexVal """
1318
- val = int(hex)
1319
- return val - (48 if val < 58 else (55 if val < 97 else 87))
1320
-
1321
- # not sure why this does what it does
1322
- def Null(self, p_bBoundParameters, p_Token, p_nOrdinal, p_Parameters):
1323
- p_nOrdinal += 1
1324
- t_Parameter = intersystems_iris.dbapi._Parameter._Parameter(mode = ParameterMode.DEFAULT_PARAMETER, paramType = 'c')
1325
- if p_bBoundParameters:
1326
- p_Parameters._params_list.insert(p_nOrdinal - 1, t_Parameter)
1327
- else:
1328
- p_Parameters._params_list.append(t_Parameter)
1329
- p_Token.Lexeme = "?"
1330
- p_Token.TokenTypeSet(TOKEN.QUESTION_MARK)
1331
- p_Token.m_replaced = True
1332
- return p_nOrdinal
1333
-
1334
- # matches a named parameter in the SQL statement with a param in the list of parameters
1335
- # param - list of parameters (p_Parameters from Resolve())
1336
- # paramName - name of the parameter to be matched
1337
- # numParam - len(param)
1338
- def matchUpParam(self, param, paramName, numParam):
1339
- match = False
1340
- if not self.hasNamedParameters or (paramName == None or paramName == "" or paramName[0] != '@'):
1341
- return False
1342
- for i in range(len(param._params_list)):
1343
- if (param._params_list[i].name.upper() == paramName.upper()) or (("@" + param._params_list[i].name.upper()) == paramName.upper()):
1344
- match = True
1345
- if i != numParam:
1346
- cp = param._params_list[i]
1347
- cporig = cp
1348
- if not cporig.parsermatched:
1349
- del param._params_list[i:(i+1)]
1350
- else:
1351
- cp = cporig.Clone()
1352
- cp.name = cporig.name + str(numParam)
1353
- cp.mode = ParameterMode.UNKNOWN
1354
- if cporig.matchedParameterList == None:
1355
- cporig.matchedParameterList = []
1356
- cporig.matchedParamaterList.append(cp)
1357
- cp.parsermatched = True
1358
- param._params_list.insert(numParam, cp)
1359
- else:
1360
- param._params_list[i].parsermatched = True
1361
- break
1362
- return match
1363
-
1364
- # I don't 100% follow this function, but I'm pretty sure it spends most of its time trying to isolate a return parameter, if any, then preparses as normal (?)
1365
- def Call(self, pOut, p_Parameters):
1366
- t_bRet = False
1367
- pOut.p_eStmtType = StatementType.UPDATE
1368
- pOut.sResult = ""
1369
- for i in range(1):
1370
- t_Enum = self.m_Tokens.GetEnumerator()
1371
- t_Enum.MoveNext()
1372
- t_str = t_Enum.Current().UpperLexeme
1373
- while (TOKEN.UNKNOWN == t_Enum.Current().TokenTypeGet()) and t_str.startswith("/*"):
1374
- t_Enum.MoveNext() # skip comments
1375
- t_str = t_Enum.Current().UpperLexeme
1376
- t_Token = t_Enum.Current()
1377
- if t_Token.Lexeme[0] == '{':
1378
- t_Enum.MoveNext()
1379
- t_Token = t_Enum.Current()
1380
- returnParam = None
1381
- # expects either "? = ..." or one of "CALL", "EXEC", "EXECUTE"
1382
- if t_Token.TokenTypeGet() == TOKEN.QUESTION_MARK:
1383
- returnParam = intersystems_iris.dbapi._Parameter._Parameter("?", ParameterMode.RETURN_VALUE, '?')
1384
- if not t_Enum.MoveNext() or t_Enum.Current().Lexeme[0] != '=':
1385
- break
1386
- if not t_Enum.MoveNext():
1387
- break
1388
- elif not (t_Enum.Current().UpperEquals("CALL") or t_Enum.Current().UpperEquals("EXEC") or t_Enum.Current().UpperEquals("EXECUTE")):
1389
- return False
1390
-
1391
- # not really sure what to make of the next couple blocks of code
1392
- # feels like they should maybe be in another elif block, not their own if block
1393
- if t_Token.TokenTypeGet() == TOKEN.ATSIGN:
1394
- self.hasNamedParameters = True
1395
- returnParam = intersystems_iris.dbapi._Parameter._Parameter(t_Token.Lexeme, ParameterMode.RETURN_VALUE)
1396
- if not t_Enum.MoveNext() or t_Enum.Current().Lexeme[0] != '=':
1397
- break
1398
- if not t_Enum.MoveNext():
1399
- break
1400
- if t_Enum.Current().UpperEquals("CALL") or t_Enum.Current().UpperEquals("EXEC") or t_Enum.Current().UpperEquals("EXECUTE"):
1401
- if not t_Enum.MoveNext():
1402
- break
1403
- else:
1404
- if TOKEN.STRFUNCTION == t_Enum.Current().TokenTypeGet():
1405
- break
1406
-
1407
- pOut.sResult += t_Enum.Current().Lexeme
1408
- t_Token = t_Enum.Current()
1409
- if t_Token.UpperEquals("SELECT") or t_Token.UpperEquals("UPDATE") or t_Token.UpperEquals("INSERT"):
1410
- pOut.sResult = ""
1411
- break
1412
- if not t_Enum.MoveNext():
1413
- break
1414
- t_Token = t_Enum.Current()
1415
- if t_Token.UpperEquals("SELECT") or t_Token.UpperEquals("UPDATE") or t_Token.UpperEquals("INSERT"):
1416
- pOut.sResult = ""
1417
- break
1418
- t_bQuitLoop = False
1419
- while t_Token.Lexeme[0] == '.':
1420
- pOut.sResult += '.'
1421
- if not t_Enum.MoveNext():
1422
- t_bQuitLoop = True
1423
- break
1424
- t_Token = t_Enum.Current()
1425
- if t_Token.TokenTypeGet() == TOKEN.ID:
1426
- pOut.sResult += t_Token.Lexeme
1427
- if not t_Enum.MoveNext():
1428
- t_bQuitLoop = True
1429
- break
1430
- t_Token = t_Enum.Current()
1431
- t_bBoundParameters = (len(p_Parameters._params_list) > 0)
1432
- t_nOrdinal = 0
1433
- if returnParam is not None:
1434
- t_nOrdinal += 1
1435
- if not t_bBoundParameters:
1436
- p_Parameters._params_list.insert(0, returnParam)
1437
- else:
1438
- if not matchUpParam(p_Parameters, returnParam.GetName(), t_nOrdinal - 1):
1439
- if p_Parameters._params_list[0].mode != ParameterMode.RETURN_VALUE:
1440
- p_Parameters._params_list.insert(0, returnParam)
1441
- if not t_bQuitLoop:
1442
- t_eLastToken = TOKEN.UNKNOWN
1443
- call_switcher = {
1444
- TOKEN.QUESTION_MARK: self.Call_question_mark,
1445
- TOKEN.ATSIGN: self.Call_atsign,
1446
- TOKEN.HEX: self.Call_hex,
1447
- TOKEN.CONSTANT: functools.partial(self.Call_constant_id, t_Enum = t_Enum),
1448
- TOKEN.ID: functools.partial(self.Call_constant_id, t_Enum = t_Enum),
1449
- TOKEN.NULL: self.Call_null,
1450
- TOKEN.COMMA: functools.partial(self.Call_comma_paren, t_eLastToken = t_eLastToken),
1451
- TOKEN.CLOSE_PAREN: functools.partial(self.Call_comma_paren, t_eLastToken = t_eLastToken)
1452
- }
1453
- while True:
1454
- t_Token = t_Enum.Current()
1455
- call_func = call_switcher.get(t_Token.TokenTypeGet(), self.Call_default)
1456
- (t_nOrdinal, t_eLastToken) = call_func(p_Parameters, t_Token, t_nOrdinal, t_bBoundParameters)
1457
-
1458
- if not t_Enum.MoveNext():
1459
- break
1460
- pOut.p_eStmtType = StatementType.CALL if (returnParam == None) else StatementType.CALLWITHRESULT
1461
- t_bRet = True
1462
- return t_bRet
1463
-
1464
- def Call_default(self, p_Parameters, t_Token, t_nOrdinal, t_bBoundParameters):
1465
- return (t_nOrdinal, t_Token.TokenTypeGet())
1466
-
1467
- def Call_question_mark(self, p_Parameters, t_Token, t_nOrdinal, t_bBoundParameters):
1468
- if not t_bBoundParameters:
1469
- p_Parameters._params_list.append(intersystems_iris.dbapi._Parameter._Parameter("?", ParameterMode.INPUT, '?'))
1470
- return (t_nOrdinal + 1, TOKEN.QUESTION_MARK)
1471
-
1472
- def Call_atsign(self, p_Parameters, t_Token, t_nOrdinal, t_bBoundParameters):
1473
- self.hasNamedParameters = True
1474
- if (not t_bBoundParameters) or len(p_Parameters._params_list) == 0:
1475
- p_Parameters._params_list.add(intersystems_iris.dbapi._Parameter._Parameter(t_Token.Lexeme, ParameterMode.UNKNOWN))
1476
- else:
1477
- if not matchUpParam(p_Parameters, t_Token.Lexeme, t_nOrdinal):
1478
- p_Parameters._params_list.add(intersystems_iris.dbapi._Parameter._Parameter(t_Token.Lexeme, ParameterMode.UNKNOWN))
1479
- return (t_nOrdinal + 1, TOKEN.ATSIGN)
1480
-
1481
- def Call_hex(self, p_Parameters, t_Token, t_nOrdinal, t_bBoundParameters):
1482
- cp = intersystems_iris.dbapi._Parameter._Parameter(bytes.fromhex(t_Token.Lexeme[2:]), ParameterMode.REPLACED_LITERAL, '?')
1483
- p_Parameters._params_list.append(cp)
1484
- t_Token.TokenTypeSet(TOKEN.QUESTION_MARK)
1485
- return (t_nOrdinal + 1, TOKEN.QUESTION_MARK)
1486
-
1487
- def Call_constant_id(self, p_Parameters, t_Token, t_nOrdinal, t_bBoundParameters, t_Enum):
1488
- t_NewEnum = t_Enum.Clone()
1489
- if t_NewEnum.MovePrevious():
1490
- t_PreviousToken = t_NewEnum.Current()
1491
- if t_PreviousToken.TokenTypeGet() == TOKEN.OP:
1492
- t_Token.TokenTypeSet(TOKEN.QUESTION_MARK)
1493
- return (self.DynamicVariable(t_bBoundParameters, intersystems_iris.dbapi.preparser._Token._Token(TOKEN.CONSTANT, t_PreviousToken.Lexeme, t_PreviousToken.UpperLexeme), t_nOrdinal, p_Parameters),
1494
- TOKEN.QUESTION_MARK)
1495
- return (self.DynamicVariable(t_bBoundParameters, t_Token, t_nOrdinal, p_Parameters), TOKEN.QUESTION_MARK)
1496
-
1497
- def Call_null(self, p_Parameters, t_Token, t_nOrdinal, t_bBoundParameters):
1498
- return (self.Null(t_bBoundParameters, t_Token, t_nOrdinal, p_Parameters), t_Token.TokenTypeGet())
1499
-
1500
- def Call_comma_paren(self, p_Parameters, t_Token, t_nOrdinal, t_bBoundParameters, t_eLastToken):
1501
- if TOKEN.COMMA == t_eLastToken or TOKEN.OPEN_PAREN == t_eLastToken:
1502
- t_Parameter = intersystems_iris.dbapi._Parameter._Parameter(mode = ParameterMode.DEFAULT_PARAMETER, paramType = 'd')
1503
- t_nOrdinal += 1
1504
- self.m_nUndefinedCount += 1
1505
- if t_bBoundParameters:
1506
- p_Parameters._params_list.insert(t_nOrdinal - 1, t_Parameter)
1507
- else:
1508
- p_Parameters._params_list.append(t_Parameter)
1509
- return (t_nOrdinal, t_Token.TokenTypeGet())
1510
-
1511
- # No idea why this function does what it does
1512
- def Exec(self, pOut, p_Parameters):
1513
- t_bRet = False
1514
- pOut.p_eStmtType = StatementType.UPDATE
1515
- t_Enum = self.m_Tokens.GetEnumerator()
1516
- for i in range(1):
1517
- t_Enum.MoveNext()
1518
- t_Token = t_Enum.Current()
1519
- if not t_Token.UpperEquals("EXEC") and not t_Token.UpperEquals("EXECUTE"):
1520
- break
1521
- pOut.p_eStmtType = StatementType.CALL
1522
- t_Enum.MoveNext()
1523
- t_str = t_Enum.Current().UpperLexeme
1524
- while (TOKEN.UNKNOWN == t_Enum.Current().TokenTypeGet()) and t_str.startswith("/*"):
1525
- t_Enum.MoveNext() # skip comments
1526
- t_str = t_Enum.Current().UpperLexeme
1527
- t_Token = t_Enum.Current()
1528
- if (t_Token.UpperEquals("SELECT")) or (t_Token.UpperEquals("UPDATE")) or (t_Token.UpperEquals("INSERT")):
1529
- break
1530
- t_bRet = True
1531
- t_bHasReturnType = False
1532
- if '@' == t_Token.Lexeme[0]:
1533
- t_bHasReturnType = True
1534
- if not t_Enum.MoveNext():
1535
- break
1536
- if not t_Enum.MoveNext():
1537
- break
1538
- t_Token = t_Enum.Current()
1539
- if t_Token.Lexeme != "=":
1540
- break
1541
- if not t_Enum.MoveNext():
1542
- break
1543
- t_Token = t_Enum.Current()
1544
- pOut.sResult += t_Token.Lexeme
1545
- if not t_Enum.MoveNext():
1546
- return True
1547
- t_Token = t_Enum.Current()
1548
- t_bQuitLoop = False
1549
- while t_Token.Lexeme[0] == '.':
1550
- pOut.sResult.append('.')
1551
- if not t_Enum.MoveNext():
1552
- t_bQuitLoop = True
1553
- break
1554
- t_Token = t_Enum.Current()
1555
- if t_Token.TokenTypeGet() == TOKEN.ID:
1556
- pOut.sResult.append(t_Token.Lexeme)
1557
- if not t_Enum.MoveNext():
1558
- t_bQuitLoop = True
1559
- break
1560
- t_Token = t_Enum.Current()
1561
- if t_bQuitLoop:
1562
- break
1563
- t_nOrdinal = 0
1564
- while True:
1565
- t_Token = t_Enum.Current()
1566
- if TOKEN.COMMA == t_Token.TokenTypeGet():
1567
- if not t_Enum.MoveNext():
1568
- break
1569
- t_Token = t_Enum.Current()
1570
- if t_Token.UpperEquals("WITH RECOMPILE"): # Shouldn't it be impossible for this to be a single token?
1571
- break
1572
- t_strParameterName = ""
1573
- if t_Token.Lexeme[0] == '@':
1574
- if t_Enum.MoveNext():
1575
- t_strParameterName = t_Enum.Current().Lexeme
1576
- bMoveNext = t_Enum.MoveNext()
1577
- if (not bMoveNext) or (t_Enum.Current().Lexeme != "="):
1578
- if not bMoveNext:
1579
- t_bQuitLoop = True
1580
- t_Param = intersystems_iris.dbapi._Parameter._Parameter(mode = ParameterMode.INPUT_OUTPUT, name = t_Token.Lexeme[1:], execParam = True)
1581
- self.m_ExecParamCount += 1
1582
- p_Parameters._params_list.append(t_Param)
1583
- continue
1584
- t_Enum.MoveNext()
1585
- t_Token = t_Enum.Current()
1586
- t_Enum.MoveNext()
1587
- if t_Token.TokenTypeGet() not in [TOKEN.OPEN_PAREN, TOKEN.CLOSE_PAREN, TOKEN.QUESTION_MARK, TOKEN.UNKNOWN]:
1588
- if t_Token is not None:
1589
- if t_Token.Lexeme[0] == '-':
1590
- if not t_Enum.MoveNext():
1591
- t_bQuitLoop = True
1592
- else:
1593
- t_Token = t_Enum.Current()
1594
- t_NewToken = intersystems_iris.dbapi.preparser._Token._Token(TOKEN.CONSTANT, "-" + t_Token.Lexem, "-" + t_Token.UpperLexeme)
1595
- t_nOrdinal = self.DynamicVariable(False, t_NewToken, t_nOrdinal, p_Parameters)
1596
- else:
1597
- t_nOrdinal = self.DynamicVariable(False, t_Token, t_nOrdinal, p_Parameters)
1598
- if not t_bQuitLoop:
1599
- t_Parameter = p_Parameters._params_list[-1]
1600
- t_Parameter.name = t_strParameterName
1601
- t_Parameter.execParam = True
1602
- self.m_ExecParamCount += 1
1603
- if not t_Enum.MoveNext():
1604
- break
1605
- if t_bQuitLoop:
1606
- break
1607
- if t_bHasReturnType:
1608
- pOut.p_eStmtType = StatementType.CALLWITHRESULT
1609
- t_ReturnParam = intersystems_iris.dbapi._Parameter._Parameter(mode = ParameterMode.UNKNOWN, execParam = True)
1610
- self.m_ExecParamCount += 1
1611
- p_Parameters._params_list.insert(0, t_ReturnParam)
1612
- else:
1613
- pOut.p_eStmtType = StatementType.CALL
1614
- t_bRet = True
1615
- if 0 == self.m_ExecParamCount:
1616
- return False
1617
- return t_bRet
1618
-
1619
- # creates Parameter object for replaced literals
1620
- def DynamicVariable(self, p_bBoundParameters, p_Token, p_nOrdinal, p_Parameters):
1621
- p_nOrdinal += 1
1622
- t_str = p_Token.Lexeme
1623
- t_c = t_str[0]
1624
- if t_c in ["'", "\""]:
1625
- # Remove leading and trailing quotes
1626
- t_str = t_str[1:-1]
1627
- # Condense doubled quotes to a single quote
1628
- t_i = 0
1629
- while t_i < len(t_str) - 1:
1630
- if (t_str[t_i] == t_c) and (t_str[t_i + 1] == t_c):
1631
- t_str = t_str[:t_i] + t_str[(t_i + 1):]
1632
- t_i += 1
1633
- else:
1634
- if 'e' in t_str or 'E' in t_str:
1635
- # Normalize number
1636
- try:
1637
- t_double = float(t_str)
1638
- t_str = str(t_double)
1639
- except ValueError:
1640
- # wasn't able to parse, leave as is
1641
- pass
1642
- else:
1643
- p = 0
1644
- if t_str[p] == '+':
1645
- t_str = t_str[1:]
1646
- if t_str[p] == '-':
1647
- p += 1
1648
- while (p < len(t_str)) and (t_str[p] == '0'):
1649
- t_str = t_str[:p] + t_str[(p + 1):]
1650
- if '.' in t_str:
1651
- while t_str[-1] == '0':
1652
- t_str = t_str[:-1]
1653
- if t_str[-1] == '.':
1654
- t_str = t_str[:-1]
1655
- if p >= len(t_str):
1656
- t_str = "0"
1657
- if p_bBoundParameters:
1658
- p_Parameters._params_list.insert(p_nOrdinal - 1, intersystems_iris.dbapi._Parameter._Parameter(t_str, ParameterMode.REPLACED_LITERAL))
1659
- else:
1660
- p_Parameters._params_list.append(intersystems_iris.dbapi._Parameter._Parameter(t_str, ParameterMode.REPLACED_LITERAL))
1661
- p_Token.Lexeme = "?"
1662
- p_Token.TokenTypeSet(TOKEN.QUESTION_MARK)
1663
- p_Token.m_replaced = True
1664
- return p_nOrdinal
1665
-
1666
- def appendRowId(self, sb):
1667
- if self.m_addRowID != 0:
1668
- return sb + "%ID ,"
1669
- return sb
1670
-
1671
- def appendIdAdded(self, sb):
1672
- if self.m_addRowID == 2:
1673
- return sb + "%IDADDED "
1674
- return sb