tracdap-runtime 0.6.5__py3-none-any.whl → 0.7.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (89) hide show
  1. tracdap/rt/__init__.py +6 -5
  2. tracdap/rt/_exec/actors.py +6 -5
  3. tracdap/rt/_exec/context.py +278 -110
  4. tracdap/rt/_exec/dev_mode.py +237 -143
  5. tracdap/rt/_exec/engine.py +223 -64
  6. tracdap/rt/_exec/functions.py +31 -6
  7. tracdap/rt/_exec/graph.py +15 -5
  8. tracdap/rt/_exec/graph_builder.py +301 -203
  9. tracdap/rt/_exec/runtime.py +13 -10
  10. tracdap/rt/_exec/server.py +6 -5
  11. tracdap/rt/_impl/__init__.py +6 -5
  12. tracdap/rt/_impl/config_parser.py +17 -9
  13. tracdap/rt/_impl/data.py +284 -172
  14. tracdap/rt/_impl/ext/__init__.py +14 -0
  15. tracdap/rt/_impl/ext/sql.py +117 -0
  16. tracdap/rt/_impl/ext/storage.py +58 -0
  17. tracdap/rt/_impl/grpc/__init__.py +6 -5
  18. tracdap/rt/_impl/grpc/codec.py +6 -5
  19. tracdap/rt/_impl/grpc/tracdap/metadata/job_pb2.py +62 -54
  20. tracdap/rt/_impl/grpc/tracdap/metadata/job_pb2.pyi +37 -2
  21. tracdap/rt/_impl/guard_rails.py +6 -5
  22. tracdap/rt/_impl/models.py +6 -5
  23. tracdap/rt/_impl/repos.py +6 -5
  24. tracdap/rt/_impl/schemas.py +6 -5
  25. tracdap/rt/_impl/shim.py +6 -5
  26. tracdap/rt/_impl/static_api.py +30 -16
  27. tracdap/rt/_impl/storage.py +8 -7
  28. tracdap/rt/_impl/type_system.py +6 -5
  29. tracdap/rt/_impl/util.py +16 -5
  30. tracdap/rt/_impl/validation.py +72 -18
  31. tracdap/rt/_plugins/__init__.py +6 -5
  32. tracdap/rt/_plugins/_helpers.py +6 -5
  33. tracdap/rt/_plugins/config_local.py +6 -5
  34. tracdap/rt/_plugins/format_arrow.py +6 -5
  35. tracdap/rt/_plugins/format_csv.py +6 -5
  36. tracdap/rt/_plugins/format_parquet.py +6 -5
  37. tracdap/rt/_plugins/repo_git.py +6 -5
  38. tracdap/rt/_plugins/repo_local.py +6 -5
  39. tracdap/rt/_plugins/repo_pypi.py +6 -5
  40. tracdap/rt/_plugins/storage_aws.py +6 -5
  41. tracdap/rt/_plugins/storage_azure.py +6 -5
  42. tracdap/rt/_plugins/storage_gcp.py +6 -5
  43. tracdap/rt/_plugins/storage_local.py +6 -5
  44. tracdap/rt/_plugins/storage_sql.py +418 -0
  45. tracdap/rt/_plugins/storage_sql_dialects.py +118 -0
  46. tracdap/rt/_version.py +7 -6
  47. tracdap/rt/api/__init__.py +23 -5
  48. tracdap/rt/api/experimental.py +85 -37
  49. tracdap/rt/api/hook.py +16 -5
  50. tracdap/rt/api/model_api.py +110 -90
  51. tracdap/rt/api/static_api.py +142 -100
  52. tracdap/rt/config/common.py +26 -27
  53. tracdap/rt/config/job.py +5 -6
  54. tracdap/rt/config/platform.py +41 -42
  55. tracdap/rt/config/result.py +5 -6
  56. tracdap/rt/config/runtime.py +6 -7
  57. tracdap/rt/exceptions.py +13 -7
  58. tracdap/rt/ext/__init__.py +6 -5
  59. tracdap/rt/ext/config.py +6 -5
  60. tracdap/rt/ext/embed.py +6 -5
  61. tracdap/rt/ext/plugins.py +6 -5
  62. tracdap/rt/ext/repos.py +6 -5
  63. tracdap/rt/ext/storage.py +6 -5
  64. tracdap/rt/launch/__init__.py +10 -5
  65. tracdap/rt/launch/__main__.py +6 -5
  66. tracdap/rt/launch/cli.py +6 -5
  67. tracdap/rt/launch/launch.py +38 -15
  68. tracdap/rt/metadata/__init__.py +4 -0
  69. tracdap/rt/metadata/common.py +2 -3
  70. tracdap/rt/metadata/custom.py +3 -4
  71. tracdap/rt/metadata/data.py +30 -31
  72. tracdap/rt/metadata/file.py +6 -7
  73. tracdap/rt/metadata/flow.py +22 -23
  74. tracdap/rt/metadata/job.py +89 -45
  75. tracdap/rt/metadata/model.py +26 -27
  76. tracdap/rt/metadata/object.py +11 -12
  77. tracdap/rt/metadata/object_id.py +23 -24
  78. tracdap/rt/metadata/resource.py +0 -1
  79. tracdap/rt/metadata/search.py +15 -16
  80. tracdap/rt/metadata/stoarge.py +22 -23
  81. tracdap/rt/metadata/tag.py +8 -9
  82. tracdap/rt/metadata/tag_update.py +11 -12
  83. tracdap/rt/metadata/type.py +38 -38
  84. {tracdap_runtime-0.6.5.dist-info → tracdap_runtime-0.7.0rc1.dist-info}/LICENSE +1 -1
  85. {tracdap_runtime-0.6.5.dist-info → tracdap_runtime-0.7.0rc1.dist-info}/METADATA +4 -2
  86. tracdap_runtime-0.7.0rc1.dist-info/RECORD +121 -0
  87. {tracdap_runtime-0.6.5.dist-info → tracdap_runtime-0.7.0rc1.dist-info}/WHEEL +1 -1
  88. tracdap_runtime-0.6.5.dist-info/RECORD +0 -116
  89. {tracdap_runtime-0.6.5.dist-info → tracdap_runtime-0.7.0rc1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,418 @@
1
+ # Licensed to the Fintech Open Source Foundation (FINOS) under one or
2
+ # more contributor license agreements. See the NOTICE file distributed
3
+ # with this work for additional information regarding copyright ownership.
4
+ # FINOS licenses this file to you under the Apache License, Version 2.0
5
+ # (the "License"); you may not use this file except in compliance with the
6
+ # License. You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ import contextlib
17
+ import typing as tp
18
+ import urllib.parse as urlp
19
+
20
+ import pyarrow as pa
21
+
22
+ import tracdap.rt.config as cfg
23
+ import tracdap.rt.exceptions as ex
24
+ import tracdap.rt.ext.plugins as plugins
25
+
26
+ # Import storage interfaces (private extension API)
27
+ from tracdap.rt._impl.ext.storage import * # noqa
28
+ from tracdap.rt._impl.ext.sql import * # noqa
29
+
30
+ import tracdap.rt._plugins._helpers as _helpers
31
+
32
+ # TODO: Remove internal references
33
+ import tracdap.rt._impl.data as _data
34
+
35
+
36
+ class SqlDataStorage(IDataStorageBase[pa.Table, pa.Schema]):
37
+
38
+ DIALECT_PROPERTY = "dialect"
39
+ DRIVER_PROPERTY = "driver.python"
40
+
41
+ __DQL_KEYWORDS = ["select"]
42
+ __DML_KEYWORDS = ["insert", "update", "delete", "merge"]
43
+ __DDL_KEYWORDS = ["create", "alter", "drop", "grant"]
44
+
45
+ def __init__(self, properties: tp.Dict[str, str]):
46
+
47
+ self._log = _helpers.logger_for_object(self)
48
+ self._properties = properties
49
+
50
+ dialect_name = _helpers.get_plugin_property(self._properties, self.DIALECT_PROPERTY)
51
+
52
+ if dialect_name is None:
53
+ raise ex.EConfigLoad(f"Missing required property [{self.DIALECT_PROPERTY}]")
54
+
55
+ if not plugins.PluginManager.is_plugin_available(ISqlDialect, dialect_name.lower()):
56
+ raise ex.EPluginNotAvailable(f"SQL dialect [{dialect_name}] is not supported")
57
+
58
+ driver_name = _helpers.get_plugin_property(self._properties, self.DRIVER_PROPERTY)
59
+ if driver_name is None:
60
+ driver_name = dialect_name.lower()
61
+
62
+ if not plugins.PluginManager.is_plugin_available(ISqlDriver, driver_name):
63
+ raise ex.EPluginNotAvailable(f"SQL driver [{driver_name}] is not available")
64
+
65
+ driver_props = self._driver_props(driver_name)
66
+ driver_cfg = cfg.PluginConfig(protocol=driver_name.lower(), properties=driver_props)
67
+ dialect_cfg = cfg.PluginConfig(protocol=dialect_name.lower(), properties={})
68
+
69
+ self._log.info(f"Loading SQL driver [{driver_name}] for dialect [{dialect_name}]")
70
+
71
+ self._driver = plugins.PluginManager.load_plugin(ISqlDriver, driver_cfg)
72
+ self._dialect = plugins.PluginManager.load_plugin(ISqlDialect, dialect_cfg)
73
+
74
+ # Test connectivity
75
+ with self._connection():
76
+ pass
77
+
78
+ def _driver_props(self, driver_name: str) -> tp.Dict[str, str]:
79
+
80
+ driver_props = dict()
81
+ driver_filter = f"{driver_name}."
82
+
83
+ for key, value in self._properties.items():
84
+ if key.startswith(driver_filter):
85
+ dialect_key = key[len(driver_filter):]
86
+ driver_props[dialect_key] = value
87
+
88
+ return driver_props
89
+
90
+ def _connection(self) -> DbApiWrapper.Connection:
91
+
92
+ return contextlib.closing(self._driver.connect()) # noqa
93
+
94
+ def _cursor(self, conn: DbApiWrapper.Connection) -> DbApiWrapper.Cursor:
95
+
96
+ return contextlib.closing(conn.cursor()) # noqa
97
+
98
+ def data_type(self) -> tp.Type[pa.Table]:
99
+ return pa.Table
100
+
101
+ def schema_type(self) -> tp.Type[pa.Schema]:
102
+ return pa.Schema
103
+
104
+ def has_table(self, table_name: str):
105
+
106
+ with self._driver.error_handling():
107
+ return self._driver.has_table(table_name)
108
+
109
+ def list_tables(self):
110
+
111
+ with self._driver.error_handling():
112
+ return self._driver.list_tables()
113
+
114
+ def create_table(self, table_name: str, schema: pa.Schema):
115
+
116
+ with self._driver.error_handling():
117
+
118
+ def type_decl(field: pa.Field):
119
+ sql_type = self._dialect.arrow_to_sql_type(field.type)
120
+ null_qualifier = " NULL" if field.nullable else " NOT NULL"
121
+ return f"{field.name} {sql_type}{null_qualifier}"
122
+
123
+ create_fields = map(lambda i: type_decl(schema.field(i)), range(len(schema.names)))
124
+ create_stmt = f"create table {table_name} (" + ", ".join(create_fields) + ")"
125
+
126
+ with self._connection() as conn, self._cursor(conn) as cur:
127
+ cur.execute(create_stmt, [])
128
+ conn.commit() # Some drivers / dialects (Postgres) require commit for create table
129
+
130
+ def read_table(self, table_name: str) -> pa.Table:
131
+
132
+ select_stmt = f"select * from {table_name}" # noqa
133
+
134
+ return self.native_read_query(select_stmt)
135
+
136
+ def native_read_query(self, query: str, **parameters) -> pa.Table:
137
+
138
+ # Real restrictions are enforced in deployment, by permissions granted to service accounts
139
+ # This is a sanity check to catch common errors before sending a query to the backend
140
+ self._check_read_query(query)
141
+
142
+ with self._driver.error_handling():
143
+
144
+ with self._connection() as conn, self._cursor(conn) as cur:
145
+
146
+ cur.execute(query, parameters)
147
+ sql_batch = cur.fetchmany()
148
+
149
+ # Read queries should always return a result set, even if it is empty
150
+ if not cur.description:
151
+ raise ex.EStorage(f"Query did not return a result set: {query}")
152
+
153
+ arrow_schema = self._decode_sql_schema(cur.description)
154
+ arrow_batches: tp.List[pa.RecordBatch] = []
155
+
156
+ while len(sql_batch) > 0:
157
+
158
+ arrow_batch = self._decode_sql_batch(arrow_schema, sql_batch)
159
+ arrow_batches.append(arrow_batch)
160
+
161
+ # Sometimes the schema is not fully defined up front (because cur.description is not sufficient)
162
+ # If type information has been inferred from the batch, update the schema accordingly
163
+ arrow_schema = arrow_batch.schema
164
+
165
+ sql_batch = cur.fetchmany()
166
+
167
+ return pa.Table.from_batches(arrow_batches, arrow_schema) # noqa
168
+
169
+ def write_table(self, table_name: str, table: pa.Table):
170
+
171
+ with self._driver.error_handling():
172
+
173
+ insert_fields = ", ".join(table.schema.names)
174
+ insert_markers = ", ".join(f":{name}" for name in table.schema.names)
175
+ insert_stmt = f"insert into {table_name}({insert_fields}) values ({insert_markers})" # noqa
176
+
177
+ with self._connection() as conn:
178
+
179
+ # Use execute many to perform a batch write
180
+ with self._cursor(conn) as cur:
181
+ if table.num_rows > 0:
182
+ # Provider converts rows on demand, to optimize for memory
183
+ row_provider = self._encode_sql_rows_dict(table)
184
+ cur.executemany(insert_stmt, row_provider)
185
+ else:
186
+ # Do not try to insert if there are now rows to bind
187
+ pass
188
+
189
+ conn.commit()
190
+
191
+ def _check_read_query(self, query):
192
+
193
+ if not any(map(lambda keyword: keyword in query.lower(), self.__DQL_KEYWORDS)):
194
+ raise ex.EStorageRequest(f"Query is not a read query: {query}")
195
+
196
+ if any(map(lambda keyword: keyword in query.lower(), self.__DML_KEYWORDS)):
197
+ raise ex.EStorageRequest(f"Query is not a read query: {query}")
198
+
199
+ if any(map(lambda keyword: keyword in query.lower(), self.__DDL_KEYWORDS)):
200
+ raise ex.EStorageRequest(f"Query is not a read query: {query}")
201
+
202
+ @staticmethod
203
+ def _decode_sql_schema(description: tp.List[tp.Tuple]):
204
+
205
+ # TODO: Infer Python / Arrow type using DB API type code
206
+ # These codes are db-specific so decoding would probably be on a best effort basis
207
+ # However the information is public for many popular db engines
208
+ # The current logic can be kept as a fallback (set type info on reading first non-null value)
209
+
210
+ def _decode_sql_field(field_desc: tp.Tuple):
211
+ field_name, type_code, _, _, precision, scale, null_ok = field_desc
212
+ return pa.field(field_name, pa.null(), null_ok)
213
+
214
+ fields = map(_decode_sql_field, description)
215
+
216
+ return pa.schema(fields)
217
+
218
+ def _decode_sql_batch(self, schema: pa.Schema, sql_batch: tp.List[tp.Tuple]) -> pa.RecordBatch:
219
+
220
+ py_dict: tp.Dict[str, pa.Array] = {}
221
+
222
+ for i, col in enumerate(schema.names):
223
+
224
+ arrow_type = schema.types[i]
225
+
226
+ if pa.types.is_null(arrow_type):
227
+ values = list(map(lambda row: row[i], sql_batch))
228
+ concrete_value = next(v for v in values if v is not None)
229
+ if concrete_value is not None:
230
+ arrow_type = _data.DataMapping.python_to_arrow_type(type(concrete_value))
231
+ arrow_field = pa.field(schema.names[i], arrow_type, nullable=True)
232
+ schema = schema.remove(i).insert(i, arrow_field)
233
+ else:
234
+ python_type = _data.DataMapping.arrow_to_python_type(arrow_type)
235
+ values = map(lambda row: self._driver.decode_sql_value(row[i], python_type), sql_batch)
236
+
237
+ py_dict[col] = pa.array(values, type=arrow_type)
238
+
239
+ return pa.RecordBatch.from_pydict(py_dict, schema)
240
+
241
+ def _encode_sql_rows_tuple(self, table: pa.Table) -> tp.Iterator[tp.Tuple]:
242
+
243
+ for row in range(0, table.num_rows):
244
+ row_values = map(lambda col: self._driver.encode_sql_value(col[row].as_py()), table.columns)
245
+ yield tuple(row_values)
246
+
247
+ def _encode_sql_rows_dict(self, table: pa.Table) -> tp.Iterator[tp.Tuple]:
248
+
249
+ for row in range(0, table.num_rows):
250
+ row_values = map(lambda col: self._driver.encode_sql_value(col[row].as_py()), table.columns)
251
+ yield dict(zip(table.column_names, row_values))
252
+
253
+
254
+ class SqlStorageProvider(IStorageProvider):
255
+
256
+ def __init__(self, properties: tp.Dict[str, str]):
257
+ self._properties = properties
258
+
259
+ def has_data_storage(self) -> bool:
260
+ return True
261
+
262
+ def get_data_storage(self) -> IDataStorageBase:
263
+ return SqlDataStorage(self._properties)
264
+
265
+
266
+ # Register with the plugin manager
267
+ plugins.PluginManager.register_plugin(IStorageProvider, SqlStorageProvider, ["SQL"])
268
+
269
+
270
+ try:
271
+
272
+ import sqlalchemy as sqla # noqa
273
+ import sqlalchemy.exc as sqla_exc # noqa
274
+
275
+ class SqlAlchemyDriver(ISqlDriver):
276
+
277
+ def __init__(self, properties: tp.Dict[str, str]):
278
+
279
+ self._log = _helpers.logger_for_object(self)
280
+
281
+ raw_url = properties.get('url')
282
+
283
+ if raw_url is None or raw_url.strip() == '':
284
+ raise ex.EConfigLoad("Missing required property [url] for SQL driver [alchemy]")
285
+
286
+ url = urlp.urlparse(raw_url)
287
+ credentials = _helpers.get_http_credentials(url, properties)
288
+ url = _helpers.apply_http_credentials(url, credentials)
289
+
290
+ filtered_keys = ["url", "username", "password", "token"]
291
+ filtered_props = dict(kv for kv in properties.items() if kv[0] not in filtered_keys)
292
+
293
+ self._log.info("Connecting: %s", _helpers.log_safe_url(url))
294
+
295
+ try:
296
+ self.__engine = sqla.create_engine(url.geturl(), **filtered_props)
297
+ except ModuleNotFoundError as e:
298
+ raise ex.EPluginNotAvailable("SQL driver is not available: " + str(e)) from e
299
+
300
+ def param_style(self) -> "DbApiWrapper.ParamStyle":
301
+ return DbApiWrapper.ParamStyle.NAMED
302
+
303
+ def connect(self, **kwargs) -> "DbApiWrapper.Connection":
304
+
305
+ return SqlAlchemyDriver.ConnectionWrapper(self.__engine.connect())
306
+
307
+ def has_table(self, table_name: str):
308
+
309
+ with self.__engine.connect() as conn:
310
+ inspection = sqla.inspect(conn)
311
+ return inspection.has_table(table_name)
312
+
313
+ def list_tables(self):
314
+
315
+ with self.__engine.connect() as conn:
316
+ inspection = sqla.inspect(conn)
317
+ return inspection.get_table_names()
318
+
319
+ def encode_sql_value(self, py_value: tp.Any) -> tp.Any:
320
+
321
+ return py_value
322
+
323
+ def decode_sql_value(self, sql_value: tp.Any, python_type: tp.Type) -> tp.Any:
324
+
325
+ return sql_value
326
+
327
+ @contextlib.contextmanager
328
+ def error_handling(self) -> contextlib.contextmanager:
329
+
330
+ try:
331
+ yield
332
+ except (sqla_exc.OperationalError, sqla_exc.ProgrammingError, sqla_exc.StatementError) as e:
333
+ raise ex.EStorageRequest(*e.args) from e
334
+ except sqla_exc.SQLAlchemyError as e:
335
+ raise ex.EStorage() from e
336
+
337
+ class ConnectionWrapper(DbApiWrapper.Connection):
338
+
339
+ def __init__(self, conn: sqla.Connection):
340
+ self.__conn = conn
341
+
342
+ def close(self):
343
+ self.__conn.close()
344
+
345
+ def commit(self):
346
+ self.__conn.commit()
347
+
348
+ def rollback(self):
349
+ self.__conn.rollback()
350
+
351
+ def cursor(self) -> "DbApiWrapper.Cursor":
352
+ return SqlAlchemyDriver.CursorWrapper(self.__conn)
353
+
354
+ class CursorWrapper(DbApiWrapper.Cursor):
355
+
356
+ arraysize: int = 1000
357
+
358
+ def __init__(self, conn: sqla.Connection):
359
+ self.__conn = conn
360
+ self.__result: tp.Optional[sqla.CursorResult] = None
361
+
362
+ @property
363
+ def description(self):
364
+
365
+ # Prefer description from the underlying cursor if available
366
+ if self.__result.cursor is not None and self.__result.cursor.description:
367
+ return self.__result.cursor.description
368
+
369
+ if not self.__result.returns_rows:
370
+ return None
371
+
372
+ # SQL Alchemy sometimes closes the cursor and the description is lost
373
+ # Fall back on using the Result API to generate a description with field names only
374
+
375
+ def name_only_field_desc(field_name):
376
+ return field_name, None, None, None, None, None, None
377
+
378
+ return list(map(name_only_field_desc, self.__result.keys()))
379
+
380
+ @property
381
+ def rowcount(self) -> int:
382
+
383
+ # Prefer the value from the underlying cursor if it is available
384
+ if self.__result.cursor is not None:
385
+ return self.__result.cursor.rowcount
386
+
387
+ return self.__result.rowcount # noqa
388
+
389
+ def execute(self, statement: str, parameters: tp.Union[tp.Dict, tp.Sequence]):
390
+
391
+ self.__result = self.__conn.execute(sqla.text(statement), parameters)
392
+
393
+ def executemany(self, statement: str, parameters: tp.Iterable[tp.Union[tp.Dict, tp.Sequence]]):
394
+
395
+ if not isinstance(parameters, tp.List):
396
+ parameters = list(parameters)
397
+
398
+ self.__result = self.__conn.execute(sqla.text(statement), parameters)
399
+
400
+ def fetchone(self) -> tp.Tuple:
401
+
402
+ row = self.__result.fetchone()
403
+ return row.tuple() if row is not None else None
404
+
405
+ def fetchmany(self, size: int = arraysize) -> tp.Sequence[tp.Tuple]:
406
+
407
+ sqla_rows = self.__result.fetchmany(self.arraysize)
408
+ return list(map(sqla.Row.tuple, sqla_rows)) # noqa
409
+
410
+ def close(self):
411
+
412
+ if self.__result is not None:
413
+ self.__result.close()
414
+
415
+ plugins.PluginManager.register_plugin(ISqlDriver, SqlAlchemyDriver, ["alchemy"])
416
+
417
+ except ModuleNotFoundError:
418
+ pass
@@ -0,0 +1,118 @@
1
+ # Licensed to the Fintech Open Source Foundation (FINOS) under one or
2
+ # more contributor license agreements. See the NOTICE file distributed
3
+ # with this work for additional information regarding copyright ownership.
4
+ # FINOS licenses this file to you under the Apache License, Version 2.0
5
+ # (the "License"); you may not use this file except in compliance with the
6
+ # License. You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ import typing as tp
17
+
18
+ import pyarrow as pa
19
+
20
+ import tracdap.rt.exceptions as ex
21
+ import tracdap.rt.ext.plugins as plugins
22
+
23
+ from tracdap.rt._impl.ext.sql import * # noqa
24
+
25
+
26
+
27
+ class AnsiStandardDialect(ISqlDialect):
28
+
29
+ def arrow_to_sql_type(self, arrow_type: pa.DataType) -> str:
30
+
31
+ if pa.types.is_boolean(arrow_type):
32
+ return "boolean"
33
+
34
+ if pa.types.is_integer(arrow_type):
35
+ return "bigint"
36
+
37
+ if pa.types.is_floating(arrow_type):
38
+ return "double precision"
39
+
40
+ if pa.types.is_decimal(arrow_type):
41
+ return "decimal (31, 10)"
42
+
43
+ if pa.types.is_string(arrow_type):
44
+ return "varchar(4096)"
45
+
46
+ if pa.types.is_date(arrow_type):
47
+ return "date"
48
+
49
+ if pa.types.is_timestamp(arrow_type):
50
+ return "timestamp (6)"
51
+
52
+ raise ex.ETracInternal(f"Unsupported data type [{str(arrow_type)}] in SQL dialect [{self.__class__.__name__}]")
53
+
54
+
55
+ class MySqlDialect(AnsiStandardDialect):
56
+
57
+ def __init__(self, properties: tp.Dict[str, str]):
58
+ self._properties = properties
59
+
60
+ def arrow_to_sql_type(self, arrow_type: pa.DataType) -> str:
61
+
62
+ if pa.types.is_floating(arrow_type):
63
+ return "double"
64
+
65
+ if pa.types.is_string(arrow_type):
66
+ return "varchar(8192)"
67
+
68
+ return super().arrow_to_sql_type(arrow_type)
69
+
70
+
71
+ class MariaDbDialect(MySqlDialect):
72
+
73
+ def __init__(self, properties: tp.Dict[str, str]):
74
+ super().__init__(properties)
75
+
76
+ # Inherit MySQL implementation
77
+ pass
78
+
79
+
80
+ class PostgresqlDialect(AnsiStandardDialect):
81
+
82
+ def __init__(self, properties: tp.Dict[str, str]):
83
+ self._properties = properties
84
+
85
+ def arrow_to_sql_type(self, arrow_type: pa.DataType) -> str:
86
+
87
+ if pa.types.is_string(arrow_type):
88
+ return "varchar"
89
+
90
+ return super().arrow_to_sql_type(arrow_type)
91
+
92
+
93
+ class SqlServerDialect(AnsiStandardDialect):
94
+
95
+ def __init__(self, properties: tp.Dict[str, str]):
96
+ self._properties = properties
97
+
98
+ def arrow_to_sql_type(self, arrow_type: pa.DataType) -> str:
99
+
100
+ if pa.types.is_boolean(arrow_type):
101
+ return "bit"
102
+
103
+ if pa.types.is_floating(arrow_type):
104
+ return "float(53)"
105
+
106
+ if pa.types.is_string(arrow_type):
107
+ return "varchar(8000)"
108
+
109
+ if pa.types.is_timestamp(arrow_type):
110
+ return "datetime2"
111
+
112
+ return super().arrow_to_sql_type(arrow_type)
113
+
114
+
115
+ plugins.PluginManager.register_plugin(ISqlDialect, MySqlDialect, ["mysql"])
116
+ plugins.PluginManager.register_plugin(ISqlDialect, MariaDbDialect, ["mariadb"])
117
+ plugins.PluginManager.register_plugin(ISqlDialect, PostgresqlDialect, ["postgresql"])
118
+ plugins.PluginManager.register_plugin(ISqlDialect, SqlServerDialect, ["sqlserver"])
tracdap/rt/_version.py CHANGED
@@ -1,8 +1,9 @@
1
- # Copyright 2021 Accenture Global Solutions Limited
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
1
+ # Licensed to the Fintech Open Source Foundation (FINOS) under one or
2
+ # more contributor license agreements. See the NOTICE file distributed
3
+ # with this work for additional information regarding copyright ownership.
4
+ # FINOS licenses this file to you under the Apache License, Version 2.0
5
+ # (the "License"); you may not use this file except in compliance with the
6
+ # License. You may obtain a copy of the License at
6
7
  #
7
8
  # http://www.apache.org/licenses/LICENSE-2.0
8
9
  #
@@ -12,4 +13,4 @@
12
13
  # See the License for the specific language governing permissions and
13
14
  # limitations under the License.
14
15
 
15
- __version__ = "0.6.5"
16
+ __version__ = "0.7.0rc1"
@@ -1,8 +1,9 @@
1
- # Copyright 2020 Accenture Global Solutions Limited
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
1
+ # Licensed to the Fintech Open Source Foundation (FINOS) under one or
2
+ # more contributor license agreements. See the NOTICE file distributed
3
+ # with this work for additional information regarding copyright ownership.
4
+ # FINOS licenses this file to you under the Apache License, Version 2.0
5
+ # (the "License"); you may not use this file except in compliance with the
6
+ # License. You may obtain a copy of the License at
6
7
  #
7
8
  # http://www.apache.org/licenses/LICENSE-2.0
8
9
  #
@@ -12,6 +13,9 @@
12
13
  # See the License for the specific language governing permissions and
13
14
  # limitations under the License.
14
15
 
16
+ """
17
+ TRAC model API for Python
18
+ """
15
19
 
16
20
  from .model_api import *
17
21
  from .static_api import *
@@ -21,10 +25,24 @@ from .static_api import *
21
25
  from tracdap.rt.metadata import * # noqa DOCGEN_REMOVE
22
26
 
23
27
  # Map basic types into the root of the API package
28
+
24
29
  BOOLEAN = BasicType.BOOLEAN
30
+ """Synonym for :py:attr:`BasicType.BOOLEAN <tracdap.rt.metadata.BasicType.BOOLEAN>`"""
31
+
25
32
  INTEGER = BasicType.INTEGER
33
+ """Synonym for :py:attr:`BasicType.INTEGER <tracdap.rt.metadata.BasicType.INTEGER>`"""
34
+
26
35
  FLOAT = BasicType.FLOAT
36
+ """Synonym for :py:attr:`BasicType.FLOAT <tracdap.rt.metadata.BasicType.FLOAT>`"""
37
+
27
38
  DECIMAL = BasicType.DECIMAL
39
+ """Synonym for :py:attr:`BasicType.DECIMAL <tracdap.rt.metadata.BasicType.DECIMAL>`"""
40
+
28
41
  STRING = BasicType.STRING
42
+ """Synonym for :py:attr:`BasicType.STRING <tracdap.rt.metadata.BasicType.STRING>`"""
43
+
29
44
  DATE = BasicType.DATE
45
+ """Synonym for :py:attr:`BasicType.DATE <tracdap.rt.metadata.BasicType.DATE>`"""
46
+
30
47
  DATETIME = BasicType.DATETIME
48
+ """Synonym for :py:attr:`BasicType.DATETIME <tracdap.rt.metadata.BasicType.DATETIME>`"""