tracdap-runtime 0.6.5__tar.gz → 0.7.0rc1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (141) hide show
  1. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/LICENSE +1 -1
  2. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/PKG-INFO +4 -2
  3. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/setup.cfg +5 -2
  4. tracdap_runtime-0.7.0rc1/src/tracdap/rt/__init__.py +16 -0
  5. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_exec/actors.py +6 -5
  6. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_exec/context.py +278 -110
  7. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_exec/dev_mode.py +237 -143
  8. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_exec/engine.py +223 -64
  9. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_exec/functions.py +31 -6
  10. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_exec/graph.py +15 -5
  11. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_exec/graph_builder.py +301 -203
  12. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_exec/runtime.py +13 -10
  13. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_exec/server.py +6 -5
  14. tracdap_runtime-0.7.0rc1/src/tracdap/rt/_impl/__init__.py +15 -0
  15. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/config_parser.py +17 -9
  16. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/data.py +284 -172
  17. tracdap_runtime-0.7.0rc1/src/tracdap/rt/_impl/ext/__init__.py +14 -0
  18. tracdap_runtime-0.7.0rc1/src/tracdap/rt/_impl/ext/sql.py +117 -0
  19. tracdap_runtime-0.7.0rc1/src/tracdap/rt/_impl/ext/storage.py +58 -0
  20. tracdap_runtime-0.7.0rc1/src/tracdap/rt/_impl/grpc/__init__.py +14 -0
  21. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/grpc/codec.py +6 -5
  22. tracdap_runtime-0.7.0rc1/src/tracdap/rt/_impl/grpc/tracdap/metadata/job_pb2.py +128 -0
  23. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/grpc/tracdap/metadata/job_pb2.pyi +37 -2
  24. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/guard_rails.py +6 -5
  25. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/models.py +6 -5
  26. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/repos.py +6 -5
  27. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/schemas.py +6 -5
  28. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/shim.py +6 -5
  29. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/static_api.py +30 -16
  30. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/storage.py +8 -7
  31. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/type_system.py +6 -5
  32. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/util.py +16 -5
  33. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/validation.py +72 -18
  34. tracdap_runtime-0.7.0rc1/src/tracdap/rt/_plugins/__init__.py +15 -0
  35. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_plugins/_helpers.py +6 -5
  36. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_plugins/config_local.py +6 -5
  37. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_plugins/format_arrow.py +6 -5
  38. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_plugins/format_csv.py +6 -5
  39. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_plugins/format_parquet.py +6 -5
  40. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_plugins/repo_git.py +6 -5
  41. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_plugins/repo_local.py +6 -5
  42. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_plugins/repo_pypi.py +6 -5
  43. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_plugins/storage_aws.py +6 -5
  44. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_plugins/storage_azure.py +6 -5
  45. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_plugins/storage_gcp.py +6 -5
  46. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_plugins/storage_local.py +6 -5
  47. tracdap_runtime-0.7.0rc1/src/tracdap/rt/_plugins/storage_sql.py +418 -0
  48. tracdap_runtime-0.7.0rc1/src/tracdap/rt/_plugins/storage_sql_dialects.py +118 -0
  49. tracdap_runtime-0.7.0rc1/src/tracdap/rt/_version.py +16 -0
  50. tracdap_runtime-0.7.0rc1/src/tracdap/rt/api/__init__.py +48 -0
  51. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/api/experimental.py +85 -37
  52. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/api/hook.py +16 -5
  53. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/api/model_api.py +110 -90
  54. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/api/static_api.py +142 -100
  55. tracdap_runtime-0.7.0rc1/src/tracdap/rt/config/common.py +83 -0
  56. tracdap_runtime-0.7.0rc1/src/tracdap/rt/config/job.py +22 -0
  57. tracdap_runtime-0.7.0rc1/src/tracdap/rt/config/platform.py +158 -0
  58. tracdap_runtime-0.7.0rc1/src/tracdap/rt/config/result.py +26 -0
  59. tracdap_runtime-0.7.0rc1/src/tracdap/rt/config/runtime.py +28 -0
  60. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/exceptions.py +13 -7
  61. tracdap_runtime-0.7.0rc1/src/tracdap/rt/ext/__init__.py +14 -0
  62. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/ext/config.py +6 -5
  63. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/ext/embed.py +6 -5
  64. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/ext/plugins.py +6 -5
  65. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/ext/repos.py +6 -5
  66. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/ext/storage.py +6 -5
  67. tracdap_runtime-0.7.0rc1/src/tracdap/rt/launch/__init__.py +21 -0
  68. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/launch/__main__.py +6 -5
  69. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/launch/cli.py +6 -5
  70. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/launch/launch.py +38 -15
  71. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/metadata/__init__.py +4 -0
  72. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/metadata/common.py +2 -3
  73. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/metadata/custom.py +3 -4
  74. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/metadata/data.py +30 -31
  75. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/metadata/file.py +6 -7
  76. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/metadata/flow.py +22 -23
  77. tracdap_runtime-0.7.0rc1/src/tracdap/rt/metadata/job.py +252 -0
  78. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/metadata/model.py +26 -27
  79. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/metadata/object.py +11 -12
  80. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/metadata/object_id.py +23 -24
  81. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/metadata/resource.py +0 -1
  82. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/metadata/search.py +15 -16
  83. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/metadata/stoarge.py +22 -23
  84. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/metadata/tag.py +8 -9
  85. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/metadata/tag_update.py +11 -12
  86. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/metadata/type.py +38 -38
  87. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/tracdap_runtime.egg-info/PKG-INFO +4 -2
  88. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/tracdap_runtime.egg-info/SOURCES.txt +5 -0
  89. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/tracdap_runtime.egg-info/requires.txt +3 -0
  90. tracdap_runtime-0.6.5/src/tracdap/rt/__init__.py +0 -15
  91. tracdap_runtime-0.6.5/src/tracdap/rt/_impl/__init__.py +0 -14
  92. tracdap_runtime-0.6.5/src/tracdap/rt/_impl/grpc/__init__.py +0 -13
  93. tracdap_runtime-0.6.5/src/tracdap/rt/_impl/grpc/tracdap/metadata/job_pb2.py +0 -120
  94. tracdap_runtime-0.6.5/src/tracdap/rt/_plugins/__init__.py +0 -14
  95. tracdap_runtime-0.6.5/src/tracdap/rt/_version.py +0 -15
  96. tracdap_runtime-0.6.5/src/tracdap/rt/api/__init__.py +0 -30
  97. tracdap_runtime-0.6.5/src/tracdap/rt/config/common.py +0 -84
  98. tracdap_runtime-0.6.5/src/tracdap/rt/config/job.py +0 -23
  99. tracdap_runtime-0.6.5/src/tracdap/rt/config/platform.py +0 -159
  100. tracdap_runtime-0.6.5/src/tracdap/rt/config/result.py +0 -27
  101. tracdap_runtime-0.6.5/src/tracdap/rt/config/runtime.py +0 -29
  102. tracdap_runtime-0.6.5/src/tracdap/rt/ext/__init__.py +0 -13
  103. tracdap_runtime-0.6.5/src/tracdap/rt/launch/__init__.py +0 -16
  104. tracdap_runtime-0.6.5/src/tracdap/rt/metadata/job.py +0 -208
  105. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/README.md +0 -0
  106. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/pyproject.toml +0 -0
  107. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_exec/__init__.py +0 -0
  108. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/grpc/tracdap/api/internal/runtime_pb2.py +0 -0
  109. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/grpc/tracdap/api/internal/runtime_pb2.pyi +0 -0
  110. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/grpc/tracdap/api/internal/runtime_pb2_grpc.py +0 -0
  111. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/grpc/tracdap/metadata/common_pb2.py +0 -0
  112. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/grpc/tracdap/metadata/common_pb2.pyi +0 -0
  113. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/grpc/tracdap/metadata/custom_pb2.py +0 -0
  114. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/grpc/tracdap/metadata/custom_pb2.pyi +0 -0
  115. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/grpc/tracdap/metadata/data_pb2.py +0 -0
  116. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/grpc/tracdap/metadata/data_pb2.pyi +0 -0
  117. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/grpc/tracdap/metadata/file_pb2.py +0 -0
  118. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/grpc/tracdap/metadata/file_pb2.pyi +0 -0
  119. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/grpc/tracdap/metadata/flow_pb2.py +0 -0
  120. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/grpc/tracdap/metadata/flow_pb2.pyi +0 -0
  121. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/grpc/tracdap/metadata/model_pb2.py +0 -0
  122. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/grpc/tracdap/metadata/model_pb2.pyi +0 -0
  123. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/grpc/tracdap/metadata/object_id_pb2.py +0 -0
  124. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/grpc/tracdap/metadata/object_id_pb2.pyi +0 -0
  125. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/grpc/tracdap/metadata/object_pb2.py +0 -0
  126. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/grpc/tracdap/metadata/object_pb2.pyi +0 -0
  127. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/grpc/tracdap/metadata/resource_pb2.py +0 -0
  128. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/grpc/tracdap/metadata/resource_pb2.pyi +0 -0
  129. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/grpc/tracdap/metadata/search_pb2.py +0 -0
  130. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/grpc/tracdap/metadata/search_pb2.pyi +0 -0
  131. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/grpc/tracdap/metadata/stoarge_pb2.py +0 -0
  132. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/grpc/tracdap/metadata/stoarge_pb2.pyi +0 -0
  133. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/grpc/tracdap/metadata/tag_pb2.py +0 -0
  134. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/grpc/tracdap/metadata/tag_pb2.pyi +0 -0
  135. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/grpc/tracdap/metadata/tag_update_pb2.py +0 -0
  136. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/grpc/tracdap/metadata/tag_update_pb2.pyi +0 -0
  137. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/grpc/tracdap/metadata/type_pb2.py +0 -0
  138. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/_impl/grpc/tracdap/metadata/type_pb2.pyi +0 -0
  139. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/src/tracdap/rt/config/__init__.py +0 -0
  140. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/tracdap_runtime.egg-info/dependency_links.txt +0 -0
  141. {tracdap_runtime-0.6.5 → tracdap_runtime-0.7.0rc1}/tracdap_runtime.egg-info/top_level.txt +0 -0
@@ -187,7 +187,7 @@
187
187
  same "printed page" as the copyright notice for easier
188
188
  identification within third-party archives.
189
189
 
190
- Copyright 2022 Accenture Global Solutions Limited
190
+ Copyright [yyyy] [name of copyright owner]
191
191
 
192
192
  Licensed under the Apache License, Version 2.0 (the "License");
193
193
  you may not use this file except in compliance with the License.
@@ -1,10 +1,10 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: tracdap-runtime
3
- Version: 0.6.5
3
+ Version: 0.7.0rc1
4
4
  Summary: Runtime package for building models on the TRAC Data & Analytics Platform
5
5
  Home-page: https://tracdap.finos.org/
6
6
  Author: Martin Traverse
7
- Author-email: martin.traverse@accenture.com
7
+ Author-email: martin@fintrac.co.uk
8
8
  License: Apache-2.0
9
9
  Project-URL: Documentation, https://tracdap.readthedocs.io/
10
10
  Project-URL: Source Code, https://github.com/finos/tracdap
@@ -30,6 +30,8 @@ Provides-Extra: polars
30
30
  Requires-Dist: polars<2.0.0,>=1.0.0; extra == "polars"
31
31
  Provides-Extra: spark
32
32
  Requires-Dist: pyspark<3.6.0,>=3.0.0; extra == "spark"
33
+ Provides-Extra: sql
34
+ Requires-Dist: sqlalchemy<2.1.0,>=2.0.0; extra == "sql"
33
35
  Provides-Extra: aws
34
36
  Requires-Dist: botocore==1.34.93; extra == "aws"
35
37
  Requires-Dist: boto3==1.34.93; extra == "aws"
@@ -1,6 +1,6 @@
1
1
  [metadata]
2
2
  name = tracdap-runtime
3
- version = 0.6.5
3
+ version = 0.7.0rc1
4
4
  description = Runtime package for building models on the TRAC Data & Analytics Platform
5
5
  long_description = file: README.md
6
6
  long_description_content_type = text/markdown
@@ -16,7 +16,7 @@ classifiers =
16
16
  License :: OSI Approved :: Apache Software License
17
17
  Operating System :: OS Independent
18
18
  author = Martin Traverse
19
- author_email = martin.traverse@accenture.com
19
+ author_email = martin@fintrac.co.uk
20
20
 
21
21
  [options]
22
22
  packages =
@@ -28,6 +28,7 @@ packages =
28
28
  tracdap.rt.ext
29
29
  tracdap.rt._exec
30
30
  tracdap.rt._impl
31
+ tracdap.rt._impl.ext
31
32
  tracdap.rt._impl.grpc
32
33
  tracdap.rt._impl.grpc.tracdap
33
34
  tracdap.rt._impl.grpc.tracdap.metadata
@@ -55,6 +56,8 @@ polars =
55
56
  polars >= 1.0.0, < 2.0.0
56
57
  spark =
57
58
  pyspark >= 3.0.0, < 3.6.0
59
+ sql =
60
+ sqlalchemy >= 2.0.0, < 2.1.0
58
61
  aws =
59
62
  botocore == 1.34.93
60
63
  boto3 == 1.34.93
@@ -0,0 +1,16 @@
1
+ # Licensed to the Fintech Open Source Foundation (FINOS) under one or
2
+ # more contributor license agreements. See the NOTICE file distributed
3
+ # with this work for additional information regarding copyright ownership.
4
+ # FINOS licenses this file to you under the Apache License, Version 2.0
5
+ # (the "License"); you may not use this file except in compliance with the
6
+ # License. You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ from ._version import __version__
@@ -1,8 +1,9 @@
1
- # Copyright 2021 Accenture Global Solutions Limited
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
1
+ # Licensed to the Fintech Open Source Foundation (FINOS) under one or
2
+ # more contributor license agreements. See the NOTICE file distributed
3
+ # with this work for additional information regarding copyright ownership.
4
+ # FINOS licenses this file to you under the Apache License, Version 2.0
5
+ # (the "License"); you may not use this file except in compliance with the
6
+ # License. You may obtain a copy of the License at
6
7
  #
7
8
  # http://www.apache.org/licenses/LICENSE-2.0
8
9
  #
@@ -1,8 +1,9 @@
1
- # Copyright 2022 Accenture Global Solutions Limited
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
1
+ # Licensed to the Fintech Open Source Foundation (FINOS) under one or
2
+ # more contributor license agreements. See the NOTICE file distributed
3
+ # with this work for additional information regarding copyright ownership.
4
+ # FINOS licenses this file to you under the Apache License, Version 2.0
5
+ # (the "License"); you may not use this file except in compliance with the
6
+ # License. You may obtain a copy of the License at
6
7
  #
7
8
  # http://www.apache.org/licenses/LICENSE-2.0
8
9
  #
@@ -55,8 +56,6 @@ class TracContextImpl(_api.TracContext):
55
56
  Output views will contain schemas but no data.
56
57
  """
57
58
 
58
- __DEFAULT_TEMPORAL_OBJECTS = False
59
-
60
59
  def __init__(self,
61
60
  model_def: _meta.ModelDefinition,
62
61
  model_class: _api.TracModel.__class__,
@@ -134,53 +133,22 @@ class TracContextImpl(_api.TracContext):
134
133
  else:
135
134
  return copy.deepcopy(data_view.trac_schema)
136
135
 
137
- def get_table(self, dataset_name: str, framework, **kwargs) -> _eapi._DATA_FRAMEWORK: # noqa
138
-
139
- # Support the experimental API data framework syntax
140
-
141
- if framework == _eapi.PANDAS:
142
- return self.get_pandas_table(dataset_name, **kwargs)
143
- elif framework == _eapi.POLARS:
144
- return self.get_polars_table(dataset_name)
145
- else:
146
- raise _ex.ERuntimeValidation(f"Unsupported data framework [{framework}]")
147
-
148
- def get_pandas_table(self, dataset_name: str, use_temporal_objects: tp.Optional[bool] = None) \
149
- -> "_data.pandas.DataFrame":
136
+ def get_table(self, dataset_name: str, framework: _eapi.DataFramework[_eapi.DATA_API], **framework_args) -> _eapi.DATA_API:
150
137
 
151
- _val.require_package("pandas", _data.pandas)
152
- _val.validate_signature(self.get_pandas_table, dataset_name, use_temporal_objects)
153
-
154
- data_view, schema = self.__get_data_view(dataset_name)
155
- part_key = _data.DataPartKey.for_root()
156
-
157
- if use_temporal_objects is None:
158
- use_temporal_objects = self.__DEFAULT_TEMPORAL_OBJECTS
159
-
160
- return _data.DataMapping.view_to_pandas(data_view, part_key, schema, use_temporal_objects)
161
-
162
- def get_polars_table(self, dataset_name: str) -> "_data.polars.DataFrame":
163
-
164
- _val.require_package("polars", _data.polars)
165
- _val.validate_signature(self.get_polars_table, dataset_name)
166
-
167
- data_view, schema = self.__get_data_view(dataset_name)
168
- part_key = _data.DataPartKey.for_root()
169
-
170
- return _data.DataMapping.view_to_polars(data_view, part_key, schema)
171
-
172
- def __get_data_view(self, dataset_name: str):
173
-
174
- _val.validate_signature(self.__get_data_view, dataset_name)
138
+ _val.validate_signature(self.get_table, dataset_name, framework)
139
+ _val.require_package(framework.protocol_name, framework.api_type)
175
140
 
176
141
  self.__val.check_dataset_valid_identifier(dataset_name)
177
142
  self.__val.check_dataset_defined_in_model(dataset_name)
178
143
  self.__val.check_dataset_available_in_context(dataset_name)
144
+ self.__val.check_data_framework_args(framework, framework_args)
179
145
 
180
146
  static_schema = self.__get_static_schema(self.__model_def, dataset_name)
181
147
  data_view = self.__local_ctx.get(dataset_name)
182
148
  part_key = _data.DataPartKey.for_root()
183
149
 
150
+ converter = _data.DataConverter.for_framework(framework, **framework_args)
151
+
184
152
  self.__val.check_context_object_type(dataset_name, data_view, _data.DataView)
185
153
  self.__val.check_dataset_schema_defined(dataset_name, data_view)
186
154
  self.__val.check_dataset_part_present(dataset_name, data_view, part_key)
@@ -193,7 +161,18 @@ class TracContextImpl(_api.TracContext):
193
161
  else:
194
162
  schema = data_view.arrow_schema
195
163
 
196
- return data_view, schema
164
+ table = _data.DataMapping.view_to_arrow(data_view, part_key)
165
+
166
+ # Data conformance is applied automatically inside the converter, if schema != None
167
+ return converter.from_internal(table, schema)
168
+
169
+ def get_pandas_table(self, dataset_name: str, use_temporal_objects: tp.Optional[bool] = None) -> "_data.pandas.DataFrame":
170
+
171
+ return self.get_table(dataset_name, _eapi.PANDAS, use_temporal_objects=use_temporal_objects)
172
+
173
+ def get_polars_table(self, dataset_name: str) -> "_data.polars.DataFrame":
174
+
175
+ return self.get_table(dataset_name, _eapi.POLARS)
197
176
 
198
177
  def put_schema(self, dataset_name: str, schema: _meta.SchemaDefinition):
199
178
 
@@ -225,57 +204,28 @@ class TracContextImpl(_api.TracContext):
225
204
 
226
205
  self.__local_ctx[dataset_name] = updated_view
227
206
 
228
- def put_table(self, dataset_name: str, dataset: _eapi._DATA_FRAMEWORK, **kwargs): # noqa
229
-
230
- # Support the experimental API data framework syntax
231
-
232
- if _data.pandas and isinstance(dataset, _data.pandas.DataFrame):
233
- self.put_pandas_table(dataset_name, dataset)
234
- elif _data.polars and isinstance(dataset, _data.polars.DataFrame):
235
- self.put_polars_table(dataset_name, dataset)
236
- else:
237
- raise _ex.ERuntimeValidation(f"Unsupported data framework[{type(dataset)}]")
238
-
239
- def put_pandas_table(self, dataset_name: str, dataset: "_data.pandas.DataFrame"):
240
-
241
- _val.require_package("pandas", _data.pandas)
242
- _val.validate_signature(self.put_pandas_table, dataset_name, dataset)
243
-
244
- part_key = _data.DataPartKey.for_root()
245
- data_view, schema = self.__put_data_view(dataset_name, part_key, dataset, _data.pandas.DataFrame)
246
-
247
- # Data conformance is applied inside these conversion functions
248
-
249
- updated_item = _data.DataMapping.pandas_to_item(dataset, schema)
250
- updated_view = _data.DataMapping.add_item_to_view(data_view, part_key, updated_item)
251
-
252
- self.__local_ctx[dataset_name] = updated_view
253
-
254
- def put_polars_table(self, dataset_name: str, dataset: "_data.polars.DataFrame"):
255
-
256
- _val.require_package("polars", _data.polars)
257
- _val.validate_signature(self.put_polars_table, dataset_name, dataset)
258
-
259
- part_key = _data.DataPartKey.for_root()
260
- data_view, schema = self.__put_data_view(dataset_name, part_key, dataset, _data.polars.DataFrame)
261
-
262
- # Data conformance is applied inside these conversion functions
263
-
264
- updated_item = _data.DataMapping.polars_to_item(dataset, schema)
265
- updated_view = _data.DataMapping.add_item_to_view(data_view, part_key, updated_item)
207
+ def put_table(
208
+ self, dataset_name: str, dataset: _eapi.DATA_API,
209
+ framework: tp.Optional[_eapi.DataFramework[_eapi.DATA_API]] = None,
210
+ **framework_args):
266
211
 
267
- self.__local_ctx[dataset_name] = updated_view
212
+ _val.validate_signature(self.put_table, dataset_name, dataset, framework)
268
213
 
269
- def __put_data_view(self, dataset_name: str, part_key: _data.DataPartKey, dataset: tp.Any, framework: type):
214
+ if framework is None:
215
+ framework = _data.DataConverter.get_framework(dataset)
270
216
 
271
- _val.validate_signature(self.__put_data_view, dataset_name, part_key, dataset, framework)
217
+ _val.require_package(framework.protocol_name, framework.api_type)
272
218
 
273
219
  self.__val.check_dataset_valid_identifier(dataset_name)
274
220
  self.__val.check_dataset_is_model_output(dataset_name)
275
- self.__val.check_provided_dataset_type(dataset, framework)
221
+ self.__val.check_provided_dataset_type(dataset, framework.api_type)
222
+ self.__val.check_data_framework_args(framework, framework_args)
276
223
 
277
224
  static_schema = self.__get_static_schema(self.__model_def, dataset_name)
278
225
  data_view = self.__local_ctx.get(dataset_name)
226
+ part_key = _data.DataPartKey.for_root()
227
+
228
+ converter = _data.DataConverter.for_framework(framework)
279
229
 
280
230
  if data_view is None:
281
231
  if static_schema is not None:
@@ -294,7 +244,21 @@ class TracContextImpl(_api.TracContext):
294
244
  else:
295
245
  schema = data_view.arrow_schema
296
246
 
297
- return data_view, schema
247
+ # Data conformance is applied automatically inside the converter, if schema != None
248
+ table = converter.to_internal(dataset, schema)
249
+ item = _data.DataItem(schema, table)
250
+
251
+ updated_view = _data.DataMapping.add_item_to_view(data_view, part_key, item)
252
+
253
+ self.__local_ctx[dataset_name] = updated_view
254
+
255
+ def put_pandas_table(self, dataset_name: str, dataset: "_data.pandas.DataFrame"):
256
+
257
+ self.put_table(dataset_name, dataset, _eapi.PANDAS)
258
+
259
+ def put_polars_table(self, dataset_name: str, dataset: "_data.polars.DataFrame"):
260
+
261
+ self.put_table(dataset_name, dataset, _eapi.POLARS)
298
262
 
299
263
  def log(self) -> logging.Logger:
300
264
 
@@ -335,7 +299,7 @@ class TracDataContextImpl(TracContextImpl, _eapi.TracDataContext):
335
299
  def __init__(
336
300
  self, model_def: _meta.ModelDefinition, model_class: _api.TracModel.__class__,
337
301
  local_ctx: tp.Dict[str, tp.Any], dynamic_outputs: tp.List[str],
338
- storage_map: tp.Dict[str, tp.Union[_eapi.TracFileStorage]],
302
+ storage_map: tp.Dict[str, tp.Union[_eapi.TracFileStorage, _eapi.TracDataStorage]],
339
303
  checkout_directory: pathlib.Path = None):
340
304
 
341
305
  super().__init__(model_def, model_class, local_ctx, dynamic_outputs, checkout_directory)
@@ -358,8 +322,27 @@ class TracDataContextImpl(TracContextImpl, _eapi.TracDataContext):
358
322
 
359
323
  return self.__storage_map[storage_key]
360
324
 
361
- def get_data_storage(self, storage_key: str) -> None:
362
- raise _ex.ERuntimeValidation("Data storage API not available yet")
325
+ def get_data_storage(
326
+ self, storage_key: str,
327
+ framework: _eapi.DataFramework[_eapi.DATA_API],
328
+ **framework_args) -> _eapi.TracDataStorage[_eapi.DATA_API]:
329
+
330
+ _val.validate_signature(self.get_file_storage, storage_key)
331
+
332
+ self.__val.check_storage_valid_identifier(storage_key)
333
+ self.__val.check_storage_available(self.__storage_map, storage_key)
334
+ self.__val.check_storage_type(self.__storage_map, storage_key, _eapi.TracDataStorage)
335
+ self.__val.check_data_framework_args(framework, framework_args)
336
+
337
+ storage = self.__storage_map[storage_key]
338
+ converter = _data.DataConverter.for_framework(framework, **framework_args)
339
+
340
+ # Create a shallow copy of the storage impl with a converter for the requested data framework
341
+ # At some point we will need a storage factory class, bc the internal data API can also be different
342
+ storage = copy.copy(storage)
343
+ storage._TracDataStorageImpl__converter = converter
344
+
345
+ return storage
363
346
 
364
347
  def add_data_import(self, dataset_name: str):
365
348
 
@@ -372,15 +355,30 @@ class TracDataContextImpl(TracContextImpl, _eapi.TracDataContext):
372
355
  self.__local_ctx[dataset_name] = _data.DataView.create_empty()
373
356
  self.__dynamic_outputs.append(dataset_name)
374
357
 
375
- def set_source_metadata(self, dataset_name: str, storage_key: str, source_info: _eapi.FileStat):
358
+ def set_source_metadata(self, dataset_name: str, storage_key: str, source_info: tp.Union[_eapi.FileStat, str]):
359
+
360
+ _val.validate_signature(self.set_source_metadata, dataset_name, storage_key, source_info)
376
361
 
377
- _val.validate_signature(self.add_data_import, dataset_name, storage_key, source_info)
362
+ self.__val.check_dataset_valid_identifier(dataset_name)
363
+ self.__val.check_dataset_available_in_context(dataset_name)
364
+ self.__val.check_storage_valid_identifier(storage_key)
365
+ self.__val.check_storage_available(self.__storage_map, storage_key)
366
+
367
+ storage = self.__storage_map[storage_key]
368
+
369
+ if isinstance(storage, _eapi.TracFileStorage):
370
+ if not isinstance(source_info, _eapi.FileStat):
371
+ self.__val.report_public_error(f"Expected storage_info to be a FileStat, [{storage_key}] refers to file storage")
372
+
373
+ if isinstance(storage, _eapi.TracDataStorage):
374
+ if not isinstance(source_info, str):
375
+ self.__val.report_public_error(f"Expected storage_info to be a table name, [{storage_key}] refers to dadta storage")
378
376
 
379
377
  pass # Not implemented yet, only required when imports are sent back to the platform
380
378
 
381
379
  def set_attribute(self, dataset_name: str, attribute_name: str, value: tp.Any):
382
380
 
383
- _val.validate_signature(self.add_data_import, dataset_name, attribute_name, value)
381
+ _val.validate_signature(self.set_attribute, dataset_name, attribute_name, value)
384
382
 
385
383
  pass # Not implemented yet, only required when imports are sent back to the platform
386
384
 
@@ -531,13 +529,132 @@ class TracFileStorageImpl(_eapi.TracFileStorage):
531
529
  super().write_bytes(storage_path, data)
532
530
 
533
531
 
532
+ class TracDataStorageImpl(_eapi.TracDataStorage[_eapi.DATA_API]):
533
+
534
+ def __init__(
535
+ self, storage_key: str, storage_impl: _storage.IDataStorageBase[_data.T_INTERNAL_DATA, _data.T_INTERNAL_SCHEMA],
536
+ data_converter: _data.DataConverter[_eapi.DATA_API, _data.T_INTERNAL_DATA, _data.T_INTERNAL_SCHEMA],
537
+ write_access: bool, checkout_directory):
538
+
539
+ self.__storage_key = storage_key
540
+ self.__converter = data_converter
541
+
542
+ self.__has_table = lambda tn: storage_impl.has_table(tn)
543
+ self.__list_tables = lambda: storage_impl.list_tables()
544
+ self.__read_table = lambda tn: storage_impl.read_table(tn)
545
+ self.__native_read_query = lambda q, ps: storage_impl.native_read_query(q, **ps)
546
+
547
+ if write_access:
548
+ self.__create_table = lambda tn, s: storage_impl.create_table(tn, s)
549
+ self.__write_table = lambda tn, ds: storage_impl.write_table(tn, ds)
550
+ else:
551
+ self.__create_table = None
552
+ self.__write_table = None
553
+
554
+ self.__log = _util.logger_for_object(self)
555
+ self.__val = TracStorageValidator(self.__log, checkout_directory, self.__storage_key)
556
+
557
+ def has_table(self, table_name: str) -> bool:
558
+
559
+ _val.validate_signature(self.has_table, table_name)
560
+
561
+ self.__val.check_operation_available(self.has_table, self.__has_table)
562
+ self.__val.check_table_name_is_valid(table_name)
563
+ self.__val.check_storage_path_is_valid(table_name)
564
+
565
+ try:
566
+ return self.__has_table(table_name)
567
+ except _ex.EStorageRequest as e:
568
+ self.__val.report_public_error(e)
569
+
570
+ def list_tables(self) -> tp.List[str]:
571
+
572
+ _val.validate_signature(self.list_tables)
573
+
574
+ self.__val.check_operation_available(self.list_tables, self.__list_tables)
575
+
576
+ try:
577
+ return self.__list_tables()
578
+ except _ex.EStorageRequest as e:
579
+ self.__val.report_public_error(e)
580
+
581
+ def create_table(self, table_name: str, schema: _api.SchemaDefinition):
582
+
583
+ _val.validate_signature(self.create_table, table_name, schema)
584
+
585
+ self.__val.check_operation_available(self.create_table, self.__create_table)
586
+ self.__val.check_table_name_is_valid(table_name)
587
+ self.__val.check_storage_path_is_valid(table_name)
588
+
589
+ arrow_schema = _data.DataMapping.trac_to_arrow_schema(schema)
590
+
591
+ try:
592
+ self.__create_table(table_name, arrow_schema)
593
+ except _ex.EStorageRequest as e:
594
+ self.__val.report_public_error(e)
595
+
596
+ def read_table(self, table_name: str) -> _eapi.DATA_API:
597
+
598
+ _val.validate_signature(self.read_table, table_name)
599
+
600
+ self.__val.check_operation_available(self.read_table, self.__read_table)
601
+ self.__val.check_table_name_is_valid(table_name)
602
+ self.__val.check_table_name_not_reserved(table_name)
603
+
604
+ try:
605
+ raw_data = self.__read_table(table_name)
606
+ return self.__converter.from_internal(raw_data)
607
+
608
+ except _ex.EStorageRequest as e:
609
+ self.__val.report_public_error(e)
610
+
611
+ def native_read_query(self, query: str, **parameters) -> _eapi.DATA_API:
612
+
613
+ _val.validate_signature(self.native_read_query, query, **parameters)
614
+
615
+ self.__val.check_operation_available(self.native_read_query, self.__native_read_query)
616
+
617
+ # TODO: validate query and parameters
618
+ # Some validation is performed by the impl
619
+
620
+ try:
621
+ raw_data = self.__native_read_query(query, **parameters)
622
+ return self.__converter.from_internal(raw_data)
623
+
624
+ except _ex.EStorageRequest as e:
625
+ self.__val.report_public_error(e)
626
+
627
+ def write_table(self, table_name: str, dataset: _eapi.DATA_API):
628
+
629
+ _val.validate_signature(self.write_table, table_name, dataset)
630
+
631
+ self.__val.check_operation_available(self.read_table, self.__read_table)
632
+ self.__val.check_table_name_is_valid(table_name)
633
+ self.__val.check_table_name_not_reserved(table_name)
634
+ self.__val.check_provided_dataset_type(dataset, self.__converter.framework.api_type)
635
+
636
+ try:
637
+ raw_data = self.__converter.to_internal(dataset)
638
+ self.__write_table(table_name, raw_data)
639
+
640
+ except _ex.EStorageRequest as e:
641
+ self.__val.report_public_error(e)
642
+
643
+
534
644
  class TracContextErrorReporter:
535
645
 
646
+ _VALID_IDENTIFIER = re.compile("^[a-zA-Z_]\\w*$",)
647
+ _RESERVED_IDENTIFIER = re.compile("^(trac_|_)\\w*")
648
+
536
649
  def __init__(self, log: logging.Logger, checkout_directory: pathlib.Path):
537
650
 
538
651
  self.__log = log
539
652
  self.__checkout_directory = checkout_directory
540
653
 
654
+ def report_public_error(self, exception: Exception):
655
+
656
+ self._report_error(str(exception), exception)
657
+
541
658
  def _report_error(self, message, cause: Exception = None):
542
659
 
543
660
  full_stack = traceback.extract_stack()
@@ -554,11 +671,18 @@ class TracContextErrorReporter:
554
671
  else:
555
672
  raise _ex.ERuntimeValidation(message)
556
673
 
674
+ @staticmethod
675
+ def _type_name(type_: type):
557
676
 
558
- class TracContextValidator(TracContextErrorReporter):
677
+ module = type_.__module__
678
+
679
+ if module is None or module == str.__class__.__module__ or module == tp.__name__:
680
+ return _val.type_name(type_, False)
681
+ else:
682
+ return _val.type_name(type_, True)
559
683
 
560
- __VALID_IDENTIFIER = re.compile("^[a-zA-Z_]\\w*$",)
561
- __RESERVED_IDENTIFIER = re.compile("^(trac_|_)\\w*")
684
+
685
+ class TracContextValidator(TracContextErrorReporter):
562
686
 
563
687
  def __init__(
564
688
  self, log: logging.Logger,
@@ -578,7 +702,7 @@ class TracContextValidator(TracContextErrorReporter):
578
702
  if param_name is None:
579
703
  self._report_error(f"Parameter name is null")
580
704
 
581
- if not self.__VALID_IDENTIFIER.match(param_name):
705
+ if not self._VALID_IDENTIFIER.match(param_name):
582
706
  self._report_error(f"Parameter name {param_name} is not a valid identifier")
583
707
 
584
708
  def check_param_defined_in_model(self, param_name: str):
@@ -596,7 +720,7 @@ class TracContextValidator(TracContextErrorReporter):
596
720
  if dataset_name is None:
597
721
  self._report_error(f"Dataset name is null")
598
722
 
599
- if not self.__VALID_IDENTIFIER.match(dataset_name):
723
+ if not self._VALID_IDENTIFIER.match(dataset_name):
600
724
  self._report_error(f"Dataset name {dataset_name} is not a valid identifier")
601
725
 
602
726
  def check_dataset_not_defined_in_model(self, dataset_name: str):
@@ -710,12 +834,39 @@ class TracContextValidator(TracContextErrorReporter):
710
834
  f"The object referenced by [{item_name}] in the current context has the wrong type" +
711
835
  f" (expected {expected_type_name}, got {actual_type_name})")
712
836
 
837
+ def check_data_framework_args(self, framework: _eapi.DataFramework, framework_args: tp.Dict[str, tp.Any]):
838
+
839
+ expected_args = _data.DataConverter.get_framework_args(framework)
840
+ unexpected_args = list(filter(lambda arg: arg not in expected_args, framework_args.keys()))
841
+
842
+ if any(unexpected_args):
843
+ unknown_args = ", ".join(unexpected_args)
844
+ self._report_error(f"Using [{framework}], some arguments were not recognized: [{unknown_args}]")
845
+
846
+ for arg_name, arg_type in expected_args.items():
847
+
848
+ arg_value = framework_args.get(arg_name)
849
+
850
+ if _val.check_type(arg_type, arg_value):
851
+ continue
852
+
853
+ if arg_value is None:
854
+ self._report_error(f"Using [{framework}], required argument [{arg_name}] is missing")
855
+
856
+ else:
857
+ expected_type_name = self._type_name(arg_type)
858
+ actual_type_name = self._type_name(type(arg_value))
859
+
860
+ self._report_error(
861
+ f"Using [{framework}], argument [{arg_name}] has the wrong type" +
862
+ f" (expected {expected_type_name}, got {actual_type_name})")
863
+
713
864
  def check_storage_valid_identifier(self, storage_key):
714
865
 
715
866
  if storage_key is None:
716
867
  self._report_error(f"Storage key is null")
717
868
 
718
- if not self.__VALID_IDENTIFIER.match(storage_key):
869
+ if not self._VALID_IDENTIFIER.match(storage_key):
719
870
  self._report_error(f"Storage key {storage_key} is not a valid identifier")
720
871
 
721
872
  def check_storage_available(self, storage_map: tp.Dict, storage_key: str):
@@ -737,16 +888,6 @@ class TracContextValidator(TracContextErrorReporter):
737
888
  else:
738
889
  self._report_error(f"Storage key [{storage_key}] refers to file storage, not data storage")
739
890
 
740
- @staticmethod
741
- def _type_name(type_: type):
742
-
743
- module = type_.__module__
744
-
745
- if module is None or module == str.__class__.__module__:
746
- return type_.__qualname__
747
-
748
- return module + '.' + type_.__name__
749
-
750
891
 
751
892
  class TracStorageValidator(TracContextErrorReporter):
752
893
 
@@ -777,3 +918,30 @@ class TracStorageValidator(TracContextErrorReporter):
777
918
 
778
919
  if _val.StorageValidator.storage_path_is_empty(storage_path):
779
920
  self._report_error(f"Storage path [{storage_path}] is not allowed")
921
+
922
+ def check_table_name_is_valid(self, table_name: str):
923
+
924
+ if table_name is None:
925
+ self._report_error(f"Table name is null")
926
+
927
+ if not self._VALID_IDENTIFIER.match(table_name):
928
+ self._report_error(f"Table name {table_name} is not a valid identifier")
929
+
930
+ def check_table_name_not_reserved(self, table_name: str):
931
+
932
+ if self._RESERVED_IDENTIFIER.match(table_name):
933
+ self._report_error(f"Table name {table_name} is a reserved identifier")
934
+
935
+ def check_provided_dataset_type(self, dataset: tp.Any, expected_type: type):
936
+
937
+ if dataset is None:
938
+ self._report_error(f"Provided dataset is null")
939
+
940
+ if not isinstance(dataset, expected_type):
941
+
942
+ expected_type_name = self._type_name(expected_type)
943
+ actual_type_name = self._type_name(type(dataset))
944
+
945
+ self._report_error(
946
+ f"Provided dataset is the wrong type" +
947
+ f" (expected {expected_type_name}, got {actual_type_name})")