tracdap-runtime 0.8.0rc2__py3-none-any.whl → 0.9.0b2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. tracdap/rt/_impl/core/config_parser.py +29 -3
  2. tracdap/rt/_impl/core/data.py +627 -40
  3. tracdap/rt/_impl/core/repos.py +17 -8
  4. tracdap/rt/_impl/core/storage.py +25 -13
  5. tracdap/rt/_impl/core/struct.py +254 -60
  6. tracdap/rt/_impl/core/util.py +125 -11
  7. tracdap/rt/_impl/exec/context.py +35 -8
  8. tracdap/rt/_impl/exec/dev_mode.py +169 -127
  9. tracdap/rt/_impl/exec/engine.py +203 -140
  10. tracdap/rt/_impl/exec/functions.py +228 -263
  11. tracdap/rt/_impl/exec/graph.py +141 -126
  12. tracdap/rt/_impl/exec/graph_builder.py +428 -449
  13. tracdap/rt/_impl/grpc/codec.py +8 -13
  14. tracdap/rt/_impl/grpc/server.py +7 -7
  15. tracdap/rt/_impl/grpc/tracdap/api/internal/runtime_pb2.py +25 -18
  16. tracdap/rt/_impl/grpc/tracdap/api/internal/runtime_pb2.pyi +27 -9
  17. tracdap/rt/_impl/grpc/tracdap/metadata/common_pb2.py +1 -1
  18. tracdap/rt/_impl/grpc/tracdap/metadata/config_pb2.py +1 -1
  19. tracdap/rt/_impl/grpc/tracdap/metadata/custom_pb2.py +1 -1
  20. tracdap/rt/_impl/grpc/tracdap/metadata/data_pb2.py +37 -35
  21. tracdap/rt/_impl/grpc/tracdap/metadata/data_pb2.pyi +37 -43
  22. tracdap/rt/_impl/grpc/tracdap/metadata/file_pb2.py +1 -1
  23. tracdap/rt/_impl/grpc/tracdap/metadata/flow_pb2.py +1 -1
  24. tracdap/rt/_impl/grpc/tracdap/metadata/job_pb2.py +67 -63
  25. tracdap/rt/_impl/grpc/tracdap/metadata/job_pb2.pyi +11 -2
  26. tracdap/rt/_impl/grpc/tracdap/metadata/model_pb2.py +1 -1
  27. tracdap/rt/_impl/grpc/tracdap/metadata/object_id_pb2.py +1 -1
  28. tracdap/rt/_impl/grpc/tracdap/metadata/object_pb2.py +1 -1
  29. tracdap/rt/_impl/grpc/tracdap/metadata/resource_pb2.py +1 -1
  30. tracdap/rt/_impl/grpc/tracdap/metadata/search_pb2.py +1 -1
  31. tracdap/rt/_impl/grpc/tracdap/metadata/storage_pb2.py +11 -9
  32. tracdap/rt/_impl/grpc/tracdap/metadata/storage_pb2.pyi +11 -2
  33. tracdap/rt/_impl/grpc/tracdap/metadata/tag_pb2.py +1 -1
  34. tracdap/rt/_impl/grpc/tracdap/metadata/tag_update_pb2.py +1 -1
  35. tracdap/rt/_impl/grpc/tracdap/metadata/type_pb2.py +23 -19
  36. tracdap/rt/_impl/grpc/tracdap/metadata/type_pb2.pyi +15 -2
  37. tracdap/rt/_impl/runtime.py +3 -9
  38. tracdap/rt/_impl/static_api.py +5 -6
  39. tracdap/rt/_plugins/format_csv.py +2 -2
  40. tracdap/rt/_plugins/repo_git.py +56 -11
  41. tracdap/rt/_plugins/storage_aws.py +165 -150
  42. tracdap/rt/_plugins/storage_azure.py +17 -11
  43. tracdap/rt/_plugins/storage_gcp.py +35 -18
  44. tracdap/rt/_version.py +1 -1
  45. tracdap/rt/api/model_api.py +45 -0
  46. tracdap/rt/config/__init__.py +7 -9
  47. tracdap/rt/config/common.py +3 -14
  48. tracdap/rt/config/job.py +17 -3
  49. tracdap/rt/config/platform.py +9 -32
  50. tracdap/rt/config/result.py +8 -4
  51. tracdap/rt/config/runtime.py +5 -10
  52. tracdap/rt/config/tenant.py +28 -0
  53. tracdap/rt/launch/cli.py +0 -8
  54. tracdap/rt/launch/launch.py +1 -3
  55. tracdap/rt/metadata/__init__.py +35 -35
  56. tracdap/rt/metadata/data.py +19 -31
  57. tracdap/rt/metadata/job.py +3 -1
  58. tracdap/rt/metadata/storage.py +9 -0
  59. tracdap/rt/metadata/type.py +9 -5
  60. {tracdap_runtime-0.8.0rc2.dist-info → tracdap_runtime-0.9.0b2.dist-info}/METADATA +5 -3
  61. {tracdap_runtime-0.8.0rc2.dist-info → tracdap_runtime-0.9.0b2.dist-info}/RECORD +64 -63
  62. {tracdap_runtime-0.8.0rc2.dist-info → tracdap_runtime-0.9.0b2.dist-info}/WHEEL +1 -1
  63. {tracdap_runtime-0.8.0rc2.dist-info → tracdap_runtime-0.9.0b2.dist-info}/licenses/LICENSE +0 -0
  64. {tracdap_runtime-0.8.0rc2.dist-info → tracdap_runtime-0.9.0b2.dist-info}/top_level.txt +0 -0
@@ -28,20 +28,27 @@ from pyarrow import fs as pa_fs
28
28
  from . import _helpers
29
29
 
30
30
 
31
- try:
32
- # These dependencies are provided by the optional [gcp] feature
33
- # For local development, pip install -r requirements_plugins.txt
34
- import google.cloud.storage as gcs # noqa
35
- import gcsfs # noqa
36
- __gcp_available = True
37
- except ImportError:
38
- gcs = None
39
- gcsfs = None
40
- __gcp_available = False
31
+ def _gcp_arrow_available():
32
+ try:
33
+ # Shipped as part of PyArrow, but may not be available on all platforms
34
+ return pa_fs.GcsFileSystem is not None
35
+ except ImportError:
36
+ return False
37
+
38
+ def _gcp_fsspec_available():
39
+ try:
40
+ # These dependencies are provided by the optional [gcp] feature
41
+ # For local development, pip install -r requirements_plugins.txt
42
+ import google.cloud.storage as gcs # noqa
43
+ import gcsfs # noqa
44
+ return True
45
+ except ImportError:
46
+ return False
41
47
 
42
48
 
43
49
  class GcpStorageProvider(IStorageProvider):
44
50
 
51
+ PROJECT_PROPERTY = "project"
45
52
  BUCKET_PROPERTY = "bucket"
46
53
  PREFIX_PROPERTY = "prefix"
47
54
  REGION_PROPERTY = "region"
@@ -62,20 +69,17 @@ class GcpStorageProvider(IStorageProvider):
62
69
  RUNTIME_FS_DEFAULT = RUNTIME_FS_AUTO
63
70
 
64
71
  ARROW_CLIENT_ARGS = {
72
+ PROJECT_PROPERTY: "project_id",
65
73
  REGION_PROPERTY: "default_bucket_location",
66
- ENDPOINT_PROPERTY: "endpoint_override"
74
+ ENDPOINT_PROPERTY: "endpoint_override",
67
75
  }
68
76
 
69
77
  FSSPEC_CLIENT_ARGS = {
78
+ PROJECT_PROPERTY: "project",
70
79
  REGION_PROPERTY: "default_location",
71
80
  ENDPOINT_PROPERTY: "endpoint_url"
72
81
  }
73
82
 
74
- try:
75
- __arrow_available = pa_fs.GcsFileSystem is not None
76
- except ImportError:
77
- __arrow_available = False
78
-
79
83
  def __init__(self, properties: tp.Dict[str, str]):
80
84
 
81
85
  self._log = _helpers.logger_for_object(self)
@@ -91,7 +95,7 @@ class GcpStorageProvider(IStorageProvider):
91
95
  def get_arrow_native(self) -> pa_fs.SubTreeFileSystem:
92
96
 
93
97
  if self._runtime_fs == self.RUNTIME_FS_AUTO:
94
- gcs_fs = self.create_arrow() if self.__arrow_available else self.create_fsspec()
98
+ gcs_fs = self.create_arrow() if _gcp_arrow_available() else self.create_fsspec()
95
99
  elif self._runtime_fs == self.RUNTIME_FS_ARROW:
96
100
  gcs_fs = self.create_arrow()
97
101
  elif self._runtime_fs == self.RUNTIME_FS_FSSPEC:
@@ -115,12 +119,20 @@ class GcpStorageProvider(IStorageProvider):
115
119
 
116
120
  def create_arrow(self) -> pa_fs.FileSystem:
117
121
 
122
+ if not _gcp_arrow_available():
123
+ raise ex.EStorage(f"GCS storage setup failed: Plugin for [{self.RUNTIME_FS_ARROW}] is not available")
124
+
118
125
  gcs_arrow_args = self.setup_client_args(self.ARROW_CLIENT_ARGS)
119
126
 
120
127
  return pa_fs.GcsFileSystem(**gcs_arrow_args)
121
128
 
122
129
  def create_fsspec(self) -> pa_fs.FileSystem:
123
130
 
131
+ if not _gcp_fsspec_available():
132
+ raise ex.EStorage(f"GCS storage setup failed: Plugin for [{self.RUNTIME_FS_FSSPEC}] is not available")
133
+
134
+ import gcsfs # noqa
135
+
124
136
  gcs_fsspec_args = self.setup_client_args(self.FSSPEC_CLIENT_ARGS)
125
137
  gcs_fsspec = gcsfs.GCSFileSystem(**gcs_fsspec_args)
126
138
 
@@ -130,9 +142,14 @@ class GcpStorageProvider(IStorageProvider):
130
142
 
131
143
  client_args = dict()
132
144
 
145
+ project = _helpers.get_plugin_property(self._properties, self.PROJECT_PROPERTY)
133
146
  region = _helpers.get_plugin_property(self._properties, self.REGION_PROPERTY)
134
147
  endpoint = _helpers.get_plugin_property(self._properties, self.ENDPOINT_PROPERTY)
135
148
 
149
+ if project is not None:
150
+ project_key = arg_mapping[self.PROJECT_PROPERTY]
151
+ client_args[project_key] = project
152
+
136
153
  if region is not None:
137
154
  region_key = arg_mapping[self.REGION_PROPERTY]
138
155
  client_args[region_key] = region
@@ -180,5 +197,5 @@ class GcpStorageProvider(IStorageProvider):
180
197
  raise ex.EStartup(message)
181
198
 
182
199
 
183
- if __gcp_available:
200
+ if _gcp_arrow_available() or _gcp_fsspec_available():
184
201
  plugins.PluginManager.register_plugin(IStorageProvider, GcpStorageProvider, ["GCS"])
tracdap/rt/_version.py CHANGED
@@ -13,4 +13,4 @@
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
15
 
16
- __version__ = "0.8.0rc2"
16
+ __version__ = "0.9.0b2"
@@ -14,6 +14,7 @@
14
14
  # limitations under the License.
15
15
 
16
16
  import abc as _abc
17
+ import dataclasses as _dc
17
18
  import typing as _tp
18
19
  import logging as _logging
19
20
 
@@ -35,6 +36,25 @@ if _tp.TYPE_CHECKING:
35
36
  pass
36
37
 
37
38
 
39
+ @_dc.dataclass(frozen=True)
40
+ class RuntimeMetadata:
41
+
42
+ """
43
+ The metadata associated with a TRAC object, made available for models at runtime
44
+
45
+ The metadata available for a particular object depends on the current job configuration, as
46
+ well as the type of object. For example, a model input supplied from a TRAC dataset will
47
+ have the metadata of that dataset available, but data passed into a model as an intermediate
48
+ dataset in a flow might not have an ID or any attributes.
49
+ """
50
+
51
+ objectId: _tp.Optional[TagHeader] = None
52
+ """TRAC object ID of the current object (if available)"""
53
+
54
+ attributes: _tp.Dict[str, _tp.Any] = _dc.field(default_factory=dict)
55
+ """TRAC metadata attributes of the current object (if available)"""
56
+
57
+
38
58
  class TracContext(metaclass=_abc.ABCMeta):
39
59
 
40
60
  """
@@ -202,6 +222,31 @@ class TracContext(metaclass=_abc.ABCMeta):
202
222
 
203
223
  pass
204
224
 
225
+ def get_metadata(self, item_name: str) -> _tp.Optional[RuntimeMetadata]:
226
+
227
+ """
228
+ Get the TRAC metadata associated with a model input.
229
+
230
+ Metadata is available for inputs supplied from real TRAC objects, including
231
+ both :py:attr:`DATA <tracdap.rt.metadata.ObjectType.DATA>` and
232
+ :py:attr:`FILE <tracdap.rt.metadata.ObjectType.DATA>` objects.
233
+
234
+ Calling :py:meth:`get_metadata()` for objects that are not inputs will return null,
235
+ since parameters have no metadata and output metadata does not exist until after a job completes.
236
+ :py:meth:`get_metadata()` will also return null for inputs supplied from other models
237
+ as intermediates in a flow, since these also have no persistent metadata.
238
+
239
+ Attempting to access metadata for objects that do not exist, including outputs that
240
+ have not been put yet, is an error.
241
+
242
+ :param item_name: The name of the file or dataset to get metadata for
243
+ :return: Runtime metadata for the named item, or None if no metadata is available
244
+ :type item_name: str
245
+ :rtype: :py:class:`RuntimeMetadata <tracdap.rt.api.RuntimeMetadata>`
246
+ """
247
+
248
+ pass
249
+
205
250
  def put_schema(self, dataset_name: str, schema: SchemaDefinition):
206
251
 
207
252
  """
@@ -1,30 +1,28 @@
1
1
  # Code generated by TRAC
2
2
 
3
- from .job import JobConfig
3
+ from .tenant import TenantConfigMap
4
+ from .tenant import TenantConfig
4
5
 
5
6
  from .common import _ConfigFile
6
7
  from .common import PluginConfig
7
8
  from .common import PlatformInfo
8
- from .common import StorageConfig
9
9
  from .common import ServiceConfig
10
10
 
11
- from .dynamic import DynamicConfig
11
+ from .runtime import RuntimeConfig
12
12
 
13
- from .result import TagUpdateList
13
+ from .result import JobResultAttrs
14
14
  from .result import JobResult
15
15
 
16
16
  from .platform import RoutingProtocol
17
17
  from .platform import DeploymentLayout
18
18
  from .platform import PlatformConfig
19
- from .platform import MetadataConfig
20
- from .platform import TenantConfig
21
19
  from .platform import GatewayConfig
22
20
  from .platform import GatewayRedirect
23
21
  from .platform import RouteConfig
24
22
  from .platform import RoutingMatch
25
23
  from .platform import RoutingTarget
26
24
  from .platform import DeploymentConfig
27
- from .platform import ClientConfig
28
25
 
29
- from .runtime import RuntimeConfig
30
- from .runtime import SparkSettings
26
+ from .dynamic import DynamicConfig
27
+
28
+ from .job import JobConfig
@@ -4,6 +4,9 @@ import typing as _tp # noqa
4
4
  import dataclasses as _dc # noqa
5
5
  import enum as _enum # noqa
6
6
 
7
+ import tracdap.rt.metadata as metadata
8
+
9
+
7
10
 
8
11
  @_dc.dataclass
9
12
  class _ConfigFile:
@@ -33,20 +36,6 @@ class PlatformInfo:
33
36
  deploymentInfo: "_tp.Dict[str, str]" = _dc.field(default_factory=dict)
34
37
 
35
38
 
36
- @_dc.dataclass
37
- class StorageConfig:
38
-
39
- buckets: "_tp.Dict[str, PluginConfig]" = _dc.field(default_factory=dict)
40
-
41
- """TODO: Rename "buckets" as "internal" for 0.7"""
42
-
43
- external: "_tp.Dict[str, PluginConfig]" = _dc.field(default_factory=dict)
44
-
45
- defaultBucket: "str" = ""
46
-
47
- defaultFormat: "str" = ""
48
-
49
-
50
39
  @_dc.dataclass
51
40
  class ServiceConfig:
52
41
 
tracdap/rt/config/job.py CHANGED
@@ -13,10 +13,24 @@ class JobConfig:
13
13
 
14
14
  jobId: "metadata.TagHeader" = _dc.field(default_factory=lambda: metadata.TagHeader())
15
15
 
16
+ """ID and definition of the job being submitted"""
17
+
16
18
  job: "metadata.JobDefinition" = _dc.field(default_factory=lambda: metadata.JobDefinition())
17
19
 
18
- resources: "_tp.Dict[str, metadata.ObjectDefinition]" = _dc.field(default_factory=dict)
20
+ objectMapping: "_tp.Dict[str, metadata.TagHeader]" = _dc.field(default_factory=dict)
21
+
22
+ """Metadata needed to execute the job (objects referred to in the job definition)"""
23
+
24
+ objects: "_tp.Dict[str, metadata.ObjectDefinition]" = _dc.field(default_factory=dict)
25
+
26
+ tags: "_tp.Dict[str, metadata.Tag]" = _dc.field(default_factory=dict)
27
+
28
+ resultId: "metadata.TagHeader" = _dc.field(default_factory=lambda: metadata.TagHeader())
29
+
30
+ """Preallocated IDs for job outputs"""
31
+
32
+ preallocatedIds: "_tp.List[metadata.TagHeader]" = _dc.field(default_factory=list)
19
33
 
20
- resourceMapping: "_tp.Dict[str, metadata.TagHeader]" = _dc.field(default_factory=dict)
34
+ properties: "_tp.Dict[str, str]" = _dc.field(default_factory=dict)
21
35
 
22
- resultMapping: "_tp.Dict[str, metadata.TagHeader]" = _dc.field(default_factory=dict)
36
+ """Allow setting per-job configuration"""
@@ -6,6 +6,7 @@ import enum as _enum # noqa
6
6
 
7
7
  import tracdap.rt.metadata as metadata
8
8
  from .common import * # noqa
9
+ from .tenant import * # noqa
9
10
 
10
11
 
11
12
  class RoutingProtocol(_enum.Enum):
@@ -20,6 +21,8 @@ class RoutingProtocol(_enum.Enum):
20
21
 
21
22
  REST = 4
22
23
 
24
+ INTERNAL = 5
25
+
23
26
 
24
27
  class DeploymentLayout(_enum.Enum):
25
28
 
@@ -39,43 +42,21 @@ class PlatformConfig:
39
42
 
40
43
  platformInfo: "PlatformInfo" = _dc.field(default_factory=lambda: PlatformInfo())
41
44
 
42
- metadata: "MetadataConfig" = _dc.field(default_factory=lambda: MetadataConfig())
43
-
44
- storage: "StorageConfig" = _dc.field(default_factory=lambda: StorageConfig())
45
-
46
- repositories: "_tp.Dict[str, PluginConfig]" = _dc.field(default_factory=dict)
47
-
48
- executor: "PluginConfig" = _dc.field(default_factory=lambda: PluginConfig())
45
+ metadataStore: "PluginConfig" = _dc.field(default_factory=lambda: PluginConfig())
49
46
 
50
47
  jobCache: "PluginConfig" = _dc.field(default_factory=lambda: PluginConfig())
51
48
 
52
- tenants: "_tp.Dict[str, TenantConfig]" = _dc.field(default_factory=dict)
53
-
54
- gateway: "GatewayConfig" = _dc.field(default_factory=lambda: GatewayConfig())
49
+ executor: "PluginConfig" = _dc.field(default_factory=lambda: PluginConfig())
55
50
 
56
51
  services: "_tp.Dict[str, ServiceConfig]" = _dc.field(default_factory=dict)
57
52
 
58
53
  deployment: "DeploymentConfig" = _dc.field(default_factory=lambda: DeploymentConfig())
59
54
 
60
- clientConfig: "_tp.Dict[str, ClientConfig]" = _dc.field(default_factory=dict)
55
+ gateway: "GatewayConfig" = _dc.field(default_factory=lambda: GatewayConfig())
61
56
 
62
57
  extensions: "_tp.Dict[str, protobuf.Any]" = _dc.field(default_factory=dict)
63
58
 
64
-
65
- @_dc.dataclass
66
- class MetadataConfig:
67
-
68
- database: "PluginConfig" = _dc.field(default_factory=lambda: PluginConfig())
69
-
70
- format: "metadata.MetadataFormat" = metadata.MetadataFormat.METADATA_FORMAT_NOT_SET
71
-
72
-
73
- @_dc.dataclass
74
- class TenantConfig:
75
-
76
- defaultBucket: "_tp.Optional[str]" = None
77
-
78
- defaultFormat: "_tp.Optional[str]" = None
59
+ setupTasks: "_tp.Dict[str, PluginConfig]" = _dc.field(default_factory=dict)
79
60
 
80
61
 
81
62
  @_dc.dataclass
@@ -109,6 +90,8 @@ class RouteConfig:
109
90
 
110
91
  target: "RoutingTarget" = _dc.field(default_factory=lambda: RoutingTarget())
111
92
 
93
+ routeKey: "str" = ""
94
+
112
95
 
113
96
  @_dc.dataclass
114
97
  class RoutingMatch:
@@ -136,9 +119,3 @@ class RoutingTarget:
136
119
  class DeploymentConfig:
137
120
 
138
121
  layout: "DeploymentLayout" = DeploymentLayout.LAYOUT_NOT_SET
139
-
140
-
141
- @_dc.dataclass
142
- class ClientConfig:
143
-
144
- properties: "_tp.Dict[str, str]" = _dc.field(default_factory=dict)
@@ -9,7 +9,7 @@ import tracdap.rt.metadata as metadata
9
9
 
10
10
 
11
11
  @_dc.dataclass
12
- class TagUpdateList:
12
+ class JobResultAttrs:
13
13
 
14
14
  attrs: "_tp.List[metadata.TagUpdate]" = _dc.field(default_factory=list)
15
15
 
@@ -19,8 +19,12 @@ class JobResult:
19
19
 
20
20
  jobId: "metadata.TagHeader" = _dc.field(default_factory=lambda: metadata.TagHeader())
21
21
 
22
- statusCode: "metadata.JobStatusCode" = metadata.JobStatusCode.JOB_STATUS_CODE_NOT_SET
22
+ resultId: "metadata.TagHeader" = _dc.field(default_factory=lambda: metadata.TagHeader())
23
23
 
24
- statusMessage: "str" = ""
24
+ result: "metadata.ResultDefinition" = _dc.field(default_factory=lambda: metadata.ResultDefinition())
25
25
 
26
- results: "_tp.Dict[str, metadata.ObjectDefinition]" = _dc.field(default_factory=dict)
26
+ objectIds: "_tp.List[metadata.TagHeader]" = _dc.field(default_factory=list)
27
+
28
+ objects: "_tp.Dict[str, metadata.ObjectDefinition]" = _dc.field(default_factory=dict)
29
+
30
+ attrs: "_tp.Dict[str, JobResultAttrs]" = _dc.field(default_factory=dict)
@@ -4,6 +4,7 @@ import typing as _tp # noqa
4
4
  import dataclasses as _dc # noqa
5
5
  import enum as _enum # noqa
6
6
 
7
+ import tracdap.rt.metadata as metadata
7
8
  from .common import * # noqa
8
9
 
9
10
 
@@ -13,16 +14,10 @@ class RuntimeConfig:
13
14
 
14
15
  config: "_tp.Dict[str, str]" = _dc.field(default_factory=dict)
15
16
 
16
- storage: "StorageConfig" = _dc.field(default_factory=lambda: StorageConfig())
17
+ properties: "_tp.Dict[str, str]" = _dc.field(default_factory=dict)
17
18
 
18
- repositories: "_tp.Dict[str, PluginConfig]" = _dc.field(default_factory=dict)
19
+ secrets: "_tp.Dict[str, str]" = _dc.field(default_factory=dict)
19
20
 
20
- sparkSettings: "SparkSettings" = _dc.field(default_factory=lambda: SparkSettings())
21
+ resources: "_tp.Dict[str, metadata.ResourceDefinition]" = _dc.field(default_factory=dict)
21
22
 
22
- runtimeApi: "ServiceConfig" = _dc.field(default_factory=lambda: ServiceConfig())
23
-
24
-
25
- @_dc.dataclass
26
- class SparkSettings:
27
-
28
- sparkProps: "_tp.Dict[str, str]" = _dc.field(default_factory=dict)
23
+ runtimeApi: "_tp.Optional[ServiceConfig]" = None
@@ -0,0 +1,28 @@
1
+ # Code generated by TRAC
2
+
3
+ import typing as _tp # noqa
4
+ import dataclasses as _dc # noqa
5
+ import enum as _enum # noqa
6
+
7
+ import tracdap.rt.metadata as metadata
8
+
9
+
10
+
11
+ @_dc.dataclass
12
+ class TenantConfigMap:
13
+
14
+ tenants: "_tp.Dict[str, TenantConfig]" = _dc.field(default_factory=dict)
15
+
16
+ autoActivate: "bool" = False
17
+
18
+
19
+ @_dc.dataclass
20
+ class TenantConfig:
21
+
22
+ properties: "_tp.Dict[str, str]" = _dc.field(default_factory=dict)
23
+
24
+ secrets: "_tp.Dict[str, str]" = _dc.field(default_factory=dict)
25
+
26
+ config: "_tp.Dict[str, metadata.ConfigDefinition]" = _dc.field(default_factory=dict)
27
+
28
+ resources: "_tp.Dict[str, metadata.ResourceDefinition]" = _dc.field(default_factory=dict)
tracdap/rt/launch/cli.py CHANGED
@@ -35,14 +35,6 @@ def _cli_args(programmatic_args = None):
35
35
  "--dev-mode", dest="dev_mode", default=False, action="store_true",
36
36
  help="Enable development mode config translation")
37
37
 
38
- parser.add_argument(
39
- "--job-result-dir", dest="job_result_dir", type=pathlib.Path, required=False,
40
- help="Output the result metadata for a batch job to the given directory")
41
-
42
- parser.add_argument(
43
- "--job-result-format", dest="job_result_format", choices=["json", "yaml", "proto"], default="json",
44
- help="Output format for the result metadata (only meaningful if --job-result-dir is set)")
45
-
46
38
  parser.add_argument(
47
39
  "--scratch-dir", dest="scratch_dir", type=pathlib.Path, required=False,
48
40
  help="Scratch directory for working files" +
@@ -36,7 +36,7 @@ def _resolve_config_file(
36
36
  if isinstance(config_path, str):
37
37
  scheme_sep = config_path.find(":")
38
38
  # Single letter scheme is a Windows file path (C:\...)
39
- scheme = scheme = config_path[:scheme_sep] if scheme_sep > 1 else "file"
39
+ scheme = config_path[:scheme_sep] if scheme_sep > 1 else "file"
40
40
  if scheme != "file":
41
41
  return config_path
42
42
 
@@ -191,8 +191,6 @@ def launch_cli(programmatic_args: _tp.Optional[_tp.List[str]] = None):
191
191
  runtime_instance = _runtime.TracRuntime(
192
192
  _sys_config,
193
193
  dev_mode=launch_args.dev_mode,
194
- job_result_dir=launch_args.job_result_dir,
195
- job_result_format=launch_args.job_result_format,
196
194
  scratch_dir=launch_args.scratch_dir,
197
195
  scratch_dir_persist=launch_args.scratch_dir_persist,
198
196
  plugin_packages=launch_args.plugin_packages)
@@ -9,47 +9,31 @@ from .type import Value
9
9
  from .type import ArrayValue
10
10
  from .type import MapValue
11
11
 
12
- from .tag_update import TagOperation
13
- from .tag_update import TagUpdate
14
-
15
12
  from .object_id import ObjectType
16
13
  from .object_id import TagHeader
17
14
  from .object_id import TagSelector
18
15
 
19
- from .job import JobType
20
- from .job import JobStatusCode
21
- from .job import JobGroupType
22
- from .job import JobDefinition
23
- from .job import ResultDefinition
24
- from .job import RunModelJob
25
- from .job import RunFlowJob
26
- from .job import ImportModelJob
27
- from .job import ImportDataJob
28
- from .job import ExportDataJob
29
- from .job import JobGroup
30
- from .job import SequentialJobGroup
31
- from .job import ParallelJobGroup
16
+ from .search import SearchOperator
17
+ from .search import LogicalOperator
18
+ from .search import SearchTerm
19
+ from .search import LogicalExpression
20
+ from .search import SearchExpression
21
+ from .search import SearchParameters
22
+
23
+ from .tag_update import TagOperation
24
+ from .tag_update import TagUpdate
25
+
26
+ from .custom import CustomDefinition
32
27
 
33
28
  from .common import MetadataFormat
34
29
  from .common import MetadataVersion
35
30
  from .common import TenantInfo
36
31
 
37
- from .resource import ResourceType
38
- from .resource import ResourceDefinition
39
-
40
- from .config import ConfigType
41
- from .config import ConfigEntry
42
- from .config import ConfigDetails
43
- from .config import ConfigDefinition
44
-
45
- from .custom import CustomDefinition
46
-
47
32
  from .data import SchemaType
48
33
  from .data import PartType
49
34
  from .data import FieldSchema
35
+ from .data import EnumValues
50
36
  from .data import TableSchema
51
- from .data import StructField
52
- from .data import StructSchema
53
37
  from .data import SchemaDefinition
54
38
  from .data import PartKey
55
39
  from .data import DataDefinition
@@ -63,26 +47,42 @@ from .model import ModelInputSchema
63
47
  from .model import ModelOutputSchema
64
48
  from .model import ModelDefinition
65
49
 
66
- from .search import SearchOperator
67
- from .search import LogicalOperator
68
- from .search import SearchTerm
69
- from .search import LogicalExpression
70
- from .search import SearchExpression
71
- from .search import SearchParameters
72
-
73
50
  from .flow import FlowNodeType
74
51
  from .flow import FlowNode
75
52
  from .flow import FlowSocket
76
53
  from .flow import FlowEdge
77
54
  from .flow import FlowDefinition
78
55
 
56
+ from .job import JobType
57
+ from .job import JobStatusCode
58
+ from .job import JobGroupType
59
+ from .job import JobDefinition
60
+ from .job import ResultDefinition
61
+ from .job import RunModelJob
62
+ from .job import RunFlowJob
63
+ from .job import ImportModelJob
64
+ from .job import ImportDataJob
65
+ from .job import ExportDataJob
66
+ from .job import JobGroup
67
+ from .job import SequentialJobGroup
68
+ from .job import ParallelJobGroup
69
+
79
70
  from .storage import CopyStatus
80
71
  from .storage import IncarnationStatus
72
+ from .storage import StorageLayout
81
73
  from .storage import StorageCopy
82
74
  from .storage import StorageIncarnation
83
75
  from .storage import StorageItem
84
76
  from .storage import StorageDefinition
85
77
 
78
+ from .resource import ResourceType
79
+ from .resource import ResourceDefinition
80
+
81
+ from .config import ConfigType
82
+ from .config import ConfigEntry
83
+ from .config import ConfigDetails
84
+ from .config import ConfigDefinition
85
+
86
86
  from .object import ObjectDefinition
87
87
 
88
88
  from .tag import Tag