tracdap-runtime 0.8.0b1__py3-none-any.whl → 0.8.0b2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tracdap/rt/_exec/actors.py +5 -4
- tracdap/rt/_exec/context.py +26 -10
- tracdap/rt/_exec/dev_mode.py +3 -2
- tracdap/rt/_exec/engine.py +221 -98
- tracdap/rt/_exec/functions.py +27 -47
- tracdap/rt/_exec/graph.py +1 -20
- tracdap/rt/_exec/graph_builder.py +31 -17
- tracdap/rt/_exec/runtime.py +5 -4
- tracdap/rt/_exec/server.py +4 -3
- tracdap/rt/_impl/config_parser.py +3 -2
- tracdap/rt/_impl/data.py +3 -3
- tracdap/rt/_impl/grpc/tracdap/metadata/job_pb2.py +64 -62
- tracdap/rt/_impl/grpc/tracdap/metadata/job_pb2.pyi +16 -2
- tracdap/rt/_impl/grpc/tracdap/metadata/object_id_pb2.py +3 -3
- tracdap/rt/_impl/grpc/tracdap/metadata/object_id_pb2.pyi +2 -0
- tracdap/rt/_impl/grpc/tracdap/metadata/object_pb2.py +4 -4
- tracdap/rt/_impl/grpc/tracdap/metadata/object_pb2.pyi +4 -2
- tracdap/rt/_impl/logging.py +195 -0
- tracdap/rt/_impl/models.py +3 -2
- tracdap/rt/_impl/repos.py +5 -3
- tracdap/rt/_impl/schemas.py +2 -2
- tracdap/rt/_impl/shim.py +3 -2
- tracdap/rt/_impl/storage.py +4 -3
- tracdap/rt/_impl/util.py +0 -110
- tracdap/rt/_impl/validation.py +3 -2
- tracdap/rt/_version.py +1 -1
- tracdap/rt/ext/plugins.py +2 -2
- tracdap/rt/metadata/__init__.py +1 -0
- tracdap/rt/metadata/job.py +16 -0
- tracdap/rt/metadata/object.py +2 -0
- tracdap/rt/metadata/object_id.py +2 -0
- {tracdap_runtime-0.8.0b1.dist-info → tracdap_runtime-0.8.0b2.dist-info}/METADATA +1 -1
- {tracdap_runtime-0.8.0b1.dist-info → tracdap_runtime-0.8.0b2.dist-info}/RECORD +36 -35
- {tracdap_runtime-0.8.0b1.dist-info → tracdap_runtime-0.8.0b2.dist-info}/WHEEL +1 -1
- {tracdap_runtime-0.8.0b1.dist-info → tracdap_runtime-0.8.0b2.dist-info}/LICENSE +0 -0
- {tracdap_runtime-0.8.0b1.dist-info → tracdap_runtime-0.8.0b2.dist-info}/top_level.txt +0 -0
tracdap/rt/_exec/functions.py
CHANGED
@@ -13,11 +13,10 @@
|
|
13
13
|
# See the License for the specific language governing permissions and
|
14
14
|
# limitations under the License.
|
15
15
|
|
16
|
-
from __future__ import annotations
|
17
|
-
|
18
16
|
import copy
|
19
17
|
import datetime
|
20
18
|
import abc
|
19
|
+
import pathlib
|
21
20
|
import random
|
22
21
|
import dataclasses as dc # noqa
|
23
22
|
|
@@ -29,6 +28,7 @@ import tracdap.rt._exec.graph_builder as _graph
|
|
29
28
|
import tracdap.rt._impl.config_parser as _cfg_p # noqa
|
30
29
|
import tracdap.rt._impl.type_system as _types # noqa
|
31
30
|
import tracdap.rt._impl.data as _data # noqa
|
31
|
+
import tracdap.rt._impl.logging as _logging # noqa
|
32
32
|
import tracdap.rt._impl.storage as _storage # noqa
|
33
33
|
import tracdap.rt._impl.models as _models # noqa
|
34
34
|
import tracdap.rt._impl.util as _util # noqa
|
@@ -228,11 +228,22 @@ class BuildJobResultFunc(NodeFunction[_config.JobResult]):
|
|
228
228
|
job_result.jobId = self.node.job_id
|
229
229
|
job_result.statusCode = meta.JobStatusCode.SUCCEEDED
|
230
230
|
|
231
|
+
if self.node.result_id is not None:
|
232
|
+
|
233
|
+
result_def = meta.ResultDefinition()
|
234
|
+
result_def.jobId = _util.selector_for(self.node.job_id)
|
235
|
+
result_def.statusCode = meta.JobStatusCode.SUCCEEDED
|
236
|
+
|
237
|
+
result_key = _util.object_key(self.node.result_id)
|
238
|
+
result_obj = meta.ObjectDefinition(objectType=meta.ObjectType.RESULT, result=result_def)
|
239
|
+
|
240
|
+
job_result.results[result_key] = result_obj
|
241
|
+
|
231
242
|
# TODO: Handle individual failed results
|
232
243
|
|
233
|
-
for
|
244
|
+
for obj_key, node_id in self.node.outputs.objects.items():
|
234
245
|
obj_def = _ctx_lookup(node_id, ctx)
|
235
|
-
job_result.results[
|
246
|
+
job_result.results[obj_key] = obj_def
|
236
247
|
|
237
248
|
for bundle_id in self.node.outputs.bundles:
|
238
249
|
bundle = _ctx_lookup(bundle_id, ctx)
|
@@ -242,9 +253,9 @@ class BuildJobResultFunc(NodeFunction[_config.JobResult]):
|
|
242
253
|
|
243
254
|
runtime_outputs = _ctx_lookup(self.node.runtime_outputs, ctx)
|
244
255
|
|
245
|
-
for
|
256
|
+
for obj_key, node_id in runtime_outputs.objects.items():
|
246
257
|
obj_def = _ctx_lookup(node_id, ctx)
|
247
|
-
job_result.results[
|
258
|
+
job_result.results[obj_key] = obj_def
|
248
259
|
|
249
260
|
for bundle_id in runtime_outputs.bundles:
|
250
261
|
bundle = _ctx_lookup(bundle_id, ctx)
|
@@ -253,37 +264,6 @@ class BuildJobResultFunc(NodeFunction[_config.JobResult]):
|
|
253
264
|
return job_result
|
254
265
|
|
255
266
|
|
256
|
-
class SaveJobResultFunc(NodeFunction[None]):
|
257
|
-
|
258
|
-
def __init__(self, node: SaveJobResultNode):
|
259
|
-
super().__init__()
|
260
|
-
self.node = node
|
261
|
-
|
262
|
-
def _execute(self, ctx: NodeContext) -> None:
|
263
|
-
|
264
|
-
job_result = _ctx_lookup(self.node.job_result_id, ctx)
|
265
|
-
|
266
|
-
if not self.node.result_spec.save_result:
|
267
|
-
return None
|
268
|
-
|
269
|
-
job_result_format = self.node.result_spec.result_format
|
270
|
-
job_result_str = _cfg_p.ConfigQuoter.quote(job_result, job_result_format)
|
271
|
-
job_result_bytes = bytes(job_result_str, "utf-8")
|
272
|
-
|
273
|
-
job_key = _util.object_key(job_result.jobId)
|
274
|
-
job_result_file = f"job_result_{job_key}.{self.node.result_spec.result_format}"
|
275
|
-
job_result_path = pathlib \
|
276
|
-
.Path(self.node.result_spec.result_dir) \
|
277
|
-
.joinpath(job_result_file)
|
278
|
-
|
279
|
-
_util.logger_for_object(self).info(f"Saving job result to [{job_result_path}]")
|
280
|
-
|
281
|
-
with open(job_result_path, "xb") as result_stream:
|
282
|
-
result_stream.write(job_result_bytes)
|
283
|
-
|
284
|
-
return None
|
285
|
-
|
286
|
-
|
287
267
|
class DataViewFunc(NodeFunction[_data.DataView]):
|
288
268
|
|
289
269
|
def __init__(self, node: DataViewNode):
|
@@ -633,8 +613,6 @@ class ImportModelFunc(NodeFunction[meta.ObjectDefinition]):
|
|
633
613
|
self.node = node
|
634
614
|
self._models = models
|
635
615
|
|
636
|
-
self._log = _util.logger_for_object(self)
|
637
|
-
|
638
616
|
def _execute(self, ctx: NodeContext) -> meta.ObjectDefinition:
|
639
617
|
|
640
618
|
model_stub = _model_def_for_import(self.node.import_details)
|
@@ -651,13 +629,15 @@ class RunModelFunc(NodeFunction[Bundle[_data.DataView]]):
|
|
651
629
|
self, node: RunModelNode,
|
652
630
|
model_class: _api.TracModel.__class__,
|
653
631
|
checkout_directory: pathlib.Path,
|
654
|
-
storage_manager: _storage.StorageManager
|
632
|
+
storage_manager: _storage.StorageManager,
|
633
|
+
log_provider: _logging.LogProvider):
|
655
634
|
|
656
635
|
super().__init__()
|
657
636
|
self.node = node
|
658
637
|
self.model_class = model_class
|
659
638
|
self.checkout_directory = checkout_directory
|
660
639
|
self.storage_manager = storage_manager
|
640
|
+
self.log_provider = log_provider
|
661
641
|
|
662
642
|
def _execute(self, ctx: NodeContext) -> Bundle[_data.DataView]:
|
663
643
|
|
@@ -684,7 +664,7 @@ class RunModelFunc(NodeFunction[Bundle[_data.DataView]]):
|
|
684
664
|
for storage_key in self.node.storage_access:
|
685
665
|
if self.storage_manager.has_file_storage(storage_key, external=True):
|
686
666
|
storage_impl = self.storage_manager.get_file_storage(storage_key, external=True)
|
687
|
-
storage = _ctx.TracFileStorageImpl(storage_key, storage_impl, write_access, self.checkout_directory)
|
667
|
+
storage = _ctx.TracFileStorageImpl(storage_key, storage_impl, write_access, self.checkout_directory, self.log_provider)
|
688
668
|
storage_map[storage_key] = storage
|
689
669
|
elif self.storage_manager.has_data_storage(storage_key, external=True):
|
690
670
|
storage_impl = self.storage_manager.get_data_storage(storage_key, external=True)
|
@@ -692,7 +672,7 @@ class RunModelFunc(NodeFunction[Bundle[_data.DataView]]):
|
|
692
672
|
if not isinstance(storage_impl, _storage.IDataStorageBase):
|
693
673
|
raise _ex.EStorageConfig(f"External storage for [{storage_key}] is using the legacy storage framework]")
|
694
674
|
converter = _data.DataConverter.noop()
|
695
|
-
storage = _ctx.TracDataStorageImpl(storage_key, storage_impl, converter, write_access, self.checkout_directory)
|
675
|
+
storage = _ctx.TracDataStorageImpl(storage_key, storage_impl, converter, write_access, self.checkout_directory, self.log_provider)
|
696
676
|
storage_map[storage_key] = storage
|
697
677
|
else:
|
698
678
|
raise _ex.EStorageConfig(f"External storage is not available: [{storage_key}]")
|
@@ -704,12 +684,12 @@ class RunModelFunc(NodeFunction[Bundle[_data.DataView]]):
|
|
704
684
|
trac_ctx = _ctx.TracDataContextImpl(
|
705
685
|
self.node.model_def, self.model_class,
|
706
686
|
local_ctx, dynamic_outputs, storage_map,
|
707
|
-
self.checkout_directory)
|
687
|
+
self.checkout_directory, self.log_provider)
|
708
688
|
else:
|
709
689
|
trac_ctx = _ctx.TracContextImpl(
|
710
690
|
self.node.model_def, self.model_class,
|
711
691
|
local_ctx, dynamic_outputs,
|
712
|
-
self.checkout_directory)
|
692
|
+
self.checkout_directory, self.log_provider)
|
713
693
|
|
714
694
|
try:
|
715
695
|
model = self.model_class()
|
@@ -812,9 +792,10 @@ class FunctionResolver:
|
|
812
792
|
|
813
793
|
__ResolveFunc = tp.Callable[['FunctionResolver', Node[_T]], NodeFunction[_T]]
|
814
794
|
|
815
|
-
def __init__(self, models: _models.ModelLoader, storage: _storage.StorageManager):
|
795
|
+
def __init__(self, models: _models.ModelLoader, storage: _storage.StorageManager, log_provider: _logging.LogProvider):
|
816
796
|
self._models = models
|
817
797
|
self._storage = storage
|
798
|
+
self._log_provider = log_provider
|
818
799
|
|
819
800
|
def resolve_node(self, node: Node[_T]) -> NodeFunction[_T]:
|
820
801
|
|
@@ -850,7 +831,7 @@ class FunctionResolver:
|
|
850
831
|
checkout_directory = self._models.model_load_checkout_directory(node.model_scope, node.model_def)
|
851
832
|
storage_manager = self._storage if node.storage_access else None
|
852
833
|
|
853
|
-
return RunModelFunc(node, model_class, checkout_directory, storage_manager)
|
834
|
+
return RunModelFunc(node, model_class, checkout_directory, storage_manager, self._log_provider)
|
854
835
|
|
855
836
|
__basic_node_mapping: tp.Dict[Node.__class__, NodeFunction.__class__] = {
|
856
837
|
|
@@ -861,7 +842,6 @@ class FunctionResolver:
|
|
861
842
|
DataViewNode: DataViewFunc,
|
862
843
|
DataItemNode: DataItemFunc,
|
863
844
|
BuildJobResultNode: BuildJobResultFunc,
|
864
|
-
SaveJobResultNode: SaveJobResultFunc,
|
865
845
|
DataResultNode: DataResultFunc,
|
866
846
|
StaticValueNode: StaticValueFunc,
|
867
847
|
RuntimeOutputsNode: RuntimeOutputsFunc,
|
tracdap/rt/_exec/graph.py
CHANGED
@@ -13,7 +13,6 @@
|
|
13
13
|
# See the License for the specific language governing permissions and
|
14
14
|
# limitations under the License.
|
15
15
|
|
16
|
-
import pathlib
|
17
16
|
import typing as tp
|
18
17
|
import dataclasses as dc
|
19
18
|
|
@@ -182,15 +181,6 @@ class JobOutputs:
|
|
182
181
|
bundles: tp.List[NodeId[ObjectBundle]] = dc.field(default_factory=list)
|
183
182
|
|
184
183
|
|
185
|
-
# TODO: Where does this go?
|
186
|
-
@dc.dataclass(frozen=True)
|
187
|
-
class JobResultSpec:
|
188
|
-
|
189
|
-
save_result: bool = False
|
190
|
-
result_dir: tp.Union[str, pathlib.Path] = None
|
191
|
-
result_format: str = None
|
192
|
-
|
193
|
-
|
194
184
|
# ----------------------------------------------------------------------------------------------------------------------
|
195
185
|
# NODE DEFINITIONS
|
196
186
|
# ----------------------------------------------------------------------------------------------------------------------
|
@@ -402,6 +392,7 @@ class RuntimeOutputsNode(Node[JobOutputs]):
|
|
402
392
|
@_node_type
|
403
393
|
class BuildJobResultNode(Node[cfg.JobResult]):
|
404
394
|
|
395
|
+
result_id: meta.TagHeader
|
405
396
|
job_id: meta.TagHeader
|
406
397
|
|
407
398
|
outputs: JobOutputs
|
@@ -414,16 +405,6 @@ class BuildJobResultNode(Node[cfg.JobResult]):
|
|
414
405
|
return {node_id: DependencyType.HARD for node_id in dep_ids}
|
415
406
|
|
416
407
|
|
417
|
-
@_node_type
|
418
|
-
class SaveJobResultNode(Node[None]):
|
419
|
-
|
420
|
-
job_result_id: NodeId[cfg.JobResult]
|
421
|
-
result_spec: JobResultSpec
|
422
|
-
|
423
|
-
def _node_dependencies(self) -> tp.Dict[NodeId, DependencyType]:
|
424
|
-
return {self.job_result_id: DependencyType.HARD}
|
425
|
-
|
426
|
-
|
427
408
|
@_node_type
|
428
409
|
class ChildJobNode(Node[cfg.JobResult]):
|
429
410
|
|
@@ -35,11 +35,10 @@ class GraphBuilder:
|
|
35
35
|
|
36
36
|
__JOB_BUILD_FUNC = tp.Callable[[meta.JobDefinition, NodeId], GraphSection]
|
37
37
|
|
38
|
-
def __init__(self, sys_config: config.RuntimeConfig, job_config: config.JobConfig
|
38
|
+
def __init__(self, sys_config: config.RuntimeConfig, job_config: config.JobConfig):
|
39
39
|
|
40
40
|
self._sys_config = sys_config
|
41
41
|
self._job_config = job_config
|
42
|
-
self._result_spec = result_spec
|
43
42
|
|
44
43
|
self._job_key = _util.object_key(job_config.jobId)
|
45
44
|
self._job_namespace = NodeNamespace(self._job_key)
|
@@ -48,7 +47,7 @@ class GraphBuilder:
|
|
48
47
|
|
49
48
|
def _child_builder(self, job_id: meta.TagHeader) -> "GraphBuilder":
|
50
49
|
|
51
|
-
builder = GraphBuilder(self._sys_config, self._job_config
|
50
|
+
builder = GraphBuilder(self._sys_config, self._job_config)
|
52
51
|
builder._job_key = _util.object_key(job_id)
|
53
52
|
builder._job_namespace = NodeNamespace(builder._job_key)
|
54
53
|
|
@@ -585,6 +584,27 @@ class GraphBuilder:
|
|
585
584
|
file_key=resolved_output_key,
|
586
585
|
storage_key=resolved_storage_key)
|
587
586
|
|
587
|
+
@classmethod
|
588
|
+
def build_output_file_and_storage(cls, output_key, file_type: meta.FileType, sys_config: cfg.RuntimeConfig, job_config: cfg.JobConfig):
|
589
|
+
|
590
|
+
# TODO: Review and de-dupe building of output metadata
|
591
|
+
# Responsibility for assigning outputs could perhaps move from orchestrator to runtime
|
592
|
+
|
593
|
+
output_storage_key = f"{output_key}:STORAGE"
|
594
|
+
|
595
|
+
output_id = job_config.resultMapping[output_key]
|
596
|
+
output_storage_id = job_config.resultMapping[output_storage_key]
|
597
|
+
|
598
|
+
timestamp = _dt.datetime.fromisoformat(output_id.objectTimestamp.isoDatetime)
|
599
|
+
data_item = f"file/{output_id.objectId}/version-{output_id.objectVersion}"
|
600
|
+
storage_key = sys_config.storage.defaultBucket
|
601
|
+
storage_path = f"file/FILE-{output_id.objectId}/version-{output_id.objectVersion}/{output_key}.{file_type.extension}"
|
602
|
+
|
603
|
+
file_def = cls.build_file_def(output_key, file_type, output_storage_id, data_item)
|
604
|
+
storage_def = cls.build_storage_def(data_item, storage_key, storage_path, file_type.mimeType, timestamp)
|
605
|
+
|
606
|
+
return file_def, storage_def
|
607
|
+
|
588
608
|
@classmethod
|
589
609
|
def build_runtime_outputs(cls, output_names: tp.List[str], job_namespace: NodeNamespace):
|
590
610
|
|
@@ -693,15 +713,16 @@ class GraphBuilder:
|
|
693
713
|
explicit_deps: tp.Optional[tp.List[NodeId]] = None) \
|
694
714
|
-> GraphSection:
|
695
715
|
|
696
|
-
|
716
|
+
result_id = self._job_config.resultMapping.get("trac_job_result")
|
717
|
+
result_node_id = NodeId.of("trac_job_result", self._job_namespace, cfg.JobResult)
|
697
718
|
|
698
719
|
if objects is not None:
|
699
720
|
|
700
721
|
results_inputs = set(objects.values())
|
701
722
|
|
702
723
|
build_result_node = BuildJobResultNode(
|
703
|
-
|
704
|
-
outputs
|
724
|
+
result_node_id, result_id, self._job_config.jobId,
|
725
|
+
outputs=JobOutputs(objects=objects),
|
705
726
|
explicit_deps=explicit_deps)
|
706
727
|
|
707
728
|
elif bundles is not None:
|
@@ -709,23 +730,16 @@ class GraphBuilder:
|
|
709
730
|
results_inputs = set(bundles)
|
710
731
|
|
711
732
|
build_result_node = BuildJobResultNode(
|
712
|
-
|
713
|
-
outputs
|
733
|
+
result_node_id, result_id, self._job_config.jobId,
|
734
|
+
outputs=JobOutputs(bundles=bundles),
|
714
735
|
explicit_deps=explicit_deps)
|
715
736
|
|
716
737
|
else:
|
717
738
|
raise _ex.EUnexpected()
|
718
739
|
|
719
|
-
|
720
|
-
save_result_id = NodeId("trac_save_result", self._job_namespace)
|
721
|
-
save_result_node = SaveJobResultNode(save_result_id, build_result_id, self._result_spec)
|
722
|
-
result_nodes = {build_result_id: build_result_node, save_result_id: save_result_node}
|
723
|
-
job_result_id = save_result_id
|
724
|
-
else:
|
725
|
-
result_nodes = {build_result_id: build_result_node}
|
726
|
-
job_result_id = build_result_id
|
740
|
+
result_nodes = {result_node_id: build_result_node}
|
727
741
|
|
728
|
-
return GraphSection(result_nodes, inputs=results_inputs, must_run=[
|
742
|
+
return GraphSection(result_nodes, inputs=results_inputs, must_run=[result_node_id])
|
729
743
|
|
730
744
|
def build_model_or_flow_with_context(
|
731
745
|
self, namespace: NodeNamespace, model_or_flow_name: str,
|
tracdap/rt/_exec/runtime.py
CHANGED
@@ -33,11 +33,12 @@ import tracdap.rt._exec.actors as _actors
|
|
33
33
|
import tracdap.rt._exec.engine as _engine
|
34
34
|
import tracdap.rt._exec.dev_mode as _dev_mode
|
35
35
|
import tracdap.rt._impl.config_parser as _cparse # noqa
|
36
|
-
import tracdap.rt._impl.
|
36
|
+
import tracdap.rt._impl.guard_rails as _guard # noqa
|
37
|
+
import tracdap.rt._impl.logging as _log # noqa
|
37
38
|
import tracdap.rt._impl.models as _models # noqa
|
38
39
|
import tracdap.rt._impl.storage as _storage # noqa
|
39
40
|
import tracdap.rt._impl.static_api as _static_api # noqa
|
40
|
-
import tracdap.rt._impl.
|
41
|
+
import tracdap.rt._impl.util as _util # noqa
|
41
42
|
import tracdap.rt._version as _version
|
42
43
|
|
43
44
|
|
@@ -83,8 +84,8 @@ class TracRuntime:
|
|
83
84
|
if isinstance(scratch_dir, str):
|
84
85
|
scratch_dir = pathlib.Path(scratch_dir)
|
85
86
|
|
86
|
-
|
87
|
-
self._log =
|
87
|
+
_log.configure_logging()
|
88
|
+
self._log = _log.logger_for_object(self)
|
88
89
|
self._log.info(f"TRAC D.A.P. Python Runtime {trac_version}")
|
89
90
|
|
90
91
|
self._sys_config = sys_config if isinstance(sys_config, _cfg.RuntimeConfig) else None
|
tracdap/rt/_exec/server.py
CHANGED
@@ -21,6 +21,7 @@ import tracdap.rt.config as config
|
|
21
21
|
import tracdap.rt.exceptions as ex
|
22
22
|
import tracdap.rt._exec.actors as actors
|
23
23
|
import tracdap.rt._impl.grpc.codec as codec # noqa
|
24
|
+
import tracdap.rt._impl.logging as logging # noqa
|
24
25
|
import tracdap.rt._impl.util as util # noqa
|
25
26
|
|
26
27
|
# Check whether gRPC is installed before trying to load any of the generated modules
|
@@ -44,7 +45,7 @@ class RuntimeApiServer(runtime_grpc.TracRuntimeApiServicer):
|
|
44
45
|
|
45
46
|
def __init__(self, system: actors.ActorSystem, port: int):
|
46
47
|
|
47
|
-
self.__log =
|
48
|
+
self.__log = logging.logger_for_object(self)
|
48
49
|
|
49
50
|
self.__system = system
|
50
51
|
self.__engine_id = system.main_id()
|
@@ -158,7 +159,7 @@ class ApiAgent(actors.ThreadsafeActor):
|
|
158
159
|
|
159
160
|
def __init__(self):
|
160
161
|
super().__init__()
|
161
|
-
self._log =
|
162
|
+
self._log = logging.logger_for_object(self)
|
162
163
|
self._event_loop = asyncio.get_event_loop()
|
163
164
|
self.__start_signal = asyncio.Event()
|
164
165
|
|
@@ -258,7 +259,7 @@ class ApiRequest(actors.ThreadsafeActor, tp.Generic[_T_REQUEST, _T_RESPONSE]):
|
|
258
259
|
self.threadsafe().stop()
|
259
260
|
|
260
261
|
|
261
|
-
ApiRequest._log =
|
262
|
+
ApiRequest._log = logging.logger_for_class(ApiRequest)
|
262
263
|
|
263
264
|
|
264
265
|
class ListJobsRequest(ApiRequest[runtime_pb2.RuntimeListJobsRequest, runtime_pb2.RuntimeListJobsResponse]):
|
@@ -32,6 +32,7 @@ import tracdap.rt.config as _config
|
|
32
32
|
import tracdap.rt.exceptions as _ex
|
33
33
|
import tracdap.rt.ext.plugins as _plugins
|
34
34
|
import tracdap.rt.ext.config as _config_ext
|
35
|
+
import tracdap.rt._impl.logging as _logging
|
35
36
|
import tracdap.rt._impl.util as _util
|
36
37
|
|
37
38
|
import yaml
|
@@ -103,7 +104,7 @@ class ConfigManager:
|
|
103
104
|
return raw_url
|
104
105
|
|
105
106
|
def __init__(self, root_dir_url: _urlp.ParseResult, root_file_url: tp.Optional[_urlp.ParseResult]):
|
106
|
-
self._log =
|
107
|
+
self._log = _logging.logger_for_object(self)
|
107
108
|
self._root_dir_url = root_dir_url
|
108
109
|
self._root_file_url = root_file_url
|
109
110
|
|
@@ -294,7 +295,7 @@ class ConfigParser(tp.Generic[_T]):
|
|
294
295
|
}
|
295
296
|
|
296
297
|
def __init__(self, config_class: _T.__class__, dev_mode_locations: tp.List[str] = None):
|
297
|
-
self._log =
|
298
|
+
self._log = _logging.logger_for_object(self)
|
298
299
|
self._config_class = config_class
|
299
300
|
self._dev_mode_locations = dev_mode_locations or []
|
300
301
|
self._errors = []
|
tracdap/rt/_impl/data.py
CHANGED
@@ -37,7 +37,7 @@ except ModuleNotFoundError:
|
|
37
37
|
import tracdap.rt.api.experimental as _api
|
38
38
|
import tracdap.rt.metadata as _meta
|
39
39
|
import tracdap.rt.exceptions as _ex
|
40
|
-
import tracdap.rt._impl.
|
40
|
+
import tracdap.rt._impl.logging as _log
|
41
41
|
|
42
42
|
|
43
43
|
@dc.dataclass(frozen=True)
|
@@ -188,7 +188,7 @@ class DataMapping:
|
|
188
188
|
:py:class:`TypeMapping <tracdap.rt.impl.type_system.MetadataCodec>`.
|
189
189
|
"""
|
190
190
|
|
191
|
-
__log =
|
191
|
+
__log = _log.logger_for_namespace(_DataInternal.__module__ + ".DataMapping")
|
192
192
|
|
193
193
|
# Matches TRAC_ARROW_TYPE_MAPPING in ArrowSchema, tracdap-lib-data
|
194
194
|
|
@@ -715,7 +715,7 @@ class DataConformance:
|
|
715
715
|
Check and/or apply conformance between datasets and schemas.
|
716
716
|
"""
|
717
717
|
|
718
|
-
__log =
|
718
|
+
__log = _log.logger_for_namespace(_DataInternal.__module__ + ".DataConformance")
|
719
719
|
|
720
720
|
__E_FIELD_MISSING = \
|
721
721
|
"Field [{field_name}] is missing from the data"
|
@@ -17,7 +17,7 @@ from tracdap.rt._impl.grpc.tracdap.metadata import object_id_pb2 as tracdap_dot_
|
|
17
17
|
from tracdap.rt._impl.grpc.tracdap.metadata import tag_update_pb2 as tracdap_dot_rt_dot___impl_dot_grpc_dot_tracdap_dot_metadata_dot_tag__update__pb2
|
18
18
|
|
19
19
|
|
20
|
-
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n0tracdap/rt/_impl/grpc/tracdap/metadata/job.proto\x12\x10tracdap.metadata\x1a\x31tracdap/rt/_impl/grpc/tracdap/metadata/type.proto\x1a\x36tracdap/rt/_impl/grpc/tracdap/metadata/object_id.proto\x1a\x37tracdap/rt/_impl/grpc/tracdap/metadata/tag_update.proto\"\
|
20
|
+
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n0tracdap/rt/_impl/grpc/tracdap/metadata/job.proto\x12\x10tracdap.metadata\x1a\x31tracdap/rt/_impl/grpc/tracdap/metadata/type.proto\x1a\x36tracdap/rt/_impl/grpc/tracdap/metadata/object_id.proto\x1a\x37tracdap/rt/_impl/grpc/tracdap/metadata/tag_update.proto\"\xb5\x03\n\rJobDefinition\x12*\n\x07jobType\x18\x01 \x01(\x0e\x32\x19.tracdap.metadata.JobType\x12\x31\n\x08runModel\x18\x02 \x01(\x0b\x32\x1d.tracdap.metadata.RunModelJobH\x00\x12/\n\x07runFlow\x18\x03 \x01(\x0b\x32\x1c.tracdap.metadata.RunFlowJobH\x00\x12\x37\n\x0bimportModel\x18\x04 \x01(\x0b\x32 .tracdap.metadata.ImportModelJobH\x00\x12\x35\n\nimportData\x18\x05 \x01(\x0b\x32\x1f.tracdap.metadata.ImportDataJobH\x00\x12\x35\n\nexportData\x18\x06 \x01(\x0b\x32\x1f.tracdap.metadata.ExportDataJobH\x00\x12.\n\x08jobGroup\x18\x07 \x01(\x0b\x32\x1a.tracdap.metadata.JobGroupH\x00\x12/\n\x08resultId\x18\x08 \x01(\x0b\x32\x1d.tracdap.metadata.TagSelectorB\x0c\n\njobDetails\"\xbe\x01\n\x10ResultDefinition\x12,\n\x05jobId\x18\x01 \x01(\x0b\x32\x1d.tracdap.metadata.TagSelector\x12\x33\n\nstatusCode\x18\x02 \x01(\x0e\x32\x1f.tracdap.metadata.JobStatusCode\x12\x15\n\rstatusMessage\x18\x03 \x01(\t\x12\x30\n\tlogFileId\x18\x04 \x01(\x0b\x32\x1d.tracdap.metadata.TagSelector\"\xac\x05\n\x0bRunModelJob\x12,\n\x05model\x18\x01 \x01(\x0b\x32\x1d.tracdap.metadata.TagSelector\x12\x41\n\nparameters\x18\x02 \x03(\x0b\x32-.tracdap.metadata.RunModelJob.ParametersEntry\x12\x39\n\x06inputs\x18\x03 \x03(\x0b\x32).tracdap.metadata.RunModelJob.InputsEntry\x12;\n\x07outputs\x18\x04 \x03(\x0b\x32*.tracdap.metadata.RunModelJob.OutputsEntry\x12\x45\n\x0cpriorOutputs\x18\x05 \x03(\x0b\x32/.tracdap.metadata.RunModelJob.PriorOutputsEntry\x12\x30\n\x0boutputAttrs\x18\x06 \x03(\x0b\x32\x1b.tracdap.metadata.TagUpdate\x1aJ\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12&\n\x05value\x18\x02 \x01(\x0b\x32\x17.tracdap.metadata.Value:\x02\x38\x01\x1aL\n\x0bInputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x1d.tracdap.metadata.TagSelector:\x02\x38\x01\x1aM\n\x0cOutputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x1d.tracdap.metadata.TagSelector:\x02\x38\x01\x1aR\n\x11PriorOutputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x1d.tracdap.metadata.TagSelector:\x02\x38\x01\"\xae\x06\n\nRunFlowJob\x12+\n\x04\x66low\x18\x01 \x01(\x0b\x32\x1d.tracdap.metadata.TagSelector\x12@\n\nparameters\x18\x02 \x03(\x0b\x32,.tracdap.metadata.RunFlowJob.ParametersEntry\x12\x38\n\x06inputs\x18\x03 \x03(\x0b\x32(.tracdap.metadata.RunFlowJob.InputsEntry\x12:\n\x07outputs\x18\x04 \x03(\x0b\x32).tracdap.metadata.RunFlowJob.OutputsEntry\x12\x44\n\x0cpriorOutputs\x18\x05 \x03(\x0b\x32..tracdap.metadata.RunFlowJob.PriorOutputsEntry\x12\x38\n\x06models\x18\x06 \x03(\x0b\x32(.tracdap.metadata.RunFlowJob.ModelsEntry\x12\x30\n\x0boutputAttrs\x18\x07 \x03(\x0b\x32\x1b.tracdap.metadata.TagUpdate\x1aJ\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12&\n\x05value\x18\x02 \x01(\x0b\x32\x17.tracdap.metadata.Value:\x02\x38\x01\x1aL\n\x0bInputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x1d.tracdap.metadata.TagSelector:\x02\x38\x01\x1aM\n\x0cOutputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x1d.tracdap.metadata.TagSelector:\x02\x38\x01\x1aR\n\x11PriorOutputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x1d.tracdap.metadata.TagSelector:\x02\x38\x01\x1aL\n\x0bModelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x1d.tracdap.metadata.TagSelector:\x02\x38\x01\"\xd7\x01\n\x0eImportModelJob\x12\x10\n\x08language\x18\x01 \x01(\t\x12\x12\n\nrepository\x18\x02 \x01(\t\x12\x19\n\x0cpackageGroup\x18\x07 \x01(\tH\x00\x88\x01\x01\x12\x0f\n\x07package\x18\x08 \x01(\t\x12\x0f\n\x07version\x18\x05 \x01(\t\x12\x12\n\nentryPoint\x18\x04 \x01(\t\x12\x0c\n\x04path\x18\x03 \x01(\t\x12/\n\nmodelAttrs\x18\x06 \x03(\x0b\x32\x1b.tracdap.metadata.TagUpdateB\x0f\n\r_packageGroup\"\x8d\x07\n\rImportDataJob\x12,\n\x05model\x18\x01 \x01(\x0b\x32\x1d.tracdap.metadata.TagSelector\x12\x43\n\nparameters\x18\x02 \x03(\x0b\x32/.tracdap.metadata.ImportDataJob.ParametersEntry\x12;\n\x06inputs\x18\x03 \x03(\x0b\x32+.tracdap.metadata.ImportDataJob.InputsEntry\x12=\n\x07outputs\x18\x04 \x03(\x0b\x32,.tracdap.metadata.ImportDataJob.OutputsEntry\x12G\n\x0cpriorOutputs\x18\x05 \x03(\x0b\x32\x31.tracdap.metadata.ImportDataJob.PriorOutputsEntry\x12\x15\n\rstorageAccess\x18\x06 \x03(\t\x12=\n\x07imports\x18\x07 \x03(\x0b\x32,.tracdap.metadata.ImportDataJob.ImportsEntry\x12\x30\n\x0boutputAttrs\x18\x08 \x03(\x0b\x32\x1b.tracdap.metadata.TagUpdate\x12\x30\n\x0bimportAttrs\x18\t \x03(\x0b\x32\x1b.tracdap.metadata.TagUpdate\x1aJ\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12&\n\x05value\x18\x02 \x01(\x0b\x32\x17.tracdap.metadata.Value:\x02\x38\x01\x1aL\n\x0bInputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x1d.tracdap.metadata.TagSelector:\x02\x38\x01\x1aM\n\x0cOutputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x1d.tracdap.metadata.TagSelector:\x02\x38\x01\x1aR\n\x11PriorOutputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x1d.tracdap.metadata.TagSelector:\x02\x38\x01\x1aM\n\x0cImportsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x1d.tracdap.metadata.TagSelector:\x02\x38\x01\"\xdb\x06\n\rExportDataJob\x12,\n\x05model\x18\x01 \x01(\x0b\x32\x1d.tracdap.metadata.TagSelector\x12\x43\n\nparameters\x18\x02 \x03(\x0b\x32/.tracdap.metadata.ExportDataJob.ParametersEntry\x12;\n\x06inputs\x18\x03 \x03(\x0b\x32+.tracdap.metadata.ExportDataJob.InputsEntry\x12=\n\x07outputs\x18\x04 \x03(\x0b\x32,.tracdap.metadata.ExportDataJob.OutputsEntry\x12G\n\x0cpriorOutputs\x18\x05 \x03(\x0b\x32\x31.tracdap.metadata.ExportDataJob.PriorOutputsEntry\x12\x15\n\rstorageAccess\x18\x06 \x03(\t\x12=\n\x07\x65xports\x18\x07 \x03(\x0b\x32,.tracdap.metadata.ExportDataJob.ExportsEntry\x12\x30\n\x0boutputAttrs\x18\x08 \x03(\x0b\x32\x1b.tracdap.metadata.TagUpdate\x1aJ\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12&\n\x05value\x18\x02 \x01(\x0b\x32\x17.tracdap.metadata.Value:\x02\x38\x01\x1aL\n\x0bInputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x1d.tracdap.metadata.TagSelector:\x02\x38\x01\x1aM\n\x0cOutputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x1d.tracdap.metadata.TagSelector:\x02\x38\x01\x1aR\n\x11PriorOutputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x1d.tracdap.metadata.TagSelector:\x02\x38\x01\x1aM\n\x0c\x45xportsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x1d.tracdap.metadata.TagSelector:\x02\x38\x01\"\xc7\x01\n\x08JobGroup\x12\x34\n\x0cjobGroupType\x18\x01 \x01(\x0e\x32\x1e.tracdap.metadata.JobGroupType\x12:\n\nsequential\x18\x02 \x01(\x0b\x32$.tracdap.metadata.SequentialJobGroupH\x00\x12\x36\n\x08parallel\x18\x03 \x01(\x0b\x32\".tracdap.metadata.ParallelJobGroupH\x00\x42\x11\n\x0fjobGroupDetails\"C\n\x12SequentialJobGroup\x12-\n\x04jobs\x18\x01 \x03(\x0b\x32\x1f.tracdap.metadata.JobDefinition\"A\n\x10ParallelJobGroup\x12-\n\x04jobs\x18\x01 \x03(\x0b\x32\x1f.tracdap.metadata.JobDefinition*\x7f\n\x07JobType\x12\x14\n\x10JOB_TYPE_NOT_SET\x10\x00\x12\r\n\tRUN_MODEL\x10\x01\x12\x0c\n\x08RUN_FLOW\x10\x02\x12\x10\n\x0cIMPORT_MODEL\x10\x03\x12\x0f\n\x0bIMPORT_DATA\x10\x04\x12\x0f\n\x0b\x45XPORT_DATA\x10\x05\x12\r\n\tJOB_GROUP\x10\x06*\xb8\x01\n\rJobStatusCode\x12\x1b\n\x17JOB_STATUS_CODE_NOT_SET\x10\x00\x12\r\n\tPREPARING\x10\x01\x12\r\n\tVALIDATED\x10\x02\x12\x0b\n\x07PENDING\x10\x03\x12\n\n\x06QUEUED\x10\x04\x12\r\n\tSUBMITTED\x10\x05\x12\x0b\n\x07RUNNING\x10\x06\x12\r\n\tFINISHING\x10\x07\x12\r\n\tSUCCEEDED\x10\x08\x12\n\n\x06\x46\x41ILED\x10\t\x12\r\n\tCANCELLED\x10\n*\\\n\x0cJobGroupType\x12\x1a\n\x16JOB_GROUP_TYPE_NOT_SET\x10\x00\x12\x18\n\x14SEQUENTIAL_JOB_GROUP\x10\x01\x12\x16\n\x12PARALLEL_JOB_GROUP\x10\x02\x42\x1e\n\x1aorg.finos.tracdap.metadataP\x01\x62\x06proto3')
|
21
21
|
|
22
22
|
_globals = globals()
|
23
23
|
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
@@ -63,66 +63,68 @@ if _descriptor._USE_C_DESCRIPTORS == False:
|
|
63
63
|
_globals['_EXPORTDATAJOB_PRIOROUTPUTSENTRY']._serialized_options = b'8\001'
|
64
64
|
_globals['_EXPORTDATAJOB_EXPORTSENTRY']._options = None
|
65
65
|
_globals['_EXPORTDATAJOB_EXPORTSENTRY']._serialized_options = b'8\001'
|
66
|
-
_globals['_JOBTYPE']._serialized_start=
|
67
|
-
_globals['_JOBTYPE']._serialized_end=
|
68
|
-
_globals['_JOBSTATUSCODE']._serialized_start=
|
69
|
-
_globals['_JOBSTATUSCODE']._serialized_end=
|
70
|
-
_globals['_JOBGROUPTYPE']._serialized_start=
|
71
|
-
_globals['_JOBGROUPTYPE']._serialized_end=
|
66
|
+
_globals['_JOBTYPE']._serialized_start=4701
|
67
|
+
_globals['_JOBTYPE']._serialized_end=4828
|
68
|
+
_globals['_JOBSTATUSCODE']._serialized_start=4831
|
69
|
+
_globals['_JOBSTATUSCODE']._serialized_end=5015
|
70
|
+
_globals['_JOBGROUPTYPE']._serialized_start=5017
|
71
|
+
_globals['_JOBGROUPTYPE']._serialized_end=5109
|
72
72
|
_globals['_JOBDEFINITION']._serialized_start=235
|
73
|
-
_globals['_JOBDEFINITION']._serialized_end=
|
74
|
-
_globals['
|
75
|
-
_globals['
|
76
|
-
_globals['
|
77
|
-
_globals['
|
78
|
-
_globals['
|
79
|
-
_globals['
|
80
|
-
_globals['
|
81
|
-
_globals['
|
82
|
-
_globals['
|
83
|
-
_globals['
|
84
|
-
_globals['
|
85
|
-
_globals['
|
86
|
-
_globals['
|
87
|
-
_globals['
|
88
|
-
_globals['
|
89
|
-
_globals['
|
90
|
-
_globals['
|
91
|
-
_globals['
|
92
|
-
_globals['
|
93
|
-
_globals['
|
94
|
-
_globals['
|
95
|
-
_globals['
|
96
|
-
_globals['
|
97
|
-
_globals['
|
98
|
-
_globals['
|
99
|
-
_globals['
|
100
|
-
_globals['
|
101
|
-
_globals['
|
102
|
-
_globals['
|
103
|
-
_globals['
|
104
|
-
_globals['
|
105
|
-
_globals['
|
106
|
-
_globals['
|
107
|
-
_globals['
|
108
|
-
_globals['
|
109
|
-
_globals['
|
110
|
-
_globals['
|
111
|
-
_globals['
|
112
|
-
_globals['
|
113
|
-
_globals['
|
114
|
-
_globals['
|
115
|
-
_globals['
|
116
|
-
_globals['
|
117
|
-
_globals['
|
118
|
-
_globals['
|
119
|
-
_globals['
|
120
|
-
_globals['
|
121
|
-
_globals['
|
122
|
-
_globals['
|
123
|
-
_globals['
|
124
|
-
_globals['
|
125
|
-
_globals['
|
126
|
-
_globals['
|
127
|
-
_globals['
|
73
|
+
_globals['_JOBDEFINITION']._serialized_end=672
|
74
|
+
_globals['_RESULTDEFINITION']._serialized_start=675
|
75
|
+
_globals['_RESULTDEFINITION']._serialized_end=865
|
76
|
+
_globals['_RUNMODELJOB']._serialized_start=868
|
77
|
+
_globals['_RUNMODELJOB']._serialized_end=1552
|
78
|
+
_globals['_RUNMODELJOB_PARAMETERSENTRY']._serialized_start=1237
|
79
|
+
_globals['_RUNMODELJOB_PARAMETERSENTRY']._serialized_end=1311
|
80
|
+
_globals['_RUNMODELJOB_INPUTSENTRY']._serialized_start=1313
|
81
|
+
_globals['_RUNMODELJOB_INPUTSENTRY']._serialized_end=1389
|
82
|
+
_globals['_RUNMODELJOB_OUTPUTSENTRY']._serialized_start=1391
|
83
|
+
_globals['_RUNMODELJOB_OUTPUTSENTRY']._serialized_end=1468
|
84
|
+
_globals['_RUNMODELJOB_PRIOROUTPUTSENTRY']._serialized_start=1470
|
85
|
+
_globals['_RUNMODELJOB_PRIOROUTPUTSENTRY']._serialized_end=1552
|
86
|
+
_globals['_RUNFLOWJOB']._serialized_start=1555
|
87
|
+
_globals['_RUNFLOWJOB']._serialized_end=2369
|
88
|
+
_globals['_RUNFLOWJOB_PARAMETERSENTRY']._serialized_start=1237
|
89
|
+
_globals['_RUNFLOWJOB_PARAMETERSENTRY']._serialized_end=1311
|
90
|
+
_globals['_RUNFLOWJOB_INPUTSENTRY']._serialized_start=1313
|
91
|
+
_globals['_RUNFLOWJOB_INPUTSENTRY']._serialized_end=1389
|
92
|
+
_globals['_RUNFLOWJOB_OUTPUTSENTRY']._serialized_start=1391
|
93
|
+
_globals['_RUNFLOWJOB_OUTPUTSENTRY']._serialized_end=1468
|
94
|
+
_globals['_RUNFLOWJOB_PRIOROUTPUTSENTRY']._serialized_start=1470
|
95
|
+
_globals['_RUNFLOWJOB_PRIOROUTPUTSENTRY']._serialized_end=1552
|
96
|
+
_globals['_RUNFLOWJOB_MODELSENTRY']._serialized_start=2293
|
97
|
+
_globals['_RUNFLOWJOB_MODELSENTRY']._serialized_end=2369
|
98
|
+
_globals['_IMPORTMODELJOB']._serialized_start=2372
|
99
|
+
_globals['_IMPORTMODELJOB']._serialized_end=2587
|
100
|
+
_globals['_IMPORTDATAJOB']._serialized_start=2590
|
101
|
+
_globals['_IMPORTDATAJOB']._serialized_end=3499
|
102
|
+
_globals['_IMPORTDATAJOB_PARAMETERSENTRY']._serialized_start=1237
|
103
|
+
_globals['_IMPORTDATAJOB_PARAMETERSENTRY']._serialized_end=1311
|
104
|
+
_globals['_IMPORTDATAJOB_INPUTSENTRY']._serialized_start=1313
|
105
|
+
_globals['_IMPORTDATAJOB_INPUTSENTRY']._serialized_end=1389
|
106
|
+
_globals['_IMPORTDATAJOB_OUTPUTSENTRY']._serialized_start=1391
|
107
|
+
_globals['_IMPORTDATAJOB_OUTPUTSENTRY']._serialized_end=1468
|
108
|
+
_globals['_IMPORTDATAJOB_PRIOROUTPUTSENTRY']._serialized_start=1470
|
109
|
+
_globals['_IMPORTDATAJOB_PRIOROUTPUTSENTRY']._serialized_end=1552
|
110
|
+
_globals['_IMPORTDATAJOB_IMPORTSENTRY']._serialized_start=3422
|
111
|
+
_globals['_IMPORTDATAJOB_IMPORTSENTRY']._serialized_end=3499
|
112
|
+
_globals['_EXPORTDATAJOB']._serialized_start=3502
|
113
|
+
_globals['_EXPORTDATAJOB']._serialized_end=4361
|
114
|
+
_globals['_EXPORTDATAJOB_PARAMETERSENTRY']._serialized_start=1237
|
115
|
+
_globals['_EXPORTDATAJOB_PARAMETERSENTRY']._serialized_end=1311
|
116
|
+
_globals['_EXPORTDATAJOB_INPUTSENTRY']._serialized_start=1313
|
117
|
+
_globals['_EXPORTDATAJOB_INPUTSENTRY']._serialized_end=1389
|
118
|
+
_globals['_EXPORTDATAJOB_OUTPUTSENTRY']._serialized_start=1391
|
119
|
+
_globals['_EXPORTDATAJOB_OUTPUTSENTRY']._serialized_end=1468
|
120
|
+
_globals['_EXPORTDATAJOB_PRIOROUTPUTSENTRY']._serialized_start=1470
|
121
|
+
_globals['_EXPORTDATAJOB_PRIOROUTPUTSENTRY']._serialized_end=1552
|
122
|
+
_globals['_EXPORTDATAJOB_EXPORTSENTRY']._serialized_start=4284
|
123
|
+
_globals['_EXPORTDATAJOB_EXPORTSENTRY']._serialized_end=4361
|
124
|
+
_globals['_JOBGROUP']._serialized_start=4364
|
125
|
+
_globals['_JOBGROUP']._serialized_end=4563
|
126
|
+
_globals['_SEQUENTIALJOBGROUP']._serialized_start=4565
|
127
|
+
_globals['_SEQUENTIALJOBGROUP']._serialized_end=4632
|
128
|
+
_globals['_PARALLELJOBGROUP']._serialized_start=4634
|
129
|
+
_globals['_PARALLELJOBGROUP']._serialized_end=4699
|
128
130
|
# @@protoc_insertion_point(module_scope)
|