tracdap-runtime 0.7.1__py3-none-any.whl → 0.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (88) hide show
  1. tracdap/rt/_impl/core/__init__.py +14 -0
  2. tracdap/rt/_impl/{config_parser.py → core/config_parser.py} +36 -19
  3. tracdap/rt/_impl/{data.py → core/data.py} +136 -32
  4. tracdap/rt/_impl/core/logging.py +195 -0
  5. tracdap/rt/_impl/{models.py → core/models.py} +15 -12
  6. tracdap/rt/_impl/{repos.py → core/repos.py} +12 -3
  7. tracdap/rt/_impl/{schemas.py → core/schemas.py} +5 -5
  8. tracdap/rt/_impl/{shim.py → core/shim.py} +5 -4
  9. tracdap/rt/_impl/{storage.py → core/storage.py} +21 -10
  10. tracdap/rt/_impl/core/struct.py +547 -0
  11. tracdap/rt/_impl/{util.py → core/util.py} +1 -111
  12. tracdap/rt/_impl/{validation.py → core/validation.py} +99 -31
  13. tracdap/rt/_impl/exec/__init__.py +14 -0
  14. tracdap/rt/{_exec → _impl/exec}/actors.py +12 -14
  15. tracdap/rt/{_exec → _impl/exec}/context.py +228 -82
  16. tracdap/rt/{_exec → _impl/exec}/dev_mode.py +163 -81
  17. tracdap/rt/{_exec → _impl/exec}/engine.py +230 -105
  18. tracdap/rt/{_exec → _impl/exec}/functions.py +191 -100
  19. tracdap/rt/{_exec → _impl/exec}/graph.py +24 -36
  20. tracdap/rt/{_exec → _impl/exec}/graph_builder.py +252 -115
  21. tracdap/rt/_impl/grpc/codec.py +1 -1
  22. tracdap/rt/{_exec → _impl/grpc}/server.py +7 -6
  23. tracdap/rt/_impl/grpc/tracdap/api/internal/runtime_pb2.py +3 -3
  24. tracdap/rt/_impl/grpc/tracdap/api/internal/runtime_pb2_grpc.py +1 -1
  25. tracdap/rt/_impl/grpc/tracdap/metadata/common_pb2.py +1 -1
  26. tracdap/rt/_impl/grpc/tracdap/metadata/config_pb2.py +40 -0
  27. tracdap/rt/_impl/grpc/tracdap/metadata/config_pb2.pyi +62 -0
  28. tracdap/rt/_impl/grpc/tracdap/metadata/custom_pb2.py +1 -1
  29. tracdap/rt/_impl/grpc/tracdap/metadata/data_pb2.py +32 -20
  30. tracdap/rt/_impl/grpc/tracdap/metadata/data_pb2.pyi +48 -2
  31. tracdap/rt/_impl/grpc/tracdap/metadata/file_pb2.py +4 -2
  32. tracdap/rt/_impl/grpc/tracdap/metadata/file_pb2.pyi +8 -0
  33. tracdap/rt/_impl/grpc/tracdap/metadata/flow_pb2.py +1 -1
  34. tracdap/rt/_impl/grpc/tracdap/metadata/job_pb2.py +65 -63
  35. tracdap/rt/_impl/grpc/tracdap/metadata/job_pb2.pyi +16 -2
  36. tracdap/rt/_impl/grpc/tracdap/metadata/model_pb2.py +28 -26
  37. tracdap/rt/_impl/grpc/tracdap/metadata/model_pb2.pyi +14 -4
  38. tracdap/rt/_impl/grpc/tracdap/metadata/object_id_pb2.py +4 -4
  39. tracdap/rt/_impl/grpc/tracdap/metadata/object_id_pb2.pyi +6 -0
  40. tracdap/rt/_impl/grpc/tracdap/metadata/object_pb2.py +9 -7
  41. tracdap/rt/_impl/grpc/tracdap/metadata/object_pb2.pyi +12 -4
  42. tracdap/rt/_impl/grpc/tracdap/metadata/resource_pb2.py +18 -5
  43. tracdap/rt/_impl/grpc/tracdap/metadata/resource_pb2.pyi +42 -2
  44. tracdap/rt/_impl/grpc/tracdap/metadata/search_pb2.py +1 -1
  45. tracdap/rt/_impl/grpc/tracdap/metadata/{stoarge_pb2.py → storage_pb2.py} +4 -4
  46. tracdap/rt/_impl/grpc/tracdap/metadata/tag_pb2.py +1 -1
  47. tracdap/rt/_impl/grpc/tracdap/metadata/tag_update_pb2.py +1 -1
  48. tracdap/rt/_impl/grpc/tracdap/metadata/type_pb2.py +1 -1
  49. tracdap/rt/{_exec → _impl}/runtime.py +32 -18
  50. tracdap/rt/_impl/static_api.py +65 -37
  51. tracdap/rt/_plugins/format_csv.py +1 -1
  52. tracdap/rt/_plugins/repo_git.py +56 -11
  53. tracdap/rt/_plugins/storage_sql.py +1 -1
  54. tracdap/rt/_version.py +1 -1
  55. tracdap/rt/api/__init__.py +5 -24
  56. tracdap/rt/api/constants.py +57 -0
  57. tracdap/rt/api/experimental.py +32 -0
  58. tracdap/rt/api/hook.py +26 -7
  59. tracdap/rt/api/model_api.py +16 -0
  60. tracdap/rt/api/static_api.py +265 -127
  61. tracdap/rt/config/__init__.py +11 -11
  62. tracdap/rt/config/common.py +2 -26
  63. tracdap/rt/config/dynamic.py +28 -0
  64. tracdap/rt/config/platform.py +17 -31
  65. tracdap/rt/config/runtime.py +2 -0
  66. tracdap/rt/ext/embed.py +2 -2
  67. tracdap/rt/ext/plugins.py +3 -3
  68. tracdap/rt/launch/launch.py +12 -14
  69. tracdap/rt/metadata/__init__.py +28 -18
  70. tracdap/rt/metadata/config.py +95 -0
  71. tracdap/rt/metadata/data.py +40 -0
  72. tracdap/rt/metadata/file.py +10 -0
  73. tracdap/rt/metadata/job.py +16 -0
  74. tracdap/rt/metadata/model.py +12 -2
  75. tracdap/rt/metadata/object.py +9 -1
  76. tracdap/rt/metadata/object_id.py +6 -0
  77. tracdap/rt/metadata/resource.py +41 -1
  78. {tracdap_runtime-0.7.1.dist-info → tracdap_runtime-0.8.0.dist-info}/METADATA +23 -17
  79. tracdap_runtime-0.8.0.dist-info/RECORD +129 -0
  80. {tracdap_runtime-0.7.1.dist-info → tracdap_runtime-0.8.0.dist-info}/WHEEL +1 -1
  81. tracdap/rt/_exec/__init__.py +0 -0
  82. tracdap_runtime-0.7.1.dist-info/RECORD +0 -121
  83. /tracdap/rt/_impl/{guard_rails.py → core/guard_rails.py} +0 -0
  84. /tracdap/rt/_impl/{type_system.py → core/type_system.py} +0 -0
  85. /tracdap/rt/_impl/grpc/tracdap/metadata/{stoarge_pb2.pyi → storage_pb2.pyi} +0 -0
  86. /tracdap/rt/metadata/{stoarge.py → storage.py} +0 -0
  87. {tracdap_runtime-0.7.1.dist-info → tracdap_runtime-0.8.0.dist-info/licenses}/LICENSE +0 -0
  88. {tracdap_runtime-0.7.1.dist-info → tracdap_runtime-0.8.0.dist-info}/top_level.txt +0 -0
@@ -22,11 +22,12 @@ import tracdap.rt.api as _api
22
22
  import tracdap.rt.config as _cfg
23
23
  import tracdap.rt.metadata as _meta
24
24
  import tracdap.rt.exceptions as _ex
25
- import tracdap.rt._impl.config_parser as _cfg_p # noqa
26
- import tracdap.rt._impl.models as _models # noqa
27
- import tracdap.rt._impl.storage as _storage # noqa
28
- import tracdap.rt._impl.type_system as _types # noqa
29
- import tracdap.rt._impl.util as _util # noqa
25
+ import tracdap.rt._impl.core.config_parser as _cfg_p
26
+ import tracdap.rt._impl.core.logging as _logging
27
+ import tracdap.rt._impl.core.models as _models
28
+ import tracdap.rt._impl.core.storage as _storage
29
+ import tracdap.rt._impl.core.type_system as _types
30
+ import tracdap.rt._impl.core.util as _util
30
31
 
31
32
 
32
33
  DEV_MODE_JOB_CONFIG = [
@@ -50,7 +51,7 @@ DEV_MODE_SYS_CONFIG = []
50
51
 
51
52
  class DevModeTranslator:
52
53
 
53
- _log: tp.Optional[_util.logging.Logger] = None
54
+ _log: tp.Optional[_logging.Logger] = None
54
55
 
55
56
  @classmethod
56
57
  def translate_sys_config(cls, sys_config: _cfg.RuntimeConfig, config_mgr: _cfg_p.ConfigManager):
@@ -137,11 +138,14 @@ class DevModeTranslator:
137
138
  raise _ex.EConfigParse(msg)
138
139
 
139
140
 
140
- def __init__(self, sys_config: _cfg.RuntimeConfig, config_mgr: _cfg_p.ConfigManager, scratch_dir: pathlib.Path):
141
+ def __init__(
142
+ self, sys_config: _cfg.RuntimeConfig, config_mgr: _cfg_p.ConfigManager, scratch_dir: pathlib.Path = None,
143
+ model_loader: _models.ModelLoader = None, storage_manager: _storage.StorageManager = None):
144
+
141
145
  self._sys_config = sys_config
142
146
  self._config_mgr = config_mgr
143
- self._scratch_dir = scratch_dir
144
- self._model_loader: tp.Optional[_models.ModelLoader] = None
147
+ self._model_loader = model_loader or _models.ModelLoader(self._sys_config, scratch_dir)
148
+ self._storage_manager = storage_manager or _storage.StorageManager(self._sys_config)
145
149
 
146
150
  def translate_job_config(
147
151
  self, job_config: _cfg.JobConfig,
@@ -150,8 +154,6 @@ class DevModeTranslator:
150
154
 
151
155
  try:
152
156
  self._log.info(f"Applying dev mode config translation to job config")
153
-
154
- self._model_loader = _models.ModelLoader(self._sys_config, self._scratch_dir)
155
157
  self._model_loader.create_scope("DEV_MODE_TRANSLATION")
156
158
 
157
159
  job_config = copy.deepcopy(job_config)
@@ -168,7 +170,6 @@ class DevModeTranslator:
168
170
 
169
171
  finally:
170
172
  self._model_loader.destroy_scope("DEV_MODE_TRANSLATION")
171
- self._model_loader = None
172
173
 
173
174
  def translate_job_def(
174
175
  self, job_config: _cfg.JobConfig, job_def: _meta.JobDefinition,
@@ -694,7 +695,7 @@ class DevModeTranslator:
694
695
 
695
696
  model_selector = job_def.runFlow.models.get(source.node)
696
697
  model_obj = _util.get_job_resource(model_selector, job_config)
697
- model_input = model_obj.model.inputs.get(source.socket)
698
+ model_input = model_obj.model.outputs.get(source.socket)
698
699
  model_outputs.append(model_input)
699
700
 
700
701
  if len(model_outputs) == 0:
@@ -765,7 +766,7 @@ class DevModeTranslator:
765
766
  p_spec = param_specs[p_name]
766
767
 
767
768
  try:
768
- cls._log.info(f"Encoding parameter [{p_name}] as {p_spec.paramType.basicType}")
769
+ cls._log.info(f"Encoding parameter [{p_name}] as {p_spec.paramType.basicType.name}")
769
770
  encoded_value = _types.MetadataCodec.convert_value(p_value, p_spec.paramType)
770
771
  encoded_values[p_name] = encoded_value
771
772
 
@@ -803,42 +804,50 @@ class DevModeTranslator:
803
804
  if not (isinstance(input_value, str) and input_value in job_resources):
804
805
 
805
806
  model_input = required_inputs[input_key]
806
- input_schema = model_input.schema if model_input and not model_input.dynamic else None
807
807
 
808
- input_id = self._process_input_or_output(
809
- input_key, input_value, job_resources,
810
- new_unique_file=False, schema=input_schema)
808
+ if model_input.objectType == _meta.ObjectType.DATA:
809
+ schema = model_input.schema if model_input and not model_input.dynamic else None
810
+ input_id = self._process_data_socket(input_key, input_value, schema, job_resources, new_unique_file=False)
811
+ elif model_input.objectType == _meta.ObjectType.FILE:
812
+ file_type = model_input.fileType
813
+ input_id = self._process_file_socket(input_key, input_value, file_type, job_resources, new_unique_file=False)
814
+ else:
815
+ raise _ex.EUnexpected()
811
816
 
812
817
  job_inputs[input_key] = _util.selector_for(input_id)
813
818
 
814
819
  for output_key, output_value in job_outputs.items():
815
820
  if not (isinstance(output_value, str) and output_value in job_resources):
816
821
 
817
- model_output= required_outputs[output_key]
818
- output_schema = model_output.schema if model_output and not model_output.dynamic else None
822
+ model_output = required_outputs[output_key]
819
823
 
820
- output_id = self._process_input_or_output(
821
- output_key, output_value, job_resources,
822
- new_unique_file=True, schema=output_schema)
824
+ if model_output.objectType == _meta.ObjectType.DATA:
825
+ schema = model_output.schema if model_output and not model_output.dynamic else None
826
+ output_id = self._process_data_socket(output_key, output_value, schema, job_resources, new_unique_file=True)
827
+ elif model_output.objectType == _meta.ObjectType.FILE:
828
+ file_type = model_output.fileType
829
+ output_id = self._process_file_socket(output_key, output_value, file_type, job_resources, new_unique_file=True)
830
+ else:
831
+ raise _ex.EUnexpected()
823
832
 
824
833
  job_outputs[output_key] = _util.selector_for(output_id)
825
834
 
826
835
  return job_config, job_def
827
836
 
828
- def _process_input_or_output(
829
- self, data_key, data_value,
830
- resources: tp.Dict[str, _meta.ObjectDefinition],
831
- new_unique_file=False,
832
- schema: tp.Optional[_meta.SchemaDefinition] = None) \
837
+ def _process_data_socket(
838
+ self, data_key, data_value, schema: tp.Optional[_meta.SchemaDefinition],
839
+ resources: tp.Dict[str, _meta.ObjectDefinition], new_unique_file=False) \
833
840
  -> _meta.TagHeader:
834
841
 
835
842
  data_id = _util.new_object_id(_meta.ObjectType.DATA)
836
843
  storage_id = _util.new_object_id(_meta.ObjectType.STORAGE)
837
844
 
845
+ self._log.info(f"Generating data definition for [{data_key}] with ID = [{_util.object_key(data_id)}]")
846
+
838
847
  if isinstance(data_value, str):
839
848
  storage_path = data_value
840
849
  storage_key = self._sys_config.storage.defaultBucket
841
- storage_format = self.infer_format(storage_path, self._sys_config.storage)
850
+ storage_format = self.infer_format(storage_path, self._sys_config.storage, schema)
842
851
  snap_version = 1
843
852
 
844
853
  elif isinstance(data_value, dict):
@@ -849,74 +858,135 @@ class DevModeTranslator:
849
858
  raise _ex.EConfigParse(f"Invalid configuration for input [{data_key}] (missing required value 'path'")
850
859
 
851
860
  storage_key = data_value.get("storageKey") or self._sys_config.storage.defaultBucket
852
- storage_format = data_value.get("format") or self.infer_format(storage_path, self._sys_config.storage)
861
+ storage_format = data_value.get("format") or self.infer_format(storage_path, self._sys_config.storage, schema)
853
862
  snap_version = 1
854
863
 
855
864
  else:
856
865
  raise _ex.EConfigParse(f"Invalid configuration for input '{data_key}'")
857
866
 
858
- self._log.info(f"Generating data definition for [{data_key}] with ID = [{_util.object_key(data_id)}]")
859
-
860
867
  # For unique outputs, increment the snap number to find a new unique snap
861
868
  # These are not incarnations, bc likely in dev mode model code and inputs are changing
862
869
  # Incarnations are for recreation of a dataset using the exact same code path and inputs
863
870
 
864
871
  if new_unique_file:
872
+ storage_path, snap_version = self._new_unique_file(data_key, storage_key, storage_path, snap_version)
865
873
 
866
- x_storage_mgr = _storage.StorageManager(self._sys_config)
867
- x_storage = x_storage_mgr.get_file_storage(storage_key)
868
- x_orig_path = pathlib.PurePath(storage_path)
869
- x_name = x_orig_path.name
870
-
871
- if x_storage.exists(str(x_orig_path.parent)):
872
- listing = x_storage.ls(str(x_orig_path.parent))
873
- existing_files = list(map(lambda stat: stat.file_name, listing))
874
- else:
875
- existing_files = []
876
-
877
- while x_name in existing_files:
874
+ part_key = _meta.PartKey(opaqueKey="part-root", partType=_meta.PartType.PART_ROOT)
875
+ delta_index = 1
876
+ incarnation_index = 1
878
877
 
879
- snap_version += 1
880
- x_name = f"{x_orig_path.stem}-{snap_version}"
881
- storage_path = str(x_orig_path.parent.joinpath(x_name))
878
+ # This is also defined in functions.DynamicDataSpecFunc, maybe centralize?
879
+ data_item = f"data/table/{data_id.objectId}/{part_key.opaqueKey}/snap-{snap_version}/delta-{delta_index}"
882
880
 
883
- self._log.info(f"Output for [{data_key}] will be snap version {snap_version}")
881
+ data_obj = self._generate_data_definition(
882
+ part_key, snap_version, delta_index, data_item,
883
+ schema, storage_id)
884
884
 
885
- data_obj, storage_obj = self._generate_input_definition(
886
- data_id, storage_id, storage_key, storage_path, storage_format,
887
- snap_index=snap_version, delta_index=1, incarnation_index=1,
888
- schema=schema)
885
+ storage_obj = self._generate_storage_definition(
886
+ storage_id, storage_key, storage_path, storage_format,
887
+ data_item, incarnation_index)
889
888
 
890
889
  resources[_util.object_key(data_id)] = data_obj
891
890
  resources[_util.object_key(storage_id)] = storage_obj
892
891
 
893
892
  return data_id
894
893
 
894
+ def _process_file_socket(
895
+ self, file_key, file_value, file_type: _meta.FileType,
896
+ resources: tp.Dict[str, _meta.ObjectDefinition], new_unique_file=False) \
897
+ -> _meta.TagHeader:
898
+
899
+ file_id = _util.new_object_id(_meta.ObjectType.FILE)
900
+ storage_id = _util.new_object_id(_meta.ObjectType.STORAGE)
901
+
902
+ self._log.info(f"Generating file definition for [{file_key}] with ID = [{_util.object_key(file_id)}]")
903
+
904
+ if isinstance(file_value, str):
905
+
906
+ storage_key = self._sys_config.storage.defaultBucket
907
+ storage_path = file_value
908
+
909
+ elif isinstance(file_value, dict):
910
+
911
+ storage_key = file_value.get("storageKey") or self._sys_config.storage.defaultBucket
912
+ storage_path = file_value.get("path")
913
+
914
+ if not storage_path:
915
+ raise _ex.EConfigParse(f"Invalid configuration for input [{file_key}] (missing required value 'path'")
916
+
917
+ else:
918
+ raise _ex.EConfigParse(f"Invalid configuration for input '{file_key}'")
919
+
920
+ storage_format = "application/x-binary"
921
+ file_version = 1
922
+
923
+ if new_unique_file:
924
+ storage_path, file_version = self._new_unique_file(file_key, storage_key, storage_path, file_version)
925
+ file_size = 0
926
+ else:
927
+ storage = self._storage_manager.get_file_storage(storage_key)
928
+ file_size = storage.size(storage_path)
929
+
930
+ data_item = f"file/{file_id.objectId}/version-{file_version}"
931
+ file_name = f"{file_key}.{file_type.extension}"
932
+
933
+ file_obj = self._generate_file_definition(
934
+ file_name, file_type, file_size,
935
+ storage_id, data_item)
936
+
937
+ storage_obj = self._generate_storage_definition(
938
+ storage_id, storage_key, storage_path, storage_format,
939
+ data_item, incarnation_index=1)
940
+
941
+ resources[_util.object_key(file_id)] = file_obj
942
+ resources[_util.object_key(storage_id)] = storage_obj
943
+
944
+ return file_id
945
+
895
946
  @staticmethod
896
- def infer_format(storage_path: str, storage_config: _cfg.StorageConfig):
947
+ def infer_format(storage_path: str, storage_config: _cfg.StorageConfig, schema: tp.Optional[_meta.SchemaDefinition]):
948
+
949
+ schema_type = schema.schemaType if schema and schema.schemaType else _meta.SchemaType.TABLE
897
950
 
898
951
  if re.match(r'.*\.\w+$', storage_path):
899
952
  extension = pathlib.Path(storage_path).suffix
900
- codec = _storage.FormatManager.get_data_format(extension, format_options={})
901
- return codec.format_code()
953
+ # Only try to map TABLE codecs through IDataFormat for now
954
+ if schema_type == _meta.SchemaType.TABLE:
955
+ codec = _storage.FormatManager.get_data_format(extension, format_options={})
956
+ return codec.format_code()
957
+ else:
958
+ return extension[1:] if extension.startswith(".") else extension
902
959
 
903
960
  else:
904
961
  return storage_config.defaultFormat
905
962
 
906
- @classmethod
907
- def _generate_input_definition(
908
- cls, data_id: _meta.TagHeader, storage_id: _meta.TagHeader,
909
- storage_key: str, storage_path: str, storage_format: str,
910
- snap_index: int, delta_index: int, incarnation_index: int,
911
- schema: tp.Optional[_meta.SchemaDefinition] = None) \
912
- -> (_meta.ObjectDefinition, _meta.ObjectDefinition):
963
+ def _new_unique_file(self, socket_name, storage_key, storage_path, version):
913
964
 
914
- part_key = _meta.PartKey(
915
- opaqueKey="part-root",
916
- partType=_meta.PartType.PART_ROOT)
965
+ x_storage = self._storage_manager.get_file_storage(storage_key)
966
+ x_orig_path = pathlib.PurePath(storage_path)
967
+ x_name = x_orig_path.name
917
968
 
918
- # This is also defined in functions.DynamicDataSpecFunc, maybe centralize?
919
- data_item = f"data/table/{data_id.objectId}/{part_key.opaqueKey}/snap-{snap_index}/delta-{delta_index}"
969
+ if x_storage.exists(str(x_orig_path.parent)):
970
+ listing = x_storage.ls(str(x_orig_path.parent))
971
+ existing_files = list(map(lambda stat: stat.file_name, listing))
972
+ else:
973
+ existing_files = []
974
+
975
+ while x_name in existing_files:
976
+
977
+ version += 1
978
+ x_name = f"{x_orig_path.stem}-{version}{x_orig_path.suffix}"
979
+ storage_path = str(x_orig_path.parent.joinpath(x_name))
980
+
981
+ self._log.info(f"Output for [{socket_name}] will be version {version}")
982
+
983
+ return storage_path, version
984
+
985
+ @classmethod
986
+ def _generate_data_definition(
987
+ cls, part_key: _meta.PartKey, snap_index: int, delta_index: int, data_item: str,
988
+ schema: tp.Optional[_meta.SchemaDefinition], storage_id: _meta.TagHeader) \
989
+ -> (_meta.ObjectDefinition, _meta.ObjectDefinition):
920
990
 
921
991
  delta = _meta.DataDefinition.Delta(
922
992
  deltaIndex=delta_index,
@@ -930,17 +1000,31 @@ class DevModeTranslator:
930
1000
  partKey=part_key,
931
1001
  snap=snap)
932
1002
 
933
- data_def = _meta.DataDefinition(parts={})
1003
+ data_def = _meta.DataDefinition()
934
1004
  data_def.parts[part_key.opaqueKey] = part
1005
+ data_def.schema = schema
1006
+ data_def.storageId = _util.selector_for(storage_id)
935
1007
 
936
- if schema is not None:
937
- data_def.schema = schema
938
- else:
939
- data_def.schema = None
1008
+ return _meta.ObjectDefinition(objectType=_meta.ObjectType.DATA, data=data_def)
940
1009
 
941
- data_def.storageId = _meta.TagSelector(
942
- _meta.ObjectType.STORAGE, storage_id.objectId,
943
- objectVersion=storage_id.objectVersion, latestTag=True)
1010
+ @classmethod
1011
+ def _generate_file_definition(
1012
+ cls, file_name: str, file_type: _meta.FileType, file_size: int,
1013
+ storage_id: _meta.TagHeader, data_item: str) \
1014
+ -> _meta.ObjectDefinition:
1015
+
1016
+ file_def = _meta.FileDefinition(
1017
+ name=file_name, extension=file_type.extension, mimeType=file_type.mimeType,
1018
+ storageId=_util.selector_for(storage_id), dataItem=data_item, size=file_size)
1019
+
1020
+ return _meta.ObjectDefinition(objectType=_meta.ObjectType.FILE, file=file_def)
1021
+
1022
+ @classmethod
1023
+ def _generate_storage_definition(
1024
+ cls, storage_id: _meta.TagHeader,
1025
+ storage_key: str, storage_path: str, storage_format: str,
1026
+ data_item: str, incarnation_index: int) \
1027
+ -> _meta.ObjectDefinition:
944
1028
 
945
1029
  storage_copy = _meta.StorageCopy(
946
1030
  storageKey=storage_key,
@@ -957,16 +1041,14 @@ class DevModeTranslator:
957
1041
  storage_item = _meta.StorageItem(
958
1042
  incarnations=[storage_incarnation])
959
1043
 
960
- storage_def = _meta.StorageDefinition(dataItems={})
961
- storage_def.dataItems[delta.dataItem] = storage_item
1044
+ storage_def = _meta.StorageDefinition()
1045
+ storage_def.dataItems[data_item] = storage_item
962
1046
 
963
1047
  if storage_format.lower() == "csv":
964
1048
  storage_def.storageOptions["lenient_csv_parser"] = _types.MetadataCodec.encode_value(True)
965
1049
 
966
- data_obj = _meta.ObjectDefinition(objectType=_meta.ObjectType.DATA, data=data_def)
967
- storage_obj = _meta.ObjectDefinition(objectType=_meta.ObjectType.STORAGE, storage=storage_def)
1050
+ return _meta.ObjectDefinition(objectType=_meta.ObjectType.STORAGE, storage=storage_def)
968
1051
 
969
- return data_obj, storage_obj
970
1052
 
971
1053
 
972
- DevModeTranslator._log = _util.logger_for_class(DevModeTranslator)
1054
+ DevModeTranslator._log = _logging.logger_for_class(DevModeTranslator)