tracdap-runtime 0.8.0b2__py3-none-any.whl → 0.8.0b4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. tracdap/rt/_impl/core/__init__.py +14 -0
  2. tracdap/rt/_impl/{config_parser.py → core/config_parser.py} +59 -35
  3. tracdap/rt/_impl/{data.py → core/data.py} +64 -33
  4. tracdap/rt/_impl/{models.py → core/models.py} +6 -6
  5. tracdap/rt/_impl/{repos.py → core/repos.py} +1 -1
  6. tracdap/rt/_impl/{schemas.py → core/schemas.py} +4 -4
  7. tracdap/rt/_impl/{shim.py → core/shim.py} +3 -3
  8. tracdap/rt/_impl/{storage.py → core/storage.py} +8 -5
  9. tracdap/rt/_impl/core/struct.py +547 -0
  10. tracdap/rt/_impl/{type_system.py → core/type_system.py} +73 -33
  11. tracdap/rt/_impl/{validation.py → core/validation.py} +58 -17
  12. tracdap/rt/_impl/exec/__init__.py +14 -0
  13. tracdap/rt/{_exec → _impl/exec}/actors.py +9 -12
  14. tracdap/rt/{_exec → _impl/exec}/context.py +70 -16
  15. tracdap/rt/{_exec → _impl/exec}/dev_mode.py +31 -20
  16. tracdap/rt/{_exec → _impl/exec}/engine.py +9 -9
  17. tracdap/rt/{_exec → _impl/exec}/functions.py +89 -40
  18. tracdap/rt/{_exec → _impl/exec}/graph.py +1 -1
  19. tracdap/rt/{_exec → _impl/exec}/graph_builder.py +2 -2
  20. tracdap/rt/{_exec → _impl/grpc}/server.py +4 -4
  21. tracdap/rt/_impl/grpc/tracdap/api/internal/runtime_pb2.py +2 -2
  22. tracdap/rt/_impl/grpc/tracdap/api/internal/runtime_pb2_grpc.py +1 -1
  23. tracdap/rt/_impl/grpc/tracdap/metadata/data_pb2.py +31 -19
  24. tracdap/rt/_impl/grpc/tracdap/metadata/data_pb2.pyi +48 -2
  25. tracdap/rt/{_exec → _impl}/runtime.py +13 -13
  26. tracdap/rt/_impl/static_api.py +13 -5
  27. tracdap/rt/_plugins/format_csv.py +1 -1
  28. tracdap/rt/_plugins/storage_sql.py +13 -6
  29. tracdap/rt/_version.py +1 -1
  30. tracdap/rt/api/experimental.py +32 -0
  31. tracdap/rt/api/hook.py +11 -0
  32. tracdap/rt/config/__init__.py +8 -10
  33. tracdap/rt/config/common.py +0 -34
  34. tracdap/rt/config/platform.py +14 -26
  35. tracdap/rt/ext/embed.py +2 -2
  36. tracdap/rt/ext/plugins.py +2 -2
  37. tracdap/rt/launch/launch.py +3 -3
  38. tracdap/rt/metadata/__init__.py +11 -9
  39. tracdap/rt/metadata/data.py +40 -0
  40. {tracdap_runtime-0.8.0b2.dist-info → tracdap_runtime-0.8.0b4.dist-info}/METADATA +15 -13
  41. {tracdap_runtime-0.8.0b2.dist-info → tracdap_runtime-0.8.0b4.dist-info}/RECORD +47 -45
  42. {tracdap_runtime-0.8.0b2.dist-info → tracdap_runtime-0.8.0b4.dist-info}/WHEEL +1 -1
  43. tracdap/rt/_exec/__init__.py +0 -0
  44. /tracdap/rt/_impl/{guard_rails.py → core/guard_rails.py} +0 -0
  45. /tracdap/rt/_impl/{logging.py → core/logging.py} +0 -0
  46. /tracdap/rt/_impl/{util.py → core/util.py} +0 -0
  47. {tracdap_runtime-0.8.0b2.dist-info → tracdap_runtime-0.8.0b4.dist-info}/LICENSE +0 -0
  48. {tracdap_runtime-0.8.0b2.dist-info → tracdap_runtime-0.8.0b4.dist-info}/top_level.txt +0 -0
@@ -133,9 +133,13 @@ class MetadataCodec:
133
133
 
134
134
  if basic_type == _meta.BasicType.ARRAY:
135
135
  items = value.arrayValue.items
136
- return list(map(lambda x: MetadataCodec._decode_value_for_type(x, type_desc.arrayType), items))
136
+ return list(MetadataCodec._decode_value_for_type(x, type_desc.arrayType) for x in items)
137
137
 
138
- raise _ex.ETracInternal(f"Decoding value type [{basic_type}] is not supported yet")
138
+ if basic_type == _meta.BasicType.MAP:
139
+ items = value.mapValue.entries.items()
140
+ return dict((k, MetadataCodec._decode_value_for_type(v, type_desc.mapType)) for k, v in items)
141
+
142
+ raise _ex.ETracInternal(f"Cannot decode value of type [{basic_type}]")
139
143
 
140
144
  @classmethod
141
145
  def encode_value(cls, value: tp.Any) -> _meta.Value:
@@ -183,19 +187,36 @@ class MetadataCodec:
183
187
  if any(map(lambda x: type(x) != array_raw_type, value)):
184
188
  raise _ex.ETracInternal("Cannot encode a list with values of different types")
185
189
 
186
- encoded_items = list(map(lambda x: cls.convert_value(x, array_trac_type), value))
190
+ encoded_items = list(map(lambda x: cls.convert_value(x, array_trac_type, True), value))
187
191
 
188
192
  return _meta.Value(
189
193
  _meta.TypeDescriptor(_meta.BasicType.ARRAY, arrayType=array_trac_type),
190
194
  arrayValue=_meta.ArrayValue(encoded_items))
191
195
 
192
- raise _ex.ETracInternal(f"Value type [{type(value)}] is not supported yet")
196
+ if isinstance(value, dict):
197
+
198
+ if len(value) == 0:
199
+ raise _ex.ETracInternal("Cannot encode an empty dict")
200
+
201
+ map_raw_type = type(next(iter(value.values())))
202
+ map_trac_type = TypeMapping.python_to_trac(map_raw_type)
203
+
204
+ if any(map(lambda x: type(x) != array_raw_type, value.values())):
205
+ raise _ex.ETracInternal("Cannot encode a dict with values of different types")
206
+
207
+ encoded_entries = dict(map(lambda kv: (kv[0], cls.convert_value(kv[1], map_trac_type, True)), value.items()))
208
+
209
+ return _meta.Value(
210
+ _meta.TypeDescriptor(_meta.BasicType.ARRAY, mapType=map_trac_type),
211
+ mapValue=_meta.MapValue(encoded_entries))
212
+
213
+ raise _ex.ETracInternal(f"Cannot encode value of type [{type(value).__name__}]")
193
214
 
194
215
  @classmethod
195
- def convert_value(cls, raw_value: tp.Any, type_desc: _meta.TypeDescriptor):
216
+ def convert_value(cls, raw_value: tp.Any, type_desc: _meta.TypeDescriptor, nested: bool = False):
196
217
 
197
218
  if type_desc.basicType == _meta.BasicType.BOOLEAN:
198
- return cls.convert_boolean_value(raw_value)
219
+ return cls.convert_boolean_value(raw_value, nested)
199
220
 
200
221
  if type_desc.basicType == _meta.BasicType.INTEGER:
201
222
  return cls.convert_integer_value(raw_value)
@@ -218,78 +239,97 @@ class MetadataCodec:
218
239
  if type_desc.basicType == _meta.BasicType.ARRAY:
219
240
  return cls.convert_array_value(raw_value, type_desc.arrayType)
220
241
 
242
+ if type_desc.basicType == _meta.BasicType.MAP:
243
+ return cls.convert_map_value(raw_value, type_desc.mapType)
244
+
221
245
  raise _ex.ETracInternal(f"Conversion to value type [{type_desc.basicType.name}] is not supported yet")
222
246
 
223
247
  @staticmethod
224
248
  def convert_array_value(raw_value: tp.List[tp.Any], array_type: _meta.TypeDescriptor) -> _meta.Value:
225
249
 
226
- type_desc = _meta.TypeDescriptor(_meta.BasicType.ARRAY, array_type)
250
+ type_desc = _meta.TypeDescriptor(basicType=_meta.BasicType.ARRAY, arrayType=array_type)
227
251
 
228
252
  if not isinstance(raw_value, list):
229
- msg = f"Value of type [{type(raw_value)}] cannot be converted to {_meta.BasicType.ARRAY.name}"
253
+ msg = f"Value of type [{type(raw_value).__name__}] cannot be converted to {_meta.BasicType.ARRAY.name}"
230
254
  raise _ex.ETracInternal(msg)
231
255
 
232
- items = list(map(lambda x: MetadataCodec.convert_value(x, array_type), raw_value))
256
+ items = list(map(lambda x: MetadataCodec.convert_value(x, array_type, True), raw_value))
233
257
 
234
258
  return _meta.Value(type_desc, arrayValue=_meta.ArrayValue(items))
235
259
 
236
260
  @staticmethod
237
- def convert_boolean_value(raw_value: tp.Any) -> _meta.Value:
261
+ def convert_map_value(raw_value: tp.Dict[str, tp.Any], map_type: _meta.TypeDescriptor) -> _meta.Value:
262
+
263
+ type_desc = _meta.TypeDescriptor(basicType=_meta.BasicType.MAP, mapType=map_type)
264
+
265
+ if not isinstance(raw_value, dict):
266
+ msg = f"Value of type [{type(raw_value).__name__}] cannot be converted to {_meta.BasicType.MAP.name}"
267
+ raise _ex.ETracInternal(msg)
268
+
269
+ entries = dict(map(lambda kv: (kv[0], MetadataCodec.convert_value(kv[1], map_type, True)), raw_value.items()))
270
+
271
+ return _meta.Value(type_desc, mapValue=_meta.MapValue(entries))
272
+
273
+ @staticmethod
274
+ def convert_boolean_value(raw_value: tp.Any, nested: bool = False) -> _meta.Value:
238
275
 
239
- type_desc = _meta.TypeDescriptor(_meta.BasicType.BOOLEAN)
276
+ type_desc = _meta.TypeDescriptor(_meta.BasicType.BOOLEAN) if not nested else None
240
277
 
241
278
  if isinstance(raw_value, bool):
242
279
  return _meta.Value(type_desc, booleanValue=raw_value)
243
280
 
244
- msg = f"Value of type [{type(raw_value)}] cannot be converted to {_meta.BasicType.BOOLEAN.name}"
281
+ msg = f"Value of type [{type(raw_value).__name__}] cannot be converted to {_meta.BasicType.BOOLEAN.name}"
245
282
  raise _ex.ETracInternal(msg)
246
283
 
247
284
  @staticmethod
248
- def convert_integer_value(raw_value: tp.Any) -> _meta.Value:
285
+ def convert_integer_value(raw_value: tp.Any, nested: bool = False) -> _meta.Value:
249
286
 
250
- type_desc = _meta.TypeDescriptor(_meta.BasicType.INTEGER)
287
+ type_desc = _meta.TypeDescriptor(_meta.BasicType.INTEGER) if not nested else None
251
288
 
252
- if isinstance(raw_value, int):
289
+ # isinstance(bool_value, int) returns True! An explicit check is needed
290
+ if isinstance(raw_value, int) and not isinstance(raw_value, bool):
253
291
  return _meta.Value(type_desc, integerValue=raw_value)
254
292
 
255
293
  if isinstance(raw_value, float) and raw_value.is_integer():
256
294
  return _meta.Value(type_desc, integerValue=int(raw_value))
257
295
 
258
- msg = f"Value of type [{type(raw_value)}] cannot be converted to {_meta.BasicType.INTEGER.name}"
296
+ msg = f"Value of type [{type(raw_value).__name__}] cannot be converted to {_meta.BasicType.INTEGER.name}"
259
297
  raise _ex.ETracInternal(msg)
260
298
 
261
299
  @staticmethod
262
- def convert_float_value(raw_value: tp.Any) -> _meta.Value:
300
+ def convert_float_value(raw_value: tp.Any, nested: bool = False) -> _meta.Value:
263
301
 
264
- type_desc = _meta.TypeDescriptor(_meta.BasicType.FLOAT)
302
+ type_desc = _meta.TypeDescriptor(_meta.BasicType.FLOAT) if not nested else None
265
303
 
266
304
  if isinstance(raw_value, float):
267
305
  return _meta.Value(type_desc, floatValue=raw_value)
268
306
 
269
- if isinstance(raw_value, int):
307
+ # isinstance(bool_value, int) returns True! An explicit check is needed
308
+ if isinstance(raw_value, int) and not isinstance(raw_value, bool):
270
309
  return _meta.Value(type_desc, floatValue=float(raw_value))
271
310
 
272
- msg = f"Value of type [{type(raw_value)}] cannot be converted to {_meta.BasicType.FLOAT.name}"
311
+ msg = f"Value of type [{type(raw_value).__name__}] cannot be converted to {_meta.BasicType.FLOAT.name}"
273
312
  raise _ex.ETracInternal(msg)
274
313
 
275
314
  @staticmethod
276
- def convert_decimal_value(raw_value: tp.Any) -> _meta.Value:
315
+ def convert_decimal_value(raw_value: tp.Any, nested: bool = False) -> _meta.Value:
277
316
 
278
- type_desc = _meta.TypeDescriptor(_meta.BasicType.DECIMAL)
317
+ type_desc = _meta.TypeDescriptor(_meta.BasicType.DECIMAL) if not nested else None
279
318
 
280
319
  if isinstance(raw_value, decimal.Decimal):
281
320
  return _meta.Value(type_desc, decimalValue=_meta.DecimalValue(str(raw_value)))
282
321
 
283
- if isinstance(raw_value, int) or isinstance(raw_value, float):
322
+ # isinstance(bool_value, int) returns True! An explicit check is needed
323
+ if isinstance(raw_value, int) or isinstance(raw_value, float) and not isinstance(raw_value, bool):
284
324
  return _meta.Value(type_desc, decimalValue=_meta.DecimalValue(str(raw_value)))
285
325
 
286
- msg = f"Value of type [{type(raw_value)}] cannot be converted to {_meta.BasicType.DECIMAL.name}"
326
+ msg = f"Value of type [{type(raw_value).__name__}] cannot be converted to {_meta.BasicType.DECIMAL.name}"
287
327
  raise _ex.ETracInternal(msg)
288
328
 
289
329
  @staticmethod
290
- def convert_string_value(raw_value: tp.Any) -> _meta.Value:
330
+ def convert_string_value(raw_value: tp.Any, nested: bool = False) -> _meta.Value:
291
331
 
292
- type_desc = _meta.TypeDescriptor(_meta.BasicType.STRING)
332
+ type_desc = _meta.TypeDescriptor(_meta.BasicType.STRING) if not nested else None
293
333
 
294
334
  if isinstance(raw_value, str):
295
335
  return _meta.Value(type_desc, stringValue=raw_value)
@@ -301,13 +341,13 @@ class MetadataCodec:
301
341
 
302
342
  return _meta.Value(type_desc, stringValue=str(raw_value))
303
343
 
304
- msg = f"Value of type [{type(raw_value)}] cannot be converted to {_meta.BasicType.STRING.name}"
344
+ msg = f"Value of type [{type(raw_value).__name__}] cannot be converted to {_meta.BasicType.STRING.name}"
305
345
  raise _ex.ETracInternal(msg)
306
346
 
307
347
  @staticmethod
308
- def convert_date_value(raw_value: tp.Any) -> _meta.Value:
348
+ def convert_date_value(raw_value: tp.Any, nested: bool = False) -> _meta.Value:
309
349
 
310
- type_desc = _meta.TypeDescriptor(_meta.BasicType.DATE)
350
+ type_desc = _meta.TypeDescriptor(_meta.BasicType.DATE) if not nested else None
311
351
 
312
352
  if isinstance(raw_value, dt.date):
313
353
  return _meta.Value(type_desc, dateValue=_meta.DateValue(isoDate=raw_value.isoformat()))
@@ -316,13 +356,13 @@ class MetadataCodec:
316
356
  date_value = dt.date.fromisoformat(raw_value)
317
357
  return _meta.Value(type_desc, dateValue=_meta.DateValue(isoDate=date_value.isoformat()))
318
358
 
319
- msg = f"Value of type [{type(raw_value)}] cannot be converted to {_meta.BasicType.DATE.name}"
359
+ msg = f"Value of type [{type(raw_value).__name__}] cannot be converted to {_meta.BasicType.DATE.name}"
320
360
  raise _ex.ETracInternal(msg)
321
361
 
322
362
  @staticmethod
323
- def convert_datetime_value(raw_value: tp.Any) -> _meta.Value:
363
+ def convert_datetime_value(raw_value: tp.Any, nested: bool = False) -> _meta.Value:
324
364
 
325
- type_desc = _meta.TypeDescriptor(_meta.BasicType.DATETIME)
365
+ type_desc = _meta.TypeDescriptor(_meta.BasicType.DATETIME) if not nested else None
326
366
 
327
367
  if isinstance(raw_value, dt.datetime):
328
368
  return _meta.Value(type_desc, datetimeValue=_meta.DatetimeValue(isoDatetime=raw_value.isoformat()))
@@ -331,5 +371,5 @@ class MetadataCodec:
331
371
  datetime_value = dt.datetime.fromisoformat(raw_value)
332
372
  return _meta.Value(type_desc, datetimeValue=_meta.DatetimeValue(isoDatetime=datetime_value.isoformat()))
333
373
 
334
- msg = f"Value of type [{type(raw_value)}] cannot be converted to {_meta.BasicType.DATETIME.name}"
374
+ msg = f"Value of type [{type(raw_value).__name__}] cannot be converted to {_meta.BasicType.DATETIME.name}"
335
375
  raise _ex.ETracInternal(msg)
@@ -22,8 +22,8 @@ import pathlib
22
22
 
23
23
  import tracdap.rt.metadata as meta
24
24
  import tracdap.rt.exceptions as ex
25
- import tracdap.rt._impl.logging as log
26
- import tracdap.rt._impl.util as util
25
+ import tracdap.rt._impl.core.logging as log
26
+ import tracdap.rt._impl.core.util as util
27
27
 
28
28
  # _Named placeholder type from API hook is needed for API type checking
29
29
  from tracdap.rt.api.hook import _Named # noqa
@@ -67,9 +67,17 @@ class SkipValidation(tp.Generic[T_SKIP_VAL]):
67
67
 
68
68
  class _TypeValidator:
69
69
 
70
- # The metaclass for generic types varies between versions of the typing library
71
- # To work around this, detect the correct metaclass by inspecting a generic type variable
72
- __generic_metaclass = type(tp.List[object])
70
+ # Support both new and old styles for generic, union and optional types
71
+ # Old-style annotations are still valid, even when the new style is fully supported
72
+ __generic_metaclass = [
73
+ types.GenericAlias,
74
+ type(tp.List[object]),
75
+ type(tp.Optional[object])
76
+ ]
77
+
78
+ # UnionType was added to the types module in Python 3.10, we support 3.9 (Jan 2025)
79
+ if hasattr(types, "UnionType"):
80
+ __generic_metaclass.append(types.UnionType)
73
81
 
74
82
  # Cache method signatures to avoid inspection on every call
75
83
  # Inspecting a function signature can take ~ half a second in Python 3.7
@@ -204,7 +212,7 @@ class _TypeValidator:
204
212
  if value.skip_type == expected_type:
205
213
  return True
206
214
 
207
- if isinstance(expected_type, cls.__generic_metaclass):
215
+ if any(map(lambda _t: isinstance(expected_type, _t), cls.__generic_metaclass)):
208
216
 
209
217
  origin = util.get_origin(expected_type)
210
218
  args = util.get_args(expected_type)
@@ -240,6 +248,33 @@ class _TypeValidator:
240
248
  all(map(lambda k: cls._validate_type(key_type, k), value.keys())) and \
241
249
  all(map(lambda v: cls._validate_type(value_type, v), value.values()))
242
250
 
251
+ if origin is type:
252
+
253
+ if not isinstance(value, type):
254
+ return False
255
+
256
+ type_arg = args[0]
257
+
258
+ if type_arg == tp.Any:
259
+ return True
260
+
261
+ if isinstance(type_arg, tp.TypeVar):
262
+
263
+ constraints = util.get_constraints(type_arg)
264
+ bound = util.get_bound(type_arg)
265
+
266
+ if constraints:
267
+ if not any(map(lambda c: expected_type == c, constraints)):
268
+ return False
269
+
270
+ if bound:
271
+ if not issubclass(expected_type, bound):
272
+ return False
273
+
274
+ # So long as constraints / bound are ok, any type matches a generic type var
275
+ return True
276
+
277
+
243
278
  if origin.__module__.startswith("tracdap.rt.api."):
244
279
  return isinstance(value, origin)
245
280
 
@@ -274,7 +309,7 @@ class _TypeValidator:
274
309
  @classmethod
275
310
  def _type_name(cls, type_var: tp.Type, qualified: bool = False) -> str:
276
311
 
277
- if isinstance(type_var, cls.__generic_metaclass):
312
+ if any(map(lambda _t: isinstance(type_var, _t), cls.__generic_metaclass)):
278
313
 
279
314
  origin = util.get_origin(type_var)
280
315
  args = util.get_args(type_var)
@@ -291,7 +326,11 @@ class _TypeValidator:
291
326
 
292
327
  if origin is list:
293
328
  list_type = cls._type_name(args[0])
294
- return f"List[{list_type}]"
329
+ return f"list[{list_type}]"
330
+
331
+ if origin is type:
332
+ type_arg = cls._type_name(args[0])
333
+ return f"type[{type_arg}]"
295
334
 
296
335
  raise ex.ETracInternal(f"Validation of [{origin.__name__}] generic parameters is not supported yet")
297
336
 
@@ -457,18 +496,20 @@ class StaticValidator:
457
496
  else:
458
497
  return
459
498
 
460
- fields = socket.schema.table.fields
461
- field_names = list(map(lambda f: f.fieldName, fields))
462
- property_type = f"field in [{socket_name}]"
499
+ if socket.schema.schemaType == meta.SchemaType.TABLE:
463
500
 
464
- if len(fields) == 0:
465
- cls._fail(f"Invalid schema for [{socket_name}]: No fields defined")
501
+ fields = socket.schema.table.fields
502
+ field_names = list(map(lambda f: f.fieldName, fields))
503
+ property_type = f"field in [{socket_name}]"
466
504
 
467
- cls._valid_identifiers(field_names, property_type)
468
- cls._case_insensitive_duplicates(field_names, property_type)
505
+ if len(fields) == 0:
506
+ cls._fail(f"Invalid schema for [{socket_name}]: No fields defined")
469
507
 
470
- for field in fields:
471
- cls._check_single_field(field, property_type)
508
+ cls._valid_identifiers(field_names, property_type)
509
+ cls._case_insensitive_duplicates(field_names, property_type)
510
+
511
+ for field in fields:
512
+ cls._check_single_field(field, property_type)
472
513
 
473
514
  @classmethod
474
515
  def _check_socket_file_type(cls, socket_name, socket):
@@ -0,0 +1,14 @@
1
+ # Licensed to the Fintech Open Source Foundation (FINOS) under one or
2
+ # more contributor license agreements. See the NOTICE file distributed
3
+ # with this work for additional information regarding copyright ownership.
4
+ # FINOS licenses this file to you under the Apache License, Version 2.0
5
+ # (the "License"); you may not use this file except in compliance with the
6
+ # License. You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
@@ -13,8 +13,6 @@
13
13
  # See the License for the specific language governing permissions and
14
14
  # limitations under the License.
15
15
 
16
- from __future__ import annotations
17
-
18
16
  import logging
19
17
  import threading
20
18
  import functools as func
@@ -25,9 +23,8 @@ import inspect
25
23
  import queue
26
24
  import time
27
25
 
28
- import tracdap.rt._impl.logging as _logging # noqa
29
- import tracdap.rt._impl.util as _util # noqa
30
- import tracdap.rt._impl.validation as _val # noqa
26
+ import tracdap.rt._impl.core.logging as _logging
27
+ import tracdap.rt._impl.core.validation as _val
31
28
  import tracdap.rt.exceptions as _ex
32
29
 
33
30
 
@@ -119,7 +116,7 @@ class Actor:
119
116
  def error(self) -> tp.Optional[Exception]:
120
117
  return self.__ctx.get_error()
121
118
 
122
- def actors(self) -> ActorContext:
119
+ def actors(self) -> "ActorContext":
123
120
  return self.__ctx
124
121
 
125
122
  def on_start(self):
@@ -135,7 +132,7 @@ class Actor:
135
132
  class ActorContext:
136
133
 
137
134
  def __init__(
138
- self, node: ActorNode, message: str,
135
+ self, node: "ActorNode", message: str,
139
136
  current_actor: ActorId, parent: ActorId, sender: tp.Optional[ActorId]):
140
137
 
141
138
  self.__node = node
@@ -189,13 +186,13 @@ class ThreadsafeActor(Actor):
189
186
  super().__init__()
190
187
  self.__threadsafe: tp.Optional[ThreadsafeContext] = None
191
188
 
192
- def threadsafe(self) -> ThreadsafeContext:
189
+ def threadsafe(self) -> "ThreadsafeContext":
193
190
  return self.__threadsafe
194
191
 
195
192
 
196
193
  class ThreadsafeContext:
197
194
 
198
- def __init__(self, node: ActorNode):
195
+ def __init__(self, node: "ActorNode"):
199
196
  self.__node = node
200
197
  self.__id = node.actor_id
201
198
  self.__parent = node.parent.actor_id if node.parent is not None else None
@@ -370,8 +367,8 @@ class ActorNode:
370
367
 
371
368
  def __init__(
372
369
  self, actor_id: ActorId, actor: Actor,
373
- parent: tp.Optional[ActorNode],
374
- system: ActorSystem,
370
+ parent: "tp.Optional[ActorNode]",
371
+ system: "ActorSystem",
375
372
  event_loop: EventLoop):
376
373
 
377
374
  self.actor_id = actor_id
@@ -484,7 +481,7 @@ class ActorNode:
484
481
 
485
482
  target_node._accept(msg)
486
483
 
487
- def _lookup_node(self, target_id: ActorId) -> tp.Optional[ActorNode]:
484
+ def _lookup_node(self, target_id: ActorId) -> "tp.Optional[ActorNode]":
488
485
 
489
486
  # Check self first
490
487
 
@@ -26,12 +26,13 @@ import tracdap.rt.api as _api
26
26
  import tracdap.rt.api.experimental as _eapi
27
27
  import tracdap.rt.metadata as _meta
28
28
  import tracdap.rt.exceptions as _ex
29
- import tracdap.rt._impl.data as _data # noqa
30
- import tracdap.rt._impl.logging as _logging # noqa
31
- import tracdap.rt._impl.storage as _storage # noqa
32
- import tracdap.rt._impl.type_system as _types # noqa
33
- import tracdap.rt._impl.util as _util # noqa
34
- import tracdap.rt._impl.validation as _val # noqa
29
+ import tracdap.rt._impl.core.data as _data
30
+ import tracdap.rt._impl.core.logging as _logging
31
+ import tracdap.rt._impl.core.storage as _storage
32
+ import tracdap.rt._impl.core.struct as _struct
33
+ import tracdap.rt._impl.core.type_system as _types
34
+ import tracdap.rt._impl.core.util as _util
35
+ import tracdap.rt._impl.core.validation as _val
35
36
 
36
37
 
37
38
  class TracContextImpl(_api.TracContext):
@@ -184,7 +185,25 @@ class TracContextImpl(_api.TracContext):
184
185
  def get_polars_table(self, dataset_name: str) -> "_data.polars.DataFrame":
185
186
 
186
187
  return self.get_table(dataset_name, _eapi.POLARS)
187
-
188
+
189
+ def get_struct(self, struct_name: str, python_class: type[_eapi.STRUCT_TYPE] = None) -> _eapi.STRUCT_TYPE:
190
+
191
+ _val.validate_signature(self.get_struct, struct_name, python_class)
192
+
193
+ self.__val.check_item_valid_identifier(struct_name, TracContextValidator.DATASET)
194
+ self.__val.check_item_defined_in_model(struct_name, TracContextValidator.DATASET)
195
+ self.__val.check_item_available_in_context(struct_name, TracContextValidator.DATASET)
196
+
197
+ data_view: _data.DataView = self.__local_ctx.get(struct_name)
198
+ part_key = _data.DataPartKey.for_root()
199
+
200
+ self.__val.check_context_object_type(struct_name, data_view, _data.DataView)
201
+ self.__val.check_context_data_view_type(struct_name, data_view, _meta.ObjectType.DATA)
202
+ self.__val.check_dataset_schema_defined(struct_name, data_view)
203
+
204
+ struct_data: dict = data_view.parts[part_key][0].content
205
+ return _struct.StructProcessor.parse_struct(struct_data, None, python_class)
206
+
188
207
  def get_file(self, file_name: str) -> bytes:
189
208
 
190
209
  _val.validate_signature(self.get_file, file_name)
@@ -199,7 +218,7 @@ class TracContextImpl(_api.TracContext):
199
218
  self.__val.check_context_data_view_type(file_name, file_view, _meta.ObjectType.FILE)
200
219
  self.__val.check_file_content_present(file_name, file_view)
201
220
 
202
- return file_view.file_item.raw_bytes
221
+ return file_view.file_item.content
203
222
 
204
223
  def get_file_stream(self, file_name: str) -> tp.ContextManager[tp.BinaryIO]:
205
224
 
@@ -274,13 +293,15 @@ class TracContextImpl(_api.TracContext):
274
293
  # Prefer static schemas for data conformance
275
294
 
276
295
  if static_schema is not None:
277
- schema = _data.DataMapping.trac_to_arrow_schema(static_schema)
296
+ trac_schema = static_schema
297
+ native_schema = _data.DataMapping.trac_to_arrow_schema(static_schema)
278
298
  else:
279
- schema = data_view.arrow_schema
299
+ trac_schema = _data.DataMapping.arrow_to_trac_schema(data_view.arrow_schema)
300
+ native_schema = data_view.arrow_schema
280
301
 
281
302
  # Data conformance is applied automatically inside the converter, if schema != None
282
- table = converter.to_internal(dataset, schema)
283
- item = _data.DataItem(_meta.ObjectType.DATA, schema, table)
303
+ table = converter.to_internal(dataset, native_schema)
304
+ item = _data.DataItem.for_table(table, native_schema, trac_schema)
284
305
 
285
306
  updated_view = _data.DataMapping.add_item_to_view(data_view, part_key, item)
286
307
 
@@ -293,7 +314,34 @@ class TracContextImpl(_api.TracContext):
293
314
  def put_polars_table(self, dataset_name: str, dataset: "_data.polars.DataFrame"):
294
315
 
295
316
  self.put_table(dataset_name, dataset, _eapi.POLARS)
296
-
317
+
318
+ def put_struct(self, struct_name: str, struct: _eapi.STRUCT_TYPE):
319
+
320
+ _val.validate_signature(self.put_struct, struct_name, struct)
321
+
322
+ self.__val.check_item_valid_identifier(struct_name, TracContextValidator.DATASET)
323
+ self.__val.check_item_is_model_output(struct_name, TracContextValidator.DATASET)
324
+
325
+ static_schema = self.__get_static_schema(self.__model_def, struct_name)
326
+ data_view = self.__local_ctx.get(struct_name)
327
+ part_key = _data.DataPartKey.for_root()
328
+
329
+ if data_view is None:
330
+ if static_schema is not None:
331
+ data_view = _data.DataView.for_trac_schema(static_schema)
332
+ else:
333
+ data_view = _data.DataView.create_empty()
334
+
335
+ self.__val.check_context_object_type(struct_name, data_view, _data.DataView)
336
+ self.__val.check_context_data_view_type(struct_name, data_view, _meta.ObjectType.DATA)
337
+ self.__val.check_dataset_schema_defined(struct_name, data_view)
338
+ self.__val.check_dataset_part_not_present(struct_name, data_view, part_key)
339
+
340
+ data_item = _data.DataItem.for_struct(struct)
341
+ updated_view = _data.DataMapping.add_item_to_view(data_view, part_key, data_item)
342
+
343
+ self.__local_ctx[struct_name] = updated_view
344
+
297
345
  def put_file(self, file_name: str, file_content: tp.Union[bytes, bytearray]):
298
346
 
299
347
  _val.validate_signature(self.put_file, file_name, file_content)
@@ -840,7 +888,13 @@ class TracContextValidator(TracContextErrorReporter):
840
888
 
841
889
  schema = data_view.trac_schema if data_view is not None else None
842
890
 
843
- if schema is None or schema.table is None or not schema.table.fields:
891
+ if schema is None:
892
+ self._report_error(f"Schema not defined for dataset {dataset_name} in the current context")
893
+
894
+ if schema.schemaType == _meta.SchemaType.TABLE and (schema.table is None or not schema.table.fields):
895
+ self._report_error(f"Schema not defined for dataset {dataset_name} in the current context")
896
+
897
+ if schema.schemaType == _meta.SchemaType.STRUCT and (schema.struct is None or not schema.struct.fields):
844
898
  self._report_error(f"Schema not defined for dataset {dataset_name} in the current context")
845
899
 
846
900
  def check_dataset_schema_not_defined(self, dataset_name: str, data_view: _data.DataView):
@@ -945,12 +999,12 @@ class TracContextValidator(TracContextErrorReporter):
945
999
 
946
1000
  def check_file_content_present(self, file_name: str, file_view: _data.DataView):
947
1001
 
948
- if file_view.file_item is None or not file_view.file_item.raw_bytes:
1002
+ if file_view.file_item is None or file_view.file_item.content is None:
949
1003
  self._report_error(f"File content is missing or empty for [{file_name}] in the current context")
950
1004
 
951
1005
  def check_file_content_not_present(self, file_name: str, file_view: _data.DataView):
952
1006
 
953
- if file_view.file_item is not None and file_view.file_item.raw_bytes:
1007
+ if file_view.file_item is not None and file_view.file_item.content is not None:
954
1008
  self._report_error(f"File content is already present for [{file_name}] in the current context")
955
1009
 
956
1010
  def check_storage_valid_identifier(self, storage_key):
@@ -22,12 +22,12 @@ import tracdap.rt.api as _api
22
22
  import tracdap.rt.config as _cfg
23
23
  import tracdap.rt.metadata as _meta
24
24
  import tracdap.rt.exceptions as _ex
25
- import tracdap.rt._impl.config_parser as _cfg_p # noqa
26
- import tracdap.rt._impl.logging as _logging # noqa
27
- import tracdap.rt._impl.models as _models # noqa
28
- import tracdap.rt._impl.storage as _storage # noqa
29
- import tracdap.rt._impl.type_system as _types # noqa
30
- import tracdap.rt._impl.util as _util # noqa
25
+ import tracdap.rt._impl.core.config_parser as _cfg_p
26
+ import tracdap.rt._impl.core.logging as _logging
27
+ import tracdap.rt._impl.core.models as _models
28
+ import tracdap.rt._impl.core.storage as _storage
29
+ import tracdap.rt._impl.core.type_system as _types
30
+ import tracdap.rt._impl.core.util as _util
31
31
 
32
32
 
33
33
  DEV_MODE_JOB_CONFIG = [
@@ -38,12 +38,12 @@ DEV_MODE_JOB_CONFIG = [
38
38
  re.compile(r"job\.\w+\.model"),
39
39
  re.compile(r"job\.\w+\.flow"),
40
40
 
41
- re.compile(r".*\.jobs\.\d+\.\w+\.parameters\.\w+"),
42
- re.compile(r".*\.jobs\.\d+\.\w+\.inputs\.\w+"),
43
- re.compile(r".*\.jobs\.\d+\.\w+\.outputs\.\w+"),
44
- re.compile(r".*\.jobs\.\d+\.\w+\.models\.\w+"),
45
- re.compile(r".*\.jobs\.\d+\.\w+\.model"),
46
- re.compile(r".*\.jobs\.\d+\.\w+\.flow")
41
+ re.compile(r".*\.jobs\[\d+]\.\w+\.parameters\.\w+"),
42
+ re.compile(r".*\.jobs\[\d+]\.\w+\.inputs\.\w+"),
43
+ re.compile(r".*\.jobs\[\d+]\.\w+\.outputs\.\w+"),
44
+ re.compile(r".*\.jobs\[\d+]\.\w+\.models\.\w+"),
45
+ re.compile(r".*\.jobs\[\d+]\.\w+\.model"),
46
+ re.compile(r".*\.jobs\[\d+]\.\w+\.flow")
47
47
  ]
48
48
 
49
49
  DEV_MODE_SYS_CONFIG = []
@@ -765,10 +765,15 @@ class DevModeTranslator:
765
765
  else:
766
766
  p_spec = param_specs[p_name]
767
767
 
768
- cls._log.info(f"Encoding parameter [{p_name}] as {p_spec.paramType.basicType.name}")
768
+ try:
769
+ cls._log.info(f"Encoding parameter [{p_name}] as {p_spec.paramType.basicType.name}")
770
+ encoded_value = _types.MetadataCodec.convert_value(p_value, p_spec.paramType)
771
+ encoded_values[p_name] = encoded_value
769
772
 
770
- encoded_value = _types.MetadataCodec.convert_value(p_value, p_spec.paramType)
771
- encoded_values[p_name] = encoded_value
773
+ except Exception as e:
774
+ msg = f"Failed to encode parameter [{p_name}]: {str(e)}"
775
+ cls._log.error(msg)
776
+ raise _ex.EConfigParse(msg) from e
772
777
 
773
778
  return encoded_values
774
779
 
@@ -842,7 +847,7 @@ class DevModeTranslator:
842
847
  if isinstance(data_value, str):
843
848
  storage_path = data_value
844
849
  storage_key = self._sys_config.storage.defaultBucket
845
- storage_format = self.infer_format(storage_path, self._sys_config.storage)
850
+ storage_format = self.infer_format(storage_path, self._sys_config.storage, schema)
846
851
  snap_version = 1
847
852
 
848
853
  elif isinstance(data_value, dict):
@@ -853,7 +858,7 @@ class DevModeTranslator:
853
858
  raise _ex.EConfigParse(f"Invalid configuration for input [{data_key}] (missing required value 'path'")
854
859
 
855
860
  storage_key = data_value.get("storageKey") or self._sys_config.storage.defaultBucket
856
- storage_format = data_value.get("format") or self.infer_format(storage_path, self._sys_config.storage)
861
+ storage_format = data_value.get("format") or self.infer_format(storage_path, self._sys_config.storage, schema)
857
862
  snap_version = 1
858
863
 
859
864
  else:
@@ -939,12 +944,18 @@ class DevModeTranslator:
939
944
  return file_id
940
945
 
941
946
  @staticmethod
942
- def infer_format(storage_path: str, storage_config: _cfg.StorageConfig):
947
+ def infer_format(storage_path: str, storage_config: _cfg.StorageConfig, schema: tp.Optional[_meta.SchemaDefinition]):
948
+
949
+ schema_type = schema.schemaType if schema and schema.schemaType else _meta.SchemaType.TABLE
943
950
 
944
951
  if re.match(r'.*\.\w+$', storage_path):
945
952
  extension = pathlib.Path(storage_path).suffix
946
- codec = _storage.FormatManager.get_data_format(extension, format_options={})
947
- return codec.format_code()
953
+ # Only try to map TABLE codecs through IDataFormat for now
954
+ if schema_type == _meta.SchemaType.TABLE:
955
+ codec = _storage.FormatManager.get_data_format(extension, format_options={})
956
+ return codec.format_code()
957
+ else:
958
+ return extension[1:] if extension.startswith(".") else extension
948
959
 
949
960
  else:
950
961
  return storage_config.defaultFormat