code-loader 1.0.138__py3-none-any.whl → 1.0.139__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of code-loader might be problematic. Click here for more details.

@@ -1,3 +1,4 @@
1
+ import warnings
1
2
  from dataclasses import dataclass, field
2
3
  from typing import Any, Callable, List, Optional, Dict, Union, Type
3
4
  import re
@@ -56,7 +57,14 @@ class PreprocessResponse:
56
57
  for sample_id in self.sample_ids:
57
58
  assert isinstance(sample_id, str), f"Sample id should be of type str. Got: {type(sample_id)}"
58
59
  else:
59
- raise Exception("length is deprecated.")
60
+ raise Exception("length is deprecated, please use sample_ids instead.")
61
+
62
+ if self.state is None:
63
+ warnings.warn(
64
+ "PreprocessResponse.state is not set. For best practice, assign a unique `state` value to each PreprocessResponse instance."
65
+ )
66
+ else:
67
+ assert isinstance(self.state, DataStateType), f"PreprocessResponse.state must be of type {DataStateType.__name__} but got {type(self.state)}"
60
68
 
61
69
  def __hash__(self) -> int:
62
70
  return id(self)
@@ -1,9 +1,10 @@
1
1
  # mypy: ignore-errors
2
2
  import os
3
+ import warnings
3
4
  from collections import defaultdict
4
5
  from functools import lru_cache
5
6
  from pathlib import Path
6
- from typing import Optional, Union, Callable, List, Dict
7
+ from typing import Optional, Union, Callable, List, Dict, get_args, get_origin
7
8
 
8
9
  import numpy as np
9
10
  import numpy.typing as npt
@@ -25,8 +26,116 @@ import functools
25
26
 
26
27
  _called_from_inside_tl_decorator = 0
27
28
  _called_from_inside_tl_integration_test_decorator = False
28
-
29
-
29
+ _update_env_status = None
30
+
31
+
32
+
33
+
34
+ def validate_args_structure(*args, types_order, func_name, expected_names, **kwargs):
35
+ def _type_to_str(t):
36
+ origin = get_origin(t)
37
+ if origin is Union:
38
+ return " | ".join(tt.__name__ for tt in get_args(t))
39
+ elif hasattr(t, "__name__"):
40
+ return t.__name__
41
+ else:
42
+ return str(t)
43
+
44
+ def _format_types(types, names=None):
45
+ return ", ".join(
46
+ f"{(names[i] + ': ') if names else f'arg{i}: '}{_type_to_str(ty)}"
47
+ for i, ty in enumerate(types)
48
+ )
49
+
50
+ if expected_names:
51
+ normalized_args = []
52
+ for i, name in enumerate(expected_names):
53
+ if i < len(args):
54
+ normalized_args.append(args[i])
55
+ elif name in kwargs:
56
+ normalized_args.append(kwargs[name])
57
+ else:
58
+ raise AssertionError(
59
+ f"{func_name} validation failed: "
60
+ f"Missing required argument '{name}'. "
61
+ f"Expected arguments: {expected_names}."
62
+ )
63
+ else:
64
+ normalized_args = list(args)
65
+ if len(normalized_args) != len(types_order):
66
+ expected = _format_types(types_order, expected_names)
67
+ got_types = ", ".join(type(arg).__name__ for arg in normalized_args)
68
+ raise AssertionError(
69
+ f"{func_name} validation failed: "
70
+ f"Expected exactly {len(types_order)} arguments ({expected}), "
71
+ f"but got {len(normalized_args)} argument(s) of type(s): ({got_types}). "
72
+ f"Correct usage example: {func_name}({expected})"
73
+ )
74
+
75
+ for i, (arg, expected_type) in enumerate(zip(normalized_args, types_order)):
76
+ origin = get_origin(expected_type)
77
+ if origin is Union:
78
+ allowed_types = get_args(expected_type)
79
+ else:
80
+ allowed_types = (expected_type,)
81
+
82
+ if not isinstance(arg, allowed_types):
83
+ allowed_str = " | ".join(t.__name__ for t in allowed_types)
84
+ raise AssertionError(
85
+ f"{func_name} validation failed: "
86
+ f"Argument '{expected_names[i] if expected_names else f'arg{i}'}' "
87
+ f"expected type {allowed_str}, but got {type(arg).__name__}. "
88
+ f"Correct usage example: {func_name}({_format_types(types_order, expected_names)})"
89
+ )
90
+
91
+
92
+ def validate_output_structure(result, func_name: str, expected_type_name="np.ndarray",gt_flag=False):
93
+ if result is None or (isinstance(result, float) and np.isnan(result)):
94
+ if gt_flag:
95
+ raise AssertionError(
96
+ f"{func_name} validation failed: "
97
+ f"The function returned {result!r}. "
98
+ f"If you are working with an unlabeled dataset and no ground truth is available, "
99
+ f"use 'return np.array([], dtype=np.float32)' instead. "
100
+ f"Otherwise, {func_name} expected a single {expected_type_name} object. "
101
+ f"Make sure the function ends with 'return <{expected_type_name}>'."
102
+ )
103
+
104
+ raise AssertionError(
105
+ f"{func_name} validation failed: "
106
+ f"The function returned None. "
107
+ f"Expected a single {expected_type_name} object. "
108
+ f"Make sure the function ends with 'return <{expected_type_name}>'."
109
+ )
110
+ if isinstance(result, tuple):
111
+ element_descriptions = [
112
+ f"[{i}] type: {type(r).__name__}"
113
+ for i, r in enumerate(result)
114
+ ]
115
+ element_summary = "\n ".join(element_descriptions)
116
+
117
+ raise AssertionError(
118
+ f"{func_name} validation failed: "
119
+ f"The function returned multiple outputs ({len(result)} values), "
120
+ f"but only a single {expected_type_name} is allowed.\n\n"
121
+ f"Returned elements:\n"
122
+ f" {element_summary}\n\n"
123
+ f"Correct usage example:\n"
124
+ f" def {func_name}(...):\n"
125
+ f" return <{expected_type_name}>\n\n"
126
+ f"If you intended to return multiple values, combine them into a single "
127
+ f"{expected_type_name} (e.g., by concatenation or stacking)."
128
+ )
129
+
130
+ def batch_warning(result, func_name):
131
+ if result.shape[0] == 1:
132
+ warnings.warn(
133
+ f"{func_name} warning: Tensorleap will add a batch dimension at axis 0 to the output of {func_name}, "
134
+ f"although the detected size of axis 0 is already 1. "
135
+ f"This may lead to an extra batch dimension (e.g., shape (1, 1, ...)). "
136
+ f"Please ensure that the output of '{func_name}' is not already batched "
137
+ f"to avoid computation errors."
138
+ )
30
139
  def _add_mapping_connection(user_unique_name, connection_destinations, arg_names, name, node_mapping_type):
31
140
  connection_destinations = [connection_destination for connection_destination in connection_destinations
32
141
  if not isinstance(connection_destination, SamplePreprocessResponse)]
@@ -49,10 +158,24 @@ def tensorleap_integration_test():
49
158
  def decorating_function(integration_test_function: Callable):
50
159
  leap_binder.integration_test_func = integration_test_function
51
160
 
161
+ def _validate_input_args(*args, **kwargs):
162
+ sample_id,preprocess_response=args
163
+ assert type(sample_id) == preprocess_response.sample_id_type, (
164
+ f"tensorleap_integration_test validation failed: "
165
+ f"sample_id type ({type(sample_id).__name__}) does not match the expected "
166
+ f"type ({preprocess_response.sample_id_type}) from the PreprocessResponse."
167
+ )
168
+
52
169
  def inner(*args, **kwargs):
170
+ validate_args_structure(*args, types_order=[Union[int, str], PreprocessResponse],
171
+ func_name='integration_test',expected_names=["idx", "preprocess"],**kwargs)
172
+ _validate_input_args(*args, **kwargs)
173
+
53
174
  global _called_from_inside_tl_integration_test_decorator
54
175
  try:
55
176
  _called_from_inside_tl_integration_test_decorator = True
177
+ if not _update_env_status is None:
178
+ _update_env_status("tensorleap_integration_test", "v")#put here because otherwise it will become v only if it finishes all the script
56
179
  ret = integration_test_function(*args, **kwargs)
57
180
 
58
181
  try:
@@ -65,7 +188,7 @@ def tensorleap_integration_test():
65
188
  line_number = first_tb.lineno
66
189
  if isinstance(e, TypeError) and 'is not subscriptable' in str(e):
67
190
  print(f'Invalid integration code. File {file_name}, line {line_number}: '
68
- f'Please remove this indexing operation usage from the integration test code.')
191
+ f"indexing is supported only on the model's predictions inside the integration test. Please remove this indexing operation usage from the integration test code.")
69
192
  else:
70
193
  print(f'Invalid integration code. File {file_name}, line {line_number}: '
71
194
  f'Integration test is only allowed to call Tensorleap decorators. '
@@ -77,9 +200,9 @@ def tensorleap_integration_test():
77
200
  _called_from_inside_tl_integration_test_decorator = False
78
201
 
79
202
  leap_binder.check()
80
-
81
203
  return inner
82
204
 
205
+
83
206
  return decorating_function
84
207
 
85
208
  def _safe_get_item(key):
@@ -89,18 +212,58 @@ def _safe_get_item(key):
89
212
  raise Exception(f'Tensorleap currently supports models with no more then 10 inputs')
90
213
 
91
214
  def tensorleap_load_model(prediction_types: Optional[List[PredictionTypeHandler]] = []):
215
+ assert isinstance(prediction_types, list),(
216
+ f"tensorleap_load_model validation failed: "
217
+ f" prediction_types is an optional argument of type List[PredictionTypeHandler]] but got {type(prediction_types).__name__}."
218
+ )
92
219
  for i, prediction_type in enumerate(prediction_types):
220
+ assert isinstance(prediction_type, PredictionTypeHandler),(f"tensorleap_load_model validation failed: "
221
+ f" prediction_types at position {i} must be of type PredictionTypeHandler but got {type(prediction_types[i]).__name__}.")
93
222
  leap_binder.add_prediction(prediction_type.name, prediction_type.labels, prediction_type.channel_dim, i)
94
223
 
224
+ def _validate_result(result) -> None:
225
+ valid_types=["onnxruntime","keras"]
226
+ err_message=f"tensorleap_load_model validation failed:\nSupported models are Keras and onnxruntime only and non of them was returned."
227
+ validate_output_structure(result, func_name="tensorleap_load_model", expected_type_name= [" | ".join(t for t in valid_types)][0])
228
+ try:
229
+ import keras
230
+ except ImportError:
231
+ keras = None
232
+ try:
233
+ import tensorflow as tf
234
+ except ImportError:
235
+ tf = None
236
+ try:
237
+ import onnxruntime
238
+ except ImportError:
239
+ onnxruntime = None
240
+
241
+ if not keras and not onnxruntime:
242
+ raise AssertionError(err_message)
243
+
244
+ is_keras_model = (
245
+ bool(keras and isinstance(result, getattr(keras, "Model", tuple())))
246
+ or bool(tf and isinstance(result, getattr(tf.keras, "Model", tuple())))
247
+ )
248
+ is_onnx_model = bool(onnxruntime and isinstance(result, onnxruntime.InferenceSession))
249
+
250
+ if not any([is_keras_model, is_onnx_model]):
251
+ raise AssertionError( err_message)
252
+
253
+
254
+
95
255
  def decorating_function(load_model_func):
96
256
  class TempMapping:
97
257
  pass
98
258
 
99
259
  @lru_cache()
100
- def inner():
260
+ def inner(*args, **kwargs):
261
+ validate_args_structure(*args, types_order=[],
262
+ func_name='tensorleap_load_model',expected_names=[],**kwargs)
101
263
  class ModelPlaceholder:
102
264
  def __init__(self):
103
- self.model = load_model_func()
265
+ self.model = load_model_func() #TODO- check why this fails on onnx model
266
+ _validate_result(self.model)
104
267
 
105
268
  # keras interface
106
269
  def __call__(self, arg):
@@ -164,8 +327,10 @@ def tensorleap_load_model(prediction_types: Optional[List[PredictionTypeHandler]
164
327
 
165
328
  def get_inputs(self):
166
329
  return self.model.get_inputs()
167
-
168
- return ModelPlaceholder()
330
+ model_placeholder=ModelPlaceholder()
331
+ if not _update_env_status is None:
332
+ _update_env_status("tensorleap_load_model", "v")
333
+ return model_placeholder
169
334
 
170
335
  def mapping_inner():
171
336
  class ModelOutputPlaceholder:
@@ -228,12 +393,11 @@ def tensorleap_load_model(prediction_types: Optional[List[PredictionTypeHandler]
228
393
 
229
394
  return ModelPlaceholder()
230
395
 
231
- def final_inner():
396
+ def final_inner(*args, **kwargs):
232
397
  if os.environ.get(mapping_runtime_mode_env_var_mame):
233
398
  return mapping_inner()
234
399
  else:
235
- return inner()
236
-
400
+ return inner(*args, **kwargs)
237
401
  return final_inner
238
402
 
239
403
  return decorating_function
@@ -244,81 +408,168 @@ def tensorleap_custom_metric(name: str,
244
408
  compute_insights: Optional[Union[bool, Dict[str, bool]]] = None,
245
409
  connects_to=None):
246
410
  name_to_unique_name = defaultdict(set)
247
-
248
411
  def decorating_function(
249
412
  user_function: Union[CustomCallableInterfaceMultiArgs, CustomMultipleReturnCallableInterfaceMultiArgs,
250
413
  ConfusionMatrixCallableInterfaceMultiArgs]):
414
+
415
+ def _validate_decorators_signature():
416
+ err_message = f"{user_function.__name__} validation failed.\n"
417
+ if not isinstance(name, str):
418
+ raise TypeError(err_message + f"`name` must be a string, got type {type(name).__name__}.")
419
+ valid_directions = {MetricDirection.Upward, MetricDirection.Downward}
420
+ if isinstance(direction, MetricDirection):
421
+ if direction not in valid_directions:
422
+ raise ValueError(
423
+ err_message +
424
+ f"Invalid MetricDirection: {direction}. Must be one of {valid_directions}, "
425
+ f"got type {type(direction).__name__}."
426
+ )
427
+ elif isinstance(direction, dict):
428
+ if not all(isinstance(k, str) for k in direction.keys()):
429
+ invalid_keys = {k: type(k).__name__ for k in direction.keys() if not isinstance(k, str)}
430
+ raise TypeError(
431
+ err_message +
432
+ f"All keys in `direction` must be strings, got invalid key types: {invalid_keys}."
433
+ )
434
+ for k, v in direction.items():
435
+ if v not in valid_directions:
436
+ raise ValueError(
437
+ err_message +
438
+ f"Invalid direction for key '{k}': {v}. Must be one of {valid_directions}, "
439
+ f"got type {type(v).__name__}."
440
+ )
441
+ else:
442
+ raise TypeError(
443
+ err_message +
444
+ f"`direction` must be a MetricDirection or a Dict[str, MetricDirection], "
445
+ f"got type {type(direction).__name__}."
446
+ )
447
+ if compute_insights is not None:
448
+ if not isinstance(compute_insights, (bool, dict)):
449
+ raise TypeError(
450
+ err_message +
451
+ f"`compute_insights` must be a bool or a Dict[str, bool], "
452
+ f"got type {type(compute_insights).__name__}."
453
+ )
454
+ if isinstance(compute_insights, dict):
455
+ if not all(isinstance(k, str) for k in compute_insights.keys()):
456
+ invalid_keys = {k: type(k).__name__ for k in compute_insights.keys() if not isinstance(k, str)}
457
+ raise TypeError(
458
+ err_message +
459
+ f"All keys in `compute_insights` must be strings, got invalid key types: {invalid_keys}."
460
+ )
461
+ for k, v in compute_insights.items():
462
+ if not isinstance(v, bool):
463
+ raise TypeError(
464
+ err_message +
465
+ f"Invalid type for compute_insights['{k}']: expected bool, got type {type(v).__name__}."
466
+ )
467
+ if connects_to is not None:
468
+ valid_types = (str, list, tuple, set)
469
+ if not isinstance(connects_to, valid_types):
470
+ raise TypeError(
471
+ err_message +
472
+ f"`connects_to` must be one of {valid_types}, got type {type(connects_to).__name__}."
473
+ )
474
+ if isinstance(connects_to, (list, tuple, set)):
475
+ invalid_elems = [f"{type(e).__name__}" for e in connects_to if not isinstance(e, str)]
476
+ if invalid_elems:
477
+ raise TypeError(
478
+ err_message +
479
+ f"All elements in `connects_to` must be strings, "
480
+ f"but found element types: {invalid_elems}."
481
+ )
482
+
483
+
484
+ _validate_decorators_signature()
485
+
251
486
  for metric_handler in leap_binder.setup_container.metrics:
252
487
  if metric_handler.metric_handler_data.name == name:
253
488
  raise Exception(f'Metric with name {name} already exists. '
254
489
  f'Please choose another')
255
490
 
256
491
  def _validate_input_args(*args, **kwargs) -> None:
492
+ assert len(args) > 0, (
493
+ f"{user_function.__name__}() validation failed: "
494
+ f"Expected at least one positional|key-word argument of type np.ndarray, "
495
+ f"but received none. "
496
+ f"Correct usage example: tensorleap_custom_metric(input_array: np.ndarray, ...)"
497
+ )
257
498
  for i, arg in enumerate(args):
258
499
  assert isinstance(arg, (np.ndarray, SamplePreprocessResponse)), (
259
- f'tensorleap_custom_metric validation failed: '
500
+ f'{user_function.__name__}() validation failed: '
260
501
  f'Argument #{i} should be a numpy array. Got {type(arg)}.')
261
502
  if leap_binder.batch_size_to_validate and isinstance(arg, np.ndarray):
262
503
  assert arg.shape[0] == leap_binder.batch_size_to_validate, \
263
- (f'tensorleap_custom_metric validation failed: Argument #{i} '
504
+ (f'{user_function.__name__}() validation failed: Argument #{i} '
264
505
  f'first dim should be as the batch size. Got {arg.shape[0]} '
265
506
  f'instead of {leap_binder.batch_size_to_validate}')
266
507
 
267
508
  for _arg_name, arg in kwargs.items():
268
509
  assert isinstance(arg, (np.ndarray, SamplePreprocessResponse)), (
269
- f'tensorleap_custom_metric validation failed: '
510
+ f'{user_function.__name__}() validation failed: '
270
511
  f'Argument {_arg_name} should be a numpy array. Got {type(arg)}.')
271
512
  if leap_binder.batch_size_to_validate and isinstance(arg, np.ndarray):
272
513
  assert arg.shape[0] == leap_binder.batch_size_to_validate, \
273
- (f'tensorleap_custom_metric validation failed: Argument {_arg_name} '
514
+ (f'{user_function.__name__}() validation failed: Argument {_arg_name} '
274
515
  f'first dim should be as the batch size. Got {arg.shape[0]} '
275
516
  f'instead of {leap_binder.batch_size_to_validate}')
276
517
 
277
518
  def _validate_result(result) -> None:
278
- supported_types_message = (f'tensorleap_custom_metric validation failed: '
279
- f'Metric has returned unsupported type. Supported types are List[float], '
280
- f'List[List[ConfusionMatrixElement]], NDArray[np.float32]. ')
519
+ validate_output_structure(result, func_name=user_function.__name__,
520
+ expected_type_name="List[float | int | None | List[ConfusionMatrixElement] ] | NDArray[np.float32] or dictonary with one of these types as its values types")
521
+ supported_types_message = (f'{user_function.__name__}() validation failed: '
522
+ f'{user_function.__name__}() has returned unsupported type.\nSupported types are List[float|int|None], '
523
+ f'List[List[ConfusionMatrixElement]], NDArray[np.float32] or dictonary with one of these types as its values types. ')
281
524
 
282
- def _validate_single_metric(single_metric_result):
525
+ def _validate_single_metric(single_metric_result,key=None):
283
526
  if isinstance(single_metric_result, list):
284
527
  if isinstance(single_metric_result[0], list):
285
- assert isinstance(single_metric_result[0][0], ConfusionMatrixElement), \
286
- f'{supported_types_message}Got List[List[{type(single_metric_result[0][0])}]].'
528
+ assert all(isinstance(cm, ConfusionMatrixElement) for cm in single_metric_result[0]), (
529
+ f"{supported_types_message} "
530
+ f"Got {'a dict where the value of ' + str(key) + ' is of type ' if key is not None else ''}"
531
+ f"List[List[{', '.join(type(cm).__name__ for cm in single_metric_result[0])}]]."
532
+ )
533
+
287
534
  else:
288
- assert isinstance(single_metric_result[0], (
289
- float, int,
290
- type(None))), f'{supported_types_message}Got List[{type(single_metric_result[0])}].'
535
+ assert all(isinstance(v, (float,int,type(None),np.float32)) for v in single_metric_result), (
536
+ f"{supported_types_message}\n"
537
+ f"Got {'a dict where the value of ' + str(key) + ' is of type ' if key is not None else ''}"
538
+ f"List[{', '.join(type(v).__name__ for v in single_metric_result)}]."
539
+ )
291
540
  else:
292
541
  assert isinstance(single_metric_result,
293
- np.ndarray), f'{supported_types_message}Got {type(single_metric_result)}.'
294
- assert len(single_metric_result.shape) == 1, (f'tensorleap_custom_metric validation failed: '
542
+ np.ndarray), f'{supported_types_message}\nGot {type(single_metric_result)}.'
543
+ assert len(single_metric_result.shape) == 1, (f'{user_function.__name__}() validation failed: '
295
544
  f'The return shape should be 1D. Got {len(single_metric_result.shape)}D.')
296
545
 
297
546
  if leap_binder.batch_size_to_validate:
298
547
  assert len(single_metric_result) == leap_binder.batch_size_to_validate, \
299
- f'tensorleap_custom_metrix validation failed: The return len should be as the batch size.'
548
+ f'{user_function.__name__}() validation failed: The return len {f"of srt{key} value" if key is not None else ""} should be as the batch size.'
300
549
 
301
550
  if isinstance(result, dict):
302
551
  for key, value in result.items():
552
+ _validate_single_metric(value,key)
553
+
303
554
  assert isinstance(key, str), \
304
- (f'tensorleap_custom_metric validation failed: '
555
+ (f'{user_function.__name__}() validation failed: '
305
556
  f'Keys in the return dict should be of type str. Got {type(key)}.')
306
557
  _validate_single_metric(value)
307
558
 
308
559
  if isinstance(direction, dict):
309
560
  for direction_key in direction:
310
561
  assert direction_key in result, \
311
- (f'tensorleap_custom_metric validation failed: '
562
+ (f'{user_function.__name__}() validation failed: '
312
563
  f'Keys in the direction mapping should be part of result keys. Got key {direction_key}.')
313
564
 
314
565
  if compute_insights is not None:
315
566
  assert isinstance(compute_insights, dict), \
316
- (f'tensorleap_custom_metric validation failed: '
567
+ (f'{user_function.__name__}() validation failed: '
317
568
  f'compute_insights should be dict if using the dict results. Got {type(compute_insights)}.')
318
569
 
319
570
  for ci_key in compute_insights:
320
571
  assert ci_key in result, \
321
- (f'tensorleap_custom_metric validation failed: '
572
+ (f'{user_function.__name__}() validation failed: '
322
573
  f'Keys in the compute_insights mapping should be part of result keys. Got key {ci_key}.')
323
574
 
324
575
  else:
@@ -326,7 +577,7 @@ def tensorleap_custom_metric(name: str,
326
577
 
327
578
  if compute_insights is not None:
328
579
  assert isinstance(compute_insights, bool), \
329
- (f'tensorleap_custom_metric validation failed: '
580
+ (f'{user_function.__name__}() validation failed: '
330
581
  f'compute_insights should be boolean. Got {type(compute_insights)}.')
331
582
 
332
583
  @functools.wraps(user_function)
@@ -358,6 +609,8 @@ def tensorleap_custom_metric(name: str,
358
609
  result = inner_without_validate(*args, **kwargs)
359
610
 
360
611
  _validate_result(result)
612
+ if not _update_env_status is None:
613
+ _update_env_status("tensorleap_custom_metric","v")
361
614
  return result
362
615
 
363
616
  def mapping_inner(*args, **kwargs):
@@ -397,28 +650,38 @@ def tensorleap_custom_visualizer(name: str, visualizer_type: LeapDataType,
397
650
  name_to_unique_name = defaultdict(set)
398
651
 
399
652
  def decorating_function(user_function: VisualizerCallableInterface):
653
+ assert isinstance(visualizer_type,LeapDataType),(f"{user_function.__name__} validation failed: "
654
+ f"visualizer_type should be of type {LeapDataType.__name__} but got {type(visualizer_type)}"
655
+ )
656
+
400
657
  for viz_handler in leap_binder.setup_container.visualizers:
401
658
  if viz_handler.visualizer_handler_data.name == name:
402
659
  raise Exception(f'Visualizer with name {name} already exists. '
403
660
  f'Please choose another')
404
661
 
405
662
  def _validate_input_args(*args, **kwargs):
663
+ assert len(args) > 0, (
664
+ f"{user_function.__name__}() validation failed: "
665
+ f"Expected at least one positional|key-word argument of type np.ndarray, "
666
+ f"but received none. "
667
+ f"Correct usage example: {user_function.__name__}(input_array: np.ndarray, ...)"
668
+ )
406
669
  for i, arg in enumerate(args):
407
670
  assert isinstance(arg, (np.ndarray, SamplePreprocessResponse)), (
408
- f'tensorleap_custom_visualizer validation failed: '
671
+ f'{user_function.__name__}() validation failed: '
409
672
  f'Argument #{i} should be a numpy array. Got {type(arg)}.')
410
673
  if leap_binder.batch_size_to_validate and isinstance(arg, np.ndarray):
411
674
  assert arg.shape[0] != leap_binder.batch_size_to_validate, \
412
- (f'tensorleap_custom_visualizer validation failed: '
675
+ (f'{user_function.__name__}() validation failed: '
413
676
  f'Argument #{i} should be without batch dimension. ')
414
677
 
415
678
  for _arg_name, arg in kwargs.items():
416
679
  assert isinstance(arg, (np.ndarray, SamplePreprocessResponse)), (
417
- f'tensorleap_custom_visualizer validation failed: '
680
+ f'{user_function.__name__}() validation failed: '
418
681
  f'Argument {_arg_name} should be a numpy array. Got {type(arg)}.')
419
682
  if leap_binder.batch_size_to_validate and isinstance(arg, np.ndarray):
420
683
  assert arg.shape[0] != leap_binder.batch_size_to_validate, \
421
- (f'tensorleap_custom_visualizer validation failed: Argument {_arg_name} '
684
+ (f'{user_function.__name__}() validation failed: Argument {_arg_name} '
422
685
  f'should be without batch dimension. ')
423
686
 
424
687
  def _validate_result(result):
@@ -432,8 +695,11 @@ def tensorleap_custom_visualizer(name: str, visualizer_type: LeapDataType,
432
695
  LeapDataType.ImageWithBBox: LeapImageWithBBox,
433
696
  LeapDataType.ImageWithHeatmap: LeapImageWithHeatmap
434
697
  }
698
+ validate_output_structure(result, func_name=user_function.__name__,
699
+ expected_type_name=result_type_map[visualizer_type])
700
+
435
701
  assert isinstance(result, result_type_map[visualizer_type]), \
436
- (f'tensorleap_custom_visualizer validation failed: '
702
+ (f'{user_function.__name__}() validation failed: '
437
703
  f'The return type should be {result_type_map[visualizer_type]}. Got {type(result)}.')
438
704
 
439
705
  @functools.wraps(user_function)
@@ -465,6 +731,8 @@ def tensorleap_custom_visualizer(name: str, visualizer_type: LeapDataType,
465
731
  result = inner_without_validate(*args, **kwargs)
466
732
 
467
733
  _validate_result(result)
734
+ if not _update_env_status is None:
735
+ _update_env_status("tensorleap_custom_visualizer","v")
468
736
  return result
469
737
 
470
738
  def mapping_inner(*args, **kwargs):
@@ -506,30 +774,26 @@ def tensorleap_metadata(
506
774
  f'Please choose another')
507
775
 
508
776
  def _validate_input_args(sample_id: Union[int, str], preprocess_response: PreprocessResponse):
509
- assert isinstance(sample_id, (int, str)), \
510
- (f'tensorleap_metadata validation failed: '
511
- f'Argument sample_id should be either int or str. Got {type(sample_id)}.')
512
- assert isinstance(preprocess_response, PreprocessResponse), \
513
- (f'tensorleap_metadata validation failed: '
514
- f'Argument preprocess_response should be a PreprocessResponse. Got {type(preprocess_response)}.')
515
777
  assert type(sample_id) == preprocess_response.sample_id_type, \
516
- (f'tensorleap_metadata validation failed: '
778
+ (f'{user_function.__name__}() validation failed: '
517
779
  f'Argument sample_id should be as the same type as defined in the preprocess response '
518
780
  f'{preprocess_response.sample_id_type}. Got {type(sample_id)}.')
519
781
 
520
782
  def _validate_result(result):
521
783
  supported_result_types = (type(None), int, str, bool, float, dict, np.floating,
522
784
  np.bool_, np.unsignedinteger, np.signedinteger, np.integer)
785
+ validate_output_structure(result, func_name=user_function.__name__,
786
+ expected_type_name=supported_result_types)
523
787
  assert isinstance(result, supported_result_types), \
524
- (f'tensorleap_metadata validation failed: '
788
+ (f'{user_function.__name__}() validation failed: '
525
789
  f'Unsupported return type. Got {type(result)}. should be any of {str(supported_result_types)}')
526
790
  if isinstance(result, dict):
527
791
  for key, value in result.items():
528
792
  assert isinstance(key, str), \
529
- (f'tensorleap_metadata validation failed: '
793
+ (f'{user_function.__name__}() validation failed: '
530
794
  f'Keys in the return dict should be of type str. Got {type(key)}.')
531
795
  assert isinstance(value, supported_result_types), \
532
- (f'tensorleap_metadata validation failed: '
796
+ (f'{user_function.__name__}() validation failed: '
533
797
  f'Values in the return dict should be of type {str(supported_result_types)}. Got {type(value)}.')
534
798
 
535
799
  def inner_without_validate(sample_id, preprocess_response):
@@ -546,15 +810,19 @@ def tensorleap_metadata(
546
810
 
547
811
  leap_binder.set_metadata(inner_without_validate, name, metadata_type)
548
812
 
549
- def inner(sample_id, preprocess_response):
813
+ def inner(*args,**kwargs):
550
814
  if os.environ.get(mapping_runtime_mode_env_var_mame):
551
815
  return None
552
-
816
+ validate_args_structure(*args, types_order=[Union[int, str], PreprocessResponse],
817
+ func_name=user_function.__name__, expected_names=["idx", "preprocess"],**kwargs)
818
+ sample_id, preprocess_response = args if len(args)!=0 else kwargs.values()
553
819
  _validate_input_args(sample_id, preprocess_response)
554
820
 
555
821
  result = inner_without_validate(sample_id, preprocess_response)
556
822
 
557
823
  _validate_result(result)
824
+ if not _update_env_status is None:
825
+ _update_env_status("tensorleap_metadata","v")
558
826
  return result
559
827
 
560
828
  return inner
@@ -617,19 +885,23 @@ def tensorleap_preprocess():
617
885
 
618
886
  def _validate_input_args(*args, **kwargs):
619
887
  assert len(args) == 0 and len(kwargs) == 0, \
620
- (f'tensorleap_preprocess validation failed: '
888
+ (f'{user_function.__name__}() validation failed: '
621
889
  f'The function should not take any arguments. Got {args} and {kwargs}.')
622
890
 
623
891
  def _validate_result(result):
624
- assert isinstance(result, list), \
625
- (f'tensorleap_preprocess validation failed: '
626
- f'The return type should be a list. Got {type(result)}.')
892
+ assert isinstance(result, list), (
893
+ f"{user_function.__name__}() validation failed: expected return type list[{PreprocessResponse.__name__}]"
894
+ f"(e.g., [PreprocessResponse1, PreprocessResponse2, ...]), but returned type is {type(result).__name__}."
895
+ if not isinstance(result, tuple)
896
+ else f"{user_function.__name__}() validation failed: expected to return a single list[{PreprocessResponse.__name__}] object, "
897
+ f"but returned {len(result)} objects instead."
898
+ )
627
899
  for i, response in enumerate(result):
628
900
  assert isinstance(response, PreprocessResponse), \
629
- (f'tensorleap_preprocess validation failed: '
901
+ (f'{user_function.__name__}() validation failed: '
630
902
  f'Element #{i} in the return list should be a PreprocessResponse. Got {type(response)}.')
631
903
  assert len(set(result)) == len(result), \
632
- (f'tensorleap_preprocess validation failed: '
904
+ (f'{user_function.__name__}() validation failed: '
633
905
  f'The return list should not contain duplicate PreprocessResponse objects.')
634
906
 
635
907
  def inner(*args, **kwargs):
@@ -637,9 +909,10 @@ def tensorleap_preprocess():
637
909
  return [None, None, None, None]
638
910
 
639
911
  _validate_input_args(*args, **kwargs)
640
-
641
912
  result = user_function()
642
913
  _validate_result(result)
914
+ if not _update_env_status is None:
915
+ _update_env_status("tensorleap_preprocess", "v")
643
916
  return result
644
917
 
645
918
  return inner
@@ -838,29 +1111,23 @@ def tensorleap_input_encoder(name: str, channel_dim=-1, model_input_index=None):
838
1111
  raise Exception(f"Channel dim for input {name} is expected to be either -1 or positive")
839
1112
 
840
1113
  def _validate_input_args(sample_id: Union[int, str], preprocess_response: PreprocessResponse):
841
- assert isinstance(sample_id, (int, str)), \
842
- (f'tensorleap_input_encoder validation failed: '
843
- f'Argument sample_id should be either int or str. Got {type(sample_id)}.')
844
- assert isinstance(preprocess_response, PreprocessResponse), \
845
- (f'tensorleap_input_encoder validation failed: '
846
- f'Argument preprocess_response should be a PreprocessResponse. Got {type(preprocess_response)}.')
847
1114
  assert type(sample_id) == preprocess_response.sample_id_type, \
848
- (f'tensorleap_input_encoder validation failed: '
1115
+ (f'{user_function.__name__}() validation failed: '
849
1116
  f'Argument sample_id should be as the same type as defined in the preprocess response '
850
1117
  f'{preprocess_response.sample_id_type}. Got {type(sample_id)}.')
851
1118
 
852
1119
  def _validate_result(result):
1120
+ validate_output_structure(result, func_name=user_function.__name__, expected_type_name = "np.ndarray")
853
1121
  assert isinstance(result, np.ndarray), \
854
- (f'tensorleap_input_encoder validation failed: '
1122
+ (f'{user_function.__name__}() validation failed: '
855
1123
  f'Unsupported return type. Should be a numpy array. Got {type(result)}.')
856
1124
  assert result.dtype == np.float32, \
857
- (f'tensorleap_input_encoder validation failed: '
1125
+ (f'{user_function.__name__}() validation failed: '
858
1126
  f'The return type should be a numpy array of type float32. Got {result.dtype}.')
859
- assert channel_dim - 1 <= len(result.shape), (f'tensorleap_input_encoder validation failed: '
1127
+ assert channel_dim - 1 <= len(result.shape), (f'{user_function.__name__}() validation failed: '
860
1128
  f'The channel_dim ({channel_dim}) should be <= to the rank of the resulting input rank ({len(result.shape)}).')
861
1129
 
862
1130
  def inner_without_validate(sample_id, preprocess_response):
863
-
864
1131
  global _called_from_inside_tl_decorator
865
1132
  _called_from_inside_tl_decorator += 1
866
1133
 
@@ -874,7 +1141,10 @@ def tensorleap_input_encoder(name: str, channel_dim=-1, model_input_index=None):
874
1141
  leap_binder.set_input(inner_without_validate, name, channel_dim=channel_dim)
875
1142
 
876
1143
 
877
- def inner(sample_id, preprocess_response):
1144
+ def inner(*args, **kwargs):
1145
+ validate_args_structure(*args, types_order=[Union[int, str], PreprocessResponse],
1146
+ func_name=user_function.__name__, expected_names=["idx", "preprocess"], **kwargs)
1147
+ sample_id, preprocess_response = args if len(args)!=0 else kwargs.values()
878
1148
  _validate_input_args(sample_id, preprocess_response)
879
1149
 
880
1150
  result = inner_without_validate(sample_id, preprocess_response)
@@ -882,18 +1152,20 @@ def tensorleap_input_encoder(name: str, channel_dim=-1, model_input_index=None):
882
1152
  _validate_result(result)
883
1153
 
884
1154
  if _called_from_inside_tl_decorator == 0 and _called_from_inside_tl_integration_test_decorator:
1155
+ batch_warning(result,user_function.__name__)
885
1156
  result = np.expand_dims(result, axis=0)
1157
+ if not _update_env_status is None:
1158
+ _update_env_status("tensorleap_input_encoder", "v")
886
1159
 
887
1160
  return result
888
1161
 
889
1162
 
890
-
891
1163
  node_mapping_type = NodeMappingType.Input
892
1164
  if model_input_index is not None:
893
1165
  node_mapping_type = NodeMappingType(f'Input{str(model_input_index)}')
894
1166
  inner.node_mapping = NodeMapping(name, node_mapping_type)
895
1167
 
896
- def mapping_inner(sample_id, preprocess_response):
1168
+ def mapping_inner(*args, **kwargs):
897
1169
  class TempMapping:
898
1170
  pass
899
1171
 
@@ -905,11 +1177,11 @@ def tensorleap_input_encoder(name: str, channel_dim=-1, model_input_index=None):
905
1177
 
906
1178
  mapping_inner.node_mapping = NodeMapping(name, node_mapping_type)
907
1179
 
908
- def final_inner(sample_id, preprocess_response):
1180
+ def final_inner(*args, **kwargs):
909
1181
  if os.environ.get(mapping_runtime_mode_env_var_mame):
910
- return mapping_inner(sample_id, preprocess_response)
1182
+ return mapping_inner(*args, **kwargs)
911
1183
  else:
912
- return inner(sample_id, preprocess_response)
1184
+ return inner(*args, **kwargs)
913
1185
 
914
1186
  final_inner.node_mapping = NodeMapping(name, node_mapping_type)
915
1187
 
@@ -926,23 +1198,18 @@ def tensorleap_gt_encoder(name: str):
926
1198
  f'Please choose another')
927
1199
 
928
1200
  def _validate_input_args(sample_id: Union[int, str], preprocess_response: PreprocessResponse):
929
- assert isinstance(sample_id, (int, str)), \
930
- (f'tensorleap_gt_encoder validation failed: '
931
- f'Argument sample_id should be either int or str. Got {type(sample_id)}.')
932
- assert isinstance(preprocess_response, PreprocessResponse), \
933
- (f'tensorleap_gt_encoder validation failed: '
934
- f'Argument preprocess_response should be a PreprocessResponse. Got {type(preprocess_response)}.')
935
1201
  assert type(sample_id) == preprocess_response.sample_id_type, \
936
- (f'tensorleap_gt_encoder validation failed: '
1202
+ (f'{user_function.__name__}() validation failed: '
937
1203
  f'Argument sample_id should be as the same type as defined in the preprocess response '
938
1204
  f'{preprocess_response.sample_id_type}. Got {type(sample_id)}.')
939
1205
 
940
1206
  def _validate_result(result):
1207
+ validate_output_structure(result, func_name=user_function.__name__, expected_type_name = "np.ndarray",gt_flag=True)
941
1208
  assert isinstance(result, np.ndarray), \
942
- (f'tensorleap_gt_encoder validation failed: '
1209
+ (f'{user_function.__name__}() validation failed: '
943
1210
  f'Unsupported return type. Should be a numpy array. Got {type(result)}.')
944
1211
  assert result.dtype == np.float32, \
945
- (f'tensorleap_gt_encoder validation failed: '
1212
+ (f'{user_function.__name__}() validation failed: '
946
1213
  f'The return type should be a numpy array of type float32. Got {result.dtype}.')
947
1214
 
948
1215
  def inner_without_validate(sample_id, preprocess_response):
@@ -959,7 +1226,10 @@ def tensorleap_gt_encoder(name: str):
959
1226
  leap_binder.set_ground_truth(inner_without_validate, name)
960
1227
 
961
1228
 
962
- def inner(sample_id, preprocess_response):
1229
+ def inner(*args, **kwargs):
1230
+ validate_args_structure(*args, types_order=[Union[int, str], PreprocessResponse],
1231
+ func_name=user_function.__name__, expected_names=["idx", "preprocess"], **kwargs)
1232
+ sample_id, preprocess_response = args
963
1233
  _validate_input_args(sample_id, preprocess_response)
964
1234
 
965
1235
  result = inner_without_validate(sample_id, preprocess_response)
@@ -967,13 +1237,15 @@ def tensorleap_gt_encoder(name: str):
967
1237
  _validate_result(result)
968
1238
 
969
1239
  if _called_from_inside_tl_decorator == 0 and _called_from_inside_tl_integration_test_decorator:
1240
+ batch_warning(result, user_function.__name__)
970
1241
  result = np.expand_dims(result, axis=0)
971
-
1242
+ if not _update_env_status is None:
1243
+ _update_env_status("tensorleap_gt_encoder", "v")
972
1244
  return result
973
1245
 
974
1246
  inner.node_mapping = NodeMapping(name, NodeMappingType.GroundTruth)
975
1247
 
976
- def mapping_inner(sample_id, preprocess_response):
1248
+ def mapping_inner(*args, **kwargs):
977
1249
  class TempMapping:
978
1250
  pass
979
1251
 
@@ -984,11 +1256,11 @@ def tensorleap_gt_encoder(name: str):
984
1256
 
985
1257
  mapping_inner.node_mapping = NodeMapping(name, NodeMappingType.GroundTruth)
986
1258
 
987
- def final_inner(sample_id, preprocess_response):
1259
+ def final_inner(*args, **kwargs):
988
1260
  if os.environ.get(mapping_runtime_mode_env_var_mame):
989
- return mapping_inner(sample_id, preprocess_response)
1261
+ return mapping_inner(*args, **kwargs)
990
1262
  else:
991
- return inner(sample_id, preprocess_response)
1263
+ return inner(*args, **kwargs)
992
1264
 
993
1265
  final_inner.node_mapping = NodeMapping(name, NodeMappingType.GroundTruth)
994
1266
 
@@ -1009,28 +1281,37 @@ def tensorleap_custom_loss(name: str, connects_to=None):
1009
1281
  valid_types = (np.ndarray, SamplePreprocessResponse)
1010
1282
 
1011
1283
  def _validate_input_args(*args, **kwargs):
1284
+ assert len(args) > 0 and len(kwargs)==0, (
1285
+ f"{user_function.__name__}() validation failed: "
1286
+ f"Expected at least one positional|key-word argument of the allowed types (np.ndarray|SamplePreprocessResponse|list(np.ndarray|SamplePreprocessResponse)). "
1287
+ f"but received none. "
1288
+ f"Correct usage example: {user_function.__name__}(input_array: np.ndarray, ...)"
1289
+ )
1012
1290
  for i, arg in enumerate(args):
1013
1291
  if isinstance(arg, list):
1014
1292
  for y, elem in enumerate(arg):
1015
- assert isinstance(elem, valid_types), (f'tensorleap_custom_loss validation failed: '
1293
+ assert isinstance(elem, valid_types), (f'{user_function.__name__}() validation failed: '
1016
1294
  f'Element #{y} of list should be a numpy array. Got {type(elem)}.')
1017
1295
  else:
1018
- assert isinstance(arg, valid_types), (f'tensorleap_custom_loss validation failed: '
1296
+ assert isinstance(arg, valid_types), (f'{user_function.__name__}() validation failed: '
1019
1297
  f'Argument #{i} should be a numpy array. Got {type(arg)}.')
1020
1298
  for _arg_name, arg in kwargs.items():
1021
1299
  if isinstance(arg, list):
1022
1300
  for y, elem in enumerate(arg):
1023
- assert isinstance(elem, valid_types), (f'tensorleap_custom_loss validation failed: '
1301
+ assert isinstance(elem, valid_types), (f'{user_function.__name__}() validation failed: '
1024
1302
  f'Element #{y} of list should be a numpy array. Got {type(elem)}.')
1025
1303
  else:
1026
- assert isinstance(arg, valid_types), (f'tensorleap_custom_loss validation failed: '
1304
+ assert isinstance(arg, valid_types), (f'{user_function.__name__}() validation failed: '
1027
1305
  f'Argument #{_arg_name} should be a numpy array. Got {type(arg)}.')
1028
1306
 
1029
1307
  def _validate_result(result):
1308
+ validate_output_structure(result, func_name=user_function.__name__,
1309
+ expected_type_name="np.ndarray")
1030
1310
  assert isinstance(result, np.ndarray), \
1031
- (f'tensorleap_custom_loss validation failed: '
1311
+ (f'{user_function.__name__} validation failed: '
1032
1312
  f'The return type should be a numpy array. Got {type(result)}.')
1033
-
1313
+ assert result.ndim<2 ,(f'{user_function.__name__} validation failed: '
1314
+ f'The return type should be a 1Dim numpy array but got {result.ndim}Dim.')
1034
1315
 
1035
1316
  @functools.wraps(user_function)
1036
1317
  def inner_without_validate(*args, **kwargs):
@@ -1061,6 +1342,9 @@ def tensorleap_custom_loss(name: str, connects_to=None):
1061
1342
  result = inner_without_validate(*args, **kwargs)
1062
1343
 
1063
1344
  _validate_result(result)
1345
+ if not _update_env_status is None:
1346
+ _update_env_status("tensorleap_custom_loss", "v")
1347
+
1064
1348
  return result
1065
1349
 
1066
1350
  def mapping_inner(*args, **kwargs):
@@ -1117,3 +1401,94 @@ def tensorleap_custom_layer(name: str):
1117
1401
  return custom_layer
1118
1402
 
1119
1403
  return decorating_function
1404
+
1405
+
1406
+ def tensorleap_status_table():
1407
+ '''
1408
+ Usage example:
1409
+ ###################
1410
+ leap_integration.py
1411
+ ###################
1412
+ from code_loader.inner_leap_binder.leapbinder_decorators import tensorleap_status_table
1413
+ ...
1414
+ ...
1415
+ ...
1416
+ if __name__ == '__main__':
1417
+ tensorleap_status_table()
1418
+ ...
1419
+ '''
1420
+ import atexit
1421
+ import sys
1422
+ import traceback
1423
+ CHECK = "✅"
1424
+ CROSS = "❌"
1425
+
1426
+ table = [
1427
+ {"name": "tensorleap_preprocess", "Added to integration": CROSS},
1428
+ {"name": "tensorleap_integration_test", "Added to integration": CROSS},
1429
+ {"name": "tensorleap_input_encoder", "Added to integration": CROSS},
1430
+ {"name": "tensorleap_gt_encoder", "Added to integration": CROSS},
1431
+ {"name": "tensorleap_load_model", "Added to integration": CROSS},
1432
+ {"name": "tensorleap_custom_loss", "Added to integration": CROSS},
1433
+ {"name": "tensorleap_custom_metric (optional)", "Added to integration": CROSS},
1434
+ {"name": "tensorleap_metadata (optional)", "Added to integration": CROSS},
1435
+ {"name": "tensorleap_custom_visualizer (optional)", "Added to integration": CROSS},
1436
+
1437
+ ]
1438
+
1439
+ _finalizer_called = {"done": False}
1440
+
1441
+ def _remove_suffix(s: str, suffix: str) -> str:
1442
+ #This is needed because str.remove_suffix was presented in python3.9+
1443
+ if suffix and s.endswith(suffix):
1444
+ return s[:-len(suffix)]
1445
+ return s
1446
+
1447
+ def _print_table():
1448
+ ready_mess = "\nAll parts have been successfully set. If no errors accured, you can now push the project to the Tensorleap system."
1449
+ not_ready_mess = "\nSome mandatory components have not yet been added to the Integration test. Recommended next interface to add is: "
1450
+ mandatory_ready_mess = "\nAll mandatory parts have been successfully set. If no errors accured, you can now push the project to the Tensorleap system or continue to the next optional reccomeded interface,adding: "
1451
+
1452
+ name_width = max(len(row["name"]) for row in table)
1453
+ status_width = max(len(row["Added to integration"]) for row in table)
1454
+ header = f"{'Decorator Name'.ljust(name_width)} | {'Added to integration'.ljust(status_width)}"
1455
+ sep = "-" * len(header)
1456
+ print("\n" + header)
1457
+ print(sep)
1458
+ ready=True
1459
+ for row in table:
1460
+ print(f"{row['name'].ljust(name_width)} | {row['Added to integration'].ljust(status_width)}")
1461
+ if row['Added to integration']==CROSS and ready:
1462
+ ready=False
1463
+ next_step=row['name']
1464
+
1465
+
1466
+ print(ready_mess) if ready else print(mandatory_ready_mess+next_step) if "optional" in next_step else print(not_ready_mess+next_step)
1467
+ def update_env_params(name: str, status: str = "✓"):
1468
+ for row in table:
1469
+ if _remove_suffix(row["name"]," (optional)") == name:
1470
+ row["Added to integration"] = CHECK if status=="v" else CROSS
1471
+ break
1472
+ def run_on_exit():
1473
+ if _finalizer_called["done"]:
1474
+ return
1475
+ _finalizer_called["done"] = True
1476
+ _print_table()
1477
+ def handle_exception(exc_type, exc_value, exc_traceback):
1478
+ traceback.print_exception(exc_type, exc_value, exc_traceback)
1479
+ run_on_exit()
1480
+ atexit.register(run_on_exit)
1481
+ sys.excepthook = handle_exception
1482
+ global _update_env_status
1483
+ _update_env_status = update_env_params
1484
+ return update_env_params
1485
+
1486
+
1487
+
1488
+
1489
+
1490
+
1491
+
1492
+
1493
+
1494
+
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: code-loader
3
- Version: 1.0.138
3
+ Version: 1.0.139
4
4
  Summary:
5
5
  Home-page: https://github.com/tensorleap/code-loader
6
6
  License: MIT
@@ -1,7 +1,7 @@
1
1
  LICENSE,sha256=qIwWjdspQeSMTtnFZBC8MuT-95L02FPvzRUdWFxrwJY,1067
2
2
  code_loader/__init__.py,sha256=outxRQ0M-zMfV0QGVJmAed5qWfRmyD0TV6-goEGAzBw,406
3
3
  code_loader/contract/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
- code_loader/contract/datasetclasses.py,sha256=u0gfDDy02skhFG3ejJOxqxCnykhAcBPGJfv8Bi4s9eQ,8966
4
+ code_loader/contract/datasetclasses.py,sha256=hkHMU1upWzVWkjIBoU3r14XPDleicif4Oia2xfuHgvQ,9395
5
5
  code_loader/contract/enums.py,sha256=GEFkvUMXnCNt-GOoz7NJ9ecQZ2PPDettJNOsxsiM0wk,1622
6
6
  code_loader/contract/exceptions.py,sha256=jWqu5i7t-0IG0jGRsKF4DjJdrsdpJjIYpUkN1F4RiyQ,51
7
7
  code_loader/contract/mapping.py,sha256=sWJhpng-IkOzQnWQdMT5w2ZZ3X1Z_OOzSwCLXIS7oxE,1446
@@ -21,7 +21,7 @@ code_loader/experiment_api/utils.py,sha256=XZHtxge12TS4H4-8PjV3sKuhp8Ud6ojAiIzTZ
21
21
  code_loader/experiment_api/workingspace_config_utils.py,sha256=DLzXQCg4dgTV_YgaSbeTVzq-2ja_SQw4zi7LXwKL9cY,990
22
22
  code_loader/inner_leap_binder/__init__.py,sha256=koOlJyMNYzGbEsoIbXathSmQ-L38N_pEXH_HvL7beXU,99
23
23
  code_loader/inner_leap_binder/leapbinder.py,sha256=Q3D9yVM-GNEJfYRFvMV__BoZbcWOgnWKhrZXAv6Tu7o,33232
24
- code_loader/inner_leap_binder/leapbinder_decorators.py,sha256=7lykS1VoaO58HIf_ysegvFxYDVI8x3AYMMa_ODz2fXA,51767
24
+ code_loader/inner_leap_binder/leapbinder_decorators.py,sha256=Trnl8PDQn_TDawx4KQJ14S_Y2DHhUdRNUvn4sSqbm-Q,70809
25
25
  code_loader/leaploader.py,sha256=6D6xZzMI6qSNIb3tuKLB3BbK5H8QS1_r7iQjIXO3OkM,29795
26
26
  code_loader/leaploaderbase.py,sha256=LIFcC6xo6V_iiGN3BjibXETu_l84EWM_WIOKAvkfTiM,4458
27
27
  code_loader/mixpanel_tracker.py,sha256=l9z_szKKQ7apEbdNZpGH1TKAiT_TsBHb9AQnePaWTyo,4942
@@ -31,7 +31,7 @@ code_loader/plot_functions/visualize.py,sha256=gsBAYYkwMh7jIpJeDMPS8G4CW-pxwx6Lz
31
31
  code_loader/utils.py,sha256=gXENTYpjdidq2dx0gVbXlErPeHoNs-4TYAZbLRe0y2c,2712
32
32
  code_loader/visualizers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
33
33
  code_loader/visualizers/default_visualizers.py,sha256=onRnLE_TXfgLN4o52hQIOOhUcFexGlqJ3xSpQDVLuZM,2604
34
- code_loader-1.0.138.dist-info/LICENSE,sha256=qIwWjdspQeSMTtnFZBC8MuT-95L02FPvzRUdWFxrwJY,1067
35
- code_loader-1.0.138.dist-info/METADATA,sha256=7M1qS36MXJD1rW11zA_wVk9HIYnji24zW427h5dGErw,1090
36
- code_loader-1.0.138.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
37
- code_loader-1.0.138.dist-info/RECORD,,
34
+ code_loader-1.0.139.dist-info/LICENSE,sha256=qIwWjdspQeSMTtnFZBC8MuT-95L02FPvzRUdWFxrwJY,1067
35
+ code_loader-1.0.139.dist-info/METADATA,sha256=gmtUBLcGyi29BS3UAF5D2t4ZJWOr8JAB2aADUXbuzzg,1090
36
+ code_loader-1.0.139.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
37
+ code_loader-1.0.139.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: poetry-core 1.9.0
2
+ Generator: poetry-core 1.9.1
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any