code-loader 1.0.137__tar.gz → 1.0.139.dev1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of code-loader might be problematic. Click here for more details.

Files changed (36) hide show
  1. {code_loader-1.0.137 → code_loader-1.0.139.dev1}/PKG-INFO +1 -1
  2. {code_loader-1.0.137 → code_loader-1.0.139.dev1}/code_loader/contract/datasetclasses.py +9 -1
  3. {code_loader-1.0.137 → code_loader-1.0.139.dev1}/code_loader/inner_leap_binder/leapbinder_decorators.py +349 -91
  4. {code_loader-1.0.137 → code_loader-1.0.139.dev1}/code_loader/leaploader.py +2 -2
  5. {code_loader-1.0.137 → code_loader-1.0.139.dev1}/pyproject.toml +1 -1
  6. {code_loader-1.0.137 → code_loader-1.0.139.dev1}/LICENSE +0 -0
  7. {code_loader-1.0.137 → code_loader-1.0.139.dev1}/README.md +0 -0
  8. {code_loader-1.0.137 → code_loader-1.0.139.dev1}/code_loader/__init__.py +0 -0
  9. {code_loader-1.0.137 → code_loader-1.0.139.dev1}/code_loader/contract/__init__.py +0 -0
  10. {code_loader-1.0.137 → code_loader-1.0.139.dev1}/code_loader/contract/enums.py +0 -0
  11. {code_loader-1.0.137 → code_loader-1.0.139.dev1}/code_loader/contract/exceptions.py +0 -0
  12. {code_loader-1.0.137 → code_loader-1.0.139.dev1}/code_loader/contract/mapping.py +0 -0
  13. {code_loader-1.0.137 → code_loader-1.0.139.dev1}/code_loader/contract/responsedataclasses.py +0 -0
  14. {code_loader-1.0.137 → code_loader-1.0.139.dev1}/code_loader/contract/visualizer_classes.py +0 -0
  15. {code_loader-1.0.137 → code_loader-1.0.139.dev1}/code_loader/default_losses.py +0 -0
  16. {code_loader-1.0.137 → code_loader-1.0.139.dev1}/code_loader/default_metrics.py +0 -0
  17. {code_loader-1.0.137 → code_loader-1.0.139.dev1}/code_loader/experiment_api/__init__.py +0 -0
  18. {code_loader-1.0.137 → code_loader-1.0.139.dev1}/code_loader/experiment_api/api.py +0 -0
  19. {code_loader-1.0.137 → code_loader-1.0.139.dev1}/code_loader/experiment_api/cli_config_utils.py +0 -0
  20. {code_loader-1.0.137 → code_loader-1.0.139.dev1}/code_loader/experiment_api/client.py +0 -0
  21. {code_loader-1.0.137 → code_loader-1.0.139.dev1}/code_loader/experiment_api/epoch.py +0 -0
  22. {code_loader-1.0.137 → code_loader-1.0.139.dev1}/code_loader/experiment_api/experiment.py +0 -0
  23. {code_loader-1.0.137 → code_loader-1.0.139.dev1}/code_loader/experiment_api/experiment_context.py +0 -0
  24. {code_loader-1.0.137 → code_loader-1.0.139.dev1}/code_loader/experiment_api/types.py +0 -0
  25. {code_loader-1.0.137 → code_loader-1.0.139.dev1}/code_loader/experiment_api/utils.py +0 -0
  26. {code_loader-1.0.137 → code_loader-1.0.139.dev1}/code_loader/experiment_api/workingspace_config_utils.py +0 -0
  27. {code_loader-1.0.137 → code_loader-1.0.139.dev1}/code_loader/inner_leap_binder/__init__.py +0 -0
  28. {code_loader-1.0.137 → code_loader-1.0.139.dev1}/code_loader/inner_leap_binder/leapbinder.py +0 -0
  29. {code_loader-1.0.137 → code_loader-1.0.139.dev1}/code_loader/leaploaderbase.py +0 -0
  30. {code_loader-1.0.137 → code_loader-1.0.139.dev1}/code_loader/mixpanel_tracker.py +0 -0
  31. {code_loader-1.0.137 → code_loader-1.0.139.dev1}/code_loader/plot_functions/__init__.py +0 -0
  32. {code_loader-1.0.137 → code_loader-1.0.139.dev1}/code_loader/plot_functions/plot_functions.py +0 -0
  33. {code_loader-1.0.137 → code_loader-1.0.139.dev1}/code_loader/plot_functions/visualize.py +0 -0
  34. {code_loader-1.0.137 → code_loader-1.0.139.dev1}/code_loader/utils.py +0 -0
  35. {code_loader-1.0.137 → code_loader-1.0.139.dev1}/code_loader/visualizers/__init__.py +0 -0
  36. {code_loader-1.0.137 → code_loader-1.0.139.dev1}/code_loader/visualizers/default_visualizers.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: code-loader
3
- Version: 1.0.137
3
+ Version: 1.0.139.dev1
4
4
  Summary:
5
5
  Home-page: https://github.com/tensorleap/code-loader
6
6
  License: MIT
@@ -1,3 +1,4 @@
1
+ import warnings
1
2
  from dataclasses import dataclass, field
2
3
  from typing import Any, Callable, List, Optional, Dict, Union, Type
3
4
  import re
@@ -56,7 +57,14 @@ class PreprocessResponse:
56
57
  for sample_id in self.sample_ids:
57
58
  assert isinstance(sample_id, str), f"Sample id should be of type str. Got: {type(sample_id)}"
58
59
  else:
59
- raise Exception("length is deprecated.")
60
+ raise Exception("length is deprecated, please use sample_ids instead.")
61
+
62
+ if self.state is None:
63
+ warnings.warn(
64
+ "PreprocessResponse.state is not set. For best practice, assign a unique `state` value to each PreprocessResponse instance."
65
+ )
66
+ else:
67
+ assert isinstance(self.state, DataStateType), f"PreprocessResponse.state must be of type {DataStateType.__name__} but got {type(self.state)}"
60
68
 
61
69
  def __hash__(self) -> int:
62
70
  return id(self)
@@ -1,9 +1,10 @@
1
1
  # mypy: ignore-errors
2
2
  import os
3
+ import warnings
3
4
  from collections import defaultdict
4
5
  from functools import lru_cache
5
6
  from pathlib import Path
6
- from typing import Optional, Union, Callable, List, Dict
7
+ from typing import Optional, Union, Callable, List, Dict, get_args, get_origin
7
8
 
8
9
  import numpy as np
9
10
  import numpy.typing as npt
@@ -27,6 +28,113 @@ _called_from_inside_tl_decorator = 0
27
28
  _called_from_inside_tl_integration_test_decorator = False
28
29
 
29
30
 
31
+
32
+
33
+ def validate_args_structure(*args, types_order, func_name, expected_names, **kwargs):
34
+ def _type_to_str(t):
35
+ origin = get_origin(t)
36
+ if origin is Union:
37
+ return " | ".join(tt.__name__ for tt in get_args(t))
38
+ elif hasattr(t, "__name__"):
39
+ return t.__name__
40
+ else:
41
+ return str(t)
42
+
43
+ def _format_types(types, names=None):
44
+ return ", ".join(
45
+ f"{(names[i] + ': ') if names else f'arg{i}: '}{_type_to_str(ty)}"
46
+ for i, ty in enumerate(types)
47
+ )
48
+
49
+ if expected_names:
50
+ normalized_args = []
51
+ for i, name in enumerate(expected_names):
52
+ if i < len(args):
53
+ normalized_args.append(args[i])
54
+ elif name in kwargs:
55
+ normalized_args.append(kwargs[name])
56
+ else:
57
+ raise AssertionError(
58
+ f"{func_name} validation failed: "
59
+ f"Missing required argument '{name}'. "
60
+ f"Expected arguments: {expected_names}."
61
+ )
62
+ else:
63
+ normalized_args = list(args)
64
+ if len(normalized_args) != len(types_order):
65
+ expected = _format_types(types_order, expected_names)
66
+ got_types = ", ".join(type(arg).__name__ for arg in normalized_args)
67
+ raise AssertionError(
68
+ f"{func_name} validation failed: "
69
+ f"Expected exactly {len(types_order)} arguments ({expected}), "
70
+ f"but got {len(normalized_args)} argument(s) of type(s): ({got_types}). "
71
+ f"Correct usage example: {func_name}({expected})"
72
+ )
73
+
74
+ for i, (arg, expected_type) in enumerate(zip(normalized_args, types_order)):
75
+ origin = get_origin(expected_type)
76
+ if origin is Union:
77
+ allowed_types = get_args(expected_type)
78
+ else:
79
+ allowed_types = (expected_type,)
80
+
81
+ if not isinstance(arg, allowed_types):
82
+ allowed_str = " | ".join(t.__name__ for t in allowed_types)
83
+ raise AssertionError(
84
+ f"{func_name} validation failed: "
85
+ f"Argument '{expected_names[i] if expected_names else f'arg{i}'}' "
86
+ f"expected type {allowed_str}, but got {type(arg).__name__}. "
87
+ f"Correct usage example: {func_name}({_format_types(types_order, expected_names)})"
88
+ )
89
+
90
+
91
+ def validate_output_structure(result, func_name: str, expected_type_name="np.ndarray",gt_flag=False):
92
+ if result is None or (isinstance(result, float) and np.isnan(result)):
93
+ if gt_flag:
94
+ raise AssertionError(
95
+ f"{func_name} validation failed: "
96
+ f"The function returned {result!r}. "
97
+ f"If you are working with an unlabeled dataset and no ground truth is available, "
98
+ f"use 'return np.array([], dtype=np.float32)' instead. "
99
+ f"Otherwise, {func_name} expected a single {expected_type_name} object. "
100
+ f"Make sure the function ends with 'return <{expected_type_name}>'."
101
+ )
102
+
103
+ raise AssertionError(
104
+ f"{func_name} validation failed: "
105
+ f"The function returned None. "
106
+ f"Expected a single {expected_type_name} object. "
107
+ f"Make sure the function ends with 'return <{expected_type_name}>'."
108
+ )
109
+ if isinstance(result, tuple):
110
+ element_descriptions = [
111
+ f"[{i}] type: {type(r).__name__}"
112
+ for i, r in enumerate(result)
113
+ ]
114
+ element_summary = "\n ".join(element_descriptions)
115
+
116
+ raise AssertionError(
117
+ f"{func_name} validation failed: "
118
+ f"The function returned multiple outputs ({len(result)} values), "
119
+ f"but only a single {expected_type_name} is allowed.\n\n"
120
+ f"Returned elements:\n"
121
+ f" {element_summary}\n\n"
122
+ f"Correct usage example:\n"
123
+ f" def {func_name}(...):\n"
124
+ f" return <{expected_type_name}>\n\n"
125
+ f"If you intended to return multiple values, combine them into a single "
126
+ f"{expected_type_name} (e.g., by concatenation or stacking)."
127
+ )
128
+
129
+ def batch_warning(result, func_name):
130
+ if result.shape[0] == 1:
131
+ warnings.warn(
132
+ f"{func_name} warning: Tensorleap will add a batch dimension at axis 0 to the output of {func_name}, "
133
+ f"although the detected size of axis 0 is already 1. "
134
+ f"This may lead to an extra batch dimension (e.g., shape (1, 1, ...)). "
135
+ f"Please ensure that the output of '{func_name}' is not already batched "
136
+ f"to avoid computation errors."
137
+ )
30
138
  def _add_mapping_connection(user_unique_name, connection_destinations, arg_names, name, node_mapping_type):
31
139
  connection_destinations = [connection_destination for connection_destination in connection_destinations
32
140
  if not isinstance(connection_destination, SamplePreprocessResponse)]
@@ -49,12 +157,23 @@ def tensorleap_integration_test():
49
157
  def decorating_function(integration_test_function: Callable):
50
158
  leap_binder.integration_test_func = integration_test_function
51
159
 
160
+ def _validate_input_args(*args, **kwargs):
161
+ sample_id,preprocess_response=args
162
+ assert type(sample_id) == preprocess_response.sample_id_type, (
163
+ f"tensorleap_integration_test validation failed: "
164
+ f"sample_id type ({type(sample_id).__name__}) does not match the expected "
165
+ f"type ({preprocess_response.sample_id_type}) from the PreprocessResponse."
166
+ )
167
+
52
168
  def inner(*args, **kwargs):
169
+ validate_args_structure(*args, types_order=[Union[int, str], PreprocessResponse],
170
+ func_name='integration_test',expected_names=["idx", "preprocess"],**kwargs)
171
+ _validate_input_args(*args, **kwargs)
172
+
53
173
  global _called_from_inside_tl_integration_test_decorator
54
174
  try:
55
175
  _called_from_inside_tl_integration_test_decorator = True
56
176
  ret = integration_test_function(*args, **kwargs)
57
-
58
177
  try:
59
178
  os.environ[mapping_runtime_mode_env_var_mame] = 'True'
60
179
  integration_test_function(None, PreprocessResponse(state=DataStateType.training, length=0))
@@ -65,7 +184,7 @@ def tensorleap_integration_test():
65
184
  line_number = first_tb.lineno
66
185
  if isinstance(e, TypeError) and 'is not subscriptable' in str(e):
67
186
  print(f'Invalid integration code. File {file_name}, line {line_number}: '
68
- f'Please remove this indexing operation usage from the integration test code.')
187
+ f"indexing is supported only on the model's predictions inside the integration test. Please remove this indexing operation usage from the integration test code.")
69
188
  else:
70
189
  print(f'Invalid integration code. File {file_name}, line {line_number}: '
71
190
  f'Integration test is only allowed to call Tensorleap decorators. '
@@ -77,9 +196,9 @@ def tensorleap_integration_test():
77
196
  _called_from_inside_tl_integration_test_decorator = False
78
197
 
79
198
  leap_binder.check()
80
-
81
199
  return inner
82
200
 
201
+
83
202
  return decorating_function
84
203
 
85
204
  def _safe_get_item(key):
@@ -89,18 +208,51 @@ def _safe_get_item(key):
89
208
  raise Exception(f'Tensorleap currently supports models with no more then 10 inputs')
90
209
 
91
210
  def tensorleap_load_model(prediction_types: Optional[List[PredictionTypeHandler]] = []):
211
+ assert isinstance(prediction_types, list),(
212
+ f"tensorleap_load_model validation failed: "
213
+ f" prediction_types is an optional argument of type List[PredictionTypeHandler]] but got {type(prediction_types).__name__}."
214
+ )
92
215
  for i, prediction_type in enumerate(prediction_types):
216
+ assert isinstance(prediction_type, PredictionTypeHandler),(f"tensorleap_load_model validation failed: "
217
+ f" prediction_types at position {i} must be of type PredictionTypeHandler but got {type(prediction_types[i]).__name__}.")
93
218
  leap_binder.add_prediction(prediction_type.name, prediction_type.labels, prediction_type.channel_dim, i)
94
219
 
220
+ def _validate_result(result) -> None:
221
+ valid_types=["onnxruntime","keras"]
222
+ err_message=f"tensorleap_load_model validation failed:\nSupported models are Keras and onnxruntime only and non of them was returned."
223
+ validate_output_structure(result, func_name="tensorleap_load_model", expected_type_name= [" | ".join(t for t in valid_types)][0])
224
+ try:
225
+ import keras
226
+ except ImportError:
227
+ keras = None
228
+ try:
229
+ import onnxruntime
230
+ except ImportError:
231
+ onnxruntime = None
232
+
233
+ if not keras and not onnxruntime:
234
+ raise AssertionError(err_message)
235
+
236
+ is_keras_model = bool(keras and isinstance(result, getattr(keras, "Model", tuple())))
237
+ is_onnx_model = bool(onnxruntime and isinstance(result, onnxruntime.InferenceSession))
238
+
239
+ if not any([is_keras_model, is_onnx_model]):
240
+ raise AssertionError( err_message)
241
+
242
+
243
+
95
244
  def decorating_function(load_model_func):
96
245
  class TempMapping:
97
246
  pass
98
247
 
99
248
  @lru_cache()
100
- def inner():
249
+ def inner(*args, **kwargs):
250
+ validate_args_structure(*args, types_order=[],
251
+ func_name='tensorleap_load_model',expected_names=[],**kwargs)
101
252
  class ModelPlaceholder:
102
253
  def __init__(self):
103
- self.model = load_model_func()
254
+ self.model = load_model_func() #TODO- check why this fails on onnx model
255
+ _validate_result(self.model)
104
256
 
105
257
  # keras interface
106
258
  def __call__(self, arg):
@@ -228,11 +380,11 @@ def tensorleap_load_model(prediction_types: Optional[List[PredictionTypeHandler]
228
380
 
229
381
  return ModelPlaceholder()
230
382
 
231
- def final_inner():
383
+ def final_inner(*args, **kwargs):
232
384
  if os.environ.get(mapping_runtime_mode_env_var_mame):
233
385
  return mapping_inner()
234
386
  else:
235
- return inner()
387
+ return inner(*args, **kwargs)
236
388
 
237
389
  return final_inner
238
390
 
@@ -244,81 +396,168 @@ def tensorleap_custom_metric(name: str,
244
396
  compute_insights: Optional[Union[bool, Dict[str, bool]]] = None,
245
397
  connects_to=None):
246
398
  name_to_unique_name = defaultdict(set)
247
-
248
399
  def decorating_function(
249
400
  user_function: Union[CustomCallableInterfaceMultiArgs, CustomMultipleReturnCallableInterfaceMultiArgs,
250
401
  ConfusionMatrixCallableInterfaceMultiArgs]):
402
+
403
+ def _validate_decorators_signature():
404
+ err_message = f"{user_function.__name__} validation failed.\n"
405
+ if not isinstance(name, str):
406
+ raise TypeError(err_message + f"`name` must be a string, got type {type(name).__name__}.")
407
+ valid_directions = {MetricDirection.Upward, MetricDirection.Downward}
408
+ if isinstance(direction, MetricDirection):
409
+ if direction not in valid_directions:
410
+ raise ValueError(
411
+ err_message +
412
+ f"Invalid MetricDirection: {direction}. Must be one of {valid_directions}, "
413
+ f"got type {type(direction).__name__}."
414
+ )
415
+ elif isinstance(direction, dict):
416
+ if not all(isinstance(k, str) for k in direction.keys()):
417
+ invalid_keys = {k: type(k).__name__ for k in direction.keys() if not isinstance(k, str)}
418
+ raise TypeError(
419
+ err_message +
420
+ f"All keys in `direction` must be strings, got invalid key types: {invalid_keys}."
421
+ )
422
+ for k, v in direction.items():
423
+ if v not in valid_directions:
424
+ raise ValueError(
425
+ err_message +
426
+ f"Invalid direction for key '{k}': {v}. Must be one of {valid_directions}, "
427
+ f"got type {type(v).__name__}."
428
+ )
429
+ else:
430
+ raise TypeError(
431
+ err_message +
432
+ f"`direction` must be a MetricDirection or a Dict[str, MetricDirection], "
433
+ f"got type {type(direction).__name__}."
434
+ )
435
+ if compute_insights is not None:
436
+ if not isinstance(compute_insights, (bool, dict)):
437
+ raise TypeError(
438
+ err_message +
439
+ f"`compute_insights` must be a bool or a Dict[str, bool], "
440
+ f"got type {type(compute_insights).__name__}."
441
+ )
442
+ if isinstance(compute_insights, dict):
443
+ if not all(isinstance(k, str) for k in compute_insights.keys()):
444
+ invalid_keys = {k: type(k).__name__ for k in compute_insights.keys() if not isinstance(k, str)}
445
+ raise TypeError(
446
+ err_message +
447
+ f"All keys in `compute_insights` must be strings, got invalid key types: {invalid_keys}."
448
+ )
449
+ for k, v in compute_insights.items():
450
+ if not isinstance(v, bool):
451
+ raise TypeError(
452
+ err_message +
453
+ f"Invalid type for compute_insights['{k}']: expected bool, got type {type(v).__name__}."
454
+ )
455
+ if connects_to is not None:
456
+ valid_types = (str, list, tuple, set)
457
+ if not isinstance(connects_to, valid_types):
458
+ raise TypeError(
459
+ err_message +
460
+ f"`connects_to` must be one of {valid_types}, got type {type(connects_to).__name__}."
461
+ )
462
+ if isinstance(connects_to, (list, tuple, set)):
463
+ invalid_elems = [f"{type(e).__name__}" for e in connects_to if not isinstance(e, str)]
464
+ if invalid_elems:
465
+ raise TypeError(
466
+ err_message +
467
+ f"All elements in `connects_to` must be strings, "
468
+ f"but found element types: {invalid_elems}."
469
+ )
470
+
471
+
472
+ _validate_decorators_signature()
473
+
251
474
  for metric_handler in leap_binder.setup_container.metrics:
252
475
  if metric_handler.metric_handler_data.name == name:
253
476
  raise Exception(f'Metric with name {name} already exists. '
254
477
  f'Please choose another')
255
478
 
256
479
  def _validate_input_args(*args, **kwargs) -> None:
480
+ assert len(args) > 0, (
481
+ f"{user_function.__name__}() validation failed: "
482
+ f"Expected at least one positional|key-word argument of type np.ndarray, "
483
+ f"but received none. "
484
+ f"Correct usage example: tensorleap_custom_metric(input_array: np.ndarray, ...)"
485
+ )
257
486
  for i, arg in enumerate(args):
258
487
  assert isinstance(arg, (np.ndarray, SamplePreprocessResponse)), (
259
- f'tensorleap_custom_metric validation failed: '
488
+ f'{user_function.__name__}() validation failed: '
260
489
  f'Argument #{i} should be a numpy array. Got {type(arg)}.')
261
490
  if leap_binder.batch_size_to_validate and isinstance(arg, np.ndarray):
262
491
  assert arg.shape[0] == leap_binder.batch_size_to_validate, \
263
- (f'tensorleap_custom_metric validation failed: Argument #{i} '
492
+ (f'{user_function.__name__}() validation failed: Argument #{i} '
264
493
  f'first dim should be as the batch size. Got {arg.shape[0]} '
265
494
  f'instead of {leap_binder.batch_size_to_validate}')
266
495
 
267
496
  for _arg_name, arg in kwargs.items():
268
497
  assert isinstance(arg, (np.ndarray, SamplePreprocessResponse)), (
269
- f'tensorleap_custom_metric validation failed: '
498
+ f'{user_function.__name__}() validation failed: '
270
499
  f'Argument {_arg_name} should be a numpy array. Got {type(arg)}.')
271
500
  if leap_binder.batch_size_to_validate and isinstance(arg, np.ndarray):
272
501
  assert arg.shape[0] == leap_binder.batch_size_to_validate, \
273
- (f'tensorleap_custom_metric validation failed: Argument {_arg_name} '
502
+ (f'{user_function.__name__}() validation failed: Argument {_arg_name} '
274
503
  f'first dim should be as the batch size. Got {arg.shape[0]} '
275
504
  f'instead of {leap_binder.batch_size_to_validate}')
276
505
 
277
506
  def _validate_result(result) -> None:
278
- supported_types_message = (f'tensorleap_custom_metric validation failed: '
279
- f'Metric has returned unsupported type. Supported types are List[float], '
280
- f'List[List[ConfusionMatrixElement]], NDArray[np.float32]. ')
507
+ validate_output_structure(result, func_name=user_function.__name__,
508
+ expected_type_name="List[float | int | None | List[ConfusionMatrixElement] ] | NDArray[np.float32] or dictonary with one of these types as its values types")
509
+ supported_types_message = (f'{user_function.__name__}() validation failed: '
510
+ f'{user_function.__name__}() has returned unsupported type.\nSupported types are List[float|int|None], '
511
+ f'List[List[ConfusionMatrixElement]], NDArray[np.float32] or dictonary with one of these types as its values types. ')
281
512
 
282
- def _validate_single_metric(single_metric_result):
513
+ def _validate_single_metric(single_metric_result,key=None):
283
514
  if isinstance(single_metric_result, list):
284
515
  if isinstance(single_metric_result[0], list):
285
- assert isinstance(single_metric_result[0][0], ConfusionMatrixElement), \
286
- f'{supported_types_message}Got List[List[{type(single_metric_result[0][0])}]].'
516
+ assert all(isinstance(cm, ConfusionMatrixElement) for cm in single_metric_result[0]), (
517
+ f"{supported_types_message} "
518
+ f"Got {'a dict where the value of ' + str(key) + ' is of type ' if key is not None else ''}"
519
+ f"List[List[{', '.join(type(cm).__name__ for cm in single_metric_result[0])}]]."
520
+ )
521
+
287
522
  else:
288
- assert isinstance(single_metric_result[0], (
289
- float, int,
290
- type(None))), f'{supported_types_message}Got List[{type(single_metric_result[0])}].'
523
+ assert all(isinstance(v, (float,int,type(None),np.float32)) for v in single_metric_result), (
524
+ f"{supported_types_message}\n"
525
+ f"Got {'a dict where the value of ' + str(key) + ' is of type ' if key is not None else ''}"
526
+ f"List[{', '.join(type(v).__name__ for v in single_metric_result)}]."
527
+ )
291
528
  else:
292
529
  assert isinstance(single_metric_result,
293
- np.ndarray), f'{supported_types_message}Got {type(single_metric_result)}.'
294
- assert len(single_metric_result.shape) == 1, (f'tensorleap_custom_metric validation failed: '
530
+ np.ndarray), f'{supported_types_message}\nGot {type(single_metric_result)}.'
531
+ assert len(single_metric_result.shape) == 1, (f'{user_function.__name__}() validation failed: '
295
532
  f'The return shape should be 1D. Got {len(single_metric_result.shape)}D.')
296
533
 
297
534
  if leap_binder.batch_size_to_validate:
298
535
  assert len(single_metric_result) == leap_binder.batch_size_to_validate, \
299
- f'tensorleap_custom_metrix validation failed: The return len should be as the batch size.'
536
+ f'{user_function.__name__}() validation failed: The return len {f"of srt{key} value" if key is not None else ""} should be as the batch size.'
300
537
 
301
538
  if isinstance(result, dict):
302
539
  for key, value in result.items():
540
+ _validate_single_metric(value,key)
541
+
303
542
  assert isinstance(key, str), \
304
- (f'tensorleap_custom_metric validation failed: '
543
+ (f'{user_function.__name__}() validation failed: '
305
544
  f'Keys in the return dict should be of type str. Got {type(key)}.')
306
545
  _validate_single_metric(value)
307
546
 
308
547
  if isinstance(direction, dict):
309
548
  for direction_key in direction:
310
549
  assert direction_key in result, \
311
- (f'tensorleap_custom_metric validation failed: '
550
+ (f'{user_function.__name__}() validation failed: '
312
551
  f'Keys in the direction mapping should be part of result keys. Got key {direction_key}.')
313
552
 
314
553
  if compute_insights is not None:
315
554
  assert isinstance(compute_insights, dict), \
316
- (f'tensorleap_custom_metric validation failed: '
555
+ (f'{user_function.__name__}() validation failed: '
317
556
  f'compute_insights should be dict if using the dict results. Got {type(compute_insights)}.')
318
557
 
319
558
  for ci_key in compute_insights:
320
559
  assert ci_key in result, \
321
- (f'tensorleap_custom_metric validation failed: '
560
+ (f'{user_function.__name__}() validation failed: '
322
561
  f'Keys in the compute_insights mapping should be part of result keys. Got key {ci_key}.')
323
562
 
324
563
  else:
@@ -326,7 +565,7 @@ def tensorleap_custom_metric(name: str,
326
565
 
327
566
  if compute_insights is not None:
328
567
  assert isinstance(compute_insights, bool), \
329
- (f'tensorleap_custom_metric validation failed: '
568
+ (f'{user_function.__name__}() validation failed: '
330
569
  f'compute_insights should be boolean. Got {type(compute_insights)}.')
331
570
 
332
571
  @functools.wraps(user_function)
@@ -397,28 +636,38 @@ def tensorleap_custom_visualizer(name: str, visualizer_type: LeapDataType,
397
636
  name_to_unique_name = defaultdict(set)
398
637
 
399
638
  def decorating_function(user_function: VisualizerCallableInterface):
639
+ assert isinstance(visualizer_type,LeapDataType),(f"{user_function.__name__} validation failed: "
640
+ f"visualizer_type should be of type {LeapDataType.__name__} but got {type(visualizer_type)}"
641
+ )
642
+
400
643
  for viz_handler in leap_binder.setup_container.visualizers:
401
644
  if viz_handler.visualizer_handler_data.name == name:
402
645
  raise Exception(f'Visualizer with name {name} already exists. '
403
646
  f'Please choose another')
404
647
 
405
648
  def _validate_input_args(*args, **kwargs):
649
+ assert len(args) > 0, (
650
+ f"{user_function.__name__}() validation failed: "
651
+ f"Expected at least one positional|key-word argument of type np.ndarray, "
652
+ f"but received none. "
653
+ f"Correct usage example: {user_function.__name__}(input_array: np.ndarray, ...)"
654
+ )
406
655
  for i, arg in enumerate(args):
407
656
  assert isinstance(arg, (np.ndarray, SamplePreprocessResponse)), (
408
- f'tensorleap_custom_visualizer validation failed: '
657
+ f'{user_function.__name__}() validation failed: '
409
658
  f'Argument #{i} should be a numpy array. Got {type(arg)}.')
410
659
  if leap_binder.batch_size_to_validate and isinstance(arg, np.ndarray):
411
660
  assert arg.shape[0] != leap_binder.batch_size_to_validate, \
412
- (f'tensorleap_custom_visualizer validation failed: '
661
+ (f'{user_function.__name__}() validation failed: '
413
662
  f'Argument #{i} should be without batch dimension. ')
414
663
 
415
664
  for _arg_name, arg in kwargs.items():
416
665
  assert isinstance(arg, (np.ndarray, SamplePreprocessResponse)), (
417
- f'tensorleap_custom_visualizer validation failed: '
666
+ f'{user_function.__name__}() validation failed: '
418
667
  f'Argument {_arg_name} should be a numpy array. Got {type(arg)}.')
419
668
  if leap_binder.batch_size_to_validate and isinstance(arg, np.ndarray):
420
669
  assert arg.shape[0] != leap_binder.batch_size_to_validate, \
421
- (f'tensorleap_custom_visualizer validation failed: Argument {_arg_name} '
670
+ (f'{user_function.__name__}() validation failed: Argument {_arg_name} '
422
671
  f'should be without batch dimension. ')
423
672
 
424
673
  def _validate_result(result):
@@ -432,8 +681,11 @@ def tensorleap_custom_visualizer(name: str, visualizer_type: LeapDataType,
432
681
  LeapDataType.ImageWithBBox: LeapImageWithBBox,
433
682
  LeapDataType.ImageWithHeatmap: LeapImageWithHeatmap
434
683
  }
684
+ validate_output_structure(result, func_name=user_function.__name__,
685
+ expected_type_name=result_type_map[visualizer_type])
686
+
435
687
  assert isinstance(result, result_type_map[visualizer_type]), \
436
- (f'tensorleap_custom_visualizer validation failed: '
688
+ (f'{user_function.__name__}() validation failed: '
437
689
  f'The return type should be {result_type_map[visualizer_type]}. Got {type(result)}.')
438
690
 
439
691
  @functools.wraps(user_function)
@@ -506,30 +758,26 @@ def tensorleap_metadata(
506
758
  f'Please choose another')
507
759
 
508
760
  def _validate_input_args(sample_id: Union[int, str], preprocess_response: PreprocessResponse):
509
- assert isinstance(sample_id, (int, str)), \
510
- (f'tensorleap_metadata validation failed: '
511
- f'Argument sample_id should be either int or str. Got {type(sample_id)}.')
512
- assert isinstance(preprocess_response, PreprocessResponse), \
513
- (f'tensorleap_metadata validation failed: '
514
- f'Argument preprocess_response should be a PreprocessResponse. Got {type(preprocess_response)}.')
515
761
  assert type(sample_id) == preprocess_response.sample_id_type, \
516
- (f'tensorleap_metadata validation failed: '
762
+ (f'{user_function.__name__}() validation failed: '
517
763
  f'Argument sample_id should be as the same type as defined in the preprocess response '
518
764
  f'{preprocess_response.sample_id_type}. Got {type(sample_id)}.')
519
765
 
520
766
  def _validate_result(result):
521
767
  supported_result_types = (type(None), int, str, bool, float, dict, np.floating,
522
768
  np.bool_, np.unsignedinteger, np.signedinteger, np.integer)
769
+ validate_output_structure(result, func_name=user_function.__name__,
770
+ expected_type_name=supported_result_types)
523
771
  assert isinstance(result, supported_result_types), \
524
- (f'tensorleap_metadata validation failed: '
772
+ (f'{user_function.__name__}() validation failed: '
525
773
  f'Unsupported return type. Got {type(result)}. should be any of {str(supported_result_types)}')
526
774
  if isinstance(result, dict):
527
775
  for key, value in result.items():
528
776
  assert isinstance(key, str), \
529
- (f'tensorleap_metadata validation failed: '
777
+ (f'{user_function.__name__}() validation failed: '
530
778
  f'Keys in the return dict should be of type str. Got {type(key)}.')
531
779
  assert isinstance(value, supported_result_types), \
532
- (f'tensorleap_metadata validation failed: '
780
+ (f'{user_function.__name__}() validation failed: '
533
781
  f'Values in the return dict should be of type {str(supported_result_types)}. Got {type(value)}.')
534
782
 
535
783
  def inner_without_validate(sample_id, preprocess_response):
@@ -546,10 +794,12 @@ def tensorleap_metadata(
546
794
 
547
795
  leap_binder.set_metadata(inner_without_validate, name, metadata_type)
548
796
 
549
- def inner(sample_id, preprocess_response):
797
+ def inner(*args,**kwargs):
550
798
  if os.environ.get(mapping_runtime_mode_env_var_mame):
551
799
  return None
552
-
800
+ validate_args_structure(*args, types_order=[Union[int, str], PreprocessResponse],
801
+ func_name=user_function.__name__, expected_names=["idx", "preprocess"],**kwargs)
802
+ sample_id, preprocess_response = args if len(args)!=0 else kwargs.values()
553
803
  _validate_input_args(sample_id, preprocess_response)
554
804
 
555
805
  result = inner_without_validate(sample_id, preprocess_response)
@@ -617,19 +867,23 @@ def tensorleap_preprocess():
617
867
 
618
868
  def _validate_input_args(*args, **kwargs):
619
869
  assert len(args) == 0 and len(kwargs) == 0, \
620
- (f'tensorleap_preprocess validation failed: '
870
+ (f'{user_function.__name__}() validation failed: '
621
871
  f'The function should not take any arguments. Got {args} and {kwargs}.')
622
872
 
623
873
  def _validate_result(result):
624
- assert isinstance(result, list), \
625
- (f'tensorleap_preprocess validation failed: '
626
- f'The return type should be a list. Got {type(result)}.')
874
+ assert isinstance(result, list), (
875
+ f"{user_function.__name__}() validation failed: expected return type list[{PreprocessResponse.__name__}]"
876
+ f"(e.g., [PreprocessResponse1, PreprocessResponse2, ...]), but returned type is {type(result).__name__}."
877
+ if not isinstance(result, tuple)
878
+ else f"{user_function.__name__}() validation failed: expected to return a single list[{PreprocessResponse.__name__}] object, "
879
+ f"but returned {len(result)} objects instead."
880
+ )
627
881
  for i, response in enumerate(result):
628
882
  assert isinstance(response, PreprocessResponse), \
629
- (f'tensorleap_preprocess validation failed: '
883
+ (f'{user_function.__name__}() validation failed: '
630
884
  f'Element #{i} in the return list should be a PreprocessResponse. Got {type(response)}.')
631
885
  assert len(set(result)) == len(result), \
632
- (f'tensorleap_preprocess validation failed: '
886
+ (f'{user_function.__name__}() validation failed: '
633
887
  f'The return list should not contain duplicate PreprocessResponse objects.')
634
888
 
635
889
  def inner(*args, **kwargs):
@@ -637,7 +891,6 @@ def tensorleap_preprocess():
637
891
  return [None, None, None, None]
638
892
 
639
893
  _validate_input_args(*args, **kwargs)
640
-
641
894
  result = user_function()
642
895
  _validate_result(result)
643
896
  return result
@@ -838,29 +1091,23 @@ def tensorleap_input_encoder(name: str, channel_dim=-1, model_input_index=None):
838
1091
  raise Exception(f"Channel dim for input {name} is expected to be either -1 or positive")
839
1092
 
840
1093
  def _validate_input_args(sample_id: Union[int, str], preprocess_response: PreprocessResponse):
841
- assert isinstance(sample_id, (int, str)), \
842
- (f'tensorleap_input_encoder validation failed: '
843
- f'Argument sample_id should be either int or str. Got {type(sample_id)}.')
844
- assert isinstance(preprocess_response, PreprocessResponse), \
845
- (f'tensorleap_input_encoder validation failed: '
846
- f'Argument preprocess_response should be a PreprocessResponse. Got {type(preprocess_response)}.')
847
1094
  assert type(sample_id) == preprocess_response.sample_id_type, \
848
- (f'tensorleap_input_encoder validation failed: '
1095
+ (f'{user_function.__name__}() validation failed: '
849
1096
  f'Argument sample_id should be as the same type as defined in the preprocess response '
850
1097
  f'{preprocess_response.sample_id_type}. Got {type(sample_id)}.')
851
1098
 
852
1099
  def _validate_result(result):
1100
+ validate_output_structure(result, func_name=user_function.__name__, expected_type_name = "np.ndarray")
853
1101
  assert isinstance(result, np.ndarray), \
854
- (f'tensorleap_input_encoder validation failed: '
1102
+ (f'{user_function.__name__}() validation failed: '
855
1103
  f'Unsupported return type. Should be a numpy array. Got {type(result)}.')
856
1104
  assert result.dtype == np.float32, \
857
- (f'tensorleap_input_encoder validation failed: '
1105
+ (f'{user_function.__name__}() validation failed: '
858
1106
  f'The return type should be a numpy array of type float32. Got {result.dtype}.')
859
- assert channel_dim - 1 <= len(result.shape), (f'tensorleap_input_encoder validation failed: '
1107
+ assert channel_dim - 1 <= len(result.shape), (f'{user_function.__name__}() validation failed: '
860
1108
  f'The channel_dim ({channel_dim}) should be <= to the rank of the resulting input rank ({len(result.shape)}).')
861
1109
 
862
1110
  def inner_without_validate(sample_id, preprocess_response):
863
-
864
1111
  global _called_from_inside_tl_decorator
865
1112
  _called_from_inside_tl_decorator += 1
866
1113
 
@@ -874,7 +1121,10 @@ def tensorleap_input_encoder(name: str, channel_dim=-1, model_input_index=None):
874
1121
  leap_binder.set_input(inner_without_validate, name, channel_dim=channel_dim)
875
1122
 
876
1123
 
877
- def inner(sample_id, preprocess_response):
1124
+ def inner(*args, **kwargs):
1125
+ validate_args_structure(*args, types_order=[Union[int, str], PreprocessResponse],
1126
+ func_name=user_function.__name__, expected_names=["idx", "preprocess"], **kwargs)
1127
+ sample_id, preprocess_response = args if len(args)!=0 else kwargs.values()
878
1128
  _validate_input_args(sample_id, preprocess_response)
879
1129
 
880
1130
  result = inner_without_validate(sample_id, preprocess_response)
@@ -882,18 +1132,18 @@ def tensorleap_input_encoder(name: str, channel_dim=-1, model_input_index=None):
882
1132
  _validate_result(result)
883
1133
 
884
1134
  if _called_from_inside_tl_decorator == 0 and _called_from_inside_tl_integration_test_decorator:
1135
+ batch_warning(result,user_function.__name__)
885
1136
  result = np.expand_dims(result, axis=0)
886
1137
 
887
1138
  return result
888
1139
 
889
1140
 
890
-
891
1141
  node_mapping_type = NodeMappingType.Input
892
1142
  if model_input_index is not None:
893
1143
  node_mapping_type = NodeMappingType(f'Input{str(model_input_index)}')
894
1144
  inner.node_mapping = NodeMapping(name, node_mapping_type)
895
1145
 
896
- def mapping_inner(sample_id, preprocess_response):
1146
+ def mapping_inner(*args, **kwargs):
897
1147
  class TempMapping:
898
1148
  pass
899
1149
 
@@ -905,11 +1155,11 @@ def tensorleap_input_encoder(name: str, channel_dim=-1, model_input_index=None):
905
1155
 
906
1156
  mapping_inner.node_mapping = NodeMapping(name, node_mapping_type)
907
1157
 
908
- def final_inner(sample_id, preprocess_response):
1158
+ def final_inner(*args, **kwargs):
909
1159
  if os.environ.get(mapping_runtime_mode_env_var_mame):
910
- return mapping_inner(sample_id, preprocess_response)
1160
+ return mapping_inner(*args, **kwargs)
911
1161
  else:
912
- return inner(sample_id, preprocess_response)
1162
+ return inner(*args, **kwargs)
913
1163
 
914
1164
  final_inner.node_mapping = NodeMapping(name, node_mapping_type)
915
1165
 
@@ -926,23 +1176,18 @@ def tensorleap_gt_encoder(name: str):
926
1176
  f'Please choose another')
927
1177
 
928
1178
  def _validate_input_args(sample_id: Union[int, str], preprocess_response: PreprocessResponse):
929
- assert isinstance(sample_id, (int, str)), \
930
- (f'tensorleap_gt_encoder validation failed: '
931
- f'Argument sample_id should be either int or str. Got {type(sample_id)}.')
932
- assert isinstance(preprocess_response, PreprocessResponse), \
933
- (f'tensorleap_gt_encoder validation failed: '
934
- f'Argument preprocess_response should be a PreprocessResponse. Got {type(preprocess_response)}.')
935
1179
  assert type(sample_id) == preprocess_response.sample_id_type, \
936
- (f'tensorleap_gt_encoder validation failed: '
1180
+ (f'{user_function.__name__}() validation failed: '
937
1181
  f'Argument sample_id should be as the same type as defined in the preprocess response '
938
1182
  f'{preprocess_response.sample_id_type}. Got {type(sample_id)}.')
939
1183
 
940
1184
  def _validate_result(result):
1185
+ validate_output_structure(result, func_name=user_function.__name__, expected_type_name = "np.ndarray",gt_flag=True)
941
1186
  assert isinstance(result, np.ndarray), \
942
- (f'tensorleap_gt_encoder validation failed: '
1187
+ (f'{user_function.__name__}() validation failed: '
943
1188
  f'Unsupported return type. Should be a numpy array. Got {type(result)}.')
944
1189
  assert result.dtype == np.float32, \
945
- (f'tensorleap_gt_encoder validation failed: '
1190
+ (f'{user_function.__name__}() validation failed: '
946
1191
  f'The return type should be a numpy array of type float32. Got {result.dtype}.')
947
1192
 
948
1193
  def inner_without_validate(sample_id, preprocess_response):
@@ -959,7 +1204,10 @@ def tensorleap_gt_encoder(name: str):
959
1204
  leap_binder.set_ground_truth(inner_without_validate, name)
960
1205
 
961
1206
 
962
- def inner(sample_id, preprocess_response):
1207
+ def inner(*args, **kwargs):
1208
+ validate_args_structure(*args, types_order=[Union[int, str], PreprocessResponse],
1209
+ func_name=user_function.__name__, expected_names=["idx", "preprocess"], **kwargs)
1210
+ sample_id, preprocess_response = args
963
1211
  _validate_input_args(sample_id, preprocess_response)
964
1212
 
965
1213
  result = inner_without_validate(sample_id, preprocess_response)
@@ -967,13 +1215,14 @@ def tensorleap_gt_encoder(name: str):
967
1215
  _validate_result(result)
968
1216
 
969
1217
  if _called_from_inside_tl_decorator == 0 and _called_from_inside_tl_integration_test_decorator:
1218
+ batch_warning(result, user_function.__name__)
970
1219
  result = np.expand_dims(result, axis=0)
971
1220
 
972
1221
  return result
973
1222
 
974
1223
  inner.node_mapping = NodeMapping(name, NodeMappingType.GroundTruth)
975
1224
 
976
- def mapping_inner(sample_id, preprocess_response):
1225
+ def mapping_inner(*args, **kwargs):
977
1226
  class TempMapping:
978
1227
  pass
979
1228
 
@@ -984,11 +1233,11 @@ def tensorleap_gt_encoder(name: str):
984
1233
 
985
1234
  mapping_inner.node_mapping = NodeMapping(name, NodeMappingType.GroundTruth)
986
1235
 
987
- def final_inner(sample_id, preprocess_response):
1236
+ def final_inner(*args, **kwargs):
988
1237
  if os.environ.get(mapping_runtime_mode_env_var_mame):
989
- return mapping_inner(sample_id, preprocess_response)
1238
+ return mapping_inner(*args, **kwargs)
990
1239
  else:
991
- return inner(sample_id, preprocess_response)
1240
+ return inner(*args, **kwargs)
992
1241
 
993
1242
  final_inner.node_mapping = NodeMapping(name, NodeMappingType.GroundTruth)
994
1243
 
@@ -1009,28 +1258,37 @@ def tensorleap_custom_loss(name: str, connects_to=None):
1009
1258
  valid_types = (np.ndarray, SamplePreprocessResponse)
1010
1259
 
1011
1260
  def _validate_input_args(*args, **kwargs):
1261
+ assert len(args) > 0 and len(kwargs)==0, (
1262
+ f"{user_function.__name__}() validation failed: "
1263
+ f"Expected at least one positional|key-word argument of the allowed types (np.ndarray|SamplePreprocessResponse|list(np.ndarray|SamplePreprocessResponse)). "
1264
+ f"but received none. "
1265
+ f"Correct usage example: {user_function.__name__}(input_array: np.ndarray, ...)"
1266
+ )
1012
1267
  for i, arg in enumerate(args):
1013
1268
  if isinstance(arg, list):
1014
1269
  for y, elem in enumerate(arg):
1015
- assert isinstance(elem, valid_types), (f'tensorleap_custom_loss validation failed: '
1270
+ assert isinstance(elem, valid_types), (f'{user_function.__name__}() validation failed: '
1016
1271
  f'Element #{y} of list should be a numpy array. Got {type(elem)}.')
1017
1272
  else:
1018
- assert isinstance(arg, valid_types), (f'tensorleap_custom_loss validation failed: '
1273
+ assert isinstance(arg, valid_types), (f'{user_function.__name__}() validation failed: '
1019
1274
  f'Argument #{i} should be a numpy array. Got {type(arg)}.')
1020
1275
  for _arg_name, arg in kwargs.items():
1021
1276
  if isinstance(arg, list):
1022
1277
  for y, elem in enumerate(arg):
1023
- assert isinstance(elem, valid_types), (f'tensorleap_custom_loss validation failed: '
1278
+ assert isinstance(elem, valid_types), (f'{user_function.__name__}() validation failed: '
1024
1279
  f'Element #{y} of list should be a numpy array. Got {type(elem)}.')
1025
1280
  else:
1026
- assert isinstance(arg, valid_types), (f'tensorleap_custom_loss validation failed: '
1281
+ assert isinstance(arg, valid_types), (f'{user_function.__name__}() validation failed: '
1027
1282
  f'Argument #{_arg_name} should be a numpy array. Got {type(arg)}.')
1028
1283
 
1029
1284
  def _validate_result(result):
1285
+ validate_output_structure(result, func_name=user_function.__name__,
1286
+ expected_type_name="np.ndarray")
1030
1287
  assert isinstance(result, np.ndarray), \
1031
- (f'tensorleap_custom_loss validation failed: '
1288
+ (f'{user_function.__name__} validation failed: '
1032
1289
  f'The return type should be a numpy array. Got {type(result)}.')
1033
-
1290
+ assert result.ndim<2 ,(f'{user_function.__name__} validation failed: '
1291
+ f'The return type should be a 1Dim numpy array but got {result.ndim}Dim.')
1034
1292
 
1035
1293
  @functools.wraps(user_function)
1036
1294
  def inner_without_validate(*args, **kwargs):
@@ -81,11 +81,11 @@ class LeapLoader(LeapLoaderBase):
81
81
 
82
82
  spec = importlib.util.spec_from_file_location(self.code_path, file_path)
83
83
  if spec is None or spec.loader is None:
84
- raise DatasetScriptException(f'Something is went wrong with spec file from: {file_path}')
84
+ raise DatasetScriptException(f'Something went wrong with spec file from: {file_path}')
85
85
 
86
86
  file = importlib.util.module_from_spec(spec)
87
87
  if file is None:
88
- raise DatasetScriptException(f'Something is went wrong with import module from: {file_path}')
88
+ raise DatasetScriptException(f'Something went wrong with import module from: {file_path}')
89
89
 
90
90
  spec.loader.exec_module(file)
91
91
 
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "code-loader"
3
- version = "1.0.137"
3
+ version = "1.0.139.dev1"
4
4
  description = ""
5
5
  authors = ["dorhar <doron.harnoy@tensorleap.ai>"]
6
6
  license = "MIT"