code-loader 1.0.141__py3-none-any.whl → 1.0.143__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of code-loader might be problematic. Click here for more details.

@@ -1,3 +1,4 @@
1
+ import warnings
1
2
  from dataclasses import dataclass, field
2
3
  from typing import Any, Callable, List, Optional, Dict, Union, Type
3
4
  import re
@@ -56,7 +57,14 @@ class PreprocessResponse:
56
57
  for sample_id in self.sample_ids:
57
58
  assert isinstance(sample_id, str), f"Sample id should be of type str. Got: {type(sample_id)}"
58
59
  else:
59
- raise Exception("length is deprecated.")
60
+ raise Exception("length is deprecated, please use sample_ids instead.")
61
+
62
+ if self.state is None:
63
+ warnings.warn(
64
+ "PreprocessResponse.state is not set. For best practice, assign a unique `state` value to each PreprocessResponse instance."
65
+ )
66
+ else:
67
+ assert isinstance(self.state, DataStateType), f"PreprocessResponse.state must be of type {DataStateType.__name__} but got {type(self.state)}"
60
68
 
61
69
  def __hash__(self) -> int:
62
70
  return id(self)
@@ -1,16 +1,14 @@
1
1
  # mypy: ignore-errors
2
2
  import os
3
- import logging
3
+ import warnings
4
4
  from collections import defaultdict
5
5
  from functools import lru_cache
6
6
  from pathlib import Path
7
- from typing import Optional, Union, Callable, List, Dict, Set, Any
7
+ from typing import Optional, Union, Callable, List, Dict, get_args, get_origin
8
8
 
9
9
  import numpy as np
10
10
  import numpy.typing as npt
11
11
 
12
- logger = logging.getLogger(__name__)
13
-
14
12
  from code_loader.contract.datasetclasses import CustomCallableInterfaceMultiArgs, \
15
13
  CustomMultipleReturnCallableInterfaceMultiArgs, ConfusionMatrixCallableInterfaceMultiArgs, CustomCallableInterface, \
16
14
  VisualizerCallableInterface, MetadataSectionCallableInterface, PreprocessResponse, SectionCallableInterface, \
@@ -22,15 +20,122 @@ from code_loader.contract.mapping import NodeMapping, NodeMappingType, NodeConne
22
20
  from code_loader.contract.visualizer_classes import LeapImage, LeapImageMask, LeapTextMask, LeapText, LeapGraph, \
23
21
  LeapHorizontalBar, LeapImageWithBBox, LeapImageWithHeatmap
24
22
  from code_loader.inner_leap_binder.leapbinder import mapping_runtime_mode_env_var_mame
25
- from code_loader.mixpanel_tracker import clear_integration_events, AnalyticsEvent, emit_integration_event_once
26
23
 
27
24
  import inspect
28
25
  import functools
29
26
 
30
27
  _called_from_inside_tl_decorator = 0
31
28
  _called_from_inside_tl_integration_test_decorator = False
32
-
33
-
29
+ _call_from_tl_platform= os.environ.get('IS_TENSORLEAP_PLATFORM') == 'true'
30
+
31
+
32
+
33
+
34
+ def validate_args_structure(*args, types_order, func_name, expected_names, **kwargs):
35
+ def _type_to_str(t):
36
+ origin = get_origin(t)
37
+ if origin is Union:
38
+ return " | ".join(tt.__name__ for tt in get_args(t))
39
+ elif hasattr(t, "__name__"):
40
+ return t.__name__
41
+ else:
42
+ return str(t)
43
+
44
+ def _format_types(types, names=None):
45
+ return ", ".join(
46
+ f"{(names[i] + ': ') if names else f'arg{i}: '}{_type_to_str(ty)}"
47
+ for i, ty in enumerate(types)
48
+ )
49
+
50
+ if expected_names:
51
+ normalized_args = []
52
+ for i, name in enumerate(expected_names):
53
+ if i < len(args):
54
+ normalized_args.append(args[i])
55
+ elif name in kwargs:
56
+ normalized_args.append(kwargs[name])
57
+ else:
58
+ raise AssertionError(
59
+ f"{func_name} validation failed: "
60
+ f"Missing required argument '{name}'. "
61
+ f"Expected arguments: {expected_names}."
62
+ )
63
+ else:
64
+ normalized_args = list(args)
65
+ if len(normalized_args) != len(types_order):
66
+ expected = _format_types(types_order, expected_names)
67
+ got_types = ", ".join(type(arg).__name__ for arg in normalized_args)
68
+ raise AssertionError(
69
+ f"{func_name} validation failed: "
70
+ f"Expected exactly {len(types_order)} arguments ({expected}), "
71
+ f"but got {len(normalized_args)} argument(s) of type(s): ({got_types}). "
72
+ f"Correct usage example: {func_name}({expected})"
73
+ )
74
+
75
+ for i, (arg, expected_type) in enumerate(zip(normalized_args, types_order)):
76
+ origin = get_origin(expected_type)
77
+ if origin is Union:
78
+ allowed_types = get_args(expected_type)
79
+ else:
80
+ allowed_types = (expected_type,)
81
+
82
+ if not isinstance(arg, allowed_types):
83
+ allowed_str = " | ".join(t.__name__ for t in allowed_types)
84
+ raise AssertionError(
85
+ f"{func_name} validation failed: "
86
+ f"Argument '{expected_names[i] if expected_names else f'arg{i}'}' "
87
+ f"expected type {allowed_str}, but got {type(arg).__name__}. "
88
+ f"Correct usage example: {func_name}({_format_types(types_order, expected_names)})"
89
+ )
90
+
91
+
92
+ def validate_output_structure(result, func_name: str, expected_type_name="np.ndarray",gt_flag=False):
93
+ if result is None or (isinstance(result, float) and np.isnan(result)):
94
+ if gt_flag:
95
+ raise AssertionError(
96
+ f"{func_name} validation failed: "
97
+ f"The function returned {result!r}. "
98
+ f"If you are working with an unlabeled dataset and no ground truth is available, "
99
+ f"use 'return np.array([], dtype=np.float32)' instead. "
100
+ f"Otherwise, {func_name} expected a single {expected_type_name} object. "
101
+ f"Make sure the function ends with 'return <{expected_type_name}>'."
102
+ )
103
+
104
+ raise AssertionError(
105
+ f"{func_name} validation failed: "
106
+ f"The function returned None. "
107
+ f"Expected a single {expected_type_name} object. "
108
+ f"Make sure the function ends with 'return <{expected_type_name}>'."
109
+ )
110
+ if isinstance(result, tuple):
111
+ element_descriptions = [
112
+ f"[{i}] type: {type(r).__name__}"
113
+ for i, r in enumerate(result)
114
+ ]
115
+ element_summary = "\n ".join(element_descriptions)
116
+
117
+ raise AssertionError(
118
+ f"{func_name} validation failed: "
119
+ f"The function returned multiple outputs ({len(result)} values), "
120
+ f"but only a single {expected_type_name} is allowed.\n\n"
121
+ f"Returned elements:\n"
122
+ f" {element_summary}\n\n"
123
+ f"Correct usage example:\n"
124
+ f" def {func_name}(...):\n"
125
+ f" return <{expected_type_name}>\n\n"
126
+ f"If you intended to return multiple values, combine them into a single "
127
+ f"{expected_type_name} (e.g., by concatenation or stacking)."
128
+ )
129
+
130
+ def batch_warning(result, func_name):
131
+ if result.shape[0] == 1:
132
+ warnings.warn(
133
+ f"{func_name} warning: Tensorleap will add a batch dimension at axis 0 to the output of {func_name}, "
134
+ f"although the detected size of axis 0 is already 1. "
135
+ f"This may lead to an extra batch dimension (e.g., shape (1, 1, ...)). "
136
+ f"Please ensure that the output of '{func_name}' is not already batched "
137
+ f"to avoid computation errors."
138
+ )
34
139
  def _add_mapping_connection(user_unique_name, connection_destinations, arg_names, name, node_mapping_type):
35
140
  connection_destinations = [connection_destination for connection_destination in connection_destinations
36
141
  if not isinstance(connection_destination, SamplePreprocessResponse)]
@@ -53,15 +158,24 @@ def tensorleap_integration_test():
53
158
  def decorating_function(integration_test_function: Callable):
54
159
  leap_binder.integration_test_func = integration_test_function
55
160
 
161
+ def _validate_input_args(*args, **kwargs):
162
+ sample_id,preprocess_response=args
163
+ assert type(sample_id) == preprocess_response.sample_id_type, (
164
+ f"tensorleap_integration_test validation failed: "
165
+ f"sample_id type ({type(sample_id).__name__}) does not match the expected "
166
+ f"type ({preprocess_response.sample_id_type}) from the PreprocessResponse."
167
+ )
168
+
56
169
  def inner(*args, **kwargs):
170
+ validate_args_structure(*args, types_order=[Union[int, str], PreprocessResponse],
171
+ func_name='integration_test',expected_names=["idx", "preprocess"],**kwargs)
172
+ _validate_input_args(*args, **kwargs)
173
+
57
174
  global _called_from_inside_tl_integration_test_decorator
58
- # Clear integration test events for new test
59
- try:
60
- clear_integration_events()
61
- except Exception as e:
62
- logger.debug(f"Failed to clear integration events: {e}")
63
175
  try:
64
176
  _called_from_inside_tl_integration_test_decorator = True
177
+ if not _call_from_tl_platform:
178
+ update_env_params_func("tensorleap_integration_test", "v")#put here because otherwise it will become v only if it finishes all the script
65
179
  ret = integration_test_function(*args, **kwargs)
66
180
 
67
181
  try:
@@ -74,7 +188,7 @@ def tensorleap_integration_test():
74
188
  line_number = first_tb.lineno
75
189
  if isinstance(e, TypeError) and 'is not subscriptable' in str(e):
76
190
  print(f'Invalid integration code. File {file_name}, line {line_number}: '
77
- f'Please remove this indexing operation usage from the integration test code.')
191
+ f"indexing is supported only on the model's predictions inside the integration test. Please remove this indexing operation usage from the integration test code.")
78
192
  else:
79
193
  print(f'Invalid integration code. File {file_name}, line {line_number}: '
80
194
  f'Integration test is only allowed to call Tensorleap decorators. '
@@ -86,9 +200,9 @@ def tensorleap_integration_test():
86
200
  _called_from_inside_tl_integration_test_decorator = False
87
201
 
88
202
  leap_binder.check()
89
-
90
203
  return inner
91
204
 
205
+
92
206
  return decorating_function
93
207
 
94
208
  def _safe_get_item(key):
@@ -97,34 +211,63 @@ def _safe_get_item(key):
97
211
  except ValueError:
98
212
  raise Exception(f'Tensorleap currently supports models with no more then 10 inputs')
99
213
 
100
-
101
214
  def tensorleap_load_model(prediction_types: Optional[List[PredictionTypeHandler]] = []):
215
+ assert isinstance(prediction_types, list),(
216
+ f"tensorleap_load_model validation failed: "
217
+ f" prediction_types is an optional argument of type List[PredictionTypeHandler]] but got {type(prediction_types).__name__}."
218
+ )
102
219
  for i, prediction_type in enumerate(prediction_types):
220
+ assert isinstance(prediction_type, PredictionTypeHandler),(f"tensorleap_load_model validation failed: "
221
+ f" prediction_types at position {i} must be of type PredictionTypeHandler but got {type(prediction_types[i]).__name__}.")
103
222
  leap_binder.add_prediction(prediction_type.name, prediction_type.labels, prediction_type.channel_dim, i)
104
223
 
224
+ def _validate_result(result) -> None:
225
+ valid_types=["onnxruntime","keras"]
226
+ err_message=f"tensorleap_load_model validation failed:\nSupported models are Keras and onnxruntime only and non of them was returned."
227
+ validate_output_structure(result, func_name="tensorleap_load_model", expected_type_name= [" | ".join(t for t in valid_types)][0])
228
+ try:
229
+ import keras
230
+ except ImportError:
231
+ keras = None
232
+ try:
233
+ import tensorflow as tf
234
+ except ImportError:
235
+ tf = None
236
+ try:
237
+ import onnxruntime
238
+ except ImportError:
239
+ onnxruntime = None
240
+
241
+ if not keras and not onnxruntime:
242
+ raise AssertionError(err_message)
243
+
244
+ is_keras_model = (
245
+ bool(keras and isinstance(result, getattr(keras, "Model", tuple())))
246
+ or bool(tf and isinstance(result, getattr(tf.keras, "Model", tuple())))
247
+ )
248
+ is_onnx_model = bool(onnxruntime and isinstance(result, onnxruntime.InferenceSession))
249
+
250
+ if not any([is_keras_model, is_onnx_model]):
251
+ raise AssertionError( err_message)
252
+
253
+
254
+
105
255
  def decorating_function(load_model_func):
106
256
  class TempMapping:
107
257
  pass
108
258
 
109
259
  @lru_cache()
110
- def inner():
260
+ def inner(*args, **kwargs):
261
+ validate_args_structure(*args, types_order=[],
262
+ func_name='tensorleap_load_model',expected_names=[],**kwargs)
111
263
  class ModelPlaceholder:
112
264
  def __init__(self):
113
- self.model = load_model_func()
114
- # Emit integration test event once per test
115
- try:
116
- emit_integration_event_once(AnalyticsEvent.LOAD_MODEL_INTEGRATION_TEST, {
117
- 'prediction_types_count': len(prediction_types)
118
- })
119
- except Exception as e:
120
- logger.debug(f"Failed to emit load_model integration test event: {e}")
265
+ self.model = load_model_func() #TODO- check why this fails on onnx model
266
+ _validate_result(self.model)
121
267
 
122
268
  # keras interface
123
269
  def __call__(self, arg):
124
270
  ret = self.model(arg)
125
- if isinstance(ret, list or tuple):
126
- return [r.numpy() for r in ret]
127
-
128
271
  return ret.numpy()
129
272
 
130
273
  def _convert_onnx_inputs_to_correct_type(
@@ -184,8 +327,10 @@ def tensorleap_load_model(prediction_types: Optional[List[PredictionTypeHandler]
184
327
 
185
328
  def get_inputs(self):
186
329
  return self.model.get_inputs()
187
-
188
- return ModelPlaceholder()
330
+ model_placeholder=ModelPlaceholder()
331
+ if not _call_from_tl_platform:
332
+ update_env_params_func("tensorleap_load_model", "v")
333
+ return model_placeholder
189
334
 
190
335
  def mapping_inner():
191
336
  class ModelOutputPlaceholder:
@@ -248,12 +393,11 @@ def tensorleap_load_model(prediction_types: Optional[List[PredictionTypeHandler]
248
393
 
249
394
  return ModelPlaceholder()
250
395
 
251
- def final_inner():
396
+ def final_inner(*args, **kwargs):
252
397
  if os.environ.get(mapping_runtime_mode_env_var_mame):
253
398
  return mapping_inner()
254
399
  else:
255
- return inner()
256
-
400
+ return inner(*args, **kwargs)
257
401
  return final_inner
258
402
 
259
403
  return decorating_function
@@ -264,81 +408,168 @@ def tensorleap_custom_metric(name: str,
264
408
  compute_insights: Optional[Union[bool, Dict[str, bool]]] = None,
265
409
  connects_to=None):
266
410
  name_to_unique_name = defaultdict(set)
267
-
268
411
  def decorating_function(
269
412
  user_function: Union[CustomCallableInterfaceMultiArgs, CustomMultipleReturnCallableInterfaceMultiArgs,
270
413
  ConfusionMatrixCallableInterfaceMultiArgs]):
414
+
415
+ def _validate_decorators_signature():
416
+ err_message = f"{user_function.__name__} validation failed.\n"
417
+ if not isinstance(name, str):
418
+ raise TypeError(err_message + f"`name` must be a string, got type {type(name).__name__}.")
419
+ valid_directions = {MetricDirection.Upward, MetricDirection.Downward}
420
+ if isinstance(direction, MetricDirection):
421
+ if direction not in valid_directions:
422
+ raise ValueError(
423
+ err_message +
424
+ f"Invalid MetricDirection: {direction}. Must be one of {valid_directions}, "
425
+ f"got type {type(direction).__name__}."
426
+ )
427
+ elif isinstance(direction, dict):
428
+ if not all(isinstance(k, str) for k in direction.keys()):
429
+ invalid_keys = {k: type(k).__name__ for k in direction.keys() if not isinstance(k, str)}
430
+ raise TypeError(
431
+ err_message +
432
+ f"All keys in `direction` must be strings, got invalid key types: {invalid_keys}."
433
+ )
434
+ for k, v in direction.items():
435
+ if v not in valid_directions:
436
+ raise ValueError(
437
+ err_message +
438
+ f"Invalid direction for key '{k}': {v}. Must be one of {valid_directions}, "
439
+ f"got type {type(v).__name__}."
440
+ )
441
+ else:
442
+ raise TypeError(
443
+ err_message +
444
+ f"`direction` must be a MetricDirection or a Dict[str, MetricDirection], "
445
+ f"got type {type(direction).__name__}."
446
+ )
447
+ if compute_insights is not None:
448
+ if not isinstance(compute_insights, (bool, dict)):
449
+ raise TypeError(
450
+ err_message +
451
+ f"`compute_insights` must be a bool or a Dict[str, bool], "
452
+ f"got type {type(compute_insights).__name__}."
453
+ )
454
+ if isinstance(compute_insights, dict):
455
+ if not all(isinstance(k, str) for k in compute_insights.keys()):
456
+ invalid_keys = {k: type(k).__name__ for k in compute_insights.keys() if not isinstance(k, str)}
457
+ raise TypeError(
458
+ err_message +
459
+ f"All keys in `compute_insights` must be strings, got invalid key types: {invalid_keys}."
460
+ )
461
+ for k, v in compute_insights.items():
462
+ if not isinstance(v, bool):
463
+ raise TypeError(
464
+ err_message +
465
+ f"Invalid type for compute_insights['{k}']: expected bool, got type {type(v).__name__}."
466
+ )
467
+ if connects_to is not None:
468
+ valid_types = (str, list, tuple, set)
469
+ if not isinstance(connects_to, valid_types):
470
+ raise TypeError(
471
+ err_message +
472
+ f"`connects_to` must be one of {valid_types}, got type {type(connects_to).__name__}."
473
+ )
474
+ if isinstance(connects_to, (list, tuple, set)):
475
+ invalid_elems = [f"{type(e).__name__}" for e in connects_to if not isinstance(e, str)]
476
+ if invalid_elems:
477
+ raise TypeError(
478
+ err_message +
479
+ f"All elements in `connects_to` must be strings, "
480
+ f"but found element types: {invalid_elems}."
481
+ )
482
+
483
+
484
+ _validate_decorators_signature()
485
+
271
486
  for metric_handler in leap_binder.setup_container.metrics:
272
487
  if metric_handler.metric_handler_data.name == name:
273
488
  raise Exception(f'Metric with name {name} already exists. '
274
489
  f'Please choose another')
275
490
 
276
491
  def _validate_input_args(*args, **kwargs) -> None:
492
+ assert len(args) > 0, (
493
+ f"{user_function.__name__}() validation failed: "
494
+ f"Expected at least one positional|key-word argument of type np.ndarray, "
495
+ f"but received none. "
496
+ f"Correct usage example: tensorleap_custom_metric(input_array: np.ndarray, ...)"
497
+ )
277
498
  for i, arg in enumerate(args):
278
499
  assert isinstance(arg, (np.ndarray, SamplePreprocessResponse)), (
279
- f'tensorleap_custom_metric validation failed: '
500
+ f'{user_function.__name__}() validation failed: '
280
501
  f'Argument #{i} should be a numpy array. Got {type(arg)}.')
281
502
  if leap_binder.batch_size_to_validate and isinstance(arg, np.ndarray):
282
503
  assert arg.shape[0] == leap_binder.batch_size_to_validate, \
283
- (f'tensorleap_custom_metric validation failed: Argument #{i} '
504
+ (f'{user_function.__name__}() validation failed: Argument #{i} '
284
505
  f'first dim should be as the batch size. Got {arg.shape[0]} '
285
506
  f'instead of {leap_binder.batch_size_to_validate}')
286
507
 
287
508
  for _arg_name, arg in kwargs.items():
288
509
  assert isinstance(arg, (np.ndarray, SamplePreprocessResponse)), (
289
- f'tensorleap_custom_metric validation failed: '
510
+ f'{user_function.__name__}() validation failed: '
290
511
  f'Argument {_arg_name} should be a numpy array. Got {type(arg)}.')
291
512
  if leap_binder.batch_size_to_validate and isinstance(arg, np.ndarray):
292
513
  assert arg.shape[0] == leap_binder.batch_size_to_validate, \
293
- (f'tensorleap_custom_metric validation failed: Argument {_arg_name} '
514
+ (f'{user_function.__name__}() validation failed: Argument {_arg_name} '
294
515
  f'first dim should be as the batch size. Got {arg.shape[0]} '
295
516
  f'instead of {leap_binder.batch_size_to_validate}')
296
517
 
297
518
  def _validate_result(result) -> None:
298
- supported_types_message = (f'tensorleap_custom_metric validation failed: '
299
- f'Metric has returned unsupported type. Supported types are List[float], '
300
- f'List[List[ConfusionMatrixElement]], NDArray[np.float32]. ')
519
+ validate_output_structure(result, func_name=user_function.__name__,
520
+ expected_type_name="List[float | int | None | List[ConfusionMatrixElement] ] | NDArray[np.float32] or dictonary with one of these types as its values types")
521
+ supported_types_message = (f'{user_function.__name__}() validation failed: '
522
+ f'{user_function.__name__}() has returned unsupported type.\nSupported types are List[float|int|None], '
523
+ f'List[List[ConfusionMatrixElement]], NDArray[np.float32] or dictonary with one of these types as its values types. ')
301
524
 
302
- def _validate_single_metric(single_metric_result):
525
+ def _validate_single_metric(single_metric_result,key=None):
303
526
  if isinstance(single_metric_result, list):
304
527
  if isinstance(single_metric_result[0], list):
305
- assert isinstance(single_metric_result[0][0], ConfusionMatrixElement), \
306
- f'{supported_types_message}Got List[List[{type(single_metric_result[0][0])}]].'
528
+ assert all(isinstance(cm, ConfusionMatrixElement) for cm in single_metric_result[0]), (
529
+ f"{supported_types_message} "
530
+ f"Got {'a dict where the value of ' + str(key) + ' is of type ' if key is not None else ''}"
531
+ f"List[List[{', '.join(type(cm).__name__ for cm in single_metric_result[0])}]]."
532
+ )
533
+
307
534
  else:
308
- assert isinstance(single_metric_result[0], (
309
- float, int,
310
- type(None))), f'{supported_types_message}Got List[{type(single_metric_result[0])}].'
535
+ assert all(isinstance(v, (float,int,type(None),np.float32)) for v in single_metric_result), (
536
+ f"{supported_types_message}\n"
537
+ f"Got {'a dict where the value of ' + str(key) + ' is of type ' if key is not None else ''}"
538
+ f"List[{', '.join(type(v).__name__ for v in single_metric_result)}]."
539
+ )
311
540
  else:
312
541
  assert isinstance(single_metric_result,
313
- np.ndarray), f'{supported_types_message}Got {type(single_metric_result)}.'
314
- assert len(single_metric_result.shape) == 1, (f'tensorleap_custom_metric validation failed: '
542
+ np.ndarray), f'{supported_types_message}\nGot {type(single_metric_result)}.'
543
+ assert len(single_metric_result.shape) == 1, (f'{user_function.__name__}() validation failed: '
315
544
  f'The return shape should be 1D. Got {len(single_metric_result.shape)}D.')
316
545
 
317
546
  if leap_binder.batch_size_to_validate:
318
547
  assert len(single_metric_result) == leap_binder.batch_size_to_validate, \
319
- f'tensorleap_custom_metrix validation failed: The return len should be as the batch size.'
548
+ f'{user_function.__name__}() validation failed: The return len {f"of srt{key} value" if key is not None else ""} should be as the batch size.'
320
549
 
321
550
  if isinstance(result, dict):
322
551
  for key, value in result.items():
552
+ _validate_single_metric(value,key)
553
+
323
554
  assert isinstance(key, str), \
324
- (f'tensorleap_custom_metric validation failed: '
555
+ (f'{user_function.__name__}() validation failed: '
325
556
  f'Keys in the return dict should be of type str. Got {type(key)}.')
326
557
  _validate_single_metric(value)
327
558
 
328
559
  if isinstance(direction, dict):
329
560
  for direction_key in direction:
330
561
  assert direction_key in result, \
331
- (f'tensorleap_custom_metric validation failed: '
562
+ (f'{user_function.__name__}() validation failed: '
332
563
  f'Keys in the direction mapping should be part of result keys. Got key {direction_key}.')
333
564
 
334
565
  if compute_insights is not None:
335
566
  assert isinstance(compute_insights, dict), \
336
- (f'tensorleap_custom_metric validation failed: '
567
+ (f'{user_function.__name__}() validation failed: '
337
568
  f'compute_insights should be dict if using the dict results. Got {type(compute_insights)}.')
338
569
 
339
570
  for ci_key in compute_insights:
340
571
  assert ci_key in result, \
341
- (f'tensorleap_custom_metric validation failed: '
572
+ (f'{user_function.__name__}() validation failed: '
342
573
  f'Keys in the compute_insights mapping should be part of result keys. Got key {ci_key}.')
343
574
 
344
575
  else:
@@ -346,7 +577,7 @@ def tensorleap_custom_metric(name: str,
346
577
 
347
578
  if compute_insights is not None:
348
579
  assert isinstance(compute_insights, bool), \
349
- (f'tensorleap_custom_metric validation failed: '
580
+ (f'{user_function.__name__}() validation failed: '
350
581
  f'compute_insights should be boolean. Got {type(compute_insights)}.')
351
582
 
352
583
  @functools.wraps(user_function)
@@ -378,6 +609,8 @@ def tensorleap_custom_metric(name: str,
378
609
  result = inner_without_validate(*args, **kwargs)
379
610
 
380
611
  _validate_result(result)
612
+ if not _call_from_tl_platform:
613
+ update_env_params_func("tensorleap_custom_metric","v")
381
614
  return result
382
615
 
383
616
  def mapping_inner(*args, **kwargs):
@@ -417,28 +650,38 @@ def tensorleap_custom_visualizer(name: str, visualizer_type: LeapDataType,
417
650
  name_to_unique_name = defaultdict(set)
418
651
 
419
652
  def decorating_function(user_function: VisualizerCallableInterface):
653
+ assert isinstance(visualizer_type,LeapDataType),(f"{user_function.__name__} validation failed: "
654
+ f"visualizer_type should be of type {LeapDataType.__name__} but got {type(visualizer_type)}"
655
+ )
656
+
420
657
  for viz_handler in leap_binder.setup_container.visualizers:
421
658
  if viz_handler.visualizer_handler_data.name == name:
422
659
  raise Exception(f'Visualizer with name {name} already exists. '
423
660
  f'Please choose another')
424
661
 
425
662
  def _validate_input_args(*args, **kwargs):
663
+ assert len(args) > 0, (
664
+ f"{user_function.__name__}() validation failed: "
665
+ f"Expected at least one positional|key-word argument of type np.ndarray, "
666
+ f"but received none. "
667
+ f"Correct usage example: {user_function.__name__}(input_array: np.ndarray, ...)"
668
+ )
426
669
  for i, arg in enumerate(args):
427
670
  assert isinstance(arg, (np.ndarray, SamplePreprocessResponse)), (
428
- f'tensorleap_custom_visualizer validation failed: '
671
+ f'{user_function.__name__}() validation failed: '
429
672
  f'Argument #{i} should be a numpy array. Got {type(arg)}.')
430
673
  if leap_binder.batch_size_to_validate and isinstance(arg, np.ndarray):
431
674
  assert arg.shape[0] != leap_binder.batch_size_to_validate, \
432
- (f'tensorleap_custom_visualizer validation failed: '
675
+ (f'{user_function.__name__}() validation failed: '
433
676
  f'Argument #{i} should be without batch dimension. ')
434
677
 
435
678
  for _arg_name, arg in kwargs.items():
436
679
  assert isinstance(arg, (np.ndarray, SamplePreprocessResponse)), (
437
- f'tensorleap_custom_visualizer validation failed: '
680
+ f'{user_function.__name__}() validation failed: '
438
681
  f'Argument {_arg_name} should be a numpy array. Got {type(arg)}.')
439
682
  if leap_binder.batch_size_to_validate and isinstance(arg, np.ndarray):
440
683
  assert arg.shape[0] != leap_binder.batch_size_to_validate, \
441
- (f'tensorleap_custom_visualizer validation failed: Argument {_arg_name} '
684
+ (f'{user_function.__name__}() validation failed: Argument {_arg_name} '
442
685
  f'should be without batch dimension. ')
443
686
 
444
687
  def _validate_result(result):
@@ -452,8 +695,11 @@ def tensorleap_custom_visualizer(name: str, visualizer_type: LeapDataType,
452
695
  LeapDataType.ImageWithBBox: LeapImageWithBBox,
453
696
  LeapDataType.ImageWithHeatmap: LeapImageWithHeatmap
454
697
  }
698
+ validate_output_structure(result, func_name=user_function.__name__,
699
+ expected_type_name=result_type_map[visualizer_type])
700
+
455
701
  assert isinstance(result, result_type_map[visualizer_type]), \
456
- (f'tensorleap_custom_visualizer validation failed: '
702
+ (f'{user_function.__name__}() validation failed: '
457
703
  f'The return type should be {result_type_map[visualizer_type]}. Got {type(result)}.')
458
704
 
459
705
  @functools.wraps(user_function)
@@ -485,6 +731,8 @@ def tensorleap_custom_visualizer(name: str, visualizer_type: LeapDataType,
485
731
  result = inner_without_validate(*args, **kwargs)
486
732
 
487
733
  _validate_result(result)
734
+ if not _call_from_tl_platform:
735
+ update_env_params_func("tensorleap_custom_visualizer","v")
488
736
  return result
489
737
 
490
738
  def mapping_inner(*args, **kwargs):
@@ -526,30 +774,26 @@ def tensorleap_metadata(
526
774
  f'Please choose another')
527
775
 
528
776
  def _validate_input_args(sample_id: Union[int, str], preprocess_response: PreprocessResponse):
529
- assert isinstance(sample_id, (int, str)), \
530
- (f'tensorleap_metadata validation failed: '
531
- f'Argument sample_id should be either int or str. Got {type(sample_id)}.')
532
- assert isinstance(preprocess_response, PreprocessResponse), \
533
- (f'tensorleap_metadata validation failed: '
534
- f'Argument preprocess_response should be a PreprocessResponse. Got {type(preprocess_response)}.')
535
777
  assert type(sample_id) == preprocess_response.sample_id_type, \
536
- (f'tensorleap_metadata validation failed: '
778
+ (f'{user_function.__name__}() validation failed: '
537
779
  f'Argument sample_id should be as the same type as defined in the preprocess response '
538
780
  f'{preprocess_response.sample_id_type}. Got {type(sample_id)}.')
539
781
 
540
782
  def _validate_result(result):
541
783
  supported_result_types = (type(None), int, str, bool, float, dict, np.floating,
542
784
  np.bool_, np.unsignedinteger, np.signedinteger, np.integer)
785
+ validate_output_structure(result, func_name=user_function.__name__,
786
+ expected_type_name=supported_result_types)
543
787
  assert isinstance(result, supported_result_types), \
544
- (f'tensorleap_metadata validation failed: '
788
+ (f'{user_function.__name__}() validation failed: '
545
789
  f'Unsupported return type. Got {type(result)}. should be any of {str(supported_result_types)}')
546
790
  if isinstance(result, dict):
547
791
  for key, value in result.items():
548
792
  assert isinstance(key, str), \
549
- (f'tensorleap_metadata validation failed: '
793
+ (f'{user_function.__name__}() validation failed: '
550
794
  f'Keys in the return dict should be of type str. Got {type(key)}.')
551
795
  assert isinstance(value, supported_result_types), \
552
- (f'tensorleap_metadata validation failed: '
796
+ (f'{user_function.__name__}() validation failed: '
553
797
  f'Values in the return dict should be of type {str(supported_result_types)}. Got {type(value)}.')
554
798
 
555
799
  def inner_without_validate(sample_id, preprocess_response):
@@ -566,15 +810,19 @@ def tensorleap_metadata(
566
810
 
567
811
  leap_binder.set_metadata(inner_without_validate, name, metadata_type)
568
812
 
569
- def inner(sample_id, preprocess_response):
813
+ def inner(*args,**kwargs):
570
814
  if os.environ.get(mapping_runtime_mode_env_var_mame):
571
815
  return None
572
-
816
+ validate_args_structure(*args, types_order=[Union[int, str], PreprocessResponse],
817
+ func_name=user_function.__name__, expected_names=["idx", "preprocess"],**kwargs)
818
+ sample_id, preprocess_response = args if len(args)!=0 else kwargs.values()
573
819
  _validate_input_args(sample_id, preprocess_response)
574
820
 
575
821
  result = inner_without_validate(sample_id, preprocess_response)
576
822
 
577
823
  _validate_result(result)
824
+ if not _call_from_tl_platform:
825
+ update_env_params_func("tensorleap_metadata","v")
578
826
  return result
579
827
 
580
828
  return inner
@@ -637,19 +885,23 @@ def tensorleap_preprocess():
637
885
 
638
886
  def _validate_input_args(*args, **kwargs):
639
887
  assert len(args) == 0 and len(kwargs) == 0, \
640
- (f'tensorleap_preprocess validation failed: '
888
+ (f'{user_function.__name__}() validation failed: '
641
889
  f'The function should not take any arguments. Got {args} and {kwargs}.')
642
890
 
643
891
  def _validate_result(result):
644
- assert isinstance(result, list), \
645
- (f'tensorleap_preprocess validation failed: '
646
- f'The return type should be a list. Got {type(result)}.')
892
+ assert isinstance(result, list), (
893
+ f"{user_function.__name__}() validation failed: expected return type list[{PreprocessResponse.__name__}]"
894
+ f"(e.g., [PreprocessResponse1, PreprocessResponse2, ...]), but returned type is {type(result).__name__}."
895
+ if not isinstance(result, tuple)
896
+ else f"{user_function.__name__}() validation failed: expected to return a single list[{PreprocessResponse.__name__}] object, "
897
+ f"but returned {len(result)} objects instead."
898
+ )
647
899
  for i, response in enumerate(result):
648
900
  assert isinstance(response, PreprocessResponse), \
649
- (f'tensorleap_preprocess validation failed: '
901
+ (f'{user_function.__name__}() validation failed: '
650
902
  f'Element #{i} in the return list should be a PreprocessResponse. Got {type(response)}.')
651
903
  assert len(set(result)) == len(result), \
652
- (f'tensorleap_preprocess validation failed: '
904
+ (f'{user_function.__name__}() validation failed: '
653
905
  f'The return list should not contain duplicate PreprocessResponse objects.')
654
906
 
655
907
  def inner(*args, **kwargs):
@@ -657,18 +909,10 @@ def tensorleap_preprocess():
657
909
  return [None, None, None, None]
658
910
 
659
911
  _validate_input_args(*args, **kwargs)
660
-
661
912
  result = user_function()
662
913
  _validate_result(result)
663
-
664
- # Emit integration test event once per test
665
- try:
666
- emit_integration_event_once(AnalyticsEvent.PREPROCESS_INTEGRATION_TEST, {
667
- 'preprocess_responses_count': len(result)
668
- })
669
- except Exception as e:
670
- logger.debug(f"Failed to emit preprocess integration test event: {e}")
671
-
914
+ if not _call_from_tl_platform:
915
+ update_env_params_func("tensorleap_preprocess", "v")
672
916
  return result
673
917
 
674
918
  return inner
@@ -867,29 +1111,23 @@ def tensorleap_input_encoder(name: str, channel_dim=-1, model_input_index=None):
867
1111
  raise Exception(f"Channel dim for input {name} is expected to be either -1 or positive")
868
1112
 
869
1113
  def _validate_input_args(sample_id: Union[int, str], preprocess_response: PreprocessResponse):
870
- assert isinstance(sample_id, (int, str)), \
871
- (f'tensorleap_input_encoder validation failed: '
872
- f'Argument sample_id should be either int or str. Got {type(sample_id)}.')
873
- assert isinstance(preprocess_response, PreprocessResponse), \
874
- (f'tensorleap_input_encoder validation failed: '
875
- f'Argument preprocess_response should be a PreprocessResponse. Got {type(preprocess_response)}.')
876
1114
  assert type(sample_id) == preprocess_response.sample_id_type, \
877
- (f'tensorleap_input_encoder validation failed: '
1115
+ (f'{user_function.__name__}() validation failed: '
878
1116
  f'Argument sample_id should be as the same type as defined in the preprocess response '
879
1117
  f'{preprocess_response.sample_id_type}. Got {type(sample_id)}.')
880
1118
 
881
1119
  def _validate_result(result):
1120
+ validate_output_structure(result, func_name=user_function.__name__, expected_type_name = "np.ndarray")
882
1121
  assert isinstance(result, np.ndarray), \
883
- (f'tensorleap_input_encoder validation failed: '
1122
+ (f'{user_function.__name__}() validation failed: '
884
1123
  f'Unsupported return type. Should be a numpy array. Got {type(result)}.')
885
1124
  assert result.dtype == np.float32, \
886
- (f'tensorleap_input_encoder validation failed: '
1125
+ (f'{user_function.__name__}() validation failed: '
887
1126
  f'The return type should be a numpy array of type float32. Got {result.dtype}.')
888
- assert channel_dim - 1 <= len(result.shape), (f'tensorleap_input_encoder validation failed: '
1127
+ assert channel_dim - 1 <= len(result.shape), (f'{user_function.__name__}() validation failed: '
889
1128
  f'The channel_dim ({channel_dim}) should be <= to the rank of the resulting input rank ({len(result.shape)}).')
890
1129
 
891
1130
  def inner_without_validate(sample_id, preprocess_response):
892
-
893
1131
  global _called_from_inside_tl_decorator
894
1132
  _called_from_inside_tl_decorator += 1
895
1133
 
@@ -903,7 +1141,10 @@ def tensorleap_input_encoder(name: str, channel_dim=-1, model_input_index=None):
903
1141
  leap_binder.set_input(inner_without_validate, name, channel_dim=channel_dim)
904
1142
 
905
1143
 
906
- def inner(sample_id, preprocess_response):
1144
+ def inner(*args, **kwargs):
1145
+ validate_args_structure(*args, types_order=[Union[int, str], PreprocessResponse],
1146
+ func_name=user_function.__name__, expected_names=["idx", "preprocess"], **kwargs)
1147
+ sample_id, preprocess_response = args if len(args)!=0 else kwargs.values()
907
1148
  _validate_input_args(sample_id, preprocess_response)
908
1149
 
909
1150
  result = inner_without_validate(sample_id, preprocess_response)
@@ -911,27 +1152,20 @@ def tensorleap_input_encoder(name: str, channel_dim=-1, model_input_index=None):
911
1152
  _validate_result(result)
912
1153
 
913
1154
  if _called_from_inside_tl_decorator == 0 and _called_from_inside_tl_integration_test_decorator:
1155
+ batch_warning(result,user_function.__name__)
914
1156
  result = np.expand_dims(result, axis=0)
915
- # Emit integration test event once per test
916
- try:
917
- emit_integration_event_once(AnalyticsEvent.INPUT_ENCODER_INTEGRATION_TEST, {
918
- 'encoder_name': name,
919
- 'channel_dim': channel_dim,
920
- 'model_input_index': model_input_index
921
- })
922
- except Exception as e:
923
- logger.debug(f"Failed to emit input_encoder integration test event: {e}")
1157
+ if not _call_from_tl_platform:
1158
+ update_env_params_func("tensorleap_input_encoder", "v")
924
1159
 
925
1160
  return result
926
1161
 
927
1162
 
928
-
929
1163
  node_mapping_type = NodeMappingType.Input
930
1164
  if model_input_index is not None:
931
1165
  node_mapping_type = NodeMappingType(f'Input{str(model_input_index)}')
932
1166
  inner.node_mapping = NodeMapping(name, node_mapping_type)
933
1167
 
934
- def mapping_inner(sample_id, preprocess_response):
1168
+ def mapping_inner(*args, **kwargs):
935
1169
  class TempMapping:
936
1170
  pass
937
1171
 
@@ -943,11 +1177,11 @@ def tensorleap_input_encoder(name: str, channel_dim=-1, model_input_index=None):
943
1177
 
944
1178
  mapping_inner.node_mapping = NodeMapping(name, node_mapping_type)
945
1179
 
946
- def final_inner(sample_id, preprocess_response):
1180
+ def final_inner(*args, **kwargs):
947
1181
  if os.environ.get(mapping_runtime_mode_env_var_mame):
948
- return mapping_inner(sample_id, preprocess_response)
1182
+ return mapping_inner(*args, **kwargs)
949
1183
  else:
950
- return inner(sample_id, preprocess_response)
1184
+ return inner(*args, **kwargs)
951
1185
 
952
1186
  final_inner.node_mapping = NodeMapping(name, node_mapping_type)
953
1187
 
@@ -964,23 +1198,18 @@ def tensorleap_gt_encoder(name: str):
964
1198
  f'Please choose another')
965
1199
 
966
1200
  def _validate_input_args(sample_id: Union[int, str], preprocess_response: PreprocessResponse):
967
- assert isinstance(sample_id, (int, str)), \
968
- (f'tensorleap_gt_encoder validation failed: '
969
- f'Argument sample_id should be either int or str. Got {type(sample_id)}.')
970
- assert isinstance(preprocess_response, PreprocessResponse), \
971
- (f'tensorleap_gt_encoder validation failed: '
972
- f'Argument preprocess_response should be a PreprocessResponse. Got {type(preprocess_response)}.')
973
1201
  assert type(sample_id) == preprocess_response.sample_id_type, \
974
- (f'tensorleap_gt_encoder validation failed: '
1202
+ (f'{user_function.__name__}() validation failed: '
975
1203
  f'Argument sample_id should be as the same type as defined in the preprocess response '
976
1204
  f'{preprocess_response.sample_id_type}. Got {type(sample_id)}.')
977
1205
 
978
1206
  def _validate_result(result):
1207
+ validate_output_structure(result, func_name=user_function.__name__, expected_type_name = "np.ndarray",gt_flag=True)
979
1208
  assert isinstance(result, np.ndarray), \
980
- (f'tensorleap_gt_encoder validation failed: '
1209
+ (f'{user_function.__name__}() validation failed: '
981
1210
  f'Unsupported return type. Should be a numpy array. Got {type(result)}.')
982
1211
  assert result.dtype == np.float32, \
983
- (f'tensorleap_gt_encoder validation failed: '
1212
+ (f'{user_function.__name__}() validation failed: '
984
1213
  f'The return type should be a numpy array of type float32. Got {result.dtype}.')
985
1214
 
986
1215
  def inner_without_validate(sample_id, preprocess_response):
@@ -997,7 +1226,10 @@ def tensorleap_gt_encoder(name: str):
997
1226
  leap_binder.set_ground_truth(inner_without_validate, name)
998
1227
 
999
1228
 
1000
- def inner(sample_id, preprocess_response):
1229
+ def inner(*args, **kwargs):
1230
+ validate_args_structure(*args, types_order=[Union[int, str], PreprocessResponse],
1231
+ func_name=user_function.__name__, expected_names=["idx", "preprocess"], **kwargs)
1232
+ sample_id, preprocess_response = args
1001
1233
  _validate_input_args(sample_id, preprocess_response)
1002
1234
 
1003
1235
  result = inner_without_validate(sample_id, preprocess_response)
@@ -1005,20 +1237,15 @@ def tensorleap_gt_encoder(name: str):
1005
1237
  _validate_result(result)
1006
1238
 
1007
1239
  if _called_from_inside_tl_decorator == 0 and _called_from_inside_tl_integration_test_decorator:
1240
+ batch_warning(result, user_function.__name__)
1008
1241
  result = np.expand_dims(result, axis=0)
1009
- # Emit integration test event once per test
1010
- try:
1011
- emit_integration_event_once(AnalyticsEvent.GT_ENCODER_INTEGRATION_TEST, {
1012
- 'encoder_name': name
1013
- })
1014
- except Exception as e:
1015
- logger.debug(f"Failed to emit gt_encoder integration test event: {e}")
1016
-
1242
+ if not _call_from_tl_platform:
1243
+ update_env_params_func("tensorleap_gt_encoder", "v")
1017
1244
  return result
1018
1245
 
1019
1246
  inner.node_mapping = NodeMapping(name, NodeMappingType.GroundTruth)
1020
1247
 
1021
- def mapping_inner(sample_id, preprocess_response):
1248
+ def mapping_inner(*args, **kwargs):
1022
1249
  class TempMapping:
1023
1250
  pass
1024
1251
 
@@ -1029,11 +1256,11 @@ def tensorleap_gt_encoder(name: str):
1029
1256
 
1030
1257
  mapping_inner.node_mapping = NodeMapping(name, NodeMappingType.GroundTruth)
1031
1258
 
1032
- def final_inner(sample_id, preprocess_response):
1259
+ def final_inner(*args, **kwargs):
1033
1260
  if os.environ.get(mapping_runtime_mode_env_var_mame):
1034
- return mapping_inner(sample_id, preprocess_response)
1261
+ return mapping_inner(*args, **kwargs)
1035
1262
  else:
1036
- return inner(sample_id, preprocess_response)
1263
+ return inner(*args, **kwargs)
1037
1264
 
1038
1265
  final_inner.node_mapping = NodeMapping(name, NodeMappingType.GroundTruth)
1039
1266
 
@@ -1054,28 +1281,37 @@ def tensorleap_custom_loss(name: str, connects_to=None):
1054
1281
  valid_types = (np.ndarray, SamplePreprocessResponse)
1055
1282
 
1056
1283
  def _validate_input_args(*args, **kwargs):
1284
+ assert len(args) > 0 and len(kwargs)==0, (
1285
+ f"{user_function.__name__}() validation failed: "
1286
+ f"Expected at least one positional|key-word argument of the allowed types (np.ndarray|SamplePreprocessResponse|list(np.ndarray|SamplePreprocessResponse)). "
1287
+ f"but received none. "
1288
+ f"Correct usage example: {user_function.__name__}(input_array: np.ndarray, ...)"
1289
+ )
1057
1290
  for i, arg in enumerate(args):
1058
1291
  if isinstance(arg, list):
1059
1292
  for y, elem in enumerate(arg):
1060
- assert isinstance(elem, valid_types), (f'tensorleap_custom_loss validation failed: '
1293
+ assert isinstance(elem, valid_types), (f'{user_function.__name__}() validation failed: '
1061
1294
  f'Element #{y} of list should be a numpy array. Got {type(elem)}.')
1062
1295
  else:
1063
- assert isinstance(arg, valid_types), (f'tensorleap_custom_loss validation failed: '
1296
+ assert isinstance(arg, valid_types), (f'{user_function.__name__}() validation failed: '
1064
1297
  f'Argument #{i} should be a numpy array. Got {type(arg)}.')
1065
1298
  for _arg_name, arg in kwargs.items():
1066
1299
  if isinstance(arg, list):
1067
1300
  for y, elem in enumerate(arg):
1068
- assert isinstance(elem, valid_types), (f'tensorleap_custom_loss validation failed: '
1301
+ assert isinstance(elem, valid_types), (f'{user_function.__name__}() validation failed: '
1069
1302
  f'Element #{y} of list should be a numpy array. Got {type(elem)}.')
1070
1303
  else:
1071
- assert isinstance(arg, valid_types), (f'tensorleap_custom_loss validation failed: '
1304
+ assert isinstance(arg, valid_types), (f'{user_function.__name__}() validation failed: '
1072
1305
  f'Argument #{_arg_name} should be a numpy array. Got {type(arg)}.')
1073
1306
 
1074
1307
  def _validate_result(result):
1308
+ validate_output_structure(result, func_name=user_function.__name__,
1309
+ expected_type_name="np.ndarray")
1075
1310
  assert isinstance(result, np.ndarray), \
1076
- (f'tensorleap_custom_loss validation failed: '
1311
+ (f'{user_function.__name__} validation failed: '
1077
1312
  f'The return type should be a numpy array. Got {type(result)}.')
1078
-
1313
+ assert result.ndim<2 ,(f'{user_function.__name__} validation failed: '
1314
+ f'The return type should be a 1Dim numpy array but got {result.ndim}Dim.')
1079
1315
 
1080
1316
  @functools.wraps(user_function)
1081
1317
  def inner_without_validate(*args, **kwargs):
@@ -1106,6 +1342,9 @@ def tensorleap_custom_loss(name: str, connects_to=None):
1106
1342
  result = inner_without_validate(*args, **kwargs)
1107
1343
 
1108
1344
  _validate_result(result)
1345
+ if not _call_from_tl_platform:
1346
+ update_env_params_func("tensorleap_custom_loss", "v")
1347
+
1109
1348
  return result
1110
1349
 
1111
1350
  def mapping_inner(*args, **kwargs):
@@ -1162,3 +1401,93 @@ def tensorleap_custom_layer(name: str):
1162
1401
  return custom_layer
1163
1402
 
1164
1403
  return decorating_function
1404
+
1405
+
1406
+ def tensorleap_status_table():
1407
+ '''
1408
+ Usage example:
1409
+ ###################
1410
+ leap_integration.py
1411
+ ###################
1412
+ from code_loader.inner_leap_binder.leapbinder_decorators import tensorleap_status_table
1413
+ ...
1414
+ ...
1415
+ ...
1416
+ if __name__ == '__main__':
1417
+ tensorleap_status_table()
1418
+ ...
1419
+ '''
1420
+ import atexit
1421
+ import sys
1422
+ import traceback
1423
+ CHECK = "✅"
1424
+ CROSS = "❌"
1425
+
1426
+ table = [
1427
+ {"name": "tensorleap_preprocess", "Added to integration": CROSS},
1428
+ {"name": "tensorleap_integration_test", "Added to integration": CROSS},
1429
+ {"name": "tensorleap_input_encoder", "Added to integration": CROSS},
1430
+ {"name": "tensorleap_gt_encoder", "Added to integration": CROSS},
1431
+ {"name": "tensorleap_load_model", "Added to integration": CROSS},
1432
+ {"name": "tensorleap_custom_loss", "Added to integration": CROSS},
1433
+ {"name": "tensorleap_custom_metric (optional)", "Added to integration": CROSS},
1434
+ {"name": "tensorleap_metadata (optional)", "Added to integration": CROSS},
1435
+ {"name": "tensorleap_custom_visualizer (optional)", "Added to integration": CROSS},
1436
+
1437
+ ]
1438
+
1439
+ _finalizer_called = {"done": False}
1440
+
1441
+ def _remove_suffix(s: str, suffix: str) -> str:
1442
+ #This is needed because str.remove_suffix was presented in python3.9+
1443
+ if suffix and s.endswith(suffix):
1444
+ return s[:-len(suffix)]
1445
+ return s
1446
+
1447
+ def _print_table():
1448
+ ready_mess = "\nAll parts have been successfully set. If no errors accured, you can now push the project to the Tensorleap system."
1449
+ not_ready_mess = "\nSome mandatory components have not yet been added to the Integration test. Recommended next interface to add is: "
1450
+ mandatory_ready_mess = "\nAll mandatory parts have been successfully set. If no errors accured, you can now push the project to the Tensorleap system or continue to the next optional reccomeded interface,adding: "
1451
+
1452
+ name_width = max(len(row["name"]) for row in table)
1453
+ status_width = max(len(row["Added to integration"]) for row in table)
1454
+ header = f"{'Decorator Name'.ljust(name_width)} | {'Added to integration'.ljust(status_width)}"
1455
+ sep = "-" * len(header)
1456
+ print("\n" + header)
1457
+ print(sep)
1458
+ ready=True
1459
+ for row in table:
1460
+ print(f"{row['name'].ljust(name_width)} | {row['Added to integration'].ljust(status_width)}")
1461
+ if row['Added to integration']==CROSS and ready:
1462
+ ready=False
1463
+ next_step=row['name']
1464
+
1465
+
1466
+ print(ready_mess) if ready else print(mandatory_ready_mess+next_step) if "optional" in next_step else print(not_ready_mess+next_step)
1467
+ def update_env_params(name: str, status: str = "✓"):
1468
+ for row in table:
1469
+ if _remove_suffix(row["name"]," (optional)") == name:
1470
+ row["Added to integration"] = CHECK if status=="v" else CROSS
1471
+ break
1472
+ def run_on_exit():
1473
+ if _finalizer_called["done"]:
1474
+ return
1475
+ _finalizer_called["done"] = True
1476
+ _print_table()
1477
+ def handle_exception(exc_type, exc_value, exc_traceback):
1478
+ traceback.print_exception(exc_type, exc_value, exc_traceback)
1479
+ run_on_exit()
1480
+ atexit.register(run_on_exit)
1481
+ sys.excepthook = handle_exception
1482
+ return update_env_params
1483
+
1484
+
1485
+ if not _call_from_tl_platform:
1486
+ update_env_params_func = tensorleap_status_table()
1487
+
1488
+
1489
+
1490
+
1491
+
1492
+
1493
+
@@ -5,53 +5,12 @@ import os
5
5
  import sys
6
6
  import getpass
7
7
  import uuid
8
- import logging
9
- from enum import Enum
10
- from typing import Optional, Dict, Any, Set, Union, TypedDict
8
+ from typing import Optional, Dict, Any
11
9
  import mixpanel # type: ignore[import]
12
10
 
13
- logger = logging.getLogger(__name__)
14
-
15
11
  TRACKING_VERSION = '1'
16
12
 
17
13
 
18
- class AnalyticsEvent(str, Enum):
19
- """Enumeration of all tracked analytics events."""
20
- CODE_LOADER_LOADED = "code_loader_loaded"
21
- LOAD_MODEL_INTEGRATION_TEST = "load_model_integration_test"
22
- PREPROCESS_INTEGRATION_TEST = "preprocess_integration_test"
23
- INPUT_ENCODER_INTEGRATION_TEST = "input_encoder_integration_test"
24
- GT_ENCODER_INTEGRATION_TEST = "gt_encoder_integration_test"
25
-
26
-
27
- class CodeLoaderLoadedProps(TypedDict, total=False):
28
- """Properties for code_loader_loaded event."""
29
- event_type: str
30
- code_path: str
31
- code_entry_name: str
32
-
33
-
34
- class LoadModelEventProps(TypedDict, total=False):
35
- """Properties for load_model_integration_test event."""
36
- prediction_types_count: int
37
-
38
-
39
- class PreprocessEventProps(TypedDict, total=False):
40
- """Properties for preprocess_integration_test event."""
41
- preprocess_responses_count: int
42
-
43
-
44
- class InputEncoderEventProps(TypedDict, total=False):
45
- """Properties for input_encoder_integration_test event."""
46
- encoder_name: str
47
- channel_dim: int
48
-
49
-
50
- class GtEncoderEventProps(TypedDict, total=False):
51
- """Properties for gt_encoder_integration_test event."""
52
- encoder_name: str
53
-
54
-
55
14
  class MixpanelTracker:
56
15
  """Handles Mixpanel event tracking for code-loader."""
57
16
 
@@ -69,8 +28,7 @@ class MixpanelTracker:
69
28
  if self._user_id is None:
70
29
  try:
71
30
  self._user_id = getpass.getuser()
72
- except Exception as e:
73
- logger.debug(f"Failed to get username via getpass: {e}")
31
+ except Exception:
74
32
  # Fallback to environment variables or default
75
33
  self._user_id = os.environ.get('USER', os.environ.get('USERNAME', 'unknown'))
76
34
  return self._user_id or 'unknown'
@@ -85,8 +43,8 @@ class MixpanelTracker:
85
43
  user_id = f.read().strip()
86
44
  if user_id:
87
45
  return user_id
88
- except Exception as e:
89
- logger.debug(f"Failed to read TensorLeap user ID: {e}")
46
+ except Exception:
47
+ pass
90
48
  return None
91
49
 
92
50
  def _get_or_create_device_id(self) -> str:
@@ -115,8 +73,7 @@ class MixpanelTracker:
115
73
  f.write(device_id)
116
74
 
117
75
  return device_id
118
- except Exception as e:
119
- logger.debug(f"Failed to read/write device ID file: {e}")
76
+ except Exception:
120
77
  # Fallback to generating a new UUID if file operations fail
121
78
  return str(uuid.uuid4())
122
79
 
@@ -133,11 +90,10 @@ class MixpanelTracker:
133
90
 
134
91
  return self._get_or_create_device_id()
135
92
 
136
- def _track_event(self, event_name: Union[str, AnalyticsEvent], event_properties: Optional[Dict[str, Any]] = None) -> None:
137
- """Internal method to track any event with device identification.
93
+ def track_code_loader_loaded(self, event_properties: Optional[Dict[str, Any]] = None) -> None:
94
+ """Track code loader loaded event with device identification.
138
95
 
139
96
  Args:
140
- event_name: The name of the event to track (string or AnalyticsEvent enum)
141
97
  event_properties: Optional additional properties to include in the event
142
98
  """
143
99
  # Skip tracking if IS_TENSORLEAP_PLATFORM environment variable is set to 'true'
@@ -166,26 +122,9 @@ class MixpanelTracker:
166
122
  if event_properties:
167
123
  properties.update(event_properties)
168
124
 
169
- self.mp.track(distinct_id, str(event_name), properties)
125
+ self.mp.track(distinct_id, 'code_loader_loaded', properties)
170
126
  except Exception as e:
171
- logger.debug(f"Failed to track event '{event_name}': {e}")
172
-
173
- def track_code_loader_loaded(self, event_properties: Optional[Dict[str, Any]] = None) -> None:
174
- """Track code loader loaded event with device identification.
175
-
176
- Args:
177
- event_properties: Optional additional properties to include in the event
178
- """
179
- self._track_event(AnalyticsEvent.CODE_LOADER_LOADED, event_properties)
180
-
181
- def track_integration_test_event(self, event_name: Union[str, AnalyticsEvent], event_properties: Optional[Dict[str, Any]] = None) -> None:
182
- """Track an integration test event with device identification.
183
-
184
- Args:
185
- event_name: The name of the event to track (string or AnalyticsEvent enum)
186
- event_properties: Optional additional properties to include in the event
187
- """
188
- self._track_event(event_name, event_properties)
127
+ pass
189
128
 
190
129
 
191
130
  # Global tracker instance
@@ -201,30 +140,3 @@ def get_tracker() -> MixpanelTracker:
201
140
 
202
141
  def track_code_loader_loaded(event_properties: Optional[Dict[str, Any]] = None) -> None:
203
142
  get_tracker().track_code_loader_loaded(event_properties)
204
-
205
-
206
- def track_integration_test_event(event_name: Union[str, AnalyticsEvent], event_properties: Optional[Dict[str, Any]] = None) -> None:
207
- get_tracker().track_integration_test_event(event_name, event_properties)
208
-
209
-
210
- # Module-level set to track which integration test events have been emitted
211
- _integration_events_emitted: Set[str] = set()
212
-
213
-
214
- def emit_integration_event_once(event_name: Union[str, AnalyticsEvent], props: Dict[str, Any]) -> None:
215
- """Emit an integration test event only once per test run."""
216
- event_name_str = str(event_name)
217
- if event_name_str in _integration_events_emitted:
218
- return
219
-
220
- try:
221
- track_integration_test_event(event_name, props)
222
- _integration_events_emitted.add(event_name_str)
223
- except Exception as e:
224
- logger.debug(f"Failed to emit integration event once '{event_name}': {e}")
225
-
226
-
227
- def clear_integration_events() -> None:
228
- """Clear the integration events set for a new test run."""
229
- global _integration_events_emitted
230
- _integration_events_emitted.clear()
@@ -324,7 +324,7 @@ def plot_image_mask(leap_data: LeapImageMask, title: str) -> None:
324
324
 
325
325
  # fill the instance mask with a translucent color
326
326
  overlayed_image[instance_mask] = (
327
- overlayed_image[instance_mask] * (1 - 0.5) + np.array(colors[i][:image.shape[-1]], dtype=image.dtype) * 0.5)
327
+ overlayed_image[instance_mask] * (1 - 0.5) + np.array(colors[i][:image.shape[-1]], dtype=np.uint8) * 0.5)
328
328
 
329
329
  # Display the result using matplotlib
330
330
  fig, ax = plt.subplots(1)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: code-loader
3
- Version: 1.0.141
3
+ Version: 1.0.143
4
4
  Summary:
5
5
  Home-page: https://github.com/tensorleap/code-loader
6
6
  License: MIT
@@ -1,7 +1,7 @@
1
1
  LICENSE,sha256=qIwWjdspQeSMTtnFZBC8MuT-95L02FPvzRUdWFxrwJY,1067
2
2
  code_loader/__init__.py,sha256=outxRQ0M-zMfV0QGVJmAed5qWfRmyD0TV6-goEGAzBw,406
3
3
  code_loader/contract/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
- code_loader/contract/datasetclasses.py,sha256=u0gfDDy02skhFG3ejJOxqxCnykhAcBPGJfv8Bi4s9eQ,8966
4
+ code_loader/contract/datasetclasses.py,sha256=hkHMU1upWzVWkjIBoU3r14XPDleicif4Oia2xfuHgvQ,9395
5
5
  code_loader/contract/enums.py,sha256=GEFkvUMXnCNt-GOoz7NJ9ecQZ2PPDettJNOsxsiM0wk,1622
6
6
  code_loader/contract/exceptions.py,sha256=jWqu5i7t-0IG0jGRsKF4DjJdrsdpJjIYpUkN1F4RiyQ,51
7
7
  code_loader/contract/mapping.py,sha256=sWJhpng-IkOzQnWQdMT5w2ZZ3X1Z_OOzSwCLXIS7oxE,1446
@@ -21,17 +21,17 @@ code_loader/experiment_api/utils.py,sha256=XZHtxge12TS4H4-8PjV3sKuhp8Ud6ojAiIzTZ
21
21
  code_loader/experiment_api/workingspace_config_utils.py,sha256=DLzXQCg4dgTV_YgaSbeTVzq-2ja_SQw4zi7LXwKL9cY,990
22
22
  code_loader/inner_leap_binder/__init__.py,sha256=koOlJyMNYzGbEsoIbXathSmQ-L38N_pEXH_HvL7beXU,99
23
23
  code_loader/inner_leap_binder/leapbinder.py,sha256=Q3D9yVM-GNEJfYRFvMV__BoZbcWOgnWKhrZXAv6Tu7o,33232
24
- code_loader/inner_leap_binder/leapbinder_decorators.py,sha256=cQwk6sHC6il4eqhxv6rnM-abP7VTL2glYnXNeU26zq4,53943
24
+ code_loader/inner_leap_binder/leapbinder_decorators.py,sha256=hHs_ZUJFfxd6sFUAdg4WUOA9a-75NhBaolS_u4LcAMY,70871
25
25
  code_loader/leaploader.py,sha256=6D6xZzMI6qSNIb3tuKLB3BbK5H8QS1_r7iQjIXO3OkM,29795
26
26
  code_loader/leaploaderbase.py,sha256=LIFcC6xo6V_iiGN3BjibXETu_l84EWM_WIOKAvkfTiM,4458
27
- code_loader/mixpanel_tracker.py,sha256=eKvymkw7X2Ht6iw-a0V9VQm6OnB9kW7hYy35YtwRAvU,8457
27
+ code_loader/mixpanel_tracker.py,sha256=l9z_szKKQ7apEbdNZpGH1TKAiT_TsBHb9AQnePaWTyo,4942
28
28
  code_loader/plot_functions/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
29
- code_loader/plot_functions/plot_functions.py,sha256=OGFLfbL31N2wuwcXIxxQ14f0Kuuvv1BZkAuFi2c0ma4,14560
29
+ code_loader/plot_functions/plot_functions.py,sha256=VyVWxd7R3lALIo2z8oZlYybbN0Ip6G0OiKNTNZ77xHk,14557
30
30
  code_loader/plot_functions/visualize.py,sha256=gsBAYYkwMh7jIpJeDMPS8G4CW-pxwx6LznoQIvi4vpo,657
31
31
  code_loader/utils.py,sha256=gXENTYpjdidq2dx0gVbXlErPeHoNs-4TYAZbLRe0y2c,2712
32
32
  code_loader/visualizers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
33
33
  code_loader/visualizers/default_visualizers.py,sha256=onRnLE_TXfgLN4o52hQIOOhUcFexGlqJ3xSpQDVLuZM,2604
34
- code_loader-1.0.141.dist-info/LICENSE,sha256=qIwWjdspQeSMTtnFZBC8MuT-95L02FPvzRUdWFxrwJY,1067
35
- code_loader-1.0.141.dist-info/METADATA,sha256=lsbN9TJFGKpD9hcJSu9-7DLnTRcCqSF8yJl24bi7Ku8,1090
36
- code_loader-1.0.141.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
37
- code_loader-1.0.141.dist-info/RECORD,,
34
+ code_loader-1.0.143.dist-info/LICENSE,sha256=qIwWjdspQeSMTtnFZBC8MuT-95L02FPvzRUdWFxrwJY,1067
35
+ code_loader-1.0.143.dist-info/METADATA,sha256=e5jK7vsHg96yteM76OwAa7MJYXGkh_C_T1NOFuDun6I,1090
36
+ code_loader-1.0.143.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
37
+ code_loader-1.0.143.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: poetry-core 1.9.0
2
+ Generator: poetry-core 1.9.1
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any