code-loader 1.0.24__tar.gz → 1.0.26__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {code_loader-1.0.24 → code_loader-1.0.26}/PKG-INFO +1 -1
- code_loader-1.0.26/code_loader/code_inegration_processes_manager.py +83 -0
- {code_loader-1.0.24 → code_loader-1.0.26}/code_loader/leaploader.py +45 -12
- {code_loader-1.0.24 → code_loader-1.0.26}/code_loader/utils.py +6 -3
- {code_loader-1.0.24 → code_loader-1.0.26}/pyproject.toml +1 -1
- {code_loader-1.0.24 → code_loader-1.0.26}/LICENSE +0 -0
- {code_loader-1.0.24 → code_loader-1.0.26}/README.md +0 -0
- {code_loader-1.0.24 → code_loader-1.0.26}/code_loader/__init__.py +0 -0
- {code_loader-1.0.24 → code_loader-1.0.26}/code_loader/contract/__init__.py +0 -0
- {code_loader-1.0.24 → code_loader-1.0.26}/code_loader/contract/datasetclasses.py +0 -0
- {code_loader-1.0.24 → code_loader-1.0.26}/code_loader/contract/enums.py +0 -0
- {code_loader-1.0.24 → code_loader-1.0.26}/code_loader/contract/exceptions.py +0 -0
- {code_loader-1.0.24 → code_loader-1.0.26}/code_loader/contract/responsedataclasses.py +0 -0
- {code_loader-1.0.24 → code_loader-1.0.26}/code_loader/contract/visualizer_classes.py +0 -0
- {code_loader-1.0.24 → code_loader-1.0.26}/code_loader/inner_leap_binder/__init__.py +0 -0
- {code_loader-1.0.24 → code_loader-1.0.26}/code_loader/inner_leap_binder/leapbinder.py +0 -0
- {code_loader-1.0.24 → code_loader-1.0.26}/code_loader/visualizers/__init__.py +0 -0
- {code_loader-1.0.24 → code_loader-1.0.26}/code_loader/visualizers/default_visualizers.py +0 -0
@@ -0,0 +1,83 @@
|
|
1
|
+
# mypy: ignore-errors
|
2
|
+
import traceback
|
3
|
+
from dataclasses import dataclass
|
4
|
+
|
5
|
+
from typing import List, Tuple, Optional
|
6
|
+
|
7
|
+
from multiprocessing import Process, Queue
|
8
|
+
|
9
|
+
from code_loader.leap_loader_parallelized_base import LeapLoaderParallelizedBase
|
10
|
+
from code_loader.leaploader import LeapLoader
|
11
|
+
from code_loader.contract.enums import DataStateEnum
|
12
|
+
from code_loader.metric_calculator_parallelized import MetricCalculatorParallelized
|
13
|
+
from code_loader.samples_generator_parallelized import SamplesGeneratorParallelized
|
14
|
+
|
15
|
+
|
16
|
+
@dataclass
|
17
|
+
class SampleSerializableError:
|
18
|
+
state: DataStateEnum
|
19
|
+
index: int
|
20
|
+
leap_script_trace: str
|
21
|
+
exception_as_str: str
|
22
|
+
|
23
|
+
|
24
|
+
class CodeIntegrationProcessesManager:
|
25
|
+
def __init__(self, code_path: str, code_entry_name: str, n_workers: Optional[int] = 2,
|
26
|
+
max_samples_in_queue: int = 128) -> None:
|
27
|
+
self.metric_calculator_parallelized = MetricCalculatorParallelized(code_path, code_entry_name)
|
28
|
+
self.samples_generator_parallelized = SamplesGeneratorParallelized(code_path, code_entry_name)
|
29
|
+
|
30
|
+
def _create_and_start_process(self) -> Process:
|
31
|
+
process = self.multiprocessing_context.Process(
|
32
|
+
target=CodeIntegrationProcessesManager._process_func,
|
33
|
+
args=(self.code_path, self.code_entry_name, self._inputs_waiting_to_be_process,
|
34
|
+
self._ready_processed_results))
|
35
|
+
process.daemon = True
|
36
|
+
process.start()
|
37
|
+
return process
|
38
|
+
|
39
|
+
def _run_and_warm_first_process(self):
|
40
|
+
process = self._create_and_start_process()
|
41
|
+
self.processes = [process]
|
42
|
+
|
43
|
+
# needed in order to make sure the preprocess func runs once in nonparallel
|
44
|
+
self._start_process_inputs([(DataStateEnum.training, 0)])
|
45
|
+
self._get_next_ready_processed_result()
|
46
|
+
|
47
|
+
def _operation_decider(self):
|
48
|
+
if self.metric_calculator_parallelized._ready_processed_results.empty() and not \
|
49
|
+
self.metric_calculator_parallelized._inputs_waiting_to_be_process.empty():
|
50
|
+
return 'metric'
|
51
|
+
|
52
|
+
if self.samples_generator_parallelized._ready_processed_results.empty() and not \
|
53
|
+
self.samples_generator_parallelized._inputs_waiting_to_be_process.empty():
|
54
|
+
return 'dataset'
|
55
|
+
|
56
|
+
|
57
|
+
|
58
|
+
|
59
|
+
@staticmethod
|
60
|
+
def _process_func(code_path: str, code_entry_name: str,
|
61
|
+
samples_to_process: Queue, ready_samples: Queue,
|
62
|
+
metrics_to_process: Queue, ready_metrics: Queue) -> None:
|
63
|
+
import os
|
64
|
+
os.environ["CUDA_VISIBLE_DEVICES"] = "-1"
|
65
|
+
|
66
|
+
leap_loader = LeapLoader(code_path, code_entry_name)
|
67
|
+
while True:
|
68
|
+
|
69
|
+
# decide on sample or metric to process
|
70
|
+
state, idx = samples_to_process.get(block=True)
|
71
|
+
leap_loader._preprocess_result()
|
72
|
+
try:
|
73
|
+
sample = leap_loader.get_sample(state, idx)
|
74
|
+
except Exception as e:
|
75
|
+
leap_script_trace = traceback.format_exc().split('File "<string>"')[-1]
|
76
|
+
ready_samples.put(SampleSerializableError(state, idx, leap_script_trace, str(e)))
|
77
|
+
continue
|
78
|
+
|
79
|
+
ready_samples.put(sample)
|
80
|
+
|
81
|
+
def generate_samples(self, sample_identities: List[Tuple[DataStateEnum, int]]):
|
82
|
+
return self.start_process_inputs(sample_identities)
|
83
|
+
|
@@ -5,7 +5,7 @@ import sys
|
|
5
5
|
from contextlib import redirect_stdout
|
6
6
|
from functools import lru_cache
|
7
7
|
from pathlib import Path
|
8
|
-
from typing import Dict, List, Iterable, Union
|
8
|
+
from typing import Dict, List, Iterable, Union, Any
|
9
9
|
|
10
10
|
import numpy as np
|
11
11
|
import numpy.typing as npt
|
@@ -19,7 +19,7 @@ from code_loader.contract.responsedataclasses import DatasetIntegParseResult, Da
|
|
19
19
|
DatasetPreprocess, DatasetSetup, DatasetInputInstance, DatasetOutputInstance, DatasetMetadataInstance, \
|
20
20
|
VisualizerInstance, PredictionTypeInstance, ModelSetup, CustomLayerInstance, MetricInstance, CustomLossInstance
|
21
21
|
from code_loader.inner_leap_binder import global_leap_binder
|
22
|
-
from code_loader.utils import
|
22
|
+
from code_loader.utils import get_root_exception_file_and_line_number
|
23
23
|
|
24
24
|
|
25
25
|
class LeapLoader:
|
@@ -127,12 +127,12 @@ class LeapLoader:
|
|
127
127
|
is_valid = all([payload.is_passed for payload in test_payloads])
|
128
128
|
setup_response = self.get_dataset_setup_response(handlers_test_payloads)
|
129
129
|
except DatasetScriptException as e:
|
130
|
-
line_number =
|
131
|
-
general_error = f"Something went wrong
|
130
|
+
line_number, file_name = get_root_exception_file_and_line_number()
|
131
|
+
general_error = f"Something went wrong. {repr(e.__cause__)} in file {file_name}, line_number: {line_number}"
|
132
132
|
is_valid = False
|
133
133
|
except Exception as e:
|
134
|
-
line_number =
|
135
|
-
general_error = f"Something went wrong
|
134
|
+
line_number, file_name = get_root_exception_file_and_line_number()
|
135
|
+
general_error = f"Something went wrong. {repr(e.__cause__)} in file {file_name}, line_number: {line_number}"
|
136
136
|
is_valid = False
|
137
137
|
|
138
138
|
print_log = stdout_steam.getvalue()
|
@@ -150,8 +150,8 @@ class LeapLoader:
|
|
150
150
|
preprocess_result = self._preprocess_result()
|
151
151
|
global_leap_binder.check_preprocess(preprocess_result)
|
152
152
|
except Exception as e:
|
153
|
-
line_number =
|
154
|
-
error_string = f"{repr(e)}
|
153
|
+
line_number, file_name = get_root_exception_file_and_line_number()
|
154
|
+
error_string = f"{repr(e)} in file {file_name}, line_number: {line_number}"
|
155
155
|
test_result.display[TestingSectionEnum.Errors.name] = error_string
|
156
156
|
test_result.is_passed = False
|
157
157
|
return test_result
|
@@ -170,8 +170,8 @@ class LeapLoader:
|
|
170
170
|
test_result = global_leap_binder.check_handler(
|
171
171
|
preprocess_response, test_result, dataset_base_handler)
|
172
172
|
except Exception as e:
|
173
|
-
line_number =
|
174
|
-
test_result[0].display[state_name] = f"{repr(e)}
|
173
|
+
line_number, file_name = get_root_exception_file_and_line_number()
|
174
|
+
test_result[0].display[state_name] = f"{repr(e)} in file {file_name}, line_number: {line_number}"
|
175
175
|
test_result[0].is_passed = False
|
176
176
|
|
177
177
|
result_payloads.extend(test_result)
|
@@ -301,7 +301,40 @@ class LeapLoader:
|
|
301
301
|
def _get_gt(self, state: DataStateEnum, idx: int) -> Dict[str, npt.NDArray[np.float32]]:
|
302
302
|
return self._get_dataset_handlers(global_leap_binder.setup_container.ground_truths, state, idx)
|
303
303
|
|
304
|
+
@lru_cache()
|
305
|
+
def _metadata_name_to_type(self) -> Dict[str, DatasetMetadataType]:
|
306
|
+
global_leap_binder.check_preprocess(self._preprocess_result())
|
307
|
+
handlers_test_payloads = self._check_handlers()
|
308
|
+
metadata_setup = self.get_dataset_setup_response(handlers_test_payloads).metadata
|
309
|
+
metadata_name_to_type = {
|
310
|
+
metadata_instance.name: metadata_instance.type
|
311
|
+
for metadata_instance in metadata_setup
|
312
|
+
}
|
313
|
+
return metadata_name_to_type
|
314
|
+
|
304
315
|
def _get_metadata(self, state: DataStateEnum, idx: int) -> Dict[str, Union[str, int, bool, float]]:
|
316
|
+
def _convert_metadata_to_correct_type(metadata_name: str, value: Any) -> Any:
|
317
|
+
metadata_name_to_type = self._metadata_name_to_type()
|
318
|
+
metadata_type_to_python_type = {
|
319
|
+
DatasetMetadataType.float: float,
|
320
|
+
DatasetMetadataType.string: str,
|
321
|
+
DatasetMetadataType.boolean: bool,
|
322
|
+
DatasetMetadataType.int: int
|
323
|
+
}
|
324
|
+
metadata_type_to_default_value = {
|
325
|
+
DatasetMetadataType.float: -1,
|
326
|
+
DatasetMetadataType.string: "",
|
327
|
+
DatasetMetadataType.boolean: False,
|
328
|
+
DatasetMetadataType.int: -1
|
329
|
+
}
|
330
|
+
|
331
|
+
try:
|
332
|
+
converted_value = metadata_type_to_python_type[metadata_name_to_type[metadata_name]](value)
|
333
|
+
except ValueError:
|
334
|
+
converted_value = metadata_type_to_default_value[metadata_name_to_type[metadata_name]]
|
335
|
+
|
336
|
+
return converted_value
|
337
|
+
|
305
338
|
result_agg = {}
|
306
339
|
preprocess_result = self._preprocess_result()
|
307
340
|
preprocess_state = preprocess_result[state]
|
@@ -310,9 +343,9 @@ class LeapLoader:
|
|
310
343
|
if isinstance(handler_result, dict):
|
311
344
|
for single_metadata_name, single_metadata_result in handler_result.items():
|
312
345
|
handler_name = f'{handler.name}_{single_metadata_name}'
|
313
|
-
result_agg[handler_name] = single_metadata_result
|
346
|
+
result_agg[handler_name] = _convert_metadata_to_correct_type(handler_name, single_metadata_result)
|
314
347
|
else:
|
315
348
|
handler_name = handler.name
|
316
|
-
result_agg[handler_name] = handler_result
|
349
|
+
result_agg[handler_name] = _convert_metadata_to_correct_type(handler_name, handler_result)
|
317
350
|
|
318
351
|
return result_agg
|
@@ -1,6 +1,7 @@
|
|
1
1
|
import sys
|
2
|
+
from pathlib import Path
|
2
3
|
from types import TracebackType
|
3
|
-
from typing import List, Union
|
4
|
+
from typing import List, Union, Tuple
|
4
5
|
|
5
6
|
import numpy as np
|
6
7
|
import numpy.typing as npt
|
@@ -24,7 +25,7 @@ def get_root_traceback(exc_tb: TracebackType) -> TracebackType:
|
|
24
25
|
return return_traceback
|
25
26
|
|
26
27
|
|
27
|
-
def
|
28
|
+
def get_root_exception_file_and_line_number() -> Tuple[int, str]:
|
28
29
|
root_exception = sys.exc_info()[1]
|
29
30
|
assert root_exception is not None
|
30
31
|
if root_exception.__context__ is not None:
|
@@ -32,10 +33,12 @@ def get_root_exception_line_number() -> int:
|
|
32
33
|
traceback = root_exception.__traceback__
|
33
34
|
|
34
35
|
root_exception_line_number = -1
|
36
|
+
root_exception_file_name = ''
|
35
37
|
if traceback is not None:
|
36
38
|
root_traceback = get_root_traceback(traceback)
|
37
39
|
root_exception_line_number = root_traceback.tb_lineno
|
38
|
-
|
40
|
+
root_exception_file_name = Path(root_traceback.tb_frame.f_code.co_filename).name
|
41
|
+
return root_exception_line_number, root_exception_file_name
|
39
42
|
|
40
43
|
|
41
44
|
def get_shape(result: Union[npt.NDArray[np.float32], str, float, int, bool]) -> List[int]:
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|