power-grid-model 1.12.58__py3-none-win_amd64.whl → 1.12.59__py3-none-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of power-grid-model might be problematic. Click here for more details.

Files changed (59) hide show
  1. power_grid_model/__init__.py +54 -54
  2. power_grid_model/_core/__init__.py +3 -3
  3. power_grid_model/_core/buffer_handling.py +493 -493
  4. power_grid_model/_core/data_handling.py +141 -141
  5. power_grid_model/_core/data_types.py +132 -132
  6. power_grid_model/_core/dataset_definitions.py +109 -109
  7. power_grid_model/_core/enum.py +226 -226
  8. power_grid_model/_core/error_handling.py +206 -206
  9. power_grid_model/_core/errors.py +130 -130
  10. power_grid_model/_core/index_integer.py +17 -17
  11. power_grid_model/_core/options.py +71 -71
  12. power_grid_model/_core/power_grid_core.py +563 -563
  13. power_grid_model/_core/power_grid_dataset.py +535 -535
  14. power_grid_model/_core/power_grid_meta.py +243 -243
  15. power_grid_model/_core/power_grid_model.py +686 -686
  16. power_grid_model/_core/power_grid_model_c/__init__.py +3 -3
  17. power_grid_model/_core/power_grid_model_c/bin/power_grid_model_c.dll +0 -0
  18. power_grid_model/_core/power_grid_model_c/get_pgm_dll_path.py +63 -63
  19. power_grid_model/_core/power_grid_model_c/include/power_grid_model_c/basics.h +255 -255
  20. power_grid_model/_core/power_grid_model_c/include/power_grid_model_c/buffer.h +108 -108
  21. power_grid_model/_core/power_grid_model_c/include/power_grid_model_c/dataset.h +316 -316
  22. power_grid_model/_core/power_grid_model_c/include/power_grid_model_c/dataset_definitions.h +1052 -1052
  23. power_grid_model/_core/power_grid_model_c/include/power_grid_model_c/handle.h +99 -99
  24. power_grid_model/_core/power_grid_model_c/include/power_grid_model_c/meta_data.h +189 -189
  25. power_grid_model/_core/power_grid_model_c/include/power_grid_model_c/model.h +125 -125
  26. power_grid_model/_core/power_grid_model_c/include/power_grid_model_c/options.h +142 -142
  27. power_grid_model/_core/power_grid_model_c/include/power_grid_model_c/serialization.h +118 -118
  28. power_grid_model/_core/power_grid_model_c/include/power_grid_model_c.h +36 -36
  29. power_grid_model/_core/power_grid_model_c/include/power_grid_model_cpp/basics.hpp +65 -65
  30. power_grid_model/_core/power_grid_model_c/include/power_grid_model_cpp/buffer.hpp +61 -61
  31. power_grid_model/_core/power_grid_model_c/include/power_grid_model_cpp/dataset.hpp +220 -220
  32. power_grid_model/_core/power_grid_model_c/include/power_grid_model_cpp/handle.hpp +108 -108
  33. power_grid_model/_core/power_grid_model_c/include/power_grid_model_cpp/meta_data.hpp +84 -84
  34. power_grid_model/_core/power_grid_model_c/include/power_grid_model_cpp/model.hpp +63 -63
  35. power_grid_model/_core/power_grid_model_c/include/power_grid_model_cpp/options.hpp +52 -52
  36. power_grid_model/_core/power_grid_model_c/include/power_grid_model_cpp/serialization.hpp +124 -124
  37. power_grid_model/_core/power_grid_model_c/include/power_grid_model_cpp/utils.hpp +81 -81
  38. power_grid_model/_core/power_grid_model_c/include/power_grid_model_cpp.hpp +19 -19
  39. power_grid_model/_core/power_grid_model_c/lib/cmake/power_grid_model/power_grid_modelConfigVersion.cmake +3 -3
  40. power_grid_model/_core/serialization.py +317 -317
  41. power_grid_model/_core/typing.py +20 -20
  42. power_grid_model/_core/utils.py +798 -798
  43. power_grid_model/data_types.py +321 -321
  44. power_grid_model/enum.py +27 -27
  45. power_grid_model/errors.py +37 -37
  46. power_grid_model/typing.py +43 -43
  47. power_grid_model/utils.py +473 -473
  48. power_grid_model/validation/__init__.py +25 -25
  49. power_grid_model/validation/_rules.py +1171 -1171
  50. power_grid_model/validation/_validation.py +1172 -1172
  51. power_grid_model/validation/assertions.py +93 -93
  52. power_grid_model/validation/errors.py +602 -602
  53. power_grid_model/validation/utils.py +313 -313
  54. {power_grid_model-1.12.58.dist-info → power_grid_model-1.12.59.dist-info}/METADATA +1 -1
  55. power_grid_model-1.12.59.dist-info/RECORD +65 -0
  56. power_grid_model-1.12.58.dist-info/RECORD +0 -65
  57. {power_grid_model-1.12.58.dist-info → power_grid_model-1.12.59.dist-info}/WHEEL +0 -0
  58. {power_grid_model-1.12.58.dist-info → power_grid_model-1.12.59.dist-info}/entry_points.txt +0 -0
  59. {power_grid_model-1.12.58.dist-info → power_grid_model-1.12.59.dist-info}/licenses/LICENSE +0 -0
@@ -1,141 +1,141 @@
1
- # SPDX-FileCopyrightText: Contributors to the Power Grid Model project <powergridmodel@lfenergy.org>
2
- #
3
- # SPDX-License-Identifier: MPL-2.0
4
-
5
- """
6
- Data handling
7
- """
8
-
9
- import numpy as np
10
-
11
- from power_grid_model._core.data_types import Dataset, SingleDataset
12
- from power_grid_model._core.dataset_definitions import ComponentType, DatasetType
13
- from power_grid_model._core.enum import CalculationType, ComponentAttributeFilterOptions
14
- from power_grid_model._core.errors import PowerGridUnreachableHitError
15
- from power_grid_model._core.power_grid_dataset import CConstDataset, CMutableDataset
16
- from power_grid_model._core.power_grid_meta import initialize_array, power_grid_meta_data
17
- from power_grid_model._core.typing import ComponentAttributeMapping
18
- from power_grid_model._core.utils import process_data_filter
19
-
20
-
21
- def get_output_type(*, calculation_type: CalculationType, symmetric: bool) -> DatasetType:
22
- """
23
- Get the output type based on the provided arguments.
24
-
25
- Args:
26
- calculation_type:
27
- request the output type for a specific calculation type (power_flow, state_estimation, ...)
28
- symmetric:
29
- True: three-phase symmetric calculation, even for asymmetric loads/generations
30
- False: three-phase asymmetric calculation
31
-
32
- Returns:
33
- the output type that fits the format requested by the output type
34
- """
35
- if calculation_type in (CalculationType.power_flow, CalculationType.state_estimation):
36
- return DatasetType.sym_output if symmetric else DatasetType.asym_output
37
-
38
- if calculation_type == CalculationType.short_circuit:
39
- return DatasetType.sc_output
40
-
41
- raise NotImplementedError
42
-
43
-
44
- def prepare_input_view(input_data: SingleDataset) -> CConstDataset:
45
- """
46
- Create a view of the input data in a format compatible with the PGM core libary.
47
-
48
- Args:
49
- input_data:
50
- the input data to create the view from
51
-
52
- Returns:
53
- instance of CConstDataset ready to be fed into C API
54
- """
55
- return CConstDataset(input_data, dataset_type=DatasetType.input)
56
-
57
-
58
- def prepare_update_view(update_data: Dataset) -> CConstDataset:
59
- """
60
- Create a view of the update data, or an empty view if not provided, in a format compatible with the PGM core libary.
61
-
62
- Args:
63
- update_data:
64
- the update data to create the view from. Defaults to None
65
-
66
- Returns:
67
- instance of CConstDataset ready to be fed into C API
68
- """
69
- return CConstDataset(update_data, dataset_type=DatasetType.update)
70
-
71
-
72
- def prepare_output_view(output_data: Dataset, output_type: DatasetType) -> CMutableDataset:
73
- """
74
- create a view of the output data in a format compatible with the PGM core libary.
75
-
76
- Args:
77
- output_data:
78
- the output data to create the view from
79
- output_type:
80
- the output type of the output_data
81
-
82
- Returns:
83
- instance of CMutableDataset ready to be fed into C API
84
- """
85
- return CMutableDataset(output_data, dataset_type=output_type)
86
-
87
-
88
- def create_output_data(
89
- output_component_types: ComponentAttributeMapping,
90
- output_type: DatasetType,
91
- all_component_count: dict[ComponentType, int],
92
- is_batch: bool,
93
- batch_size: int,
94
- ) -> Dataset:
95
- """
96
- Create the output dataset based on component and batch size from the model; and output attributes requested by user.
97
-
98
- Args:
99
- output_component_types:
100
- the output components the user seeks to extract
101
- output_type:
102
- the type of output that the user will see (as per the calculation options)
103
- all_component_count:
104
- the amount of components in the grid (as per the input data)
105
- is_batch:
106
- if the dataset is batch
107
- batch_size:
108
- the batch size
109
-
110
- Returns:
111
- Dataset: output dataset
112
- """
113
- processed_output_types = process_data_filter(output_type, output_component_types, list(all_component_count.keys()))
114
-
115
- all_component_count = {k: v for k, v in all_component_count.items() if k in processed_output_types}
116
-
117
- # create result dataset
118
- result_dict: Dataset = {}
119
-
120
- for name, count in all_component_count.items():
121
- # shape
122
- if is_batch:
123
- shape: tuple[int] | tuple[int, int] = (batch_size, count)
124
- else:
125
- shape = (count,)
126
-
127
- requested_component = processed_output_types[name]
128
- dtype = power_grid_meta_data[output_type][name].dtype
129
- if dtype.names is None:
130
- raise PowerGridUnreachableHitError
131
- if requested_component is None:
132
- result_dict[name] = initialize_array(output_type, name, shape=shape, empty=True)
133
- elif requested_component in [
134
- ComponentAttributeFilterOptions.everything,
135
- ComponentAttributeFilterOptions.relevant,
136
- ]:
137
- result_dict[name] = {attr: np.empty(shape, dtype=dtype[attr]) for attr in dtype.names}
138
- elif isinstance(requested_component, list | set):
139
- result_dict[name] = {attr: np.empty(shape, dtype=dtype[attr]) for attr in requested_component}
140
-
141
- return result_dict
1
+ # SPDX-FileCopyrightText: Contributors to the Power Grid Model project <powergridmodel@lfenergy.org>
2
+ #
3
+ # SPDX-License-Identifier: MPL-2.0
4
+
5
+ """
6
+ Data handling
7
+ """
8
+
9
+ import numpy as np
10
+
11
+ from power_grid_model._core.data_types import Dataset, SingleDataset
12
+ from power_grid_model._core.dataset_definitions import ComponentType, DatasetType
13
+ from power_grid_model._core.enum import CalculationType, ComponentAttributeFilterOptions
14
+ from power_grid_model._core.errors import PowerGridUnreachableHitError
15
+ from power_grid_model._core.power_grid_dataset import CConstDataset, CMutableDataset
16
+ from power_grid_model._core.power_grid_meta import initialize_array, power_grid_meta_data
17
+ from power_grid_model._core.typing import ComponentAttributeMapping
18
+ from power_grid_model._core.utils import process_data_filter
19
+
20
+
21
+ def get_output_type(*, calculation_type: CalculationType, symmetric: bool) -> DatasetType:
22
+ """
23
+ Get the output type based on the provided arguments.
24
+
25
+ Args:
26
+ calculation_type:
27
+ request the output type for a specific calculation type (power_flow, state_estimation, ...)
28
+ symmetric:
29
+ True: three-phase symmetric calculation, even for asymmetric loads/generations
30
+ False: three-phase asymmetric calculation
31
+
32
+ Returns:
33
+ the output type that fits the format requested by the output type
34
+ """
35
+ if calculation_type in (CalculationType.power_flow, CalculationType.state_estimation):
36
+ return DatasetType.sym_output if symmetric else DatasetType.asym_output
37
+
38
+ if calculation_type == CalculationType.short_circuit:
39
+ return DatasetType.sc_output
40
+
41
+ raise NotImplementedError
42
+
43
+
44
+ def prepare_input_view(input_data: SingleDataset) -> CConstDataset:
45
+ """
46
+ Create a view of the input data in a format compatible with the PGM core libary.
47
+
48
+ Args:
49
+ input_data:
50
+ the input data to create the view from
51
+
52
+ Returns:
53
+ instance of CConstDataset ready to be fed into C API
54
+ """
55
+ return CConstDataset(input_data, dataset_type=DatasetType.input)
56
+
57
+
58
+ def prepare_update_view(update_data: Dataset) -> CConstDataset:
59
+ """
60
+ Create a view of the update data, or an empty view if not provided, in a format compatible with the PGM core libary.
61
+
62
+ Args:
63
+ update_data:
64
+ the update data to create the view from. Defaults to None
65
+
66
+ Returns:
67
+ instance of CConstDataset ready to be fed into C API
68
+ """
69
+ return CConstDataset(update_data, dataset_type=DatasetType.update)
70
+
71
+
72
+ def prepare_output_view(output_data: Dataset, output_type: DatasetType) -> CMutableDataset:
73
+ """
74
+ create a view of the output data in a format compatible with the PGM core libary.
75
+
76
+ Args:
77
+ output_data:
78
+ the output data to create the view from
79
+ output_type:
80
+ the output type of the output_data
81
+
82
+ Returns:
83
+ instance of CMutableDataset ready to be fed into C API
84
+ """
85
+ return CMutableDataset(output_data, dataset_type=output_type)
86
+
87
+
88
+ def create_output_data(
89
+ output_component_types: ComponentAttributeMapping,
90
+ output_type: DatasetType,
91
+ all_component_count: dict[ComponentType, int],
92
+ is_batch: bool,
93
+ batch_size: int,
94
+ ) -> Dataset:
95
+ """
96
+ Create the output dataset based on component and batch size from the model; and output attributes requested by user.
97
+
98
+ Args:
99
+ output_component_types:
100
+ the output components the user seeks to extract
101
+ output_type:
102
+ the type of output that the user will see (as per the calculation options)
103
+ all_component_count:
104
+ the amount of components in the grid (as per the input data)
105
+ is_batch:
106
+ if the dataset is batch
107
+ batch_size:
108
+ the batch size
109
+
110
+ Returns:
111
+ Dataset: output dataset
112
+ """
113
+ processed_output_types = process_data_filter(output_type, output_component_types, list(all_component_count.keys()))
114
+
115
+ all_component_count = {k: v for k, v in all_component_count.items() if k in processed_output_types}
116
+
117
+ # create result dataset
118
+ result_dict: Dataset = {}
119
+
120
+ for name, count in all_component_count.items():
121
+ # shape
122
+ if is_batch:
123
+ shape: tuple[int] | tuple[int, int] = (batch_size, count)
124
+ else:
125
+ shape = (count,)
126
+
127
+ requested_component = processed_output_types[name]
128
+ dtype = power_grid_meta_data[output_type][name].dtype
129
+ if dtype.names is None:
130
+ raise PowerGridUnreachableHitError
131
+ if requested_component is None:
132
+ result_dict[name] = initialize_array(output_type, name, shape=shape, empty=True)
133
+ elif requested_component in [
134
+ ComponentAttributeFilterOptions.everything,
135
+ ComponentAttributeFilterOptions.relevant,
136
+ ]:
137
+ result_dict[name] = {attr: np.empty(shape, dtype=dtype[attr]) for attr in dtype.names}
138
+ elif isinstance(requested_component, list | set):
139
+ result_dict[name] = {attr: np.empty(shape, dtype=dtype[attr]) for attr in requested_component}
140
+
141
+ return result_dict
@@ -1,132 +1,132 @@
1
- # SPDX-FileCopyrightText: Contributors to the Power Grid Model project <powergridmodel@lfenergy.org>
2
- #
3
- # SPDX-License-Identifier: MPL-2.0
4
-
5
- """
6
- Data types involving PGM datasets.
7
-
8
- Data types for library-internal use. In an attempt to clarify type hints, some types
9
- have been defined and explained in this file.
10
- """
11
-
12
- from typing import TypeAlias, TypedDict, TypeVar
13
-
14
- import numpy as np
15
-
16
- from power_grid_model._core.dataset_definitions import ComponentTypeVar
17
-
18
- SingleArray: TypeAlias = np.ndarray
19
-
20
- AttributeType: TypeAlias = str
21
-
22
- SingleColumn: TypeAlias = np.ndarray
23
-
24
- DenseBatchArray: TypeAlias = np.ndarray
25
-
26
- SingleColumnarData = dict[AttributeType, SingleColumn]
27
-
28
- _SingleComponentData = TypeVar("_SingleComponentData", SingleArray, SingleColumnarData) # deduction helper
29
- SingleComponentData = SingleArray | SingleColumnarData
30
-
31
-
32
- SingleDataset = dict[ComponentTypeVar, _SingleComponentData]
33
-
34
- BatchList = list[SingleDataset]
35
-
36
- BatchColumn: TypeAlias = np.ndarray
37
-
38
- DenseBatchColumnarData = dict[AttributeType, BatchColumn]
39
-
40
- IndexPointer: TypeAlias = np.ndarray
41
-
42
-
43
- class SparseBatchColumnarData(TypedDict):
44
- """
45
- Sparse batch columnar data is a dictionary containing the keys `indptr` and `data`.
46
-
47
- - data: a :class:`SingleColumnarData`. The exact supported attribute columns depend on the component type.
48
- - indptr: an :class:`IndexPointer` representing the start and end indices for each batch scenario.
49
-
50
- - Examples:
51
-
52
- - structure: {"indptr": :class:`IndexPointer`, "data": :class:`SingleColumnarData`}
53
- - concrete example: {"indptr": [0, 2, 2, 3], "data": {"id": [0, 1, 0], "status": [1, 1, 0]}}
54
-
55
- - the scenario 0 sets the status of components with ids 0 and 1 to 1
56
- (and keeps defaults for other components)
57
- - scenario 1 keeps the default values for all components
58
- - scenario 2 sets the status of component with id 0 to 0 (and keeps defaults for other components)
59
- """
60
-
61
- indptr: IndexPointer
62
- data: SingleColumnarData
63
-
64
-
65
- class SparseBatchArray(TypedDict):
66
- """
67
- A sparse batch array is a dictionary containing the keys `indptr` and `data`.
68
-
69
- - data: a :class:`SingleArray`. The exact dtype depends on the type of component.
70
- - indptr: an :class:`IndexPointer` representing the start and end indices for each batch scenario.
71
-
72
- - Examples:
73
-
74
- - structure: {"indptr": :class:`IndexPointer`, "data": :class:`SingleArray`}
75
- - concrete example: {"indptr": [0, 2, 2, 3], "data": [(0, 1, 1), (1, 1, 1), (0, 0, 0)]}
76
-
77
- - the scenario 0 sets the statuses of components with ids 0 and 1 to 1
78
- (and keeps defaults for other components)
79
- - scenario 1 keeps the default values for all components
80
- - scenario 2 sets the statuses of component with id 0 to 0 (and keeps defaults for other components)
81
- """
82
-
83
- indptr: IndexPointer
84
- data: SingleArray
85
-
86
-
87
- SparseBatchData = SparseBatchArray | SparseBatchColumnarData
88
-
89
- SparseDataComponentType: TypeAlias = str
90
-
91
- BatchColumnarData = DenseBatchColumnarData | SparseBatchColumnarData
92
-
93
- ColumnarData = SingleColumnarData | BatchColumnarData
94
- BatchArray = DenseBatchArray | SparseBatchArray
95
-
96
-
97
- BatchComponentData = BatchArray | BatchColumnarData
98
-
99
- _BatchComponentData = TypeVar("_BatchComponentData", BatchArray, BatchColumnarData) # deduction helper
100
-
101
-
102
- BatchDataset = dict[ComponentTypeVar, _BatchComponentData]
103
-
104
-
105
- DataArray = SingleArray | BatchArray
106
-
107
-
108
- _ComponentData = TypeVar("_ComponentData", SingleComponentData, BatchComponentData) # deduction helper
109
- ComponentData = DataArray | ColumnarData
110
-
111
- Dataset = dict[ComponentTypeVar, _ComponentData]
112
-
113
-
114
- DenseBatchData = DenseBatchArray | DenseBatchColumnarData
115
-
116
- NominalValue = int
117
-
118
- RealValue = float
119
-
120
- AsymValue = tuple[RealValue, RealValue, RealValue]
121
-
122
- AttributeValue = RealValue | NominalValue | AsymValue
123
-
124
- Component = dict[AttributeType, AttributeValue | str]
125
-
126
- ComponentList = list[Component]
127
-
128
- SinglePythonDataset = dict[ComponentTypeVar, ComponentList]
129
-
130
- BatchPythonDataset = list[SinglePythonDataset]
131
-
132
- PythonDataset = SinglePythonDataset | BatchPythonDataset
1
+ # SPDX-FileCopyrightText: Contributors to the Power Grid Model project <powergridmodel@lfenergy.org>
2
+ #
3
+ # SPDX-License-Identifier: MPL-2.0
4
+
5
+ """
6
+ Data types involving PGM datasets.
7
+
8
+ Data types for library-internal use. In an attempt to clarify type hints, some types
9
+ have been defined and explained in this file.
10
+ """
11
+
12
+ from typing import TypeAlias, TypedDict, TypeVar
13
+
14
+ import numpy as np
15
+
16
+ from power_grid_model._core.dataset_definitions import ComponentTypeVar
17
+
18
+ SingleArray: TypeAlias = np.ndarray
19
+
20
+ AttributeType: TypeAlias = str
21
+
22
+ SingleColumn: TypeAlias = np.ndarray
23
+
24
+ DenseBatchArray: TypeAlias = np.ndarray
25
+
26
+ SingleColumnarData = dict[AttributeType, SingleColumn]
27
+
28
+ _SingleComponentData = TypeVar("_SingleComponentData", SingleArray, SingleColumnarData) # deduction helper
29
+ SingleComponentData = SingleArray | SingleColumnarData
30
+
31
+
32
+ SingleDataset = dict[ComponentTypeVar, _SingleComponentData]
33
+
34
+ BatchList = list[SingleDataset]
35
+
36
+ BatchColumn: TypeAlias = np.ndarray
37
+
38
+ DenseBatchColumnarData = dict[AttributeType, BatchColumn]
39
+
40
+ IndexPointer: TypeAlias = np.ndarray
41
+
42
+
43
+ class SparseBatchColumnarData(TypedDict):
44
+ """
45
+ Sparse batch columnar data is a dictionary containing the keys `indptr` and `data`.
46
+
47
+ - data: a :class:`SingleColumnarData`. The exact supported attribute columns depend on the component type.
48
+ - indptr: an :class:`IndexPointer` representing the start and end indices for each batch scenario.
49
+
50
+ - Examples:
51
+
52
+ - structure: {"indptr": :class:`IndexPointer`, "data": :class:`SingleColumnarData`}
53
+ - concrete example: {"indptr": [0, 2, 2, 3], "data": {"id": [0, 1, 0], "status": [1, 1, 0]}}
54
+
55
+ - the scenario 0 sets the status of components with ids 0 and 1 to 1
56
+ (and keeps defaults for other components)
57
+ - scenario 1 keeps the default values for all components
58
+ - scenario 2 sets the status of component with id 0 to 0 (and keeps defaults for other components)
59
+ """
60
+
61
+ indptr: IndexPointer
62
+ data: SingleColumnarData
63
+
64
+
65
+ class SparseBatchArray(TypedDict):
66
+ """
67
+ A sparse batch array is a dictionary containing the keys `indptr` and `data`.
68
+
69
+ - data: a :class:`SingleArray`. The exact dtype depends on the type of component.
70
+ - indptr: an :class:`IndexPointer` representing the start and end indices for each batch scenario.
71
+
72
+ - Examples:
73
+
74
+ - structure: {"indptr": :class:`IndexPointer`, "data": :class:`SingleArray`}
75
+ - concrete example: {"indptr": [0, 2, 2, 3], "data": [(0, 1, 1), (1, 1, 1), (0, 0, 0)]}
76
+
77
+ - the scenario 0 sets the statuses of components with ids 0 and 1 to 1
78
+ (and keeps defaults for other components)
79
+ - scenario 1 keeps the default values for all components
80
+ - scenario 2 sets the statuses of component with id 0 to 0 (and keeps defaults for other components)
81
+ """
82
+
83
+ indptr: IndexPointer
84
+ data: SingleArray
85
+
86
+
87
+ SparseBatchData = SparseBatchArray | SparseBatchColumnarData
88
+
89
+ SparseDataComponentType: TypeAlias = str
90
+
91
+ BatchColumnarData = DenseBatchColumnarData | SparseBatchColumnarData
92
+
93
+ ColumnarData = SingleColumnarData | BatchColumnarData
94
+ BatchArray = DenseBatchArray | SparseBatchArray
95
+
96
+
97
+ BatchComponentData = BatchArray | BatchColumnarData
98
+
99
+ _BatchComponentData = TypeVar("_BatchComponentData", BatchArray, BatchColumnarData) # deduction helper
100
+
101
+
102
+ BatchDataset = dict[ComponentTypeVar, _BatchComponentData]
103
+
104
+
105
+ DataArray = SingleArray | BatchArray
106
+
107
+
108
+ _ComponentData = TypeVar("_ComponentData", SingleComponentData, BatchComponentData) # deduction helper
109
+ ComponentData = DataArray | ColumnarData
110
+
111
+ Dataset = dict[ComponentTypeVar, _ComponentData]
112
+
113
+
114
+ DenseBatchData = DenseBatchArray | DenseBatchColumnarData
115
+
116
+ NominalValue = int
117
+
118
+ RealValue = float
119
+
120
+ AsymValue = tuple[RealValue, RealValue, RealValue]
121
+
122
+ AttributeValue = RealValue | NominalValue | AsymValue
123
+
124
+ Component = dict[AttributeType, AttributeValue | str]
125
+
126
+ ComponentList = list[Component]
127
+
128
+ SinglePythonDataset = dict[ComponentTypeVar, ComponentList]
129
+
130
+ BatchPythonDataset = list[SinglePythonDataset]
131
+
132
+ PythonDataset = SinglePythonDataset | BatchPythonDataset