power-grid-model 1.12.39__py3-none-macosx_13_0_x86_64.whl → 1.12.104__py3-none-macosx_13_0_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (29) hide show
  1. power_grid_model/_core/data_handling.py +65 -11
  2. power_grid_model/_core/data_types.py +13 -2
  3. power_grid_model/_core/error_handling.py +7 -7
  4. power_grid_model/_core/options.py +12 -12
  5. power_grid_model/_core/power_grid_core.py +20 -6
  6. power_grid_model/_core/power_grid_dataset.py +35 -25
  7. power_grid_model/_core/power_grid_meta.py +37 -18
  8. power_grid_model/_core/power_grid_model.py +381 -42
  9. power_grid_model/_core/power_grid_model_c/include/power_grid_model_c/dataset.h +16 -0
  10. power_grid_model/_core/power_grid_model_c/include/power_grid_model_c/dataset_definitions.h +8 -0
  11. power_grid_model/_core/power_grid_model_c/include/power_grid_model_c/model.h +5 -0
  12. power_grid_model/_core/power_grid_model_c/include/power_grid_model_cpp/dataset.hpp +4 -0
  13. power_grid_model/_core/power_grid_model_c/lib/cmake/power_grid_model/power_grid_modelConfig.cmake +3 -3
  14. power_grid_model/_core/power_grid_model_c/lib/cmake/power_grid_model/power_grid_modelConfigVersion.cmake +3 -3
  15. power_grid_model/_core/power_grid_model_c/lib/cmake/power_grid_model/power_grid_modelTargets-release.cmake +3 -3
  16. power_grid_model/_core/power_grid_model_c/lib/cmake/power_grid_model/power_grid_modelTargets.cmake +1 -1
  17. power_grid_model/_core/power_grid_model_c/lib/libpower_grid_model_c.1.12.104.dylib +0 -0
  18. power_grid_model/_core/power_grid_model_c/lib/libpower_grid_model_c.dylib +0 -0
  19. power_grid_model/_core/serialization.py +11 -9
  20. power_grid_model/_core/typing.py +2 -2
  21. power_grid_model/_core/utils.py +6 -6
  22. power_grid_model/utils.py +57 -1
  23. power_grid_model/validation/errors.py +1 -1
  24. {power_grid_model-1.12.39.dist-info → power_grid_model-1.12.104.dist-info}/METADATA +3 -21
  25. {power_grid_model-1.12.39.dist-info → power_grid_model-1.12.104.dist-info}/RECORD +28 -28
  26. power_grid_model/_core/power_grid_model_c/lib/libpower_grid_model_c.1.12.39.dylib +0 -0
  27. {power_grid_model-1.12.39.dist-info → power_grid_model-1.12.104.dist-info}/WHEEL +0 -0
  28. {power_grid_model-1.12.39.dist-info → power_grid_model-1.12.104.dist-info}/entry_points.txt +0 -0
  29. {power_grid_model-1.12.39.dist-info → power_grid_model-1.12.104.dist-info}/licenses/LICENSE +0 -0
@@ -6,16 +6,26 @@
6
6
  Data handling
7
7
  """
8
8
 
9
+ from typing import Literal, overload
10
+
9
11
  import numpy as np
10
12
 
11
- from power_grid_model._core.data_types import Dataset, SingleDataset
12
- from power_grid_model._core.dataset_definitions import ComponentType, DatasetType
13
+ from power_grid_model._core.data_types import (
14
+ BatchDataset,
15
+ Dataset,
16
+ DenseBatchArray,
17
+ SingleArray,
18
+ SingleColumnarData,
19
+ SingleDataset,
20
+ )
21
+ from power_grid_model._core.dataset_definitions import ComponentType, ComponentTypeVar, DatasetType
13
22
  from power_grid_model._core.enum import CalculationType, ComponentAttributeFilterOptions
14
23
  from power_grid_model._core.errors import PowerGridUnreachableHitError
15
24
  from power_grid_model._core.power_grid_dataset import CConstDataset, CMutableDataset
16
25
  from power_grid_model._core.power_grid_meta import initialize_array, power_grid_meta_data
17
- from power_grid_model._core.typing import ComponentAttributeMapping
26
+ from power_grid_model._core.typing import ComponentAttributeMapping, ComponentAttributeMappingDict
18
27
  from power_grid_model._core.utils import process_data_filter
28
+ from power_grid_model.data_types import DenseBatchColumnarData
19
29
 
20
30
 
21
31
  def get_output_type(*, calculation_type: CalculationType, symmetric: bool) -> DatasetType:
@@ -85,6 +95,54 @@ def prepare_output_view(output_data: Dataset, output_type: DatasetType) -> CMuta
85
95
  return CMutableDataset(output_data, dataset_type=output_type)
86
96
 
87
97
 
98
+ @overload
99
+ def create_output_data(
100
+ output_component_types: None | set[ComponentTypeVar] | list[ComponentTypeVar],
101
+ output_type: DatasetType,
102
+ all_component_count: dict[ComponentType, int],
103
+ is_batch: Literal[False],
104
+ batch_size: int,
105
+ ) -> dict[ComponentType, SingleArray]: ...
106
+ @overload
107
+ def create_output_data(
108
+ output_component_types: None | set[ComponentTypeVar] | list[ComponentTypeVar],
109
+ output_type: DatasetType,
110
+ all_component_count: dict[ComponentType, int],
111
+ is_batch: Literal[True],
112
+ batch_size: int,
113
+ ) -> dict[ComponentType, DenseBatchArray]: ...
114
+ @overload
115
+ def create_output_data(
116
+ output_component_types: ComponentAttributeFilterOptions,
117
+ output_type: DatasetType,
118
+ all_component_count: dict[ComponentType, int],
119
+ is_batch: Literal[False],
120
+ batch_size: int,
121
+ ) -> dict[ComponentType, SingleColumnarData]: ...
122
+ @overload
123
+ def create_output_data(
124
+ output_component_types: ComponentAttributeFilterOptions,
125
+ output_type: DatasetType,
126
+ all_component_count: dict[ComponentType, int],
127
+ is_batch: Literal[True],
128
+ batch_size: int,
129
+ ) -> dict[ComponentType, DenseBatchColumnarData]: ...
130
+ @overload
131
+ def create_output_data(
132
+ output_component_types: ComponentAttributeMappingDict,
133
+ output_type: DatasetType,
134
+ all_component_count: dict[ComponentType, int],
135
+ is_batch: Literal[False],
136
+ batch_size: int,
137
+ ) -> SingleDataset: ...
138
+ @overload
139
+ def create_output_data(
140
+ output_component_types: ComponentAttributeMappingDict,
141
+ output_type: DatasetType,
142
+ all_component_count: dict[ComponentType, int],
143
+ is_batch: Literal[True],
144
+ batch_size: int,
145
+ ) -> BatchDataset: ...
88
146
  def create_output_data(
89
147
  output_component_types: ComponentAttributeMapping,
90
148
  output_type: DatasetType,
@@ -96,7 +154,7 @@ def create_output_data(
96
154
  Create the output dataset based on component and batch size from the model; and output attributes requested by user.
97
155
 
98
156
  Args:
99
- output_component_types:
157
+ output_component_types (ComponentAttributeMapping):
100
158
  the output components the user seeks to extract
101
159
  output_type:
102
160
  the type of output that the user will see (as per the calculation options)
@@ -118,11 +176,7 @@ def create_output_data(
118
176
  result_dict: Dataset = {}
119
177
 
120
178
  for name, count in all_component_count.items():
121
- # shape
122
- if is_batch:
123
- shape: tuple[int] | tuple[int, int] = (batch_size, count)
124
- else:
125
- shape = (count,)
179
+ shape: tuple[int, int] | int = (batch_size, count) if is_batch else count
126
180
 
127
181
  requested_component = processed_output_types[name]
128
182
  dtype = power_grid_meta_data[output_type][name].dtype
@@ -134,8 +188,8 @@ def create_output_data(
134
188
  ComponentAttributeFilterOptions.everything,
135
189
  ComponentAttributeFilterOptions.relevant,
136
190
  ]:
137
- result_dict[name] = {attr: np.empty(shape, dtype=dtype[attr]) for attr in dtype.names}
191
+ result_dict[name] = {attr: np.empty(shape=shape, dtype=dtype[attr]) for attr in dtype.names}
138
192
  elif isinstance(requested_component, list | set):
139
- result_dict[name] = {attr: np.empty(shape, dtype=dtype[attr]) for attr in requested_component}
193
+ result_dict[name] = {attr: np.empty(shape=shape, dtype=dtype[attr]) for attr in requested_component}
140
194
 
141
195
  return result_dict
@@ -13,7 +13,7 @@ from typing import TypeAlias, TypedDict, TypeVar
13
13
 
14
14
  import numpy as np
15
15
 
16
- from power_grid_model._core.dataset_definitions import ComponentTypeVar
16
+ from power_grid_model._core.dataset_definitions import ComponentType, ComponentTypeVar
17
17
 
18
18
  SingleArray: TypeAlias = np.ndarray
19
19
 
@@ -28,7 +28,8 @@ SingleColumnarData = dict[AttributeType, SingleColumn]
28
28
  _SingleComponentData = TypeVar("_SingleComponentData", SingleArray, SingleColumnarData) # deduction helper
29
29
  SingleComponentData = SingleArray | SingleColumnarData
30
30
 
31
-
31
+ SingleRowBasedDataset = dict[ComponentTypeVar, SingleArray]
32
+ SingleColumnarDataset = dict[ComponentTypeVar, SingleColumnarData]
32
33
  SingleDataset = dict[ComponentTypeVar, _SingleComponentData]
33
34
 
34
35
  BatchList = list[SingleDataset]
@@ -113,6 +114,16 @@ Dataset = dict[ComponentTypeVar, _ComponentData]
113
114
 
114
115
  DenseBatchData = DenseBatchArray | DenseBatchColumnarData
115
116
 
117
+ # overloads that only match on latest PGM type
118
+ SingleRowBasedOutputDataset = dict[ComponentType, SingleArray]
119
+ SingleColumnarOutputDataset = dict[ComponentType, SingleColumnarData]
120
+ SingleOutputDataset = dict[ComponentType, SingleComponentData]
121
+ DenseBatchRowBasedOutputDataset = dict[ComponentType, DenseBatchArray]
122
+ DenseBatchColumnarOutputDataset = dict[ComponentType, DenseBatchColumnarData]
123
+ DenseBatchOutputDataset = dict[ComponentType, DenseBatchData]
124
+ OutputDataset = dict[ComponentType, ComponentData]
125
+
126
+
116
127
  NominalValue = int
117
128
 
118
129
  RealValue = float
@@ -40,7 +40,7 @@ from power_grid_model._core.errors import (
40
40
  TapSearchStrategyIncompatibleError,
41
41
  )
42
42
  from power_grid_model._core.index_integer import IdxNp
43
- from power_grid_model._core.power_grid_core import power_grid_core as pgc
43
+ from power_grid_model._core.power_grid_core import get_power_grid_core as get_pgc
44
44
 
45
45
  VALIDATOR_MSG = "\nTry validate_input_data() or validate_batch_data() to validate your data.\n"
46
46
  # error codes
@@ -133,19 +133,19 @@ def find_error(batch_size: int = 1, decode_error: bool = True) -> RuntimeError |
133
133
  Returns: error object, can be none
134
134
 
135
135
  """
136
- error_code: int = pgc.error_code()
136
+ error_code: int = get_pgc().error_code()
137
137
  if error_code == PGM_NO_ERROR:
138
138
  return None
139
139
  if error_code == PGM_REGULAR_ERROR:
140
- error_message = pgc.error_message()
140
+ error_message = get_pgc().error_message()
141
141
  error_message += VALIDATOR_MSG
142
142
  return _interpret_error(error_message, decode_error=decode_error)
143
143
  if error_code == PGM_BATCH_ERROR:
144
144
  error_message = "There are errors in the batch calculation." + VALIDATOR_MSG
145
145
  error = PowerGridBatchError(error_message)
146
- n_fails = pgc.n_failed_scenarios()
147
- failed_idxptr = pgc.failed_scenarios()
148
- failed_msgptr = pgc.batch_errors()
146
+ n_fails = get_pgc().n_failed_scenarios()
147
+ failed_idxptr = get_pgc().failed_scenarios()
148
+ failed_msgptr = get_pgc().batch_errors()
149
149
  error.failed_scenarios = np.ctypeslib.as_array(failed_idxptr, shape=(n_fails,)).copy()
150
150
  error.error_messages = [failed_msgptr[i].decode() for i in range(n_fails)] # type: ignore
151
151
  error.errors = [_interpret_error(message, decode_error=decode_error) for message in error.error_messages]
@@ -155,7 +155,7 @@ def find_error(batch_size: int = 1, decode_error: bool = True) -> RuntimeError |
155
155
  error.succeeded_scenarios = all_scenarios[mask]
156
156
  return error
157
157
  if error_code == PGM_SERIALIZATION_ERROR:
158
- return PowerGridSerializationError(pgc.error_message())
158
+ return PowerGridSerializationError(get_pgc().error_message())
159
159
  return RuntimeError("Unknown error!")
160
160
 
161
161
 
@@ -9,7 +9,7 @@ Option class
9
9
  from collections.abc import Callable
10
10
  from typing import Any
11
11
 
12
- from power_grid_model._core.power_grid_core import OptionsPtr, power_grid_core as pgc
12
+ from power_grid_model._core.power_grid_core import OptionsPtr, get_power_grid_core as get_pgc
13
13
 
14
14
 
15
15
  class OptionSetter:
@@ -36,15 +36,15 @@ class Options:
36
36
 
37
37
  _opt: OptionsPtr
38
38
  # option setter
39
- calculation_type = OptionSetter(pgc.set_calculation_type)
40
- calculation_method = OptionSetter(pgc.set_calculation_method)
41
- symmetric = OptionSetter(pgc.set_symmetric)
42
- error_tolerance = OptionSetter(pgc.set_err_tol)
43
- max_iterations = OptionSetter(pgc.set_max_iter)
44
- threading = OptionSetter(pgc.set_threading)
45
- tap_changing_strategy = OptionSetter(pgc.set_tap_changing_strategy)
46
- short_circuit_voltage_scaling = OptionSetter(pgc.set_short_circuit_voltage_scaling)
47
- experimental_features = OptionSetter(pgc.set_experimental_features)
39
+ calculation_type = OptionSetter(get_pgc().set_calculation_type)
40
+ calculation_method = OptionSetter(get_pgc().set_calculation_method)
41
+ symmetric = OptionSetter(get_pgc().set_symmetric)
42
+ error_tolerance = OptionSetter(get_pgc().set_err_tol)
43
+ max_iterations = OptionSetter(get_pgc().set_max_iter)
44
+ threading = OptionSetter(get_pgc().set_threading)
45
+ tap_changing_strategy = OptionSetter(get_pgc().set_tap_changing_strategy)
46
+ short_circuit_voltage_scaling = OptionSetter(get_pgc().set_short_circuit_voltage_scaling)
47
+ experimental_features = OptionSetter(get_pgc().set_experimental_features)
48
48
 
49
49
  @property
50
50
  def opt(self) -> OptionsPtr:
@@ -57,11 +57,11 @@ class Options:
57
57
 
58
58
  def __new__(cls, *args, **kwargs):
59
59
  instance = super().__new__(cls, *args, **kwargs)
60
- instance._opt = pgc.create_options()
60
+ instance._opt = get_pgc().create_options()
61
61
  return instance
62
62
 
63
63
  def __del__(self):
64
- pgc.destroy_options(self._opt)
64
+ get_pgc().destroy_options(self._opt)
65
65
 
66
66
  # not copyable
67
67
  def __copy__(self):
@@ -6,6 +6,7 @@
6
6
  Loader for the dynamic library
7
7
  """
8
8
 
9
+ import threading
9
10
  from collections.abc import Callable
10
11
  from ctypes import CDLL, POINTER, c_char, c_char_p, c_double, c_size_t, c_void_p
11
12
  from inspect import signature
@@ -14,6 +15,10 @@ from itertools import chain
14
15
  from power_grid_model._core.index_integer import IdC, IdxC
15
16
  from power_grid_model._core.power_grid_model_c.get_pgm_dll_path import get_pgm_dll_path
16
17
 
18
+ # threading local
19
+ _thread_local_data = threading.local()
20
+
21
+
17
22
  # integer index
18
23
  IdxPtr = POINTER(IdxC)
19
24
  """Pointer to index."""
@@ -195,14 +200,12 @@ class PowerGridCore:
195
200
  """
196
201
 
197
202
  _handle: HandlePtr
198
- _instance: "PowerGridCore | None" = None
199
203
 
200
204
  # singleton of power grid core
201
205
  def __new__(cls, *args, **kwargs):
202
- if cls._instance is None:
203
- cls._instance = super().__new__(cls, *args, **kwargs)
204
- cls._instance._handle = _CDLL.PGM_create_handle()
205
- return cls._instance
206
+ instance = super().__new__(cls, *args, **kwargs)
207
+ instance._handle = _CDLL.PGM_create_handle()
208
+ return instance
206
209
 
207
210
  def __del__(self):
208
211
  _CDLL.PGM_destroy_handle(self._handle)
@@ -481,6 +484,12 @@ class PowerGridCore:
481
484
  def dataset_const_get_info(self, dataset: ConstDatasetPtr) -> DatasetInfoPtr: # type: ignore[empty-body]
482
485
  pass # pragma: no cover
483
486
 
487
+ @make_c_binding
488
+ def dataset_const_set_next_cartesian_product_dimension(
489
+ self, dataset: ConstDatasetPtr, next_dataset: ConstDatasetPtr
490
+ ) -> None: # type: ignore[empty-body]
491
+ pass # pragma: no cover
492
+
484
493
  @make_c_binding
485
494
  def dataset_writable_get_info(self, dataset: WritableDatasetPtr) -> DatasetInfoPtr: # type: ignore[empty-body]
486
495
  pass # pragma: no cover
@@ -560,4 +569,9 @@ class PowerGridCore:
560
569
 
561
570
 
562
571
  # make one instance
563
- power_grid_core = PowerGridCore()
572
+ def get_power_grid_core() -> PowerGridCore:
573
+ try:
574
+ return _thread_local_data.power_grid_core
575
+ except AttributeError:
576
+ _thread_local_data.power_grid_core = PowerGridCore()
577
+ return _thread_local_data.power_grid_core
@@ -34,10 +34,10 @@ from power_grid_model._core.power_grid_core import (
34
34
  DatasetInfoPtr,
35
35
  MutableDatasetPtr,
36
36
  WritableDatasetPtr,
37
- power_grid_core as pgc,
37
+ get_power_grid_core as get_pgc,
38
38
  )
39
39
  from power_grid_model._core.power_grid_meta import ComponentMetaData, DatasetMetaData, power_grid_meta_data
40
- from power_grid_model._core.typing import ComponentAttributeMapping, _ComponentAttributeMappingDict
40
+ from power_grid_model._core.typing import ComponentAttributeMapping, ComponentAttributeMappingDict
41
41
  from power_grid_model._core.utils import (
42
42
  get_dataset_type,
43
43
  is_columnar,
@@ -62,7 +62,7 @@ class CDatasetInfo:
62
62
  Returns:
63
63
  The name of the dataset type
64
64
  """
65
- return pgc.dataset_info_name(self._info)
65
+ return get_pgc().dataset_info_name(self._info)
66
66
 
67
67
  def dataset_type(self):
68
68
  """
@@ -80,7 +80,7 @@ class CDatasetInfo:
80
80
  Returns:
81
81
  Whether the dataset is a batch dataset
82
82
  """
83
- return bool(pgc.dataset_info_is_batch(self._info))
83
+ return bool(get_pgc().dataset_info_is_batch(self._info))
84
84
 
85
85
  def batch_size(self) -> int:
86
86
  """
@@ -89,7 +89,7 @@ class CDatasetInfo:
89
89
  Returns:
90
90
  The size of the dataset
91
91
  """
92
- return pgc.dataset_info_batch_size(self._info)
92
+ return get_pgc().dataset_info_batch_size(self._info)
93
93
 
94
94
  def n_components(self) -> int:
95
95
  """
@@ -98,7 +98,7 @@ class CDatasetInfo:
98
98
  Returns:
99
99
  The amount of components in the dataset
100
100
  """
101
- return pgc.dataset_info_n_components(self._info)
101
+ return get_pgc().dataset_info_n_components(self._info)
102
102
 
103
103
  def components(self) -> list[ComponentType]:
104
104
  """
@@ -108,7 +108,7 @@ class CDatasetInfo:
108
108
  A list of the component names in the dataset
109
109
  """
110
110
  return [
111
- _str_to_component_type(pgc.dataset_info_component_name(self._info, idx))
111
+ _str_to_component_type(get_pgc().dataset_info_component_name(self._info, idx))
112
112
  for idx in range(self.n_components())
113
113
  ]
114
114
 
@@ -121,7 +121,7 @@ class CDatasetInfo:
121
121
  or -1 if the scenario is not uniform (different amount per scenario)
122
122
  """
123
123
  return {
124
- component_name: pgc.dataset_info_elements_per_scenario(self._info, idx)
124
+ component_name: get_pgc().dataset_info_elements_per_scenario(self._info, idx)
125
125
  for idx, component_name in enumerate(self.components())
126
126
  }
127
127
 
@@ -135,7 +135,7 @@ class CDatasetInfo:
135
135
  the product of the batch size and the amount of elements per scenario for that component.
136
136
  """
137
137
  return {
138
- component_name: pgc.dataset_info_total_elements(self._info, idx)
138
+ component_name: get_pgc().dataset_info_total_elements(self._info, idx)
139
139
  for idx, component_name in enumerate(self.components())
140
140
  }
141
141
 
@@ -150,13 +150,13 @@ class CDatasetInfo:
150
150
  result_dict: dict[ComponentType, None | list[AttributeType]] = {}
151
151
  components = self.components()
152
152
  for component_idx, component_name in enumerate(components):
153
- has_indications = pgc.dataset_info_has_attribute_indications(self._info, component_idx)
153
+ has_indications = get_pgc().dataset_info_has_attribute_indications(self._info, component_idx)
154
154
  if has_indications == 0:
155
155
  result_dict[component_name] = None
156
156
  else:
157
- n_indications = pgc.dataset_info_n_attribute_indications(self._info, component_idx)
157
+ n_indications = get_pgc().dataset_info_n_attribute_indications(self._info, component_idx)
158
158
  result_dict[component_name] = [
159
- pgc.dataset_info_attribute_name(self._info, component_idx, attribute_idx)
159
+ get_pgc().dataset_info_attribute_name(self._info, component_idx, attribute_idx)
160
160
  for attribute_idx in range(n_indications)
161
161
  ]
162
162
  return result_dict
@@ -195,7 +195,7 @@ class CMutableDataset:
195
195
  instance._is_batch = False
196
196
  instance._batch_size = 1
197
197
 
198
- instance._mutable_dataset = pgc.create_dataset_mutable(
198
+ instance._mutable_dataset = get_pgc().create_dataset_mutable(
199
199
  instance._dataset_type.value, instance._is_batch, instance._batch_size
200
200
  )
201
201
  assert_no_error()
@@ -221,7 +221,7 @@ class CMutableDataset:
221
221
  Returns:
222
222
  The dataset info for this dataset.
223
223
  """
224
- return CDatasetInfo(pgc.dataset_mutable_get_info(self._mutable_dataset))
224
+ return CDatasetInfo(get_pgc().dataset_mutable_get_info(self._mutable_dataset))
225
225
 
226
226
  def get_buffer_views(self) -> list[CBuffer]:
227
227
  """
@@ -272,7 +272,7 @@ class CMutableDataset:
272
272
  self._register_buffer(component, c_buffer)
273
273
 
274
274
  def _register_buffer(self, component: ComponentType, buffer: CBuffer):
275
- pgc.dataset_mutable_add_buffer(
275
+ get_pgc().dataset_mutable_add_buffer(
276
276
  dataset=self._mutable_dataset,
277
277
  component=component.value,
278
278
  elements_per_scenario=buffer.n_elements_per_scenario,
@@ -285,7 +285,7 @@ class CMutableDataset:
285
285
  self._register_attribute_buffer(component, attr, attr_data)
286
286
 
287
287
  def _register_attribute_buffer(self, component, attr, attr_data):
288
- pgc.dataset_mutable_add_attribute_buffer(
288
+ get_pgc().dataset_mutable_add_attribute_buffer(
289
289
  dataset=self._mutable_dataset,
290
290
  component=component.value,
291
291
  attribute=attr,
@@ -303,7 +303,7 @@ class CMutableDataset:
303
303
  raise ValueError(f"Dataset must have a consistent batch size across all components. {VALIDATOR_MSG}")
304
304
 
305
305
  def __del__(self):
306
- pgc.destroy_dataset_mutable(self._mutable_dataset)
306
+ get_pgc().destroy_dataset_mutable(self._mutable_dataset)
307
307
 
308
308
 
309
309
  class CConstDataset:
@@ -326,7 +326,7 @@ class CConstDataset:
326
326
 
327
327
  # create from mutable dataset
328
328
  mutable_dataset = CMutableDataset(data=data, dataset_type=dataset_type)
329
- instance._const_dataset = pgc.create_dataset_const_from_mutable(mutable_dataset.get_dataset_ptr())
329
+ instance._const_dataset = get_pgc().create_dataset_const_from_mutable(mutable_dataset.get_dataset_ptr())
330
330
  assert_no_error()
331
331
  instance._buffer_views = mutable_dataset.get_buffer_views()
332
332
 
@@ -348,10 +348,20 @@ class CConstDataset:
348
348
  Returns:
349
349
  The dataset info for this dataset.
350
350
  """
351
- return CDatasetInfo(pgc.dataset_const_get_info(self._const_dataset))
351
+ return CDatasetInfo(get_pgc().dataset_const_get_info(self._const_dataset))
352
+
353
+ def set_next_cartesian_product_dimension(self, next_dataset: "CConstDataset") -> None:
354
+ """
355
+ Set the next dataset in the linked list.
356
+
357
+ Args:
358
+ next_dataset: The next dataset to set.
359
+ """
360
+ get_pgc().dataset_const_set_next_cartesian_product_dimension(self._const_dataset, next_dataset._const_dataset)
361
+ assert_no_error()
352
362
 
353
363
  def __del__(self):
354
- pgc.destroy_dataset_const(self._const_dataset)
364
+ get_pgc().destroy_dataset_const(self._const_dataset)
355
365
 
356
366
 
357
367
  class CWritableDataset:
@@ -400,7 +410,7 @@ class CWritableDataset:
400
410
  Returns:
401
411
  The dataset info for this dataset.
402
412
  """
403
- return CDatasetInfo(pgc.dataset_writable_get_info(self._writable_dataset))
413
+ return CDatasetInfo(get_pgc().dataset_writable_get_info(self._writable_dataset))
404
414
 
405
415
  def get_data(self) -> Dataset:
406
416
  """
@@ -426,11 +436,11 @@ class CWritableDataset:
426
436
  """
427
437
  return self._data[component]
428
438
 
429
- def get_data_filter(self) -> _ComponentAttributeMappingDict:
439
+ def get_data_filter(self) -> ComponentAttributeMappingDict:
430
440
  """Gets the data filter requested
431
441
 
432
442
  Returns:
433
- _ComponentAttributeMappingDict: data filter
443
+ ComponentAttributeMappingDict: data filter
434
444
  """
435
445
  return self._data_filter
436
446
 
@@ -445,7 +455,7 @@ class CWritableDataset:
445
455
  self._register_buffer(component, get_buffer_view(self._data[component], schema))
446
456
 
447
457
  def _register_buffer(self, component: ComponentType, buffer: CBuffer):
448
- pgc.dataset_writable_set_buffer(
458
+ get_pgc().dataset_writable_set_buffer(
449
459
  dataset=self._writable_dataset,
450
460
  component=component,
451
461
  indptr=buffer.indptr,
@@ -461,7 +471,7 @@ class CWritableDataset:
461
471
  attribute: AttributeType,
462
472
  buffer: CAttributeBuffer,
463
473
  ):
464
- pgc.dataset_writable_set_attribute_buffer(
474
+ get_pgc().dataset_writable_set_attribute_buffer(
465
475
  dataset=self._writable_dataset,
466
476
  component=component,
467
477
  attribute=attribute,
@@ -8,7 +8,7 @@ Load meta data from C core and define numpy structured array
8
8
 
9
9
  from dataclasses import dataclass
10
10
  from enum import IntEnum
11
- from typing import Any
11
+ from typing import Any, overload
12
12
 
13
13
  import numpy as np
14
14
 
@@ -21,7 +21,12 @@ from power_grid_model._core.dataset_definitions import (
21
21
  _str_to_component_type,
22
22
  _str_to_datatype,
23
23
  )
24
- from power_grid_model._core.power_grid_core import AttributePtr, ComponentPtr, DatasetPtr, power_grid_core as pgc
24
+ from power_grid_model._core.power_grid_core import (
25
+ AttributePtr,
26
+ ComponentPtr,
27
+ DatasetPtr,
28
+ get_power_grid_core as get_pgc,
29
+ )
25
30
 
26
31
 
27
32
  # constant enum for ctype
@@ -35,7 +40,7 @@ class PGMCType(IntEnum):
35
40
 
36
41
 
37
42
  _CTYPE_NUMPY_MAP = {PGMCType.double: "f8", PGMCType.int32: "i4", PGMCType.int8: "i1", PGMCType.double3: "(3,)f8"}
38
- _ENDIANNESS = "<" if pgc.is_little_endian() == 1 else ">"
43
+ _ENDIANNESS = "<" if get_pgc().is_little_endian() == 1 else ">"
39
44
  _NAN_VALUE_MAP = {
40
45
  f"{_ENDIANNESS}f8": np.nan,
41
46
  f"{_ENDIANNESS}(3,)f8": np.nan,
@@ -82,10 +87,10 @@ def _generate_meta_data() -> PowerGridMetaData:
82
87
 
83
88
  """
84
89
  py_meta_data = {}
85
- n_datasets = pgc.meta_n_datasets()
90
+ n_datasets = get_pgc().meta_n_datasets()
86
91
  for i in range(n_datasets):
87
- dataset = pgc.meta_get_dataset_by_idx(i)
88
- py_meta_data[_str_to_datatype(pgc.meta_dataset_name(dataset))] = _generate_meta_dataset(dataset)
92
+ dataset = get_pgc().meta_get_dataset_by_idx(i)
93
+ py_meta_data[_str_to_datatype(get_pgc().meta_dataset_name(dataset))] = _generate_meta_dataset(dataset)
89
94
  return py_meta_data
90
95
 
91
96
 
@@ -99,10 +104,10 @@ def _generate_meta_dataset(dataset: DatasetPtr) -> DatasetMetaData:
99
104
 
100
105
  """
101
106
  py_meta_dataset = {}
102
- n_components = pgc.meta_n_components(dataset)
107
+ n_components = get_pgc().meta_n_components(dataset)
103
108
  for i in range(n_components):
104
- component = pgc.meta_get_component_by_idx(dataset, i)
105
- py_meta_dataset[_str_to_component_type(pgc.meta_component_name(component))] = _generate_meta_component(
109
+ component = get_pgc().meta_get_component_by_idx(dataset, i)
110
+ py_meta_dataset[_str_to_component_type(get_pgc().meta_component_name(component))] = _generate_meta_component(
106
111
  component
107
112
  )
108
113
  return py_meta_dataset
@@ -121,8 +126,8 @@ def _generate_meta_component(component: ComponentPtr) -> ComponentMetaData:
121
126
  dtype_dict = _generate_meta_attributes(component)
122
127
  dtype = np.dtype({k: v for k, v in dtype_dict.items() if k != "nans"}) # type: ignore
123
128
  nans = dict(zip(dtype_dict["names"], dtype_dict["nans"]))
124
- if dtype.alignment != pgc.meta_component_alignment(component):
125
- raise TypeError(f'Aligment mismatch for component type: "{pgc.meta_component_name(component)}" !')
129
+ if dtype.alignment != get_pgc().meta_component_alignment(component):
130
+ raise TypeError(f'Aligment mismatch for component type: "{get_pgc().meta_component_name(component)}" !')
126
131
  # get single nan scalar
127
132
  nan_scalar = np.empty(1, dtype=dtype)
128
133
  for key, value in nans.items():
@@ -143,12 +148,12 @@ def _generate_meta_attributes(component: ComponentPtr) -> dict:
143
148
  formats = []
144
149
  offsets = []
145
150
  nans = []
146
- n_attrs = pgc.meta_n_attributes(component)
151
+ n_attrs = get_pgc().meta_n_attributes(component)
147
152
  for i in range(n_attrs):
148
- attribute: AttributePtr = pgc.meta_get_attribute_by_idx(component, i)
149
- attr_name: str = pgc.meta_attribute_name(attribute)
150
- attr_ctype: int = pgc.meta_attribute_ctype(attribute)
151
- attr_offset: int = pgc.meta_attribute_offset(attribute)
153
+ attribute: AttributePtr = get_pgc().meta_get_attribute_by_idx(component, i)
154
+ attr_name: str = get_pgc().meta_attribute_name(attribute)
155
+ attr_ctype: int = get_pgc().meta_attribute_ctype(attribute)
156
+ attr_offset: int = get_pgc().meta_attribute_offset(attribute)
152
157
  attr_np_type = f"{_ENDIANNESS}{_CTYPE_NUMPY_MAP[PGMCType(attr_ctype)]}"
153
158
  attr_nan = _NAN_VALUE_MAP[attr_np_type]
154
159
  names.append(attr_name)
@@ -159,7 +164,7 @@ def _generate_meta_attributes(component: ComponentPtr) -> dict:
159
164
  "names": names,
160
165
  "formats": formats,
161
166
  "offsets": offsets,
162
- "itemsize": pgc.meta_component_size(component),
167
+ "itemsize": get_pgc().meta_component_size(component),
163
168
  "aligned": True,
164
169
  "nans": nans,
165
170
  }
@@ -172,10 +177,24 @@ The data types for all dataset types and components used by the Power Grid Model
172
177
  """
173
178
 
174
179
 
180
+ @overload
181
+ def initialize_array(
182
+ data_type: DatasetTypeLike,
183
+ component_type: ComponentTypeLike,
184
+ shape: int | tuple[int],
185
+ empty: bool = False,
186
+ ) -> SingleArray: ...
187
+ @overload
188
+ def initialize_array(
189
+ data_type: DatasetTypeLike,
190
+ component_type: ComponentTypeLike,
191
+ shape: tuple[int, int],
192
+ empty: bool = False,
193
+ ) -> DenseBatchArray: ...
175
194
  def initialize_array(
176
195
  data_type: DatasetTypeLike,
177
196
  component_type: ComponentTypeLike,
178
- shape: tuple | int,
197
+ shape: int | tuple[int] | tuple[int, int],
179
198
  empty: bool = False,
180
199
  ) -> SingleArray | DenseBatchArray:
181
200
  """