power-grid-model 1.10.17__py3-none-win_amd64.whl → 1.12.119__py3-none-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of power-grid-model might be problematic. Click here for more details.

Files changed (67) hide show
  1. power_grid_model/__init__.py +54 -29
  2. power_grid_model/_core/__init__.py +3 -3
  3. power_grid_model/_core/buffer_handling.py +507 -478
  4. power_grid_model/_core/data_handling.py +195 -141
  5. power_grid_model/_core/data_types.py +142 -0
  6. power_grid_model/_core/dataset_definitions.py +109 -109
  7. power_grid_model/_core/enum.py +226 -0
  8. power_grid_model/_core/error_handling.py +215 -198
  9. power_grid_model/_core/errors.py +134 -0
  10. power_grid_model/_core/index_integer.py +17 -17
  11. power_grid_model/_core/options.py +71 -69
  12. power_grid_model/_core/power_grid_core.py +577 -562
  13. power_grid_model/_core/power_grid_dataset.py +545 -490
  14. power_grid_model/_core/power_grid_meta.py +262 -244
  15. power_grid_model/_core/power_grid_model.py +1025 -687
  16. power_grid_model/_core/power_grid_model_c/__init__.py +3 -0
  17. power_grid_model/_core/power_grid_model_c/bin/power_grid_model_c.dll +0 -0
  18. power_grid_model/_core/power_grid_model_c/get_pgm_dll_path.py +63 -0
  19. power_grid_model/_core/power_grid_model_c/include/power_grid_model_c/basics.h +251 -0
  20. power_grid_model/_core/power_grid_model_c/include/power_grid_model_c/buffer.h +108 -0
  21. power_grid_model/_core/power_grid_model_c/include/power_grid_model_c/dataset.h +332 -0
  22. power_grid_model/_core/power_grid_model_c/include/power_grid_model_c/dataset_definitions.h +1060 -0
  23. power_grid_model/_core/power_grid_model_c/include/power_grid_model_c/handle.h +111 -0
  24. power_grid_model/_core/power_grid_model_c/include/power_grid_model_c/meta_data.h +189 -0
  25. power_grid_model/_core/power_grid_model_c/include/power_grid_model_c/model.h +130 -0
  26. power_grid_model/_core/power_grid_model_c/include/power_grid_model_c/options.h +142 -0
  27. power_grid_model/_core/power_grid_model_c/include/power_grid_model_c/serialization.h +118 -0
  28. power_grid_model/_core/power_grid_model_c/include/power_grid_model_c.h +36 -0
  29. power_grid_model/_core/power_grid_model_c/include/power_grid_model_cpp/basics.hpp +65 -0
  30. power_grid_model/_core/power_grid_model_c/include/power_grid_model_cpp/buffer.hpp +61 -0
  31. power_grid_model/_core/power_grid_model_c/include/power_grid_model_cpp/dataset.hpp +224 -0
  32. power_grid_model/_core/power_grid_model_c/include/power_grid_model_cpp/handle.hpp +108 -0
  33. power_grid_model/_core/power_grid_model_c/include/power_grid_model_cpp/meta_data.hpp +84 -0
  34. power_grid_model/_core/power_grid_model_c/include/power_grid_model_cpp/model.hpp +63 -0
  35. power_grid_model/_core/power_grid_model_c/include/power_grid_model_cpp/options.hpp +52 -0
  36. power_grid_model/_core/power_grid_model_c/include/power_grid_model_cpp/serialization.hpp +124 -0
  37. power_grid_model/_core/power_grid_model_c/include/power_grid_model_cpp/utils.hpp +81 -0
  38. power_grid_model/_core/power_grid_model_c/include/power_grid_model_cpp.hpp +19 -0
  39. power_grid_model/_core/power_grid_model_c/lib/cmake/power_grid_model/power_grid_modelConfig.cmake +37 -0
  40. power_grid_model/_core/power_grid_model_c/lib/cmake/power_grid_model/power_grid_modelConfigVersion.cmake +65 -0
  41. power_grid_model/_core/power_grid_model_c/lib/cmake/power_grid_model/power_grid_modelTargets-release.cmake +19 -0
  42. power_grid_model/_core/power_grid_model_c/lib/cmake/power_grid_model/power_grid_modelTargets.cmake +144 -0
  43. power_grid_model/_core/power_grid_model_c/lib/power_grid_model_c.lib +0 -0
  44. power_grid_model/_core/power_grid_model_c/share/LICENSE +292 -0
  45. power_grid_model/_core/power_grid_model_c/share/README.md +15 -0
  46. power_grid_model/_core/serialization.py +319 -317
  47. power_grid_model/_core/typing.py +20 -0
  48. power_grid_model/{_utils.py → _core/utils.py} +798 -783
  49. power_grid_model/data_types.py +321 -319
  50. power_grid_model/enum.py +27 -214
  51. power_grid_model/errors.py +37 -119
  52. power_grid_model/typing.py +43 -48
  53. power_grid_model/utils.py +529 -400
  54. power_grid_model/validation/__init__.py +25 -10
  55. power_grid_model/validation/{rules.py → _rules.py} +1167 -962
  56. power_grid_model/validation/{validation.py → _validation.py} +1172 -1015
  57. power_grid_model/validation/assertions.py +93 -92
  58. power_grid_model/validation/errors.py +602 -524
  59. power_grid_model/validation/utils.py +313 -318
  60. {power_grid_model-1.10.17.dist-info → power_grid_model-1.12.119.dist-info}/METADATA +162 -165
  61. power_grid_model-1.12.119.dist-info/RECORD +65 -0
  62. {power_grid_model-1.10.17.dist-info → power_grid_model-1.12.119.dist-info}/WHEEL +1 -1
  63. power_grid_model-1.12.119.dist-info/entry_points.txt +3 -0
  64. power_grid_model/_core/_power_grid_core.dll +0 -0
  65. power_grid_model-1.10.17.dist-info/RECORD +0 -32
  66. power_grid_model-1.10.17.dist-info/top_level.txt +0 -1
  67. {power_grid_model-1.10.17.dist-info → power_grid_model-1.12.119.dist-info/licenses}/LICENSE +0 -0
power_grid_model/utils.py CHANGED
@@ -1,400 +1,529 @@
1
- # SPDX-FileCopyrightText: Contributors to the Power Grid Model project <powergridmodel@lfenergy.org>
2
- #
3
- # SPDX-License-Identifier: MPL-2.0
4
-
5
- """
6
- This module contains functions that may be useful when working with the power-grid-model library.
7
- """
8
-
9
- import json
10
- import math
11
- import tempfile
12
- import warnings
13
- from pathlib import Path
14
- from typing import cast as cast_type
15
-
16
- import numpy as np
17
-
18
- from power_grid_model import CalculationMethod, PowerGridModel
19
- from power_grid_model._core.dataset_definitions import DatasetType, _map_to_component_types
20
- from power_grid_model._core.serialization import ( # pylint: disable=unused-import
21
- json_deserialize,
22
- json_serialize,
23
- msgpack_deserialize,
24
- msgpack_serialize,
25
- )
26
- from power_grid_model._utils import (
27
- _extract_data_from_component_data,
28
- _extract_indptr,
29
- get_and_verify_batch_sizes as _get_and_verify_batch_sizes,
30
- get_batch_size as _get_batch_size,
31
- get_dataset_type,
32
- is_columnar,
33
- is_sparse,
34
- )
35
- from power_grid_model.data_types import (
36
- BatchComponentData,
37
- BatchDataset,
38
- Dataset,
39
- DenseBatchArray,
40
- IndexPointer,
41
- SingleComponentData,
42
- SingleDataset,
43
- )
44
- from power_grid_model.errors import PowerGridError, PowerGridSerializationError
45
- from power_grid_model.typing import ComponentAttributeMapping
46
-
47
- _DEPRECATED_FUNCTION_MSG = "This function is deprecated."
48
- _DEPRECATED_JSON_DESERIALIZATION_MSG = f"{_DEPRECATED_FUNCTION_MSG} Please use json_deserialize_to_file instead."
49
- _DEPRECATED_JSON_SERIALIZATION_MSG = f"{_DEPRECATED_FUNCTION_MSG} Please use json_serialize_from_file instead."
50
-
51
-
52
- def get_dataset_scenario(dataset: BatchDataset, scenario: int) -> SingleDataset:
53
- """
54
- Obtain the single dataset at a given scenario, independently of the internal batch data structure.
55
-
56
- Args:
57
- dataset: the batch dataset
58
- scenario: the scenario index
59
-
60
- Raises:
61
- IndexError: if the scenario is out of range for any of the components.
62
-
63
- Returns:
64
- The dataset for a specific scenario
65
- """
66
-
67
- def _get_dense_scenario(arr: np.ndarray) -> np.ndarray:
68
- return arr[scenario]
69
-
70
- def _get_sparse_scenario(arr: np.ndarray, indptr: IndexPointer) -> np.ndarray:
71
- return arr[indptr[scenario] : indptr[scenario + 1]]
72
-
73
- def _get_component_scenario(component_scenarios: BatchComponentData) -> SingleComponentData:
74
- data = _extract_data_from_component_data(component_scenarios)
75
-
76
- if is_sparse(component_scenarios):
77
- indptr = _extract_indptr(component_scenarios)
78
- if is_columnar(component_scenarios):
79
- return {
80
- attribute: _get_sparse_scenario(attribute_data, indptr)
81
- for attribute, attribute_data in data.items()
82
- }
83
- return _get_sparse_scenario(data, indptr)
84
-
85
- if is_columnar(component_scenarios):
86
- return {attribute: _get_dense_scenario(attribute_data) for attribute, attribute_data in data.items()}
87
- return _get_dense_scenario(cast_type(DenseBatchArray, component_scenarios))
88
-
89
- return {component: _get_component_scenario(component_data) for component, component_data in dataset.items()}
90
-
91
-
92
- def get_dataset_batch_size(dataset: BatchDataset) -> int:
93
- """
94
- Get the number of scenarios in the batch dataset.
95
-
96
- Args:
97
- dataset: the batch dataset
98
-
99
- Raises:
100
- ValueError: if the batch dataset is inconsistent.
101
-
102
- Returns:
103
- The size of the batch dataset. Making use of existing _utils function.
104
- """
105
- return _get_and_verify_batch_sizes(dataset)
106
-
107
-
108
- def get_component_batch_size(data_array: BatchComponentData) -> int:
109
- """
110
- Determine the number of batch scenarios and verify the data structure
111
-
112
- Args:
113
- data_array: batch data for power-grid-model
114
-
115
- Returns:
116
- The number of batch scenarios in data_array
117
- """
118
- return _get_batch_size(data_array)
119
-
120
-
121
- def json_deserialize_from_file(
122
- file_path: Path,
123
- data_filter: ComponentAttributeMapping = None,
124
- ) -> Dataset:
125
- """
126
- Load and deserialize a JSON file to a new dataset.
127
-
128
- Args:
129
- file_path: the path to the file to load and deserialize.
130
-
131
- Raises:
132
- ValueError: if the data is inconsistent with the rest of the dataset or a component is unknown.
133
- PowerGridError: if there was an internal error.
134
-
135
- Returns:
136
- The deserialized dataset in Power grid model input format.
137
- """
138
- with open(file_path, encoding="utf-8") as file_pointer:
139
- return json_deserialize(file_pointer.read(), data_filter=data_filter)
140
-
141
-
142
- def json_serialize_to_file(
143
- file_path: Path,
144
- data: Dataset,
145
- dataset_type: DatasetType | None = None,
146
- use_compact_list: bool = False,
147
- indent: int | None = 2,
148
- ):
149
- """
150
- Export JSON data in most recent format.
151
-
152
- Args:
153
- file_path: the path to the file to load and deserialize.
154
- data: a single or batch dataset for power-grid-model.
155
- use_compact_list: write components on a single line.
156
- indent: indent of the file. Defaults to 2.
157
-
158
- Returns:
159
- Save to file.
160
- """
161
- data = _map_to_component_types(data)
162
- result = json_serialize(
163
- data=data, dataset_type=dataset_type, use_compact_list=use_compact_list, indent=-1 if indent is None else indent
164
- )
165
-
166
- with open(file_path, mode="w", encoding="utf-8") as file_pointer:
167
- file_pointer.write(result)
168
-
169
-
170
- def msgpack_deserialize_from_file(
171
- file_path: Path,
172
- data_filter: ComponentAttributeMapping = None,
173
- ) -> Dataset:
174
- """
175
- Load and deserialize a msgpack file to a new dataset.
176
-
177
- Args:
178
- file_path: the path to the file to load and deserialize.
179
-
180
- Raises:
181
- ValueError: if the data is inconsistent with the rest of the dataset or a component is unknown.
182
- PowerGridError: if there was an internal error.
183
-
184
- Returns:
185
- The deserialized dataset in Power grid model input format.
186
- """
187
- with open(file_path, mode="rb") as file_pointer:
188
- return msgpack_deserialize(file_pointer.read(), data_filter=data_filter)
189
-
190
-
191
- def msgpack_serialize_to_file(
192
- file_path: Path, data: Dataset, dataset_type: DatasetType | None = None, use_compact_list: bool = False
193
- ):
194
- """
195
- Export msgpack data in most recent format.
196
-
197
- Args:
198
- file_path: the path to the file to load and deserialize.
199
- data: a single or batch dataset for power-grid-model.
200
- use_compact_list: write components on a single line.
201
- indent: indent of the file, default 2.
202
-
203
- Returns:
204
- Save to file.
205
- """
206
- data = _map_to_component_types(data)
207
- result = msgpack_serialize(data=data, dataset_type=dataset_type, use_compact_list=use_compact_list)
208
-
209
- with open(file_path, mode="wb") as file_pointer:
210
- file_pointer.write(result)
211
-
212
-
213
- def import_json_data(json_file: Path, data_type: str, *args, **kwargs) -> Dataset:
214
- """
215
- [deprecated] Import json data.
216
-
217
- **WARNING:** This function is deprecated. Please use json_deserialize_from_file instead.
218
-
219
- Args:
220
- json_file: path to the json file.
221
- data_type: type of data: input, update, sym_output, or asym_output.
222
- [deprecated]: All extra positional and keyword arguments are ignored.
223
-
224
- Returns:
225
- A single or batch dataset for power-grid-model.
226
- """
227
- warnings.warn(_DEPRECATED_JSON_DESERIALIZATION_MSG, DeprecationWarning)
228
- if args:
229
- warnings.warn("Provided positional arguments at index 2 and following are deprecated.", DeprecationWarning)
230
- if kwargs:
231
- warnings.warn(f"Provided keyword arguments {list(kwargs.keys())} are deprecated.", DeprecationWarning)
232
-
233
- return _compatibility_deprecated_import_json_data(json_file=json_file, data_type=data_type) # type: ignore
234
-
235
-
236
- def export_json_data(
237
- json_file: Path, data: Dataset, indent: int | None = 2, compact: bool = False, use_deprecated_format: bool = True
238
- ):
239
- """
240
- [deprecated] Export json data in a deprecated serialization format.
241
-
242
- **WARNING:** This function is deprecated. Please use json_serialize_to_file instead.
243
-
244
- For backwards compatibility, this function outputs the deprecated serialization format by default.
245
- This feature may be removed in the future.
246
-
247
- Args:
248
- json_file: path to json file.
249
- data: a single or batch dataset for power-grid-model.
250
- indent: indent of the file, default 2.
251
- compact: write components on a single line.
252
- use_deprecated_format: use the old style format. Defaults to True for backwards compatibility.
253
-
254
- Returns:
255
- Save to file.
256
- """
257
- warnings.warn(_DEPRECATED_JSON_SERIALIZATION_MSG, DeprecationWarning)
258
- if use_deprecated_format:
259
- warnings.warn(
260
- "Argument use_deprecated_format is a temporary backwards-compatibility measure. "
261
- "Please upgrade to use_deprecated_format=False or json_serialize_to_file as soon as possible.",
262
- DeprecationWarning,
263
- stacklevel=2,
264
- )
265
- _compatibility_deprecated_export_json_data(json_file=json_file, data=data)
266
- else:
267
- json_serialize_to_file(file_path=json_file, data=data, use_compact_list=compact, indent=indent)
268
-
269
-
270
- def _compatibility_deprecated_export_json_data(
271
- json_file: Path, data: Dataset, indent: int | None = 2, compact: bool = False
272
- ):
273
- serialized_data = json_serialize(data=data, use_compact_list=compact, indent=-1 if indent is None else indent)
274
- old_format_serialized_data = json.dumps(json.loads(serialized_data)["data"])
275
- with open(json_file, mode="w", encoding="utf-8") as file_pointer:
276
- file_pointer.write(old_format_serialized_data)
277
-
278
-
279
- def import_input_data(json_file: Path) -> SingleDataset:
280
- """
281
- [deprecated] Import input json data.
282
-
283
- **WARNING:** This function is deprecated. Please use json_deserialize_from_file instead.
284
-
285
- For backwards and forward compatibility, this function supportes both the latest and the old serialization format.
286
-
287
- Args:
288
- json_file: path to the json file.
289
-
290
- Returns:
291
- A single dataset for power-grid-model.
292
- """
293
- warnings.warn(_DEPRECATED_JSON_DESERIALIZATION_MSG, DeprecationWarning)
294
-
295
- data = _compatibility_deprecated_import_json_data(json_file=json_file, data_type=DatasetType.input)
296
- assert isinstance(data, dict)
297
- assert all(isinstance(component, np.ndarray) and component.ndim == 1 for component in data.values())
298
- return cast_type(SingleDataset, data)
299
-
300
-
301
- def import_update_data(json_file: Path) -> BatchDataset:
302
- """
303
- [deprecated] Import update json data.
304
-
305
- **WARNING:** This function is deprecated. Please use json_deserialize_from_file instead.
306
-
307
- For backwards and forward compatibility, this function supportes both the latest and the old serialization format.
308
-
309
- Args:
310
- json_file: path to the json file.
311
-
312
- Returns:
313
- A batch dataset for power-grid-model.
314
- """
315
- warnings.warn(_DEPRECATED_JSON_DESERIALIZATION_MSG, DeprecationWarning)
316
-
317
- return cast_type(
318
- BatchDataset,
319
- _compatibility_deprecated_import_json_data(json_file=json_file, data_type=DatasetType.update),
320
- )
321
-
322
-
323
- def _compatibility_deprecated_import_json_data(json_file: Path, data_type: DatasetType):
324
- with open(json_file, mode="r", encoding="utf-8") as file_pointer:
325
- data = json.load(file_pointer)
326
-
327
- if "version" not in data: # convert old format to version 1.0
328
- data = {
329
- "attributes": {},
330
- "data": data,
331
- "is_batch": isinstance(data, list),
332
- "type": data_type,
333
- "version": "1.0",
334
- }
335
-
336
- result = json_deserialize(json.dumps(data))
337
- if get_dataset_type(result) != data_type:
338
- raise PowerGridSerializationError("An internal error occured during deserialization")
339
-
340
- return result
341
-
342
-
343
- def self_test():
344
- """
345
- Perform a self-test of the Power Grid Model functionality.
346
-
347
- Tests whether the installation was successful and there are no build errors,
348
- segmentation violations, undefined symbols, etc.
349
-
350
- This function is designed to validate the basic functionality of data serialization,
351
- model instantiation, power flow calculation, and serialization of results using the
352
- Power Grid Model library.
353
-
354
- Raises:
355
- PowerGridError: if there was an internal error.
356
- """
357
- with tempfile.TemporaryDirectory() as temp_dir:
358
- # Create a simple JSON input data file in the temporary directory
359
- input_data = {
360
- "version": "1.0",
361
- "type": "input",
362
- "is_batch": False,
363
- "attributes": {},
364
- "data": {
365
- "node": [{"id": 1, "u_rated": 10000}],
366
- "source": [{"id": 2, "node": 1, "u_ref": 1, "sk": 1e20}],
367
- "sym_load": [{"id": 3, "node": 1, "status": 1, "type": 0, "p_specified": 0, "q_specified": 0}],
368
- },
369
- }
370
-
371
- input_file_path = Path(temp_dir) / "input_data.json"
372
- input_file_path.write_text(json.dumps(input_data))
373
-
374
- try:
375
- # Load the created JSON input data file (deserialize)
376
- deserialized_data = json_deserialize_from_file(input_file_path)
377
-
378
- # Create a PowerGridModel instance from the loaded input data
379
- model = PowerGridModel(deserialized_data)
380
-
381
- # Run a simple power flow calculation on the created model (linear calculation)
382
- output_data = model.calculate_power_flow(calculation_method=CalculationMethod.linear)
383
-
384
- # Write the calculation result to a file in the temporary directory
385
- output_file_path = Path(temp_dir) / "output_data.json"
386
-
387
- json_serialize_to_file(output_file_path, output_data)
388
-
389
- # Verify that the written output is correct
390
- with open(output_file_path, "r", encoding="utf-8") as output_file:
391
- output_data = json.load(output_file)
392
-
393
- assert output_data is not None
394
- assert math.isclose(
395
- output_data["data"]["node"][0]["u"], input_data["data"]["node"][0]["u_rated"], abs_tol=1e-9
396
- )
397
-
398
- print("Self test finished.")
399
- except Exception as e:
400
- raise PowerGridError from e
1
+ # SPDX-FileCopyrightText: Contributors to the Power Grid Model project <powergridmodel@lfenergy.org>
2
+ #
3
+ # SPDX-License-Identifier: MPL-2.0
4
+
5
+ """
6
+ This module contains functions that may be useful when working with the power-grid-model library.
7
+ """
8
+
9
+ import io
10
+ import json
11
+ import math
12
+ import tempfile
13
+ import warnings
14
+ from pathlib import Path
15
+ from typing import IO, Any, cast as cast_type
16
+
17
+ import numpy as np
18
+
19
+ from power_grid_model import CalculationMethod, PowerGridModel
20
+ from power_grid_model._core.dataset_definitions import ComponentType, DatasetType, _map_to_component_types
21
+ from power_grid_model._core.serialization import (
22
+ json_deserialize,
23
+ json_serialize,
24
+ msgpack_deserialize,
25
+ msgpack_serialize,
26
+ )
27
+ from power_grid_model._core.utils import (
28
+ _extract_data_from_component_data,
29
+ _extract_indptr,
30
+ get_and_verify_batch_sizes as _get_and_verify_batch_sizes,
31
+ get_batch_size as _get_batch_size,
32
+ get_dataset_type,
33
+ is_columnar,
34
+ is_sparse,
35
+ )
36
+ from power_grid_model.data_types import (
37
+ BatchComponentData,
38
+ BatchDataset,
39
+ Dataset,
40
+ DenseBatchArray,
41
+ IndexPointer,
42
+ SingleComponentData,
43
+ SingleDataset,
44
+ )
45
+ from power_grid_model.errors import PowerGridError, PowerGridSerializationError
46
+ from power_grid_model.typing import ComponentAttributeMapping
47
+
48
+ _DEPRECATED_FUNCTION_MSG = "This function is deprecated."
49
+ _DEPRECATED_JSON_DESERIALIZATION_MSG = f"{_DEPRECATED_FUNCTION_MSG} Please use json_deserialize_to_file instead."
50
+ _DEPRECATED_JSON_SERIALIZATION_MSG = f"{_DEPRECATED_FUNCTION_MSG} Please use json_serialize_from_file instead."
51
+
52
+ LICENSE_TEXT = (
53
+ "SPDX-FileCopyrightText: Contributors to the Power Grid Model project <powergridmodel@lfenergy.org>\n\n"
54
+ "SPDX-License-Identifier: MPL-2.0"
55
+ "\n"
56
+ )
57
+
58
+
59
+ def get_dataset_scenario(dataset: BatchDataset, scenario: int) -> SingleDataset:
60
+ """
61
+ Obtain the single dataset at a given scenario, independently of the internal batch data structure.
62
+
63
+ Args:
64
+ dataset: the batch dataset
65
+ scenario: the scenario index
66
+
67
+ Raises:
68
+ IndexError: if the scenario is out of range for any of the components.
69
+
70
+ Returns:
71
+ The dataset for a specific scenario
72
+ """
73
+
74
+ def _get_dense_scenario(arr: np.ndarray) -> np.ndarray:
75
+ return arr[scenario]
76
+
77
+ def _get_sparse_scenario(arr: np.ndarray, indptr: IndexPointer) -> np.ndarray:
78
+ return arr[indptr[scenario] : indptr[scenario + 1]]
79
+
80
+ def _get_component_scenario(component_scenarios: BatchComponentData) -> SingleComponentData:
81
+ data = _extract_data_from_component_data(component_scenarios)
82
+
83
+ if is_sparse(component_scenarios):
84
+ indptr = _extract_indptr(component_scenarios)
85
+ if is_columnar(component_scenarios):
86
+ return {
87
+ attribute: _get_sparse_scenario(attribute_data, indptr)
88
+ for attribute, attribute_data in data.items()
89
+ }
90
+ return _get_sparse_scenario(data, indptr)
91
+
92
+ if is_columnar(component_scenarios):
93
+ return {attribute: _get_dense_scenario(attribute_data) for attribute, attribute_data in data.items()}
94
+ return _get_dense_scenario(cast_type(DenseBatchArray, component_scenarios))
95
+
96
+ return {component: _get_component_scenario(component_data) for component, component_data in dataset.items()}
97
+
98
+
99
+ def get_dataset_batch_size(dataset: BatchDataset) -> int:
100
+ """
101
+ Get the number of scenarios in the batch dataset.
102
+
103
+ Args:
104
+ dataset: the batch dataset
105
+
106
+ Raises:
107
+ ValueError: if the batch dataset is inconsistent.
108
+
109
+ Returns:
110
+ The size of the batch dataset. Making use of existing _utils function.
111
+ """
112
+ return _get_and_verify_batch_sizes(dataset)
113
+
114
+
115
+ def get_component_batch_size(data_array: BatchComponentData) -> int:
116
+ """
117
+ Determine the number of batch scenarios and verify the data structure
118
+
119
+ Args:
120
+ data_array: batch data for power-grid-model
121
+
122
+ Returns:
123
+ The number of batch scenarios in data_array
124
+ """
125
+ return _get_batch_size(data_array)
126
+
127
+
128
+ def json_deserialize_from_file(
129
+ file_path: Path,
130
+ data_filter: ComponentAttributeMapping = None,
131
+ ) -> Dataset:
132
+ """
133
+ Load and deserialize a JSON file to a new dataset.
134
+
135
+ Args:
136
+ file_path: the path to the file to load and deserialize.
137
+
138
+ Raises:
139
+ ValueError: if the data is inconsistent with the rest of the dataset or a component is unknown.
140
+ PowerGridError: if there was an internal error.
141
+
142
+ Returns:
143
+ The deserialized dataset in Power grid model input format.
144
+ """
145
+ with file_path.open(encoding="utf-8") as file_pointer:
146
+ return json_deserialize(file_pointer.read(), data_filter=data_filter)
147
+
148
+
149
+ def json_serialize_to_file(
150
+ file_path: Path,
151
+ data: Dataset,
152
+ dataset_type: DatasetType | None = None,
153
+ use_compact_list: bool = False,
154
+ indent: int | None = 2,
155
+ ):
156
+ """
157
+ Export JSON data in most recent format.
158
+
159
+ Args:
160
+ file_path: the path to the file to load and deserialize.
161
+ data: a single or batch dataset for power-grid-model.
162
+ use_compact_list: write components on a single line.
163
+ indent: indent of the file. Defaults to 2.
164
+
165
+ Returns:
166
+ Save to file.
167
+ """
168
+ data = _map_to_component_types(data)
169
+ result = json_serialize(
170
+ data=data, dataset_type=dataset_type, use_compact_list=use_compact_list, indent=-1 if indent is None else indent
171
+ )
172
+
173
+ with file_path.open(mode="w", encoding="utf-8") as file_pointer:
174
+ file_pointer.write(result)
175
+
176
+
177
+ def msgpack_deserialize_from_file(
178
+ file_path: Path,
179
+ data_filter: ComponentAttributeMapping = None,
180
+ ) -> Dataset:
181
+ """
182
+ Load and deserialize a msgpack file to a new dataset.
183
+
184
+ Args:
185
+ file_path: the path to the file to load and deserialize.
186
+
187
+ Raises:
188
+ ValueError: if the data is inconsistent with the rest of the dataset or a component is unknown.
189
+ PowerGridError: if there was an internal error.
190
+
191
+ Returns:
192
+ The deserialized dataset in Power grid model input format.
193
+ """
194
+ with file_path.open(mode="rb") as file_pointer:
195
+ return msgpack_deserialize(file_pointer.read(), data_filter=data_filter)
196
+
197
+
198
+ def msgpack_serialize_to_file(
199
+ file_path: Path, data: Dataset, dataset_type: DatasetType | None = None, use_compact_list: bool = False
200
+ ):
201
+ """
202
+ Export msgpack data in most recent format.
203
+
204
+ Args:
205
+ file_path: the path to the file to load and deserialize.
206
+ data: a single or batch dataset for power-grid-model.
207
+ use_compact_list: write components on a single line.
208
+ indent: indent of the file, default 2.
209
+
210
+ Returns:
211
+ Save to file.
212
+ """
213
+ data = _map_to_component_types(data)
214
+ result = msgpack_serialize(data=data, dataset_type=dataset_type, use_compact_list=use_compact_list)
215
+
216
+ with file_path.open(mode="wb") as file_pointer:
217
+ file_pointer.write(result)
218
+
219
+
220
+ def import_json_data(json_file: Path, data_type: str, *args, **kwargs) -> Dataset:
221
+ """
222
+ [deprecated] Import json data.
223
+
224
+ **WARNING:** This function is deprecated. Please use json_deserialize_from_file instead.
225
+
226
+ Args:
227
+ json_file: path to the json file.
228
+ data_type: type of data: input, update, sym_output, or asym_output.
229
+ [deprecated]: All extra positional and keyword arguments are ignored.
230
+
231
+ Returns:
232
+ A single or batch dataset for power-grid-model.
233
+ """
234
+ warnings.warn(_DEPRECATED_JSON_DESERIALIZATION_MSG, DeprecationWarning)
235
+ if args:
236
+ warnings.warn("Provided positional arguments at index 2 and following are deprecated.", DeprecationWarning)
237
+ if kwargs:
238
+ warnings.warn(f"Provided keyword arguments {list(kwargs.keys())} are deprecated.", DeprecationWarning)
239
+
240
+ return _compatibility_deprecated_import_json_data(json_file=json_file, data_type=data_type) # type: ignore
241
+
242
+
243
+ def export_json_data(
244
+ json_file: Path, data: Dataset, indent: int | None = 2, compact: bool = False, use_deprecated_format: bool = True
245
+ ):
246
+ """
247
+ [deprecated] Export json data in a deprecated serialization format.
248
+
249
+ **WARNING:** This function is deprecated. Please use json_serialize_to_file instead.
250
+
251
+ For backwards compatibility, this function outputs the deprecated serialization format by default.
252
+ This feature may be removed in the future.
253
+
254
+ Args:
255
+ json_file: path to json file.
256
+ data: a single or batch dataset for power-grid-model.
257
+ indent: indent of the file, default 2.
258
+ compact: write components on a single line.
259
+ use_deprecated_format: use the old style format. Defaults to True for backwards compatibility.
260
+
261
+ Returns:
262
+ Save to file.
263
+ """
264
+ warnings.warn(_DEPRECATED_JSON_SERIALIZATION_MSG, DeprecationWarning)
265
+ if use_deprecated_format:
266
+ warnings.warn(
267
+ "Argument use_deprecated_format is a temporary backwards-compatibility measure. "
268
+ "Please upgrade to use_deprecated_format=False or json_serialize_to_file as soon as possible.",
269
+ DeprecationWarning,
270
+ stacklevel=2,
271
+ )
272
+ _compatibility_deprecated_export_json_data(json_file=json_file, data=data)
273
+ else:
274
+ json_serialize_to_file(file_path=json_file, data=data, use_compact_list=compact, indent=indent)
275
+
276
+
277
+ def _compatibility_deprecated_export_json_data(
278
+ json_file: Path, data: Dataset, indent: int | None = 2, compact: bool = False
279
+ ):
280
+ serialized_data = json_serialize(data=data, use_compact_list=compact, indent=-1 if indent is None else indent)
281
+ old_format_serialized_data = json.dumps(json.loads(serialized_data)["data"])
282
+ with json_file.open(mode="w", encoding="utf-8") as file_pointer:
283
+ file_pointer.write(old_format_serialized_data)
284
+
285
+
286
+ def import_input_data(json_file: Path) -> SingleDataset: # pragma: no cover
287
+ """
288
+ [deprecated] Import input json data.
289
+
290
+ **WARNING:** This function is deprecated. Please use json_deserialize_from_file instead.
291
+
292
+ For backwards and forward compatibility, this function supportes both the latest and the old serialization format.
293
+
294
+ Args:
295
+ json_file: path to the json file.
296
+
297
+ Returns:
298
+ A single dataset for power-grid-model.
299
+ """
300
+ warnings.warn(_DEPRECATED_JSON_DESERIALIZATION_MSG, DeprecationWarning)
301
+
302
+ data = _compatibility_deprecated_import_json_data(json_file=json_file, data_type=DatasetType.input)
303
+ if not isinstance(data, dict):
304
+ raise TypeError(f"Expected data to be dict, got {type(data)}")
305
+ if not all(isinstance(component, np.ndarray) and component.ndim == 1 for component in data.values()):
306
+ raise TypeError("All components must be 1D numpy arrays")
307
+ return cast_type(SingleDataset, data)
308
+
309
+
310
+ def import_update_data(json_file: Path) -> BatchDataset:
311
+ """
312
+ [deprecated] Import update json data.
313
+
314
+ **WARNING:** This function is deprecated. Please use json_deserialize_from_file instead.
315
+
316
+ For backwards and forward compatibility, this function supportes both the latest and the old serialization format.
317
+
318
+ Args:
319
+ json_file: path to the json file.
320
+
321
+ Returns:
322
+ A batch dataset for power-grid-model.
323
+ """
324
+ warnings.warn(_DEPRECATED_JSON_DESERIALIZATION_MSG, DeprecationWarning)
325
+
326
+ return cast_type(
327
+ BatchDataset,
328
+ _compatibility_deprecated_import_json_data(json_file=json_file, data_type=DatasetType.update),
329
+ )
330
+
331
+
332
+ def _compatibility_deprecated_import_json_data(json_file: Path, data_type: DatasetType):
333
+ with Path(json_file).open(mode="r", encoding="utf-8") as file_pointer:
334
+ data = json.load(file_pointer)
335
+
336
+ if "version" not in data: # convert old format to version 1.0
337
+ data = {
338
+ "attributes": {},
339
+ "data": data,
340
+ "is_batch": isinstance(data, list),
341
+ "type": data_type,
342
+ "version": "1.0",
343
+ }
344
+
345
+ result = json_deserialize(json.dumps(data))
346
+ if get_dataset_type(result) != data_type:
347
+ raise PowerGridSerializationError("An internal error occured during deserialization")
348
+
349
+ return result
350
+
351
+
352
+ def self_test():
353
+ """
354
+ Perform a self-test of the Power Grid Model functionality.
355
+
356
+ Tests whether the installation was successful and there are no build errors,
357
+ segmentation violations, undefined symbols, etc.
358
+
359
+ This function is designed to validate the basic functionality of data serialization,
360
+ model instantiation, power flow calculation, and serialization of results using the
361
+ Power Grid Model library.
362
+
363
+ Raises:
364
+ PowerGridError: if there was an internal error.
365
+ """
366
+ with tempfile.TemporaryDirectory() as temp_dir:
367
+ # Create a simple JSON input data file in the temporary directory
368
+ input_data = {
369
+ "version": "1.0",
370
+ "type": "input",
371
+ "is_batch": False,
372
+ "attributes": {},
373
+ "data": {
374
+ ComponentType.node: [{"id": 1, "u_rated": 10000}],
375
+ ComponentType.source: [{"id": 2, "node": 1, "u_ref": 1, "sk": 1e20}],
376
+ ComponentType.sym_load: [
377
+ {"id": 3, "node": 1, "status": 1, "type": 0, "p_specified": 0, "q_specified": 0}
378
+ ],
379
+ },
380
+ }
381
+
382
+ input_file_path = Path(temp_dir) / "input_data.json"
383
+ input_file_path.write_text(json.dumps(input_data))
384
+
385
+ try:
386
+ # Load the created JSON input data file (deserialize)
387
+ deserialized_data = json_deserialize_from_file(input_file_path)
388
+
389
+ # Create a PowerGridModel instance from the loaded input data
390
+ model = PowerGridModel(deserialized_data)
391
+
392
+ # Run a simple power flow calculation on the created model (linear calculation)
393
+ output_data = model.calculate_power_flow(calculation_method=CalculationMethod.linear)
394
+
395
+ # Write the calculation result to a file in the temporary directory
396
+ output_file_path = Path(temp_dir) / "output_data.json"
397
+
398
+ json_serialize_to_file(output_file_path, output_data)
399
+
400
+ # Verify that the written output is correct
401
+ with Path(output_file_path).open("r", encoding="utf-8") as output_file:
402
+ output_data = json.load(output_file)
403
+
404
+ if output_data is None: # pragma: no cover
405
+ raise ValueError("Output data should not be None")
406
+ if not math.isclose(
407
+ output_data["data"][ComponentType.node][0]["u"],
408
+ input_data["data"][ComponentType.node][0]["u_rated"],
409
+ abs_tol=1e-9,
410
+ ): # pragma: no cover
411
+ raise ValueError("The difference between the input and output data is too big.")
412
+
413
+ print("Self test finished.")
414
+ except Exception as e:
415
+ raise PowerGridError from e
416
+
417
+
418
+ def _make_test_case( # noqa: PLR0913
419
+ *,
420
+ output_path: Path,
421
+ input_data: SingleDataset,
422
+ params: dict,
423
+ output_data: Dataset,
424
+ output_dataset_type: DatasetType,
425
+ update_data: Dataset | None = None,
426
+ ):
427
+ """
428
+ Create and save a validation test case dataset, including input, update (optional), output, and parameters.
429
+
430
+ Args:
431
+ save_path: Directory path where the test case files will be saved.
432
+ input_data: Input dataset for the test case.
433
+ params: Dictionary of parameters used for the test case. It may include calculation method, tolerances, etc.
434
+ An example of parameters could be:
435
+ params = {
436
+ "calculation_method": "newton_raphson",
437
+ "rtol": 1e-6,
438
+ "atol": 1e-6,
439
+ }
440
+ output_data: Output dataset for the test case.
441
+ output_dataset_type: The type of the output dataset (e.g., sym_output, asym_output, sc_output).
442
+ update_data: Optional batch update dataset.
443
+
444
+ Raises:
445
+ ValueError: If the output_dataset_type is not recognized.
446
+
447
+ Side Effects:
448
+ Writes JSON files for input, update (if provided), output, and parameters,
449
+ all relevant license files, to save_path.
450
+ """
451
+ output_file_stem = output_dataset_type.name if isinstance(output_dataset_type, DatasetType) else None
452
+ if output_dataset_type in [DatasetType.input, DatasetType.update] or output_file_stem is None:
453
+ raise ValueError(
454
+ f"Invalid output dataset type: {output_dataset_type}. Expected one of: sym_output, asym_output, sc_output."
455
+ )
456
+
457
+ output_path.mkdir(parents=True, exist_ok=True)
458
+ json_serialize_to_file(file_path=output_path / "input.json", data=input_data, dataset_type=DatasetType.input)
459
+ (output_path / "input.json.license").write_text(data=LICENSE_TEXT, encoding="utf-8")
460
+
461
+ if update_data is not None:
462
+ json_serialize_to_file(
463
+ file_path=output_path / "update_batch.json", data=update_data, dataset_type=DatasetType.update
464
+ )
465
+ (output_path / "update_batch.json.license").write_text(data=LICENSE_TEXT, encoding="utf-8")
466
+ output_file_stem += "_batch"
467
+ json_serialize_to_file(
468
+ file_path=output_path / f"{output_file_stem}.json", data=output_data, dataset_type=output_dataset_type
469
+ )
470
+ (output_path / f"{output_file_stem}.json.license").write_text(data=LICENSE_TEXT, encoding="utf-8")
471
+
472
+ params_json = json.dumps(params, indent=2)
473
+ (output_path / "params.json").write_text(data=params_json, encoding="utf-8")
474
+ (output_path / "params.json.license").write_text(data=LICENSE_TEXT, encoding="utf-8")
475
+
476
+
477
+ def msgpack_deserialize_from_stream(
478
+ stream: IO[bytes],
479
+ data_filter: ComponentAttributeMapping = None,
480
+ ) -> Dataset:
481
+ """
482
+ Load and deserialize a msgpack file to a new dataset.
483
+
484
+ Args:
485
+ stream: the Binary IO stream to the file to load and deserialize.
486
+
487
+ Raises:
488
+ ValueError: if the data is inconsistent with the rest of the dataset or a component is unknown.
489
+ PowerGridError: if there was an internal error.
490
+
491
+ Returns:
492
+ The deserialized dataset in Power grid model input format.
493
+ """
494
+ if stream is IO[Any]:
495
+ raise TypeError("Expected a stream.")
496
+ if isinstance(stream, io.TextIOBase):
497
+ raise TypeError("Expected a binary stream.")
498
+ if not stream.readable():
499
+ raise io.UnsupportedOperation("Stream is not readable.")
500
+ return msgpack_deserialize(stream.read(), data_filter=data_filter)
501
+
502
+
503
+ def msgpack_serialize_to_stream(
504
+ stream: IO[bytes],
505
+ data: Dataset,
506
+ dataset_type: DatasetType | None = None,
507
+ use_compact_list: bool = False,
508
+ ):
509
+ """
510
+ Export msgpack data in most recent format.
511
+
512
+ Args:
513
+ stream: the Binary IO stream to the file to load and deserialize.
514
+ data: a single or batch dataset for power-grid-model.
515
+ use_compact_list: write components on a single line.
516
+
517
+ Returns:
518
+ Save to BytesIO file.
519
+ """
520
+ if stream is IO[Any]:
521
+ raise TypeError("Expected a stream.")
522
+ if isinstance(stream, io.TextIOBase):
523
+ raise TypeError("Expected a binary stream.")
524
+ if not stream.writable():
525
+ raise io.UnsupportedOperation("Stream is not writable.")
526
+
527
+ data = _map_to_component_types(data)
528
+ result = msgpack_serialize(data=data, dataset_type=dataset_type, use_compact_list=use_compact_list)
529
+ stream.write(result)