power-grid-model 1.12.58__py3-none-win_amd64.whl → 1.12.59__py3-none-win_amd64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of power-grid-model might be problematic. Click here for more details.

Files changed (59) hide show
  1. power_grid_model/__init__.py +54 -54
  2. power_grid_model/_core/__init__.py +3 -3
  3. power_grid_model/_core/buffer_handling.py +493 -493
  4. power_grid_model/_core/data_handling.py +141 -141
  5. power_grid_model/_core/data_types.py +132 -132
  6. power_grid_model/_core/dataset_definitions.py +109 -109
  7. power_grid_model/_core/enum.py +226 -226
  8. power_grid_model/_core/error_handling.py +206 -206
  9. power_grid_model/_core/errors.py +130 -130
  10. power_grid_model/_core/index_integer.py +17 -17
  11. power_grid_model/_core/options.py +71 -71
  12. power_grid_model/_core/power_grid_core.py +563 -563
  13. power_grid_model/_core/power_grid_dataset.py +535 -535
  14. power_grid_model/_core/power_grid_meta.py +243 -243
  15. power_grid_model/_core/power_grid_model.py +686 -686
  16. power_grid_model/_core/power_grid_model_c/__init__.py +3 -3
  17. power_grid_model/_core/power_grid_model_c/bin/power_grid_model_c.dll +0 -0
  18. power_grid_model/_core/power_grid_model_c/get_pgm_dll_path.py +63 -63
  19. power_grid_model/_core/power_grid_model_c/include/power_grid_model_c/basics.h +255 -255
  20. power_grid_model/_core/power_grid_model_c/include/power_grid_model_c/buffer.h +108 -108
  21. power_grid_model/_core/power_grid_model_c/include/power_grid_model_c/dataset.h +316 -316
  22. power_grid_model/_core/power_grid_model_c/include/power_grid_model_c/dataset_definitions.h +1052 -1052
  23. power_grid_model/_core/power_grid_model_c/include/power_grid_model_c/handle.h +99 -99
  24. power_grid_model/_core/power_grid_model_c/include/power_grid_model_c/meta_data.h +189 -189
  25. power_grid_model/_core/power_grid_model_c/include/power_grid_model_c/model.h +125 -125
  26. power_grid_model/_core/power_grid_model_c/include/power_grid_model_c/options.h +142 -142
  27. power_grid_model/_core/power_grid_model_c/include/power_grid_model_c/serialization.h +118 -118
  28. power_grid_model/_core/power_grid_model_c/include/power_grid_model_c.h +36 -36
  29. power_grid_model/_core/power_grid_model_c/include/power_grid_model_cpp/basics.hpp +65 -65
  30. power_grid_model/_core/power_grid_model_c/include/power_grid_model_cpp/buffer.hpp +61 -61
  31. power_grid_model/_core/power_grid_model_c/include/power_grid_model_cpp/dataset.hpp +220 -220
  32. power_grid_model/_core/power_grid_model_c/include/power_grid_model_cpp/handle.hpp +108 -108
  33. power_grid_model/_core/power_grid_model_c/include/power_grid_model_cpp/meta_data.hpp +84 -84
  34. power_grid_model/_core/power_grid_model_c/include/power_grid_model_cpp/model.hpp +63 -63
  35. power_grid_model/_core/power_grid_model_c/include/power_grid_model_cpp/options.hpp +52 -52
  36. power_grid_model/_core/power_grid_model_c/include/power_grid_model_cpp/serialization.hpp +124 -124
  37. power_grid_model/_core/power_grid_model_c/include/power_grid_model_cpp/utils.hpp +81 -81
  38. power_grid_model/_core/power_grid_model_c/include/power_grid_model_cpp.hpp +19 -19
  39. power_grid_model/_core/power_grid_model_c/lib/cmake/power_grid_model/power_grid_modelConfigVersion.cmake +3 -3
  40. power_grid_model/_core/serialization.py +317 -317
  41. power_grid_model/_core/typing.py +20 -20
  42. power_grid_model/_core/utils.py +798 -798
  43. power_grid_model/data_types.py +321 -321
  44. power_grid_model/enum.py +27 -27
  45. power_grid_model/errors.py +37 -37
  46. power_grid_model/typing.py +43 -43
  47. power_grid_model/utils.py +473 -473
  48. power_grid_model/validation/__init__.py +25 -25
  49. power_grid_model/validation/_rules.py +1171 -1171
  50. power_grid_model/validation/_validation.py +1172 -1172
  51. power_grid_model/validation/assertions.py +93 -93
  52. power_grid_model/validation/errors.py +602 -602
  53. power_grid_model/validation/utils.py +313 -313
  54. {power_grid_model-1.12.58.dist-info → power_grid_model-1.12.59.dist-info}/METADATA +1 -1
  55. power_grid_model-1.12.59.dist-info/RECORD +65 -0
  56. power_grid_model-1.12.58.dist-info/RECORD +0 -65
  57. {power_grid_model-1.12.58.dist-info → power_grid_model-1.12.59.dist-info}/WHEEL +0 -0
  58. {power_grid_model-1.12.58.dist-info → power_grid_model-1.12.59.dist-info}/entry_points.txt +0 -0
  59. {power_grid_model-1.12.58.dist-info → power_grid_model-1.12.59.dist-info}/licenses/LICENSE +0 -0
power_grid_model/utils.py CHANGED
@@ -1,473 +1,473 @@
1
- # SPDX-FileCopyrightText: Contributors to the Power Grid Model project <powergridmodel@lfenergy.org>
2
- #
3
- # SPDX-License-Identifier: MPL-2.0
4
-
5
- """
6
- This module contains functions that may be useful when working with the power-grid-model library.
7
- """
8
-
9
- import json
10
- import math
11
- import tempfile
12
- import warnings
13
- from pathlib import Path
14
- from typing import cast as cast_type
15
-
16
- import numpy as np
17
-
18
- from power_grid_model import CalculationMethod, PowerGridModel
19
- from power_grid_model._core.dataset_definitions import ComponentType, DatasetType, _map_to_component_types
20
- from power_grid_model._core.serialization import (
21
- json_deserialize,
22
- json_serialize,
23
- msgpack_deserialize,
24
- msgpack_serialize,
25
- )
26
- from power_grid_model._core.utils import (
27
- _extract_data_from_component_data,
28
- _extract_indptr,
29
- get_and_verify_batch_sizes as _get_and_verify_batch_sizes,
30
- get_batch_size as _get_batch_size,
31
- get_dataset_type,
32
- is_columnar,
33
- is_sparse,
34
- )
35
- from power_grid_model.data_types import (
36
- BatchComponentData,
37
- BatchDataset,
38
- Dataset,
39
- DenseBatchArray,
40
- IndexPointer,
41
- SingleComponentData,
42
- SingleDataset,
43
- )
44
- from power_grid_model.errors import PowerGridError, PowerGridSerializationError
45
- from power_grid_model.typing import ComponentAttributeMapping
46
-
47
- _DEPRECATED_FUNCTION_MSG = "This function is deprecated."
48
- _DEPRECATED_JSON_DESERIALIZATION_MSG = f"{_DEPRECATED_FUNCTION_MSG} Please use json_deserialize_to_file instead."
49
- _DEPRECATED_JSON_SERIALIZATION_MSG = f"{_DEPRECATED_FUNCTION_MSG} Please use json_serialize_from_file instead."
50
-
51
- LICENSE_TEXT = (
52
- "SPDX-FileCopyrightText: Contributors to the Power Grid Model project <powergridmodel@lfenergy.org>\n\n"
53
- "SPDX-License-Identifier: MPL-2.0"
54
- "\n"
55
- )
56
-
57
-
58
- def get_dataset_scenario(dataset: BatchDataset, scenario: int) -> SingleDataset:
59
- """
60
- Obtain the single dataset at a given scenario, independently of the internal batch data structure.
61
-
62
- Args:
63
- dataset: the batch dataset
64
- scenario: the scenario index
65
-
66
- Raises:
67
- IndexError: if the scenario is out of range for any of the components.
68
-
69
- Returns:
70
- The dataset for a specific scenario
71
- """
72
-
73
- def _get_dense_scenario(arr: np.ndarray) -> np.ndarray:
74
- return arr[scenario]
75
-
76
- def _get_sparse_scenario(arr: np.ndarray, indptr: IndexPointer) -> np.ndarray:
77
- return arr[indptr[scenario] : indptr[scenario + 1]]
78
-
79
- def _get_component_scenario(component_scenarios: BatchComponentData) -> SingleComponentData:
80
- data = _extract_data_from_component_data(component_scenarios)
81
-
82
- if is_sparse(component_scenarios):
83
- indptr = _extract_indptr(component_scenarios)
84
- if is_columnar(component_scenarios):
85
- return {
86
- attribute: _get_sparse_scenario(attribute_data, indptr)
87
- for attribute, attribute_data in data.items()
88
- }
89
- return _get_sparse_scenario(data, indptr)
90
-
91
- if is_columnar(component_scenarios):
92
- return {attribute: _get_dense_scenario(attribute_data) for attribute, attribute_data in data.items()}
93
- return _get_dense_scenario(cast_type(DenseBatchArray, component_scenarios))
94
-
95
- return {component: _get_component_scenario(component_data) for component, component_data in dataset.items()}
96
-
97
-
98
- def get_dataset_batch_size(dataset: BatchDataset) -> int:
99
- """
100
- Get the number of scenarios in the batch dataset.
101
-
102
- Args:
103
- dataset: the batch dataset
104
-
105
- Raises:
106
- ValueError: if the batch dataset is inconsistent.
107
-
108
- Returns:
109
- The size of the batch dataset. Making use of existing _utils function.
110
- """
111
- return _get_and_verify_batch_sizes(dataset)
112
-
113
-
114
- def get_component_batch_size(data_array: BatchComponentData) -> int:
115
- """
116
- Determine the number of batch scenarios and verify the data structure
117
-
118
- Args:
119
- data_array: batch data for power-grid-model
120
-
121
- Returns:
122
- The number of batch scenarios in data_array
123
- """
124
- return _get_batch_size(data_array)
125
-
126
-
127
- def json_deserialize_from_file(
128
- file_path: Path,
129
- data_filter: ComponentAttributeMapping = None,
130
- ) -> Dataset:
131
- """
132
- Load and deserialize a JSON file to a new dataset.
133
-
134
- Args:
135
- file_path: the path to the file to load and deserialize.
136
-
137
- Raises:
138
- ValueError: if the data is inconsistent with the rest of the dataset or a component is unknown.
139
- PowerGridError: if there was an internal error.
140
-
141
- Returns:
142
- The deserialized dataset in Power grid model input format.
143
- """
144
- with file_path.open(encoding="utf-8") as file_pointer:
145
- return json_deserialize(file_pointer.read(), data_filter=data_filter)
146
-
147
-
148
- def json_serialize_to_file(
149
- file_path: Path,
150
- data: Dataset,
151
- dataset_type: DatasetType | None = None,
152
- use_compact_list: bool = False,
153
- indent: int | None = 2,
154
- ):
155
- """
156
- Export JSON data in most recent format.
157
-
158
- Args:
159
- file_path: the path to the file to load and deserialize.
160
- data: a single or batch dataset for power-grid-model.
161
- use_compact_list: write components on a single line.
162
- indent: indent of the file. Defaults to 2.
163
-
164
- Returns:
165
- Save to file.
166
- """
167
- data = _map_to_component_types(data)
168
- result = json_serialize(
169
- data=data, dataset_type=dataset_type, use_compact_list=use_compact_list, indent=-1 if indent is None else indent
170
- )
171
-
172
- with file_path.open(mode="w", encoding="utf-8") as file_pointer:
173
- file_pointer.write(result)
174
-
175
-
176
- def msgpack_deserialize_from_file(
177
- file_path: Path,
178
- data_filter: ComponentAttributeMapping = None,
179
- ) -> Dataset:
180
- """
181
- Load and deserialize a msgpack file to a new dataset.
182
-
183
- Args:
184
- file_path: the path to the file to load and deserialize.
185
-
186
- Raises:
187
- ValueError: if the data is inconsistent with the rest of the dataset or a component is unknown.
188
- PowerGridError: if there was an internal error.
189
-
190
- Returns:
191
- The deserialized dataset in Power grid model input format.
192
- """
193
- with file_path.open(mode="rb") as file_pointer:
194
- return msgpack_deserialize(file_pointer.read(), data_filter=data_filter)
195
-
196
-
197
- def msgpack_serialize_to_file(
198
- file_path: Path, data: Dataset, dataset_type: DatasetType | None = None, use_compact_list: bool = False
199
- ):
200
- """
201
- Export msgpack data in most recent format.
202
-
203
- Args:
204
- file_path: the path to the file to load and deserialize.
205
- data: a single or batch dataset for power-grid-model.
206
- use_compact_list: write components on a single line.
207
- indent: indent of the file, default 2.
208
-
209
- Returns:
210
- Save to file.
211
- """
212
- data = _map_to_component_types(data)
213
- result = msgpack_serialize(data=data, dataset_type=dataset_type, use_compact_list=use_compact_list)
214
-
215
- with file_path.open(mode="wb") as file_pointer:
216
- file_pointer.write(result)
217
-
218
-
219
- def import_json_data(json_file: Path, data_type: str, *args, **kwargs) -> Dataset:
220
- """
221
- [deprecated] Import json data.
222
-
223
- **WARNING:** This function is deprecated. Please use json_deserialize_from_file instead.
224
-
225
- Args:
226
- json_file: path to the json file.
227
- data_type: type of data: input, update, sym_output, or asym_output.
228
- [deprecated]: All extra positional and keyword arguments are ignored.
229
-
230
- Returns:
231
- A single or batch dataset for power-grid-model.
232
- """
233
- warnings.warn(_DEPRECATED_JSON_DESERIALIZATION_MSG, DeprecationWarning)
234
- if args:
235
- warnings.warn("Provided positional arguments at index 2 and following are deprecated.", DeprecationWarning)
236
- if kwargs:
237
- warnings.warn(f"Provided keyword arguments {list(kwargs.keys())} are deprecated.", DeprecationWarning)
238
-
239
- return _compatibility_deprecated_import_json_data(json_file=json_file, data_type=data_type) # type: ignore
240
-
241
-
242
- def export_json_data(
243
- json_file: Path, data: Dataset, indent: int | None = 2, compact: bool = False, use_deprecated_format: bool = True
244
- ):
245
- """
246
- [deprecated] Export json data in a deprecated serialization format.
247
-
248
- **WARNING:** This function is deprecated. Please use json_serialize_to_file instead.
249
-
250
- For backwards compatibility, this function outputs the deprecated serialization format by default.
251
- This feature may be removed in the future.
252
-
253
- Args:
254
- json_file: path to json file.
255
- data: a single or batch dataset for power-grid-model.
256
- indent: indent of the file, default 2.
257
- compact: write components on a single line.
258
- use_deprecated_format: use the old style format. Defaults to True for backwards compatibility.
259
-
260
- Returns:
261
- Save to file.
262
- """
263
- warnings.warn(_DEPRECATED_JSON_SERIALIZATION_MSG, DeprecationWarning)
264
- if use_deprecated_format:
265
- warnings.warn(
266
- "Argument use_deprecated_format is a temporary backwards-compatibility measure. "
267
- "Please upgrade to use_deprecated_format=False or json_serialize_to_file as soon as possible.",
268
- DeprecationWarning,
269
- stacklevel=2,
270
- )
271
- _compatibility_deprecated_export_json_data(json_file=json_file, data=data)
272
- else:
273
- json_serialize_to_file(file_path=json_file, data=data, use_compact_list=compact, indent=indent)
274
-
275
-
276
- def _compatibility_deprecated_export_json_data(
277
- json_file: Path, data: Dataset, indent: int | None = 2, compact: bool = False
278
- ):
279
- serialized_data = json_serialize(data=data, use_compact_list=compact, indent=-1 if indent is None else indent)
280
- old_format_serialized_data = json.dumps(json.loads(serialized_data)["data"])
281
- with json_file.open(mode="w", encoding="utf-8") as file_pointer:
282
- file_pointer.write(old_format_serialized_data)
283
-
284
-
285
- def import_input_data(json_file: Path) -> SingleDataset: # pragma: no cover
286
- """
287
- [deprecated] Import input json data.
288
-
289
- **WARNING:** This function is deprecated. Please use json_deserialize_from_file instead.
290
-
291
- For backwards and forward compatibility, this function supportes both the latest and the old serialization format.
292
-
293
- Args:
294
- json_file: path to the json file.
295
-
296
- Returns:
297
- A single dataset for power-grid-model.
298
- """
299
- warnings.warn(_DEPRECATED_JSON_DESERIALIZATION_MSG, DeprecationWarning)
300
-
301
- data = _compatibility_deprecated_import_json_data(json_file=json_file, data_type=DatasetType.input)
302
- if not isinstance(data, dict):
303
- raise TypeError(f"Expected data to be dict, got {type(data)}")
304
- if not all(isinstance(component, np.ndarray) and component.ndim == 1 for component in data.values()):
305
- raise TypeError("All components must be 1D numpy arrays")
306
- return cast_type(SingleDataset, data)
307
-
308
-
309
- def import_update_data(json_file: Path) -> BatchDataset:
310
- """
311
- [deprecated] Import update json data.
312
-
313
- **WARNING:** This function is deprecated. Please use json_deserialize_from_file instead.
314
-
315
- For backwards and forward compatibility, this function supportes both the latest and the old serialization format.
316
-
317
- Args:
318
- json_file: path to the json file.
319
-
320
- Returns:
321
- A batch dataset for power-grid-model.
322
- """
323
- warnings.warn(_DEPRECATED_JSON_DESERIALIZATION_MSG, DeprecationWarning)
324
-
325
- return cast_type(
326
- BatchDataset,
327
- _compatibility_deprecated_import_json_data(json_file=json_file, data_type=DatasetType.update),
328
- )
329
-
330
-
331
- def _compatibility_deprecated_import_json_data(json_file: Path, data_type: DatasetType):
332
- with Path(json_file).open(mode="r", encoding="utf-8") as file_pointer:
333
- data = json.load(file_pointer)
334
-
335
- if "version" not in data: # convert old format to version 1.0
336
- data = {
337
- "attributes": {},
338
- "data": data,
339
- "is_batch": isinstance(data, list),
340
- "type": data_type,
341
- "version": "1.0",
342
- }
343
-
344
- result = json_deserialize(json.dumps(data))
345
- if get_dataset_type(result) != data_type:
346
- raise PowerGridSerializationError("An internal error occured during deserialization")
347
-
348
- return result
349
-
350
-
351
- def self_test():
352
- """
353
- Perform a self-test of the Power Grid Model functionality.
354
-
355
- Tests whether the installation was successful and there are no build errors,
356
- segmentation violations, undefined symbols, etc.
357
-
358
- This function is designed to validate the basic functionality of data serialization,
359
- model instantiation, power flow calculation, and serialization of results using the
360
- Power Grid Model library.
361
-
362
- Raises:
363
- PowerGridError: if there was an internal error.
364
- """
365
- with tempfile.TemporaryDirectory() as temp_dir:
366
- # Create a simple JSON input data file in the temporary directory
367
- input_data = {
368
- "version": "1.0",
369
- "type": "input",
370
- "is_batch": False,
371
- "attributes": {},
372
- "data": {
373
- ComponentType.node: [{"id": 1, "u_rated": 10000}],
374
- ComponentType.source: [{"id": 2, "node": 1, "u_ref": 1, "sk": 1e20}],
375
- ComponentType.sym_load: [
376
- {"id": 3, "node": 1, "status": 1, "type": 0, "p_specified": 0, "q_specified": 0}
377
- ],
378
- },
379
- }
380
-
381
- input_file_path = Path(temp_dir) / "input_data.json"
382
- input_file_path.write_text(json.dumps(input_data))
383
-
384
- try:
385
- # Load the created JSON input data file (deserialize)
386
- deserialized_data = json_deserialize_from_file(input_file_path)
387
-
388
- # Create a PowerGridModel instance from the loaded input data
389
- model = PowerGridModel(deserialized_data)
390
-
391
- # Run a simple power flow calculation on the created model (linear calculation)
392
- output_data = model.calculate_power_flow(calculation_method=CalculationMethod.linear)
393
-
394
- # Write the calculation result to a file in the temporary directory
395
- output_file_path = Path(temp_dir) / "output_data.json"
396
-
397
- json_serialize_to_file(output_file_path, output_data)
398
-
399
- # Verify that the written output is correct
400
- with Path(output_file_path).open("r", encoding="utf-8") as output_file:
401
- output_data = json.load(output_file)
402
-
403
- if output_data is None: # pragma: no cover
404
- raise ValueError("Output data should not be None")
405
- if not math.isclose(
406
- output_data["data"][ComponentType.node][0]["u"],
407
- input_data["data"][ComponentType.node][0]["u_rated"],
408
- abs_tol=1e-9,
409
- ): # pragma: no cover
410
- raise ValueError("The difference between the input and output data is too big.")
411
-
412
- print("Self test finished.")
413
- except Exception as e:
414
- raise PowerGridError from e
415
-
416
-
417
- def _make_test_case( # noqa: PLR0913
418
- *,
419
- output_path: Path,
420
- input_data: SingleDataset,
421
- params: dict,
422
- output_data: Dataset,
423
- output_dataset_type: DatasetType,
424
- update_data: Dataset | None = None,
425
- ):
426
- """
427
- Create and save a validation test case dataset, including input, update (optional), output, and parameters.
428
-
429
- Args:
430
- save_path: Directory path where the test case files will be saved.
431
- input_data: Input dataset for the test case.
432
- params: Dictionary of parameters used for the test case. It may include calculation method, tolerances, etc.
433
- An example of parameters could be:
434
- params = {
435
- "calculation_method": "newton_raphson",
436
- "rtol": 1e-6,
437
- "atol": 1e-6,
438
- }
439
- output_data: Output dataset for the test case.
440
- output_dataset_type: The type of the output dataset (e.g., sym_output, asym_output, sc_output).
441
- update_data: Optional batch update dataset.
442
-
443
- Raises:
444
- ValueError: If the output_dataset_type is not recognized.
445
-
446
- Side Effects:
447
- Writes JSON files for input, update (if provided), output, and parameters,
448
- all relevant license files, to save_path.
449
- """
450
- output_file_stem = output_dataset_type.name if isinstance(output_dataset_type, DatasetType) else None
451
- if output_dataset_type in [DatasetType.input, DatasetType.update] or output_file_stem is None:
452
- raise ValueError(
453
- f"Invalid output dataset type: {output_dataset_type}. Expected one of: sym_output, asym_output, sc_output."
454
- )
455
-
456
- output_path.mkdir(parents=True, exist_ok=True)
457
- json_serialize_to_file(file_path=output_path / "input.json", data=input_data, dataset_type=DatasetType.input)
458
- (output_path / "input.json.license").write_text(data=LICENSE_TEXT, encoding="utf-8")
459
-
460
- if update_data is not None:
461
- json_serialize_to_file(
462
- file_path=output_path / "update_batch.json", data=update_data, dataset_type=DatasetType.update
463
- )
464
- (output_path / "update_batch.json.license").write_text(data=LICENSE_TEXT, encoding="utf-8")
465
- output_file_stem += "_batch"
466
- json_serialize_to_file(
467
- file_path=output_path / f"{output_file_stem}.json", data=output_data, dataset_type=output_dataset_type
468
- )
469
- (output_path / f"{output_file_stem}.json.license").write_text(data=LICENSE_TEXT, encoding="utf-8")
470
-
471
- params_json = json.dumps(params, indent=2)
472
- (output_path / "params.json").write_text(data=params_json, encoding="utf-8")
473
- (output_path / "params.json.license").write_text(data=LICENSE_TEXT, encoding="utf-8")
1
+ # SPDX-FileCopyrightText: Contributors to the Power Grid Model project <powergridmodel@lfenergy.org>
2
+ #
3
+ # SPDX-License-Identifier: MPL-2.0
4
+
5
+ """
6
+ This module contains functions that may be useful when working with the power-grid-model library.
7
+ """
8
+
9
+ import json
10
+ import math
11
+ import tempfile
12
+ import warnings
13
+ from pathlib import Path
14
+ from typing import cast as cast_type
15
+
16
+ import numpy as np
17
+
18
+ from power_grid_model import CalculationMethod, PowerGridModel
19
+ from power_grid_model._core.dataset_definitions import ComponentType, DatasetType, _map_to_component_types
20
+ from power_grid_model._core.serialization import (
21
+ json_deserialize,
22
+ json_serialize,
23
+ msgpack_deserialize,
24
+ msgpack_serialize,
25
+ )
26
+ from power_grid_model._core.utils import (
27
+ _extract_data_from_component_data,
28
+ _extract_indptr,
29
+ get_and_verify_batch_sizes as _get_and_verify_batch_sizes,
30
+ get_batch_size as _get_batch_size,
31
+ get_dataset_type,
32
+ is_columnar,
33
+ is_sparse,
34
+ )
35
+ from power_grid_model.data_types import (
36
+ BatchComponentData,
37
+ BatchDataset,
38
+ Dataset,
39
+ DenseBatchArray,
40
+ IndexPointer,
41
+ SingleComponentData,
42
+ SingleDataset,
43
+ )
44
+ from power_grid_model.errors import PowerGridError, PowerGridSerializationError
45
+ from power_grid_model.typing import ComponentAttributeMapping
46
+
47
+ _DEPRECATED_FUNCTION_MSG = "This function is deprecated."
48
+ _DEPRECATED_JSON_DESERIALIZATION_MSG = f"{_DEPRECATED_FUNCTION_MSG} Please use json_deserialize_to_file instead."
49
+ _DEPRECATED_JSON_SERIALIZATION_MSG = f"{_DEPRECATED_FUNCTION_MSG} Please use json_serialize_from_file instead."
50
+
51
+ LICENSE_TEXT = (
52
+ "SPDX-FileCopyrightText: Contributors to the Power Grid Model project <powergridmodel@lfenergy.org>\n\n"
53
+ "SPDX-License-Identifier: MPL-2.0"
54
+ "\n"
55
+ )
56
+
57
+
58
+ def get_dataset_scenario(dataset: BatchDataset, scenario: int) -> SingleDataset:
59
+ """
60
+ Obtain the single dataset at a given scenario, independently of the internal batch data structure.
61
+
62
+ Args:
63
+ dataset: the batch dataset
64
+ scenario: the scenario index
65
+
66
+ Raises:
67
+ IndexError: if the scenario is out of range for any of the components.
68
+
69
+ Returns:
70
+ The dataset for a specific scenario
71
+ """
72
+
73
+ def _get_dense_scenario(arr: np.ndarray) -> np.ndarray:
74
+ return arr[scenario]
75
+
76
+ def _get_sparse_scenario(arr: np.ndarray, indptr: IndexPointer) -> np.ndarray:
77
+ return arr[indptr[scenario] : indptr[scenario + 1]]
78
+
79
+ def _get_component_scenario(component_scenarios: BatchComponentData) -> SingleComponentData:
80
+ data = _extract_data_from_component_data(component_scenarios)
81
+
82
+ if is_sparse(component_scenarios):
83
+ indptr = _extract_indptr(component_scenarios)
84
+ if is_columnar(component_scenarios):
85
+ return {
86
+ attribute: _get_sparse_scenario(attribute_data, indptr)
87
+ for attribute, attribute_data in data.items()
88
+ }
89
+ return _get_sparse_scenario(data, indptr)
90
+
91
+ if is_columnar(component_scenarios):
92
+ return {attribute: _get_dense_scenario(attribute_data) for attribute, attribute_data in data.items()}
93
+ return _get_dense_scenario(cast_type(DenseBatchArray, component_scenarios))
94
+
95
+ return {component: _get_component_scenario(component_data) for component, component_data in dataset.items()}
96
+
97
+
98
+ def get_dataset_batch_size(dataset: BatchDataset) -> int:
99
+ """
100
+ Get the number of scenarios in the batch dataset.
101
+
102
+ Args:
103
+ dataset: the batch dataset
104
+
105
+ Raises:
106
+ ValueError: if the batch dataset is inconsistent.
107
+
108
+ Returns:
109
+ The size of the batch dataset. Making use of existing _utils function.
110
+ """
111
+ return _get_and_verify_batch_sizes(dataset)
112
+
113
+
114
+ def get_component_batch_size(data_array: BatchComponentData) -> int:
115
+ """
116
+ Determine the number of batch scenarios and verify the data structure
117
+
118
+ Args:
119
+ data_array: batch data for power-grid-model
120
+
121
+ Returns:
122
+ The number of batch scenarios in data_array
123
+ """
124
+ return _get_batch_size(data_array)
125
+
126
+
127
+ def json_deserialize_from_file(
128
+ file_path: Path,
129
+ data_filter: ComponentAttributeMapping = None,
130
+ ) -> Dataset:
131
+ """
132
+ Load and deserialize a JSON file to a new dataset.
133
+
134
+ Args:
135
+ file_path: the path to the file to load and deserialize.
136
+
137
+ Raises:
138
+ ValueError: if the data is inconsistent with the rest of the dataset or a component is unknown.
139
+ PowerGridError: if there was an internal error.
140
+
141
+ Returns:
142
+ The deserialized dataset in Power grid model input format.
143
+ """
144
+ with file_path.open(encoding="utf-8") as file_pointer:
145
+ return json_deserialize(file_pointer.read(), data_filter=data_filter)
146
+
147
+
148
+ def json_serialize_to_file(
149
+ file_path: Path,
150
+ data: Dataset,
151
+ dataset_type: DatasetType | None = None,
152
+ use_compact_list: bool = False,
153
+ indent: int | None = 2,
154
+ ):
155
+ """
156
+ Export JSON data in most recent format.
157
+
158
+ Args:
159
+ file_path: the path to the file to load and deserialize.
160
+ data: a single or batch dataset for power-grid-model.
161
+ use_compact_list: write components on a single line.
162
+ indent: indent of the file. Defaults to 2.
163
+
164
+ Returns:
165
+ Save to file.
166
+ """
167
+ data = _map_to_component_types(data)
168
+ result = json_serialize(
169
+ data=data, dataset_type=dataset_type, use_compact_list=use_compact_list, indent=-1 if indent is None else indent
170
+ )
171
+
172
+ with file_path.open(mode="w", encoding="utf-8") as file_pointer:
173
+ file_pointer.write(result)
174
+
175
+
176
+ def msgpack_deserialize_from_file(
177
+ file_path: Path,
178
+ data_filter: ComponentAttributeMapping = None,
179
+ ) -> Dataset:
180
+ """
181
+ Load and deserialize a msgpack file to a new dataset.
182
+
183
+ Args:
184
+ file_path: the path to the file to load and deserialize.
185
+
186
+ Raises:
187
+ ValueError: if the data is inconsistent with the rest of the dataset or a component is unknown.
188
+ PowerGridError: if there was an internal error.
189
+
190
+ Returns:
191
+ The deserialized dataset in Power grid model input format.
192
+ """
193
+ with file_path.open(mode="rb") as file_pointer:
194
+ return msgpack_deserialize(file_pointer.read(), data_filter=data_filter)
195
+
196
+
197
+ def msgpack_serialize_to_file(
198
+ file_path: Path, data: Dataset, dataset_type: DatasetType | None = None, use_compact_list: bool = False
199
+ ):
200
+ """
201
+ Export msgpack data in most recent format.
202
+
203
+ Args:
204
+ file_path: the path to the file to load and deserialize.
205
+ data: a single or batch dataset for power-grid-model.
206
+ use_compact_list: write components on a single line.
207
+ indent: indent of the file, default 2.
208
+
209
+ Returns:
210
+ Save to file.
211
+ """
212
+ data = _map_to_component_types(data)
213
+ result = msgpack_serialize(data=data, dataset_type=dataset_type, use_compact_list=use_compact_list)
214
+
215
+ with file_path.open(mode="wb") as file_pointer:
216
+ file_pointer.write(result)
217
+
218
+
219
+ def import_json_data(json_file: Path, data_type: str, *args, **kwargs) -> Dataset:
220
+ """
221
+ [deprecated] Import json data.
222
+
223
+ **WARNING:** This function is deprecated. Please use json_deserialize_from_file instead.
224
+
225
+ Args:
226
+ json_file: path to the json file.
227
+ data_type: type of data: input, update, sym_output, or asym_output.
228
+ [deprecated]: All extra positional and keyword arguments are ignored.
229
+
230
+ Returns:
231
+ A single or batch dataset for power-grid-model.
232
+ """
233
+ warnings.warn(_DEPRECATED_JSON_DESERIALIZATION_MSG, DeprecationWarning)
234
+ if args:
235
+ warnings.warn("Provided positional arguments at index 2 and following are deprecated.", DeprecationWarning)
236
+ if kwargs:
237
+ warnings.warn(f"Provided keyword arguments {list(kwargs.keys())} are deprecated.", DeprecationWarning)
238
+
239
+ return _compatibility_deprecated_import_json_data(json_file=json_file, data_type=data_type) # type: ignore
240
+
241
+
242
+ def export_json_data(
243
+ json_file: Path, data: Dataset, indent: int | None = 2, compact: bool = False, use_deprecated_format: bool = True
244
+ ):
245
+ """
246
+ [deprecated] Export json data in a deprecated serialization format.
247
+
248
+ **WARNING:** This function is deprecated. Please use json_serialize_to_file instead.
249
+
250
+ For backwards compatibility, this function outputs the deprecated serialization format by default.
251
+ This feature may be removed in the future.
252
+
253
+ Args:
254
+ json_file: path to json file.
255
+ data: a single or batch dataset for power-grid-model.
256
+ indent: indent of the file, default 2.
257
+ compact: write components on a single line.
258
+ use_deprecated_format: use the old style format. Defaults to True for backwards compatibility.
259
+
260
+ Returns:
261
+ Save to file.
262
+ """
263
+ warnings.warn(_DEPRECATED_JSON_SERIALIZATION_MSG, DeprecationWarning)
264
+ if use_deprecated_format:
265
+ warnings.warn(
266
+ "Argument use_deprecated_format is a temporary backwards-compatibility measure. "
267
+ "Please upgrade to use_deprecated_format=False or json_serialize_to_file as soon as possible.",
268
+ DeprecationWarning,
269
+ stacklevel=2,
270
+ )
271
+ _compatibility_deprecated_export_json_data(json_file=json_file, data=data)
272
+ else:
273
+ json_serialize_to_file(file_path=json_file, data=data, use_compact_list=compact, indent=indent)
274
+
275
+
276
+ def _compatibility_deprecated_export_json_data(
277
+ json_file: Path, data: Dataset, indent: int | None = 2, compact: bool = False
278
+ ):
279
+ serialized_data = json_serialize(data=data, use_compact_list=compact, indent=-1 if indent is None else indent)
280
+ old_format_serialized_data = json.dumps(json.loads(serialized_data)["data"])
281
+ with json_file.open(mode="w", encoding="utf-8") as file_pointer:
282
+ file_pointer.write(old_format_serialized_data)
283
+
284
+
285
+ def import_input_data(json_file: Path) -> SingleDataset: # pragma: no cover
286
+ """
287
+ [deprecated] Import input json data.
288
+
289
+ **WARNING:** This function is deprecated. Please use json_deserialize_from_file instead.
290
+
291
+ For backwards and forward compatibility, this function supportes both the latest and the old serialization format.
292
+
293
+ Args:
294
+ json_file: path to the json file.
295
+
296
+ Returns:
297
+ A single dataset for power-grid-model.
298
+ """
299
+ warnings.warn(_DEPRECATED_JSON_DESERIALIZATION_MSG, DeprecationWarning)
300
+
301
+ data = _compatibility_deprecated_import_json_data(json_file=json_file, data_type=DatasetType.input)
302
+ if not isinstance(data, dict):
303
+ raise TypeError(f"Expected data to be dict, got {type(data)}")
304
+ if not all(isinstance(component, np.ndarray) and component.ndim == 1 for component in data.values()):
305
+ raise TypeError("All components must be 1D numpy arrays")
306
+ return cast_type(SingleDataset, data)
307
+
308
+
309
+ def import_update_data(json_file: Path) -> BatchDataset:
310
+ """
311
+ [deprecated] Import update json data.
312
+
313
+ **WARNING:** This function is deprecated. Please use json_deserialize_from_file instead.
314
+
315
+ For backwards and forward compatibility, this function supportes both the latest and the old serialization format.
316
+
317
+ Args:
318
+ json_file: path to the json file.
319
+
320
+ Returns:
321
+ A batch dataset for power-grid-model.
322
+ """
323
+ warnings.warn(_DEPRECATED_JSON_DESERIALIZATION_MSG, DeprecationWarning)
324
+
325
+ return cast_type(
326
+ BatchDataset,
327
+ _compatibility_deprecated_import_json_data(json_file=json_file, data_type=DatasetType.update),
328
+ )
329
+
330
+
331
+ def _compatibility_deprecated_import_json_data(json_file: Path, data_type: DatasetType):
332
+ with Path(json_file).open(mode="r", encoding="utf-8") as file_pointer:
333
+ data = json.load(file_pointer)
334
+
335
+ if "version" not in data: # convert old format to version 1.0
336
+ data = {
337
+ "attributes": {},
338
+ "data": data,
339
+ "is_batch": isinstance(data, list),
340
+ "type": data_type,
341
+ "version": "1.0",
342
+ }
343
+
344
+ result = json_deserialize(json.dumps(data))
345
+ if get_dataset_type(result) != data_type:
346
+ raise PowerGridSerializationError("An internal error occured during deserialization")
347
+
348
+ return result
349
+
350
+
351
+ def self_test():
352
+ """
353
+ Perform a self-test of the Power Grid Model functionality.
354
+
355
+ Tests whether the installation was successful and there are no build errors,
356
+ segmentation violations, undefined symbols, etc.
357
+
358
+ This function is designed to validate the basic functionality of data serialization,
359
+ model instantiation, power flow calculation, and serialization of results using the
360
+ Power Grid Model library.
361
+
362
+ Raises:
363
+ PowerGridError: if there was an internal error.
364
+ """
365
+ with tempfile.TemporaryDirectory() as temp_dir:
366
+ # Create a simple JSON input data file in the temporary directory
367
+ input_data = {
368
+ "version": "1.0",
369
+ "type": "input",
370
+ "is_batch": False,
371
+ "attributes": {},
372
+ "data": {
373
+ ComponentType.node: [{"id": 1, "u_rated": 10000}],
374
+ ComponentType.source: [{"id": 2, "node": 1, "u_ref": 1, "sk": 1e20}],
375
+ ComponentType.sym_load: [
376
+ {"id": 3, "node": 1, "status": 1, "type": 0, "p_specified": 0, "q_specified": 0}
377
+ ],
378
+ },
379
+ }
380
+
381
+ input_file_path = Path(temp_dir) / "input_data.json"
382
+ input_file_path.write_text(json.dumps(input_data))
383
+
384
+ try:
385
+ # Load the created JSON input data file (deserialize)
386
+ deserialized_data = json_deserialize_from_file(input_file_path)
387
+
388
+ # Create a PowerGridModel instance from the loaded input data
389
+ model = PowerGridModel(deserialized_data)
390
+
391
+ # Run a simple power flow calculation on the created model (linear calculation)
392
+ output_data = model.calculate_power_flow(calculation_method=CalculationMethod.linear)
393
+
394
+ # Write the calculation result to a file in the temporary directory
395
+ output_file_path = Path(temp_dir) / "output_data.json"
396
+
397
+ json_serialize_to_file(output_file_path, output_data)
398
+
399
+ # Verify that the written output is correct
400
+ with Path(output_file_path).open("r", encoding="utf-8") as output_file:
401
+ output_data = json.load(output_file)
402
+
403
+ if output_data is None: # pragma: no cover
404
+ raise ValueError("Output data should not be None")
405
+ if not math.isclose(
406
+ output_data["data"][ComponentType.node][0]["u"],
407
+ input_data["data"][ComponentType.node][0]["u_rated"],
408
+ abs_tol=1e-9,
409
+ ): # pragma: no cover
410
+ raise ValueError("The difference between the input and output data is too big.")
411
+
412
+ print("Self test finished.")
413
+ except Exception as e:
414
+ raise PowerGridError from e
415
+
416
+
417
+ def _make_test_case( # noqa: PLR0913
418
+ *,
419
+ output_path: Path,
420
+ input_data: SingleDataset,
421
+ params: dict,
422
+ output_data: Dataset,
423
+ output_dataset_type: DatasetType,
424
+ update_data: Dataset | None = None,
425
+ ):
426
+ """
427
+ Create and save a validation test case dataset, including input, update (optional), output, and parameters.
428
+
429
+ Args:
430
+ save_path: Directory path where the test case files will be saved.
431
+ input_data: Input dataset for the test case.
432
+ params: Dictionary of parameters used for the test case. It may include calculation method, tolerances, etc.
433
+ An example of parameters could be:
434
+ params = {
435
+ "calculation_method": "newton_raphson",
436
+ "rtol": 1e-6,
437
+ "atol": 1e-6,
438
+ }
439
+ output_data: Output dataset for the test case.
440
+ output_dataset_type: The type of the output dataset (e.g., sym_output, asym_output, sc_output).
441
+ update_data: Optional batch update dataset.
442
+
443
+ Raises:
444
+ ValueError: If the output_dataset_type is not recognized.
445
+
446
+ Side Effects:
447
+ Writes JSON files for input, update (if provided), output, and parameters,
448
+ all relevant license files, to save_path.
449
+ """
450
+ output_file_stem = output_dataset_type.name if isinstance(output_dataset_type, DatasetType) else None
451
+ if output_dataset_type in [DatasetType.input, DatasetType.update] or output_file_stem is None:
452
+ raise ValueError(
453
+ f"Invalid output dataset type: {output_dataset_type}. Expected one of: sym_output, asym_output, sc_output."
454
+ )
455
+
456
+ output_path.mkdir(parents=True, exist_ok=True)
457
+ json_serialize_to_file(file_path=output_path / "input.json", data=input_data, dataset_type=DatasetType.input)
458
+ (output_path / "input.json.license").write_text(data=LICENSE_TEXT, encoding="utf-8")
459
+
460
+ if update_data is not None:
461
+ json_serialize_to_file(
462
+ file_path=output_path / "update_batch.json", data=update_data, dataset_type=DatasetType.update
463
+ )
464
+ (output_path / "update_batch.json.license").write_text(data=LICENSE_TEXT, encoding="utf-8")
465
+ output_file_stem += "_batch"
466
+ json_serialize_to_file(
467
+ file_path=output_path / f"{output_file_stem}.json", data=output_data, dataset_type=output_dataset_type
468
+ )
469
+ (output_path / f"{output_file_stem}.json.license").write_text(data=LICENSE_TEXT, encoding="utf-8")
470
+
471
+ params_json = json.dumps(params, indent=2)
472
+ (output_path / "params.json").write_text(data=params_json, encoding="utf-8")
473
+ (output_path / "params.json.license").write_text(data=LICENSE_TEXT, encoding="utf-8")