foamlib 0.9.3__tar.gz → 0.9.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. {foamlib-0.9.3 → foamlib-0.9.4}/PKG-INFO +1 -1
  2. {foamlib-0.9.3 → foamlib-0.9.4}/foamlib/__init__.py +1 -1
  3. {foamlib-0.9.3 → foamlib-0.9.4}/foamlib/_files/_files.py +23 -16
  4. {foamlib-0.9.3 → foamlib-0.9.4}/foamlib/_files/_serialization.py +54 -22
  5. {foamlib-0.9.3 → foamlib-0.9.4}/foamlib/_files/_types.py +27 -29
  6. {foamlib-0.9.3 → foamlib-0.9.4}/tests/test_example.py +2 -1
  7. {foamlib-0.9.3 → foamlib-0.9.4}/tests/test_files/test_dumps.py +17 -3
  8. {foamlib-0.9.3 → foamlib-0.9.4}/tests/test_files/test_files.py +2 -1
  9. {foamlib-0.9.3 → foamlib-0.9.4}/tests/test_files/test_parsing/test_advanced.py +2 -1
  10. {foamlib-0.9.3 → foamlib-0.9.4}/tests/test_files/test_parsing/test_intermediate.py +33 -33
  11. {foamlib-0.9.3 → foamlib-0.9.4}/tests/test_files/test_types.py +5 -5
  12. {foamlib-0.9.3 → foamlib-0.9.4}/.devcontainer.json +0 -0
  13. {foamlib-0.9.3 → foamlib-0.9.4}/.dockerignore +0 -0
  14. {foamlib-0.9.3 → foamlib-0.9.4}/.git-blame-ignore-revs +0 -0
  15. {foamlib-0.9.3 → foamlib-0.9.4}/.github/dependabot.yml +0 -0
  16. {foamlib-0.9.3 → foamlib-0.9.4}/.github/workflows/ci.yml +0 -0
  17. {foamlib-0.9.3 → foamlib-0.9.4}/.github/workflows/docker.yml +0 -0
  18. {foamlib-0.9.3 → foamlib-0.9.4}/.github/workflows/dockerhub-description.yml +0 -0
  19. {foamlib-0.9.3 → foamlib-0.9.4}/.github/workflows/pypi-publish.yml +0 -0
  20. {foamlib-0.9.3 → foamlib-0.9.4}/.gitignore +0 -0
  21. {foamlib-0.9.3 → foamlib-0.9.4}/.readthedocs.yaml +0 -0
  22. {foamlib-0.9.3 → foamlib-0.9.4}/CONTRIBUTING.md +0 -0
  23. {foamlib-0.9.3 → foamlib-0.9.4}/Dockerfile +0 -0
  24. {foamlib-0.9.3 → foamlib-0.9.4}/LICENSE.txt +0 -0
  25. {foamlib-0.9.3 → foamlib-0.9.4}/README.md +0 -0
  26. {foamlib-0.9.3 → foamlib-0.9.4}/benchmark/benchmark.png +0 -0
  27. {foamlib-0.9.3 → foamlib-0.9.4}/benchmark/benchmark.py +0 -0
  28. {foamlib-0.9.3 → foamlib-0.9.4}/benchmark/requirements.txt +0 -0
  29. {foamlib-0.9.3 → foamlib-0.9.4}/benchmark/ruff.toml +0 -0
  30. {foamlib-0.9.3 → foamlib-0.9.4}/docs/Makefile +0 -0
  31. {foamlib-0.9.3 → foamlib-0.9.4}/docs/cases.rst +0 -0
  32. {foamlib-0.9.3 → foamlib-0.9.4}/docs/conf.py +0 -0
  33. {foamlib-0.9.3 → foamlib-0.9.4}/docs/files.rst +0 -0
  34. {foamlib-0.9.3 → foamlib-0.9.4}/docs/index.rst +0 -0
  35. {foamlib-0.9.3 → foamlib-0.9.4}/docs/make.bat +0 -0
  36. {foamlib-0.9.3 → foamlib-0.9.4}/docs/ruff.toml +0 -0
  37. {foamlib-0.9.3 → foamlib-0.9.4}/foamlib/_cases/__init__.py +0 -0
  38. {foamlib-0.9.3 → foamlib-0.9.4}/foamlib/_cases/_async.py +0 -0
  39. {foamlib-0.9.3 → foamlib-0.9.4}/foamlib/_cases/_base.py +0 -0
  40. {foamlib-0.9.3 → foamlib-0.9.4}/foamlib/_cases/_run.py +0 -0
  41. {foamlib-0.9.3 → foamlib-0.9.4}/foamlib/_cases/_slurm.py +0 -0
  42. {foamlib-0.9.3 → foamlib-0.9.4}/foamlib/_cases/_subprocess.py +0 -0
  43. {foamlib-0.9.3 → foamlib-0.9.4}/foamlib/_cases/_sync.py +0 -0
  44. {foamlib-0.9.3 → foamlib-0.9.4}/foamlib/_cases/_util.py +0 -0
  45. {foamlib-0.9.3 → foamlib-0.9.4}/foamlib/_files/__init__.py +0 -0
  46. {foamlib-0.9.3 → foamlib-0.9.4}/foamlib/_files/_io.py +0 -0
  47. {foamlib-0.9.3 → foamlib-0.9.4}/foamlib/_files/_parsing.py +0 -0
  48. {foamlib-0.9.3 → foamlib-0.9.4}/foamlib/py.typed +0 -0
  49. {foamlib-0.9.3 → foamlib-0.9.4}/logo.png +0 -0
  50. {foamlib-0.9.3 → foamlib-0.9.4}/pyproject.toml +0 -0
  51. {foamlib-0.9.3 → foamlib-0.9.4}/tests/__init__.py +0 -0
  52. {foamlib-0.9.3 → foamlib-0.9.4}/tests/ruff.toml +0 -0
  53. {foamlib-0.9.3 → foamlib-0.9.4}/tests/test_cases/__init__.py +0 -0
  54. {foamlib-0.9.3 → foamlib-0.9.4}/tests/test_cases/test_cavity.py +0 -0
  55. {foamlib-0.9.3 → foamlib-0.9.4}/tests/test_cases/test_cavity_async.py +0 -0
  56. {foamlib-0.9.3 → foamlib-0.9.4}/tests/test_cases/test_flange.py +0 -0
  57. {foamlib-0.9.3 → foamlib-0.9.4}/tests/test_cases/test_flange_async.py +0 -0
  58. {foamlib-0.9.3 → foamlib-0.9.4}/tests/test_files/__init__.py +0 -0
  59. {foamlib-0.9.3 → foamlib-0.9.4}/tests/test_files/test_parsing/__init__.py +0 -0
  60. {foamlib-0.9.3 → foamlib-0.9.4}/tests/test_files/test_parsing/test_basic.py +0 -0
  61. {foamlib-0.9.3 → foamlib-0.9.4}/tests/test_files/test_parsing/test_decompose_par.py +0 -0
  62. {foamlib-0.9.3 → foamlib-0.9.4}/tests/test_files/test_parsing/test_fields.py +0 -0
  63. {foamlib-0.9.3 → foamlib-0.9.4}/tests/test_files/test_parsing/test_fv_schemes.py +0 -0
  64. {foamlib-0.9.3 → foamlib-0.9.4}/tests/test_files/test_parsing/test_loads.py +0 -0
  65. {foamlib-0.9.3 → foamlib-0.9.4}/tests/test_files/test_parsing/test_poly_mesh.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: foamlib
3
- Version: 0.9.3
3
+ Version: 0.9.4
4
4
  Summary: A Python interface for interacting with OpenFOAM
5
5
  Project-URL: Homepage, https://github.com/gerlero/foamlib
6
6
  Project-URL: Repository, https://github.com/gerlero/foamlib
@@ -1,6 +1,6 @@
1
1
  """A Python interface for interacting with OpenFOAM."""
2
2
 
3
- __version__ = "0.9.3"
3
+ __version__ = "0.9.4"
4
4
 
5
5
  from ._cases import (
6
6
  AsyncFoamCase,
@@ -22,14 +22,17 @@ from ._serialization import dumps, normalize_data, normalize_keyword
22
22
  from ._types import (
23
23
  Data,
24
24
  DataLike,
25
- Dict_,
26
25
  Dimensioned,
27
26
  DimensionSet,
28
- EntryLike,
29
27
  Field,
30
28
  FieldLike,
31
29
  File,
32
- MutableEntry,
30
+ FileLike,
31
+ MutableSubDict,
32
+ StandaloneData,
33
+ StandaloneDataLike,
34
+ SubDict,
35
+ SubDictLike,
33
36
  )
34
37
 
35
38
 
@@ -62,7 +65,7 @@ def _tensor_kind_for_field(
62
65
  class FoamFile(
63
66
  MutableMapping[
64
67
  Optional[Union[str, Tuple[str, ...]]],
65
- MutableEntry,
68
+ Union[Data, MutableSubDict],
66
69
  ],
67
70
  FoamFileIO,
68
71
  ):
@@ -115,7 +118,7 @@ class FoamFile(
115
118
  DimensionSet = DimensionSet
116
119
 
117
120
  class SubDict(
118
- MutableMapping[str, MutableEntry],
121
+ MutableMapping[str, Union[Data, MutableSubDict]],
119
122
  ):
120
123
  """
121
124
  An OpenFOAM sub-dictionary within a file.
@@ -154,7 +157,7 @@ class FoamFile(
154
157
  def __setitem__(
155
158
  self,
156
159
  keyword: str,
157
- data: EntryLike,
160
+ data: DataLike | SubDictLike,
158
161
  ) -> None:
159
162
  self._file[(*self._keywords, keyword)] = data
160
163
 
@@ -183,7 +186,7 @@ class FoamFile(
183
186
  def __repr__(self) -> str:
184
187
  return f"{type(self).__qualname__}('{self._file}', {self._keywords})"
185
188
 
186
- def as_dict(self) -> Dict_:
189
+ def as_dict(self) -> SubDict:
187
190
  """Return a nested dict representation of the sub-dictionary."""
188
191
  ret = self._file.as_dict(include_header=True)
189
192
 
@@ -193,7 +196,7 @@ class FoamFile(
193
196
  assert isinstance(v, dict)
194
197
  ret = cast("File", v)
195
198
 
196
- return cast("Dict_", ret)
199
+ return cast("SubDict", ret)
197
200
 
198
201
  @property
199
202
  def version(self) -> float:
@@ -282,7 +285,7 @@ class FoamFile(
282
285
  return deepcopy(value)
283
286
 
284
287
  def __setitem__(
285
- self, keywords: str | tuple[str, ...] | None, data: EntryLike
288
+ self, keywords: str | tuple[str, ...] | None, data: DataLike | SubDictLike
286
289
  ) -> None:
287
290
  if not keywords:
288
291
  keywords = ()
@@ -463,7 +466,7 @@ class FoamFile(
463
466
  s: bytes | str,
464
467
  *,
465
468
  include_header: bool = False,
466
- ) -> File | Data:
469
+ ) -> File | StandaloneData:
467
470
  """
468
471
  Standalone deserializing function.
469
472
 
@@ -486,7 +489,9 @@ class FoamFile(
486
489
  return ret
487
490
 
488
491
  @staticmethod
489
- def dumps(file: File | DataLike, *, ensure_header: bool = True) -> bytes:
492
+ def dumps(
493
+ file: FileLike | StandaloneDataLike, *, ensure_header: bool = True
494
+ ) -> bytes:
490
495
  """
491
496
  Standalone serializing function.
492
497
 
@@ -498,21 +503,23 @@ class FoamFile(
498
503
  If `True`, a header will be included if it is not already present in the
499
504
  input object.
500
505
  """
506
+ header: SubDict | None
501
507
  if isinstance(file, Mapping):
502
- header = file.get("FoamFile", None)
503
- assert isinstance(header, FoamFile.SubDict) or header is None
508
+ header = file.get("FoamFile", None) # type: ignore [assignment]
509
+
504
510
  entries: list[bytes] = []
505
511
  for k, v in file.items():
506
512
  if k is not None:
507
513
  entries.append(
508
- dumps((k, v), keywords=(), header=header, tuple_is_entry=True)
514
+ dumps((k, v), keywords=(), header=header, tuple_is_entry=True) # type: ignore [arg-type]
509
515
  )
510
516
  else:
517
+ assert not isinstance(v, Mapping)
511
518
  entries.append(dumps(v, keywords=(), header=header))
512
519
  ret = b" ".join(entries)
513
520
  else:
514
521
  header = None
515
- ret = dumps(file)
522
+ ret = dumps(file, keywords=(), header=header)
516
523
 
517
524
  if header is None and ensure_header:
518
525
  class_ = "dictionary"
@@ -670,5 +677,5 @@ class FoamFieldFile(FoamFile):
670
677
  return ret
671
678
 
672
679
  @boundary_field.setter
673
- def boundary_field(self, value: Mapping[str, Dict_]) -> None:
680
+ def boundary_field(self, value: Mapping[str, SubDict]) -> None:
674
681
  self["boundaryField"] = value
@@ -16,8 +16,10 @@ from ._types import (
16
16
  DataLike,
17
17
  Dimensioned,
18
18
  DimensionSet,
19
- Entry,
20
- EntryLike,
19
+ StandaloneData,
20
+ StandaloneDataLike,
21
+ SubDict,
22
+ SubDictLike,
21
23
  is_sequence,
22
24
  )
23
25
 
@@ -30,13 +32,37 @@ def normalize_data(
30
32
 
31
33
  @overload
32
34
  def normalize_data(
33
- data: EntryLike, *, keywords: tuple[str, ...] | None = None
34
- ) -> Entry: ...
35
+ data: StandaloneDataLike, *, keywords: tuple[str, ...] | None = None
36
+ ) -> StandaloneData: ...
35
37
 
36
38
 
39
+ @overload
37
40
  def normalize_data(
38
- data: EntryLike, *, keywords: tuple[str, ...] | None = None
39
- ) -> Entry:
41
+ data: SubDictLike, *, keywords: tuple[str, ...] | None = None
42
+ ) -> SubDict: ...
43
+
44
+
45
+ def normalize_data(
46
+ data: DataLike | StandaloneDataLike | SubDictLike,
47
+ *,
48
+ keywords: tuple[str, ...] | None = None,
49
+ ) -> Data | StandaloneData | SubDict:
50
+ if isinstance(data, Mapping):
51
+ return {normalize_keyword(k): normalize_data(v) for k, v in data.items()} # type: ignore [arg-type, misc]
52
+
53
+ if keywords == () and is_sequence(data) and not isinstance(data, tuple):
54
+ try:
55
+ arr = np.asarray(data)
56
+ except ValueError:
57
+ pass
58
+ else:
59
+ if np.issubdtype(arr.dtype, np.integer) and arr.ndim == 1:
60
+ return arr # type: ignore [return-value]
61
+ if arr.ndim == 2 and arr.shape[1] == 3:
62
+ if not np.issubdtype(arr.dtype, np.floating):
63
+ arr = arr.astype(float)
64
+ return arr # type: ignore [return-value]
65
+
40
66
  if keywords is not None and (
41
67
  keywords == ("internalField",)
42
68
  or (
@@ -49,7 +75,7 @@ def normalize_data(
49
75
  )
50
76
  )
51
77
  ):
52
- if is_sequence(data):
78
+ if is_sequence(data) and not isinstance(data, tuple):
53
79
  try:
54
80
  arr = np.asarray(data)
55
81
  except ValueError:
@@ -61,20 +87,17 @@ def normalize_data(
61
87
  if arr.ndim == 1 or (arr.ndim == 2 and arr.shape[1] in (3, 6, 9)):
62
88
  return arr # type: ignore [return-value]
63
89
 
64
- return [normalize_data(d) for d in data]
90
+ return [normalize_data(d) for d in data] # type: ignore [arg-type]
65
91
 
66
92
  if isinstance(data, int):
67
93
  return float(data)
68
94
 
69
95
  return normalize_data(data)
70
96
 
71
- if isinstance(data, Mapping):
72
- return {normalize_keyword(k): normalize_data(v) for k, v in data.items()} # type: ignore [misc]
73
-
74
97
  if isinstance(data, np.ndarray):
75
98
  ret = data.tolist()
76
99
  assert isinstance(ret, (int, float, list))
77
- return ret
100
+ return ret # type: ignore [return-value]
78
101
 
79
102
  if (
80
103
  not isinstance(data, DimensionSet)
@@ -90,16 +113,19 @@ def normalize_data(
90
113
  k, v = data
91
114
  assert not isinstance(k, Mapping)
92
115
  return (
93
- normalize_keyword(k),
94
- normalize_data(v) if not isinstance(v, Mapping) else v,
95
- ) # type: ignore [return-value]
116
+ normalize_keyword(k), # type: ignore [arg-type]
117
+ normalize_data(v) if not isinstance(v, Mapping) else v, # type: ignore [arg-type, misc]
118
+ )
96
119
 
97
120
  if (
98
121
  is_sequence(data)
99
122
  and not isinstance(data, DimensionSet)
100
- and (keywords is None or not isinstance(data, tuple))
123
+ and not isinstance(data, tuple)
101
124
  ):
102
- return [normalize_data(d) for d in data]
125
+ return [normalize_data(d) for d in data] # type: ignore [arg-type]
126
+
127
+ if isinstance(data, tuple) and not isinstance(data, DimensionSet):
128
+ return tuple(normalize_data(d) for d in data)
103
129
 
104
130
  if isinstance(data, str):
105
131
  s = loads(data)
@@ -108,7 +134,7 @@ def normalize_data(
108
134
 
109
135
  if isinstance(
110
136
  data,
111
- (int, float, bool, tuple, DimensionSet, Dimensioned),
137
+ (int, float, bool, DimensionSet, Dimensioned),
112
138
  ):
113
139
  return data
114
140
 
@@ -126,13 +152,13 @@ def normalize_keyword(data: DataLike) -> Data:
126
152
 
127
153
 
128
154
  def dumps(
129
- data: EntryLike,
155
+ data: DataLike | StandaloneDataLike | SubDictLike,
130
156
  *,
131
157
  keywords: tuple[str, ...] | None = None,
132
- header: Mapping[str, Entry] | None = None,
158
+ header: SubDictLike | None = None,
133
159
  tuple_is_entry: bool = False,
134
160
  ) -> bytes:
135
- data = normalize_data(data, keywords=keywords)
161
+ data = normalize_data(data, keywords=keywords) # type: ignore [arg-type, misc]
136
162
 
137
163
  if isinstance(data, Mapping):
138
164
  return (
@@ -148,6 +174,12 @@ def dumps(
148
174
  + b"}"
149
175
  )
150
176
 
177
+ if keywords == () and isinstance(data, np.ndarray):
178
+ if (header.get("format", "") if header else "") == "binary":
179
+ return dumps(len(data)) + b"(" + data.tobytes() + b")"
180
+
181
+ return dumps(data.tolist())
182
+
151
183
  if (
152
184
  keywords is not None
153
185
  and (
@@ -231,7 +263,7 @@ def dumps(
231
263
  return b" ".join(dumps(v) for v in data)
232
264
 
233
265
  if is_sequence(data):
234
- return b"(" + b" ".join(dumps(v, tuple_is_entry=True) for v in data) + b")"
266
+ return b"(" + b" ".join(dumps(v, tuple_is_entry=True) for v in data) + b")" # type: ignore [arg-type]
235
267
 
236
268
  if data is True:
237
269
  return b"yes"
@@ -1,7 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import sys
4
- from typing import Any, Dict, NamedTuple, Optional, Union
4
+ from typing import Any, Dict, List, NamedTuple, Optional, Tuple, Union
5
5
 
6
6
  import numpy as np
7
7
 
@@ -76,9 +76,8 @@ Tensor = Union[
76
76
  ]
77
77
 
78
78
  TensorLike = Union[
79
- Sequence[float],
80
- "np.ndarray[tuple[()], np.dtype[np.float64]]",
81
79
  Tensor,
80
+ Sequence[float],
82
81
  ]
83
82
 
84
83
 
@@ -116,7 +115,7 @@ class Dimensioned:
116
115
  return Dimensioned(
117
116
  self.value + other.value, # type: ignore [arg-type]
118
117
  self.dimensions + other.dimensions,
119
- f"{self.name} + {other.name}"
118
+ f"{self.name}+{other.name}"
120
119
  if self.name is not None and other.name is not None
121
120
  else None,
122
121
  )
@@ -128,7 +127,7 @@ class Dimensioned:
128
127
  return Dimensioned(
129
128
  self.value - other.value, # type: ignore [arg-type]
130
129
  self.dimensions - other.dimensions,
131
- f"{self.name} - {other.name}"
130
+ f"{self.name}-{other.name}"
132
131
  if self.name is not None and other.name is not None
133
132
  else None,
134
133
  )
@@ -140,7 +139,7 @@ class Dimensioned:
140
139
  return Dimensioned(
141
140
  self.value * other.value, # type: ignore [arg-type]
142
141
  self.dimensions * other.dimensions,
143
- f"{self.name} * {other.name}"
142
+ f"{self.name}*{other.name}"
144
143
  if self.name is not None and other.name is not None
145
144
  else None,
146
145
  )
@@ -152,7 +151,7 @@ class Dimensioned:
152
151
  return Dimensioned(
153
152
  self.value / other.value, # type: ignore [arg-type]
154
153
  self.dimensions / other.dimensions,
155
- f"{self.name} / {other.name}"
154
+ f"{self.name}/{other.name}"
156
155
  if self.name is not None and other.name is not None
157
156
  else None,
158
157
  )
@@ -164,7 +163,7 @@ class Dimensioned:
164
163
  return Dimensioned(
165
164
  self.value**exponent, # type: ignore [arg-type]
166
165
  self.dimensions**exponent,
167
- f"{self.name} ** {exponent}" if self.name is not None else None,
166
+ f"pow({self.name},{exponent})" if self.name is not None else None,
168
167
  )
169
168
 
170
169
  def __float__(self) -> float:
@@ -194,10 +193,9 @@ Field = Union[
194
193
  ]
195
194
 
196
195
  FieldLike = Union[
196
+ Field,
197
197
  TensorLike,
198
198
  Sequence[TensorLike],
199
- Sequence[Sequence[TensorLike]],
200
- Field,
201
199
  ]
202
200
 
203
201
 
@@ -208,34 +206,35 @@ Data = Union[
208
206
  bool,
209
207
  Dimensioned,
210
208
  DimensionSet,
211
- Sequence["Entry"],
209
+ Tuple["Data", ...],
210
+ List[Union["Data", Tuple["Data", Union["Data", "SubDict"]]]],
212
211
  Field,
213
212
  ]
214
213
 
215
- Entry = Union[
214
+ DataLike = Union[
216
215
  Data,
217
- Mapping[str, "Entry"],
216
+ Tuple["DataLike", ...],
217
+ Sequence[Union["DataLike", Tuple["DataLike", Union["DataLike", "SubDictLike"]]]],
218
+ FieldLike,
218
219
  ]
219
- """
220
- A value that can be stored in an OpenFOAM file.
221
- """
222
220
 
223
- DataLike = Union[
224
- FieldLike,
225
- Sequence["EntryLike"],
221
+ StandaloneData = Union[
226
222
  Data,
223
+ "np.ndarray[tuple[int], np.dtype[np.int64 | np.int32]]",
224
+ "np.ndarray[tuple[int], np.dtype[np.float64 | np.float32]]",
227
225
  ]
228
226
 
229
- EntryLike = Union[
227
+ StandaloneDataLike = Union[
230
228
  DataLike,
231
- Mapping[str, "EntryLike"],
229
+ "np.ndarray[tuple[int], np.dtype[np.int64 | np.int32]]",
230
+ "np.ndarray[tuple[int], np.dtype[np.float64 | np.float32]]",
232
231
  ]
233
232
 
234
233
 
235
234
  def is_sequence(
236
- value: EntryLike,
235
+ value: DataLike | StandaloneDataLike | SubDictLike,
237
236
  ) -> TypeGuard[
238
- Sequence[EntryLike]
237
+ Sequence[DataLike | tuple[DataLike, DataLike | SubDictLike]]
239
238
  | np.ndarray[tuple[int] | tuple[int, int], np.dtype[np.float64 | np.float32]]
240
239
  ]:
241
240
  return (isinstance(value, Sequence) and not isinstance(value, str)) or (
@@ -243,10 +242,9 @@ def is_sequence(
243
242
  )
244
243
 
245
244
 
246
- MutableEntry = Union[
247
- Data,
248
- MutableMapping[str, "MutableEntry"],
249
- ]
245
+ SubDict = Dict[str, Union[Data, "SubDict"]]
246
+ SubDictLike = Mapping[str, Union[DataLike, "SubDictLike"]]
247
+ MutableSubDict = MutableMapping[str, Union[Data, "MutableSubDict"]]
250
248
 
251
- Dict_ = Dict[str, Union["Entry", "Dict_"]]
252
- File = Dict[Optional[str], Union["Entry", "Dict_"]]
249
+ File = Dict[Optional[str], Union[StandaloneData, Data, "SubDict"]]
250
+ FileLike = Mapping[Optional[str], Union[StandaloneDataLike, DataLike, "FileLike"]]
@@ -82,7 +82,7 @@ def test_example(tmp_path: Path) -> None:
82
82
  [1, 1, 1],
83
83
  ]
84
84
  f["edges"] = []
85
- f["boundary"] = [
85
+ boundary: list[tuple[str, dict[str, str | list[list[int]]]]] = [
86
86
  ("inletUp", {"type": "patch", "faces": [[5, 4, 10, 11]]}),
87
87
  ("inletDown", {"type": "patch", "faces": [[0, 5, 11, 6]]}),
88
88
  ("outletUp", {"type": "patch", "faces": [[2, 3, 9, 8]]}),
@@ -101,6 +101,7 @@ def test_example(tmp_path: Path) -> None:
101
101
  },
102
102
  ),
103
103
  ]
104
+ f["boundary"] = boundary
104
105
  f["mergePatchPairs"] = []
105
106
 
106
107
  with case.transport_properties as f:
@@ -92,7 +92,6 @@ def test_serialize_data() -> None:
92
92
  assert dumps([("a", "b"), ("c", "d")]) == b"(a b; c d;)"
93
93
  assert dumps([("a", {"b": "c"}), ("d", {"e": "g"})]) == b"(a {b c;} d {e g;})"
94
94
  assert dumps([("a", [0, 1, 2]), ("b", {})]) == b"(a (0 1 2); b {})"
95
- assert dumps([{"a": "b", "c": "d"}, {"e": "g"}]) == b"({a b; c d;} {e g;})"
96
95
  assert dumps(["water", "oil", "mercury", "air"]) == b"(water oil mercury air)"
97
96
  assert dumps("div(phi,U)") == b"div(phi,U)"
98
97
 
@@ -104,10 +103,25 @@ def test_serialize_file() -> None:
104
103
  == b"{FoamFile {version 2.0; format ascii; class dictionary;}} 1.0"
105
104
  )
106
105
  assert (
107
- FoamFile.dumps([{"a": "b", "c": "d"}, {"e": "g"}])
108
- == b"{FoamFile {version 2.0; format ascii; class dictionary;}} ({a b; c d;} {e g;})"
106
+ FoamFile.dumps({"a": "b", "c": "d"})
107
+ == b"{FoamFile {version 2.0; format ascii; class dictionary;}} a b; c d;"
109
108
  )
110
109
  assert (
111
110
  FoamFile.dumps({"internalField": [[1, 2, 3], [4, 5, 6]]})
112
111
  == b"{FoamFile {version 2.0; format ascii; class volVectorField;}} internalField nonuniform List<vector> 2((1.0 2.0 3.0) (4.0 5.0 6.0));"
113
112
  )
113
+ assert (
114
+ FoamFile.dumps([[1, 2, 3], [4, 5, 6]])
115
+ == b"{FoamFile {version 2.0; format ascii; class dictionary;}} ((1.0 2.0 3.0) (4.0 5.0 6.0))"
116
+ )
117
+ assert FoamFile.dumps([1, 2, 3], ensure_header=False) == b"(1 2 3)"
118
+ assert (
119
+ FoamFile.dumps(
120
+ {
121
+ "FoamFile": {"format": "binary"},
122
+ None: np.array([1, 2, 3], dtype=np.int32), # type: ignore[dict-item]
123
+ },
124
+ ensure_header=False,
125
+ )
126
+ == b"FoamFile {format binary;} 3(\x01\x00\x00\x00\x02\x00\x00\x00\x03\x00\x00\x00)"
127
+ )
@@ -161,7 +161,8 @@ def test_internal_field(cavity: FoamCase) -> None:
161
161
  blocks = cavity.block_mesh_dict["blocks"]
162
162
  assert isinstance(blocks, list)
163
163
  sizes = blocks[2]
164
- size = np.prod(sizes)
164
+ assert isinstance(sizes, list)
165
+ size = np.prod(sizes) # type: ignore [arg-type]
165
166
 
166
167
  p_arr = np.zeros(size)
167
168
  U_arr = np.zeros((size, 3))
@@ -280,7 +280,8 @@ def test_assignment_strange_name() -> None:
280
280
  assert isinstance(parsed[("equations", '"(U|e|k|epsilon).*"')], tuple)
281
281
  assert parsed[("equations", '"(U|e|k|epsilon).*"')][0] == "table"
282
282
  assert np.array_equal(
283
- parsed[("equations", '"(U|e|k|epsilon).*"')][1], [[0, 0.4], [0.5, 0.7]]
283
+ parsed[("equations", '"(U|e|k|epsilon).*"')][1], # type: ignore[arg-type]
284
+ [[0, 0.4], [0.5, 0.7]],
284
285
  )
285
286
 
286
287
 
@@ -157,10 +157,10 @@ def test_list_with_dict() -> None:
157
157
  assert len(boundary) == 1
158
158
 
159
159
  upper_boundary = boundary[0]
160
- assert upper_boundary[0] == "upperBoundary"
161
- assert upper_boundary[1]["type"] == "cyclic"
162
- assert upper_boundary[1]["neighbourPatch"] == "lowerBoundary"
163
- assert np.array_equal(upper_boundary[1]["faces"], [[3, 7, 6, 2]])
160
+ assert upper_boundary[0] == "upperBoundary" # type: ignore[index]
161
+ assert upper_boundary[1]["type"] == "cyclic" # type: ignore[index, call-overload]
162
+ assert upper_boundary[1]["neighbourPatch"] == "lowerBoundary" # type: ignore[index, call-overload]
163
+ assert np.array_equal(upper_boundary[1]["faces"], [[3, 7, 6, 2]]) # type: ignore[arg-type, index, call-overload]
164
164
 
165
165
 
166
166
  def test_list_with_str() -> None:
@@ -174,10 +174,10 @@ def test_list_with_str() -> None:
174
174
  assert len(blocks) == 5
175
175
 
176
176
  assert blocks[0] == "hex"
177
- assert np.array_equal(blocks[1], [0, 1, 2, 3, 4, 5, 6, 7])
178
- assert np.array_equal(blocks[2], [40, 40, 40])
177
+ assert np.array_equal(blocks[1], [0, 1, 2, 3, 4, 5, 6, 7]) # type: ignore[arg-type]
178
+ assert np.array_equal(blocks[2], [40, 40, 40]) # type: ignore[arg-type]
179
179
  assert blocks[3] == "simpleGrading"
180
- assert np.array_equal(blocks[4], [1, 1, 1])
180
+ assert np.array_equal(blocks[4], [1, 1, 1]) # type: ignore[arg-type]
181
181
 
182
182
 
183
183
  def test_file_simple() -> None:
@@ -457,7 +457,7 @@ def test_for_blockmesh() -> None:
457
457
  assert len(blocks) == 4
458
458
 
459
459
  assert blocks[0] == "hex"
460
- assert np.array_equal(blocks[1], [4, 6, 14, 12, 0, 2, 10, 8])
460
+ assert np.array_equal(blocks[1], [4, 6, 14, 12, 0, 2, 10, 8]) # type: ignore[arg-type]
461
461
  assert blocks[2] == [1, "$upstreamCells", "$cylinderBoxCells"]
462
462
  assert blocks[3] == "$expandBlock"
463
463
 
@@ -487,27 +487,27 @@ def test_blocks() -> None:
487
487
  assert len(blocks) == 22
488
488
 
489
489
  assert blocks[0] == "hex"
490
- assert np.array_equal(blocks[1], [0, 1, 2, 3, 4, 5, 6, 7])
490
+ assert np.array_equal(blocks[1], [0, 1, 2, 3, 4, 5, 6, 7]) # type: ignore[arg-type]
491
491
  assert blocks[2] == "inletChannel"
492
- assert np.array_equal(blocks[3], [40, 1, 64])
492
+ assert np.array_equal(blocks[3], [40, 1, 64]) # type: ignore[arg-type]
493
493
  assert blocks[4] == "simpleGrading"
494
- assert np.array_equal(blocks[5], [1, 1, 1])
494
+ assert np.array_equal(blocks[5], [1, 1, 1]) # type: ignore[arg-type]
495
495
  assert blocks[6] == "hex"
496
- assert np.array_equal(blocks[7], [4, 5, 6, 7, 8, 9, 10, 11, 12])
496
+ assert np.array_equal(blocks[7], [4, 5, 6, 7, 8, 9, 10, 11, 12]) # type: ignore[arg-type]
497
497
  assert blocks[8] == "inletChannel"
498
- assert np.array_equal(blocks[9], [40, 1, 16])
498
+ assert np.array_equal(blocks[9], [40, 1, 16]) # type: ignore[arg-type]
499
499
  assert blocks[10] == "simpleGrading"
500
- assert np.array_equal(blocks[11], [1, 1, 1])
500
+ assert np.array_equal(blocks[11], [1, 1, 1]) # type: ignore[arg-type]
501
501
  assert blocks[12] == "hex"
502
- assert np.array_equal(blocks[13], [12, 13, 14, 15, 16, 17, 18, 19])
503
- assert np.array_equal(blocks[14], [96, 1, 8])
502
+ assert np.array_equal(blocks[13], [12, 13, 14, 15, 16, 17, 18, 19]) # type: ignore[arg-type]
503
+ assert np.array_equal(blocks[14], [96, 1, 8]) # type: ignore[arg-type]
504
504
  assert blocks[15] == "simpleGrading"
505
- assert np.array_equal(blocks[16], [1, 1, 1])
505
+ assert np.array_equal(blocks[16], [1, 1, 1]) # type: ignore[arg-type]
506
506
  assert blocks[17] == "hex"
507
- assert np.array_equal(blocks[18], [16, 17, 18, 19, 20, 21, 22, 23])
508
- assert np.array_equal(blocks[19], [96, 1, 72])
507
+ assert np.array_equal(blocks[18], [16, 17, 18, 19, 20, 21, 22, 23]) # type: ignore[arg-type]
508
+ assert np.array_equal(blocks[19], [96, 1, 72]) # type: ignore[arg-type]
509
509
  assert blocks[20] == "simpleGrading"
510
- assert np.array_equal(blocks[21], [1, 1, 1])
510
+ assert np.array_equal(blocks[21], [1, 1, 1]) # type: ignore[arg-type]
511
511
 
512
512
 
513
513
  @pytest.mark.xfail(reason="Not currently supported")
@@ -595,7 +595,7 @@ def test_list_edges() -> None:
595
595
  assert edges[1] == 1
596
596
  assert edges[2] == 2
597
597
  assert np.array_equal(
598
- edges[3],
598
+ edges[3], # type: ignore[arg-type]
599
599
  [
600
600
  [0.6, 0.0124, 0.0],
601
601
  [0.7, 0.0395, 0.0],
@@ -612,7 +612,7 @@ def test_list_edges() -> None:
612
612
  assert edges[5] == 6
613
613
  assert edges[6] == 5
614
614
  assert np.array_equal(
615
- edges[7],
615
+ edges[7], # type: ignore[arg-type]
616
616
  [
617
617
  [0.6, 0.0124, 0.05],
618
618
  [0.7, 0.0395, 0.05],
@@ -642,12 +642,12 @@ def test_list_edges_arcs() -> None:
642
642
  assert edges[1] == 0
643
643
  assert edges[2] == 5
644
644
  assert edges[3] == "origin"
645
- assert np.array_equal(edges[4], [0, 0, 0])
645
+ assert np.array_equal(edges[4], [0, 0, 0]) # type: ignore[arg-type]
646
646
  assert edges[5] == "arc"
647
647
  assert edges[6] == 5
648
648
  assert edges[7] == 10
649
649
  assert edges[8] == "origin"
650
- assert np.array_equal(edges[9], [0, 0, 0])
650
+ assert np.array_equal(edges[9], [0, 0, 0]) # type: ignore[arg-type]
651
651
 
652
652
 
653
653
  def test_list_blocks() -> None:
@@ -665,31 +665,31 @@ def test_list_blocks() -> None:
665
665
  assert len(blocks) == 15
666
666
 
667
667
  assert blocks[0] == "hex"
668
- assert np.array_equal(blocks[1], [0, 1, 9, 8, 7, 6, 14, 15])
669
- assert np.array_equal(blocks[2], [50, 100, 1])
668
+ assert np.array_equal(blocks[1], [0, 1, 9, 8, 7, 6, 14, 15]) # type: ignore[arg-type]
669
+ assert np.array_equal(blocks[2], [50, 100, 1]) # type: ignore[arg-type]
670
670
  assert blocks[3] == "simpleGrading"
671
671
  assert isinstance(blocks[4], list)
672
672
  assert len(blocks[4]) == 3
673
673
  assert blocks[4][0] == 1
674
- assert np.array_equal(blocks[4][1], [[0.1, 0.25, 41.9], [0.9, 0.75, 1]])
674
+ assert np.array_equal(blocks[4][1], [[0.1, 0.25, 41.9], [0.9, 0.75, 1]]) # type: ignore[arg-type]
675
675
  assert blocks[4][2] == 1
676
676
  assert blocks[5] == "hex"
677
- assert np.array_equal(blocks[6], [1, 2, 10, 9, 6, 5, 13, 14])
678
- assert np.array_equal(blocks[7], [50, 100, 1])
677
+ assert np.array_equal(blocks[6], [1, 2, 10, 9, 6, 5, 13, 14]) # type: ignore[arg-type]
678
+ assert np.array_equal(blocks[7], [50, 100, 1]) # type: ignore[arg-type]
679
679
  assert blocks[8] == "simpleGrading"
680
680
  assert isinstance(blocks[9], list)
681
681
  assert len(blocks[9]) == 3
682
682
  assert blocks[9][0] == 1
683
- assert np.array_equal(blocks[9][1], [[0.1, 0.25, 41.9], [0.9, 0.75, 1]])
683
+ assert np.array_equal(blocks[9][1], [[0.1, 0.25, 41.9], [0.9, 0.75, 1]]) # type: ignore[arg-type]
684
684
  assert blocks[9][2] == 1
685
685
  assert blocks[10] == "hex"
686
- assert np.array_equal(blocks[11], [2, 3, 11, 10, 5, 4, 12, 13])
687
- assert np.array_equal(blocks[12], [225, 100, 1])
686
+ assert np.array_equal(blocks[11], [2, 3, 11, 10, 5, 4, 12, 13]) # type: ignore[arg-type]
687
+ assert np.array_equal(blocks[12], [225, 100, 1]) # type: ignore[arg-type]
688
688
  assert blocks[13] == "simpleGrading"
689
689
  assert isinstance(blocks[14], list)
690
690
  assert len(blocks[14]) == 3
691
691
  assert blocks[14][0] == 1
692
- assert np.array_equal(blocks[14][1], [[0.1, 0.25, 41.9], [0.9, 0.75, 1]])
692
+ assert np.array_equal(blocks[14][1], [[0.1, 0.25, 41.9], [0.9, 0.75, 1]]) # type: ignore[arg-type]
693
693
  assert blocks[14][2] == 1
694
694
 
695
695
 
@@ -30,27 +30,27 @@ def test_dimensioned() -> None:
30
30
  result = dimensioned + dimensioned
31
31
  assert result.value == 9.81 * 2
32
32
  assert result.dimensions == FoamFile.DimensionSet(length=1, time=-2)
33
- assert result.name == "g + g"
33
+ assert result.name == "g+g"
34
34
 
35
35
  result = dimensioned - dimensioned
36
36
  assert result.value == 0.0
37
37
  assert result.dimensions == FoamFile.DimensionSet(length=1, time=-2)
38
- assert result.name == "g - g"
38
+ assert result.name == "g-g"
39
39
 
40
40
  result = dimensioned * dimensioned
41
41
  assert result.value == 9.81**2
42
42
  assert result.dimensions == FoamFile.DimensionSet(length=2, time=-4)
43
- assert result.name == "g * g"
43
+ assert result.name == "g*g"
44
44
 
45
45
  result = dimensioned / dimensioned
46
46
  assert result.value == 1.0
47
47
  assert result.dimensions == FoamFile.DimensionSet()
48
- assert result.name == "g / g"
48
+ assert result.name == "g/g"
49
49
 
50
50
  result = dimensioned**2
51
51
  assert result.value == 9.81**2
52
52
  assert result.dimensions == FoamFile.DimensionSet(length=2, time=-4)
53
- assert result.name == "g ** 2"
53
+ assert result.name == "pow(g,2)"
54
54
 
55
55
  with pytest.raises(ValueError, match="dimension"):
56
56
  dimensioned + 1
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes