foamlib 0.8.11__tar.gz → 0.9.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. {foamlib-0.8.11 → foamlib-0.9.0}/PKG-INFO +1 -1
  2. {foamlib-0.8.11 → foamlib-0.9.0}/foamlib/__init__.py +1 -1
  3. {foamlib-0.8.11 → foamlib-0.9.0}/foamlib/_files/_parsing.py +213 -111
  4. {foamlib-0.8.11 → foamlib-0.9.0}/tests/test_files/test_files.py +6 -6
  5. {foamlib-0.8.11 → foamlib-0.9.0}/tests/test_files/test_parsing/test_advanced.py +22 -14
  6. {foamlib-0.8.11 → foamlib-0.9.0}/tests/test_files/test_parsing/test_basic.py +47 -18
  7. {foamlib-0.8.11 → foamlib-0.9.0}/tests/test_files/test_parsing/test_decompose_par.py +3 -1
  8. {foamlib-0.8.11 → foamlib-0.9.0}/tests/test_files/test_parsing/test_intermediate.py +160 -100
  9. {foamlib-0.8.11 → foamlib-0.9.0}/tests/test_files/test_parsing/test_poly_mesh.py +12 -11
  10. {foamlib-0.8.11 → foamlib-0.9.0}/.devcontainer.json +0 -0
  11. {foamlib-0.8.11 → foamlib-0.9.0}/.dockerignore +0 -0
  12. {foamlib-0.8.11 → foamlib-0.9.0}/.git-blame-ignore-revs +0 -0
  13. {foamlib-0.8.11 → foamlib-0.9.0}/.github/dependabot.yml +0 -0
  14. {foamlib-0.8.11 → foamlib-0.9.0}/.github/workflows/ci.yml +0 -0
  15. {foamlib-0.8.11 → foamlib-0.9.0}/.github/workflows/docker.yml +0 -0
  16. {foamlib-0.8.11 → foamlib-0.9.0}/.github/workflows/dockerhub-description.yml +0 -0
  17. {foamlib-0.8.11 → foamlib-0.9.0}/.github/workflows/pypi-publish.yml +0 -0
  18. {foamlib-0.8.11 → foamlib-0.9.0}/.gitignore +0 -0
  19. {foamlib-0.8.11 → foamlib-0.9.0}/.readthedocs.yaml +0 -0
  20. {foamlib-0.8.11 → foamlib-0.9.0}/CONTRIBUTING.md +0 -0
  21. {foamlib-0.8.11 → foamlib-0.9.0}/Dockerfile +0 -0
  22. {foamlib-0.8.11 → foamlib-0.9.0}/LICENSE.txt +0 -0
  23. {foamlib-0.8.11 → foamlib-0.9.0}/README.md +0 -0
  24. {foamlib-0.8.11 → foamlib-0.9.0}/benchmark/benchmark.png +0 -0
  25. {foamlib-0.8.11 → foamlib-0.9.0}/benchmark/benchmark.py +0 -0
  26. {foamlib-0.8.11 → foamlib-0.9.0}/benchmark/requirements.txt +0 -0
  27. {foamlib-0.8.11 → foamlib-0.9.0}/benchmark/ruff.toml +0 -0
  28. {foamlib-0.8.11 → foamlib-0.9.0}/docs/Makefile +0 -0
  29. {foamlib-0.8.11 → foamlib-0.9.0}/docs/cases.rst +0 -0
  30. {foamlib-0.8.11 → foamlib-0.9.0}/docs/conf.py +0 -0
  31. {foamlib-0.8.11 → foamlib-0.9.0}/docs/files.rst +0 -0
  32. {foamlib-0.8.11 → foamlib-0.9.0}/docs/index.rst +0 -0
  33. {foamlib-0.8.11 → foamlib-0.9.0}/docs/make.bat +0 -0
  34. {foamlib-0.8.11 → foamlib-0.9.0}/docs/ruff.toml +0 -0
  35. {foamlib-0.8.11 → foamlib-0.9.0}/foamlib/_cases/__init__.py +0 -0
  36. {foamlib-0.8.11 → foamlib-0.9.0}/foamlib/_cases/_async.py +0 -0
  37. {foamlib-0.8.11 → foamlib-0.9.0}/foamlib/_cases/_base.py +0 -0
  38. {foamlib-0.8.11 → foamlib-0.9.0}/foamlib/_cases/_run.py +0 -0
  39. {foamlib-0.8.11 → foamlib-0.9.0}/foamlib/_cases/_slurm.py +0 -0
  40. {foamlib-0.8.11 → foamlib-0.9.0}/foamlib/_cases/_subprocess.py +0 -0
  41. {foamlib-0.8.11 → foamlib-0.9.0}/foamlib/_cases/_sync.py +0 -0
  42. {foamlib-0.8.11 → foamlib-0.9.0}/foamlib/_cases/_util.py +0 -0
  43. {foamlib-0.8.11 → foamlib-0.9.0}/foamlib/_files/__init__.py +0 -0
  44. {foamlib-0.8.11 → foamlib-0.9.0}/foamlib/_files/_files.py +0 -0
  45. {foamlib-0.8.11 → foamlib-0.9.0}/foamlib/_files/_io.py +0 -0
  46. {foamlib-0.8.11 → foamlib-0.9.0}/foamlib/_files/_serialization.py +0 -0
  47. {foamlib-0.8.11 → foamlib-0.9.0}/foamlib/_files/_types.py +0 -0
  48. {foamlib-0.8.11 → foamlib-0.9.0}/foamlib/py.typed +0 -0
  49. {foamlib-0.8.11 → foamlib-0.9.0}/logo.png +0 -0
  50. {foamlib-0.8.11 → foamlib-0.9.0}/pyproject.toml +0 -0
  51. {foamlib-0.8.11 → foamlib-0.9.0}/tests/__init__.py +0 -0
  52. {foamlib-0.8.11 → foamlib-0.9.0}/tests/ruff.toml +0 -0
  53. {foamlib-0.8.11 → foamlib-0.9.0}/tests/test_cases/__init__.py +0 -0
  54. {foamlib-0.8.11 → foamlib-0.9.0}/tests/test_cases/test_cavity.py +0 -0
  55. {foamlib-0.8.11 → foamlib-0.9.0}/tests/test_cases/test_cavity_async.py +0 -0
  56. {foamlib-0.8.11 → foamlib-0.9.0}/tests/test_cases/test_flange.py +0 -0
  57. {foamlib-0.8.11 → foamlib-0.9.0}/tests/test_cases/test_flange_async.py +0 -0
  58. {foamlib-0.8.11 → foamlib-0.9.0}/tests/test_example.py +0 -0
  59. {foamlib-0.8.11 → foamlib-0.9.0}/tests/test_files/__init__.py +0 -0
  60. {foamlib-0.8.11 → foamlib-0.9.0}/tests/test_files/test_dumps.py +0 -0
  61. {foamlib-0.8.11 → foamlib-0.9.0}/tests/test_files/test_parsing/__init__.py +0 -0
  62. {foamlib-0.8.11 → foamlib-0.9.0}/tests/test_files/test_parsing/test_fields.py +0 -0
  63. {foamlib-0.8.11 → foamlib-0.9.0}/tests/test_files/test_parsing/test_fv_schemes.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: foamlib
3
- Version: 0.8.11
3
+ Version: 0.9.0
4
4
  Summary: A Python interface for interacting with OpenFOAM
5
5
  Project-URL: Homepage, https://github.com/gerlero/foamlib
6
6
  Project-URL: Repository, https://github.com/gerlero/foamlib
@@ -1,6 +1,6 @@
1
1
  """A Python interface for interacting with OpenFOAM."""
2
2
 
3
- __version__ = "0.8.11"
3
+ __version__ = "0.9.0"
4
4
 
5
5
  from ._cases import (
6
6
  AsyncFoamCase,
@@ -25,6 +25,7 @@ from pyparsing import (
25
25
  LineEnd,
26
26
  Literal,
27
27
  Located,
28
+ NoMatch,
28
29
  Opt,
29
30
  ParserElement,
30
31
  ParseResults,
@@ -37,131 +38,194 @@ from pyparsing import (
37
38
  printables,
38
39
  )
39
40
 
40
- from ._types import Data, Dimensioned, DimensionSet, File, TensorKind
41
+ from ._types import Data, Dimensioned, DimensionSet, File
41
42
 
42
43
 
43
- def _tensor(tensor_kind: TensorKind, *, ignore: Regex | None = None) -> Regex:
44
+ def _numeric_list(
45
+ *, nested: int | None = None, ignore: Regex | None = None, force_float: bool = False
46
+ ) -> ParserElement:
47
+ if not force_float:
48
+ int_pattern = r"(?:-?\d+)"
44
49
  float_pattern = r"(?i:[+-]?(?:(?:\d+\.?\d*(?:e[+-]?\d+)?)|nan|inf(?:inity)?))"
50
+ spacing_pattern = (
51
+ rf"(?:(?:\s|{ignore.re.pattern})+)" if ignore is not None else r"\s+"
52
+ )
45
53
 
46
- if tensor_kind == TensorKind.SCALAR:
47
- ret = Regex(float_pattern)
48
- ret.add_parse_action(lambda tks: [float(tks[0])])
49
- return ret
54
+ if nested is None:
55
+ if not force_float:
56
+ int_element_pattern = int_pattern
57
+ int_element = common.integer
58
+ float_element_pattern = float_pattern
59
+ float_element = common.ieee_float
60
+ else:
61
+ if not force_float:
62
+ int_element_pattern = rf"(?:(?:{nested})?{spacing_pattern}?\({spacing_pattern}?(?:{int_pattern}{spacing_pattern}){{{nested - 1}}}{int_pattern}{spacing_pattern}?\))"
63
+ int_element = (
64
+ Opt(Literal(str(nested))).suppress()
65
+ + Literal("(").suppress()
66
+ + Group(common.integer[nested])
67
+ + Literal(")").suppress()
68
+ )
69
+ float_element_pattern = rf"(?:(?:{nested})?{spacing_pattern}?\({spacing_pattern}?(?:{float_pattern}{spacing_pattern}){{{nested - 1}}}{float_pattern}{spacing_pattern}?\))"
70
+ float_element = (
71
+ Opt(Literal(str(nested))).suppress()
72
+ + Literal("(").suppress()
73
+ + Group(common.ieee_float[nested])
74
+ + Literal(")").suppress()
75
+ )
50
76
 
51
- ignore_pattern = rf"(?:\s|{ignore.re.pattern})+" if ignore is not None else r"\s+"
77
+ if not force_float:
78
+ int_list = Forward()
79
+ float_list = Forward()
52
80
 
53
- ret = Regex(
54
- rf"\((?:{ignore_pattern})?(?:{float_pattern}{ignore_pattern}){{{tensor_kind.size - 1}}}{float_pattern}(?:{ignore_pattern})?\)"
55
- )
56
- ret.add_parse_action(
57
- lambda tks: np.fromstring(
58
- re.sub(ignore.re, " ", tks[0][1:-1])
59
- if ignore is not None
60
- else tks[0][1:-1],
61
- sep=" ",
62
- )
63
- )
64
- return ret
81
+ def process_count(tks: ParseResults) -> None:
82
+ nonlocal int_list, float_list
65
83
 
84
+ if not tks:
85
+ count = None
86
+ else:
87
+ (count,) = tks
88
+ assert isinstance(count, int)
66
89
 
67
- def _list_of(entry: ParserElement) -> ParserElement:
68
- return Opt(
69
- Literal("List") + Literal("<") + _IDENTIFIER + Literal(">")
70
- ).suppress() + (
71
- (
72
- counted_array(entry, common.integer + Literal("(").suppress())
73
- + Literal(")").suppress()
74
- ).set_parse_action(lambda tks: [tks.as_list()])
75
- | (
76
- Literal("(").suppress()
77
- + Group((entry)[...], aslist=True)
78
- + Literal(")").suppress()
90
+ if count is None:
91
+ if not force_float:
92
+ int_list_pattern = rf"\({spacing_pattern}?(?:{int_element_pattern}{spacing_pattern})*{int_element_pattern}{spacing_pattern}?\)"
93
+ float_list_pattern = rf"\({spacing_pattern}?(?:{float_element_pattern}{spacing_pattern})*{float_element_pattern}{spacing_pattern}?\)"
94
+ else:
95
+ float_list_pattern = rf"\({spacing_pattern}?(?:{float_element_pattern}{spacing_pattern})*{float_element_pattern}?{spacing_pattern}?\)"
96
+
97
+ elif count == 0:
98
+ if not force_float:
99
+ int_list <<= NoMatch()
100
+ float_list <<= NoMatch()
101
+ else:
102
+ float_list <<= (Literal("(") + Literal(")")).add_parse_action(
103
+ lambda: np.empty((0, nested) if nested else 0, dtype=float)
104
+ )
105
+ return
106
+
107
+ else:
108
+ if not force_float:
109
+ int_list_pattern = rf"\({spacing_pattern}?(?:{int_element_pattern}{spacing_pattern}){{{count - 1}}}{int_element_pattern}{spacing_pattern}?\)"
110
+ float_list_pattern = rf"\({spacing_pattern}?(?:{float_element_pattern}{spacing_pattern}){{{count - 1}}}{float_element_pattern}{spacing_pattern}?\)"
111
+
112
+ if not force_float:
113
+ int_list <<= Regex(int_list_pattern).add_parse_action(
114
+ lambda tks: to_array(tks, dtype=int)
115
+ )
116
+ float_list <<= Regex(float_list_pattern).add_parse_action(
117
+ lambda tks: to_array(tks, dtype=float)
79
118
  )
80
- | (
81
- common.integer + Literal("{").suppress() + entry + Literal("}").suppress()
82
- ).set_parse_action(lambda tks: [[tks[1]] * tks[0]])
83
- )
84
119
 
120
+ def to_array(
121
+ tks: ParseResults, *, dtype: type
122
+ ) -> np.ndarray[tuple[int] | tuple[int, int], np.dtype[np.int64 | np.float64]]:
123
+ (s,) = tks
124
+ s = s.replace("(", "").replace(")", "")
85
125
 
86
- def _parse_ascii_field(
87
- s: str, tensor_kind: TensorKind, *, ignore: Regex | None
88
- ) -> np.ndarray[tuple[int] | tuple[int, int], np.dtype[np.float64]]:
89
- if ignore is not None:
90
- s = re.sub(ignore.re, " ", s)
91
- s = s.replace("(", " ").replace(")", " ")
126
+ if ignore is not None:
127
+ s = re.sub(ignore.re, " ", s)
92
128
 
93
- return np.fromstring(s, sep=" ").reshape(-1, *tensor_kind.shape) # type: ignore [return-value]
129
+ ret: np.ndarray[
130
+ tuple[int] | tuple[int, int], np.dtype[np.int64 | np.float64]
131
+ ] = np.fromstring(s, sep=" ", dtype=dtype) # type: ignore[assignment]
94
132
 
133
+ if nested is not None:
134
+ ret = ret.reshape(-1, nested)
95
135
 
96
- def _unpack_binary_field(
97
- b: bytes, tensor_kind: TensorKind, *, length: int
98
- ) -> np.ndarray[tuple[int] | tuple[int, int], np.dtype[np.float64 | np.float32]]:
99
- float_size = len(b) / tensor_kind.size / length
100
- assert float_size in (4, 8)
136
+ return ret
101
137
 
102
- dtype = np.float32 if float_size == 4 else float
103
- return np.frombuffer(b, dtype=dtype).reshape(-1, *tensor_kind.shape) # type: ignore [return-value]
138
+ def to_full_array(
139
+ tks: ParseResults, *, dtype: type
140
+ ) -> np.ndarray[tuple[int] | tuple[int, int], np.dtype[np.int64 | np.float64]]:
141
+ count, lst = tks
142
+ assert isinstance(count, int)
104
143
 
144
+ if nested is None:
145
+ return np.full(count, lst, dtype=dtype)
105
146
 
106
- def _tensor_list(
107
- tensor_kind: TensorKind, *, ignore: Regex | None = None
108
- ) -> ParserElement:
109
- tensor = _tensor(tensor_kind, ignore=ignore)
110
- ignore_pattern = rf"(?:\s|{ignore.re.pattern})+" if ignore is not None else r"\s+"
111
-
112
- list_ = Forward()
113
-
114
- list_ <<= Regex(
115
- rf"\((?:{ignore_pattern})?(?:{tensor.re.pattern}{ignore_pattern})*{tensor.re.pattern}(?:{ignore_pattern})?\)"
116
- ).add_parse_action(
117
- lambda tks: [_parse_ascii_field(tks[0], tensor_kind, ignore=ignore)]
118
- ) | (
119
- (Literal("(") + Literal(")"))
120
- .suppress()
121
- .add_parse_action(lambda: np.array([]).reshape(0, *tensor_kind.shape))
147
+ return np.full((count, nested), lst, dtype=dtype) # type: ignore[return-value]
148
+
149
+ count = Opt(common.integer).add_parse_action(process_count)
150
+
151
+ ret = count.suppress() + (
152
+ (int_list | float_list) if not force_float else float_list
122
153
  )
123
154
 
124
- def count_parse_action(tks: ParseResults) -> None:
125
- nonlocal list_
126
- length = tks[0]
127
- assert isinstance(length, int)
155
+ if not force_float:
156
+ ret |= (
157
+ common.integer
158
+ + Literal("{").suppress()
159
+ + int_element
160
+ + Literal("}").suppress()
161
+ ).add_parse_action(lambda tks: to_full_array(tks, dtype=int))
162
+ ret |= (
163
+ common.integer
164
+ + Literal("{").suppress()
165
+ + float_element
166
+ + Literal("}").suppress()
167
+ ).add_parse_action(lambda tks: to_full_array(tks, dtype=float))
128
168
 
129
- if not length:
130
- list_ <<= (
131
- ((Literal("(") + Literal(")")) | (Literal("{") + Literal("}")))
132
- .suppress()
133
- .add_parse_action(lambda: np.array([]).reshape(0, *tensor_kind.shape))
134
- )
135
- return
169
+ if ignore is not None:
170
+ ret.ignore(ignore)
136
171
 
137
- list_ <<= (
138
- Regex(
139
- rf"\((?:{ignore_pattern})?(?:{tensor.re.pattern}{ignore_pattern}){{{length - 1}}}{tensor.re.pattern}(?:{ignore_pattern})?\)"
140
- ).add_parse_action(
141
- lambda tks: [_parse_ascii_field(tks[0], tensor_kind, ignore=ignore)]
142
- )
143
- | Regex(
144
- rf"\((?s:({'.' * 8 * tensor_kind.size}|{'.' * 4 * tensor_kind.size}){{{length}}})\)"
145
- ).add_parse_action(
146
- lambda tks: [
147
- _unpack_binary_field(
148
- tks[0][1:-1].encode("latin-1"), tensor_kind, length=length
149
- )
150
- ]
151
- )
152
- | (
153
- Literal("{").suppress() + tensor + Literal("}").suppress()
154
- ).add_parse_action(
155
- lambda tks: [np.full((length, *tensor_kind.shape), tks[0], dtype=float)]
156
- )
172
+ return ret
173
+
174
+
175
+ def _binary_field(*, nested: int | None = None) -> ParserElement:
176
+ elsize = nested if nested is not None else 1
177
+
178
+ binary_field = Forward()
179
+
180
+ def process_count(tks: ParseResults) -> None:
181
+ nonlocal binary_field
182
+ (size,) = tks
183
+ assert isinstance(size, int)
184
+
185
+ binary_field <<= Regex(
186
+ rf"\((?s:({'.' * 8 * elsize}|{'.' * 4 * elsize}){{{size}}})\)"
157
187
  )
158
188
 
159
- count = common.integer.copy().add_parse_action(count_parse_action)
189
+ def to_array(
190
+ tks: ParseResults,
191
+ ) -> np.ndarray[tuple[int] | tuple[int, int], np.dtype[np.float64 | np.float32]]:
192
+ size, s = tks
193
+ assert isinstance(size, int)
194
+ assert isinstance(s, str)
195
+ assert s[0] == "("
196
+ assert s[-1] == ")"
197
+ s = s[1:-1]
198
+
199
+ float_size = len(s) / elsize / size
200
+ assert float_size in (4, 8)
201
+
202
+ dtype = np.float32 if float_size == 4 else float
203
+ ret = np.frombuffer(s.encode("latin-1"), dtype=dtype)
160
204
 
205
+ if nested is not None:
206
+ ret = ret.reshape(-1, nested)
207
+
208
+ return ret # type: ignore[return-value]
209
+
210
+ count = common.integer.copy().add_parse_action(process_count)
211
+
212
+ return (count + binary_field).add_parse_action(to_array)
213
+
214
+
215
+ def _list_of(entry: ParserElement) -> ParserElement:
161
216
  return (
162
- Opt(Literal("List") + Literal("<") + str(tensor_kind) + Literal(">")).suppress()
163
- + Opt(count).suppress()
164
- + list_
217
+ (
218
+ counted_array(entry, common.integer + Literal("(").suppress())
219
+ + Literal(")").suppress()
220
+ ).set_parse_action(lambda tks: [tks.as_list()])
221
+ | (
222
+ Literal("(").suppress()
223
+ + Group((entry)[...], aslist=True)
224
+ + Literal(")").suppress()
225
+ )
226
+ | (
227
+ common.integer + Literal("{").suppress() + entry + Literal("}").suppress()
228
+ ).set_parse_action(lambda tks: [[tks[1]] * tks[0]])
165
229
  )
166
230
 
167
231
 
@@ -250,12 +314,11 @@ _SWITCH = (
250
314
  _DIMENSIONS = (
251
315
  Literal("[").suppress() + common.number[0, 7] + Literal("]").suppress()
252
316
  ).set_parse_action(lambda tks: DimensionSet(*tks))
253
- _TENSOR = (
254
- _tensor(TensorKind.SCALAR)
255
- | _tensor(TensorKind.VECTOR)
256
- | _tensor(TensorKind.SYMM_TENSOR)
257
- | _tensor(TensorKind.TENSOR)
258
- )
317
+ _TENSOR = common.ieee_float | (
318
+ Literal("(").suppress()
319
+ + Group(common.ieee_float[3] | common.ieee_float[6] | common.ieee_float[9])
320
+ + Literal(")").suppress()
321
+ ).add_parse_action(lambda tks: np.array(tks[0], dtype=float))
259
322
  _PARENTHESIZED = Forward()
260
323
  _IDENTIFIER = Combine(Word(_IDENTCHARS, _IDENTBODYCHARS) + Opt(_PARENTHESIZED))
261
324
  _PARENTHESIZED <<= Combine(
@@ -270,12 +333,46 @@ _DIMENSIONED = (Opt(_IDENTIFIER) + _DIMENSIONS + _TENSOR).set_parse_action(
270
333
  _FIELD = (Keyword("uniform", _IDENTBODYCHARS).suppress() + _TENSOR) | (
271
334
  Keyword("nonuniform", _IDENTBODYCHARS).suppress()
272
335
  + (
273
- _tensor_list(TensorKind.SCALAR, ignore=_COMMENT)
274
- | _tensor_list(TensorKind.VECTOR, ignore=_COMMENT)
275
- | _tensor_list(TensorKind.SYMM_TENSOR, ignore=_COMMENT)
276
- | _tensor_list(TensorKind.TENSOR, ignore=_COMMENT)
336
+ (
337
+ Opt(
338
+ Literal("List") + Literal("<") + Literal("scalar") + Literal(">")
339
+ ).suppress()
340
+ + (_numeric_list(force_float=True, ignore=_COMMENT) | _binary_field())
341
+ )
342
+ | (
343
+ Opt(
344
+ Literal("List") + Literal("<") + Literal("vector") + Literal(">")
345
+ ).suppress()
346
+ + (
347
+ _numeric_list(nested=3, force_float=True, ignore=_COMMENT)
348
+ | _binary_field(nested=3)
349
+ )
350
+ )
351
+ | (
352
+ Opt(
353
+ Literal("List") + Literal("<") + Literal("symmTensor") + Literal(">")
354
+ ).suppress()
355
+ + (
356
+ _numeric_list(
357
+ nested=6,
358
+ force_float=True,
359
+ ignore=_COMMENT,
360
+ )
361
+ | _binary_field(nested=6)
362
+ )
363
+ )
364
+ | (
365
+ Opt(
366
+ Literal("List") + Literal("<") + Literal("tensor") + Literal(">")
367
+ ).suppress()
368
+ + (
369
+ _numeric_list(nested=9, force_float=True, ignore=_COMMENT)
370
+ | _binary_field(nested=9)
371
+ )
372
+ )
277
373
  )
278
374
  )
375
+
279
376
  _DIRECTIVE = Word("#", _IDENTBODYCHARS)
280
377
  _TOKEN = dbl_quoted_string | _DIRECTIVE | _IDENTIFIER
281
378
  _DATA = Forward()
@@ -283,7 +380,12 @@ _KEYWORD_ENTRY = _keyword_entry_of(_TOKEN | _list_of(_IDENTIFIER), _DATA)
283
380
  _DICT = _dict_of(_TOKEN, _DATA)
284
381
  _DATA_ENTRY = Forward()
285
382
  _LIST_ENTRY = _DICT | _KEYWORD_ENTRY | _DATA_ENTRY
286
- _LIST = _list_of(_LIST_ENTRY)
383
+ _LIST = (
384
+ _numeric_list(ignore=_COMMENT)
385
+ | _numeric_list(nested=3, ignore=_COMMENT)
386
+ | _numeric_list(nested=4, ignore=_COMMENT)
387
+ | _list_of(_LIST_ENTRY)
388
+ )
287
389
  _NUMBER = (
288
390
  common.number
289
391
  | CaselessKeyword("nan").set_parse_action(lambda: np.nan)
@@ -3,9 +3,9 @@ import sys
3
3
  from pathlib import Path
4
4
 
5
5
  if sys.version_info >= (3, 9):
6
- from collections.abc import Generator, Sequence
6
+ from collections.abc import Generator
7
7
  else:
8
- from typing import Generator, Sequence
8
+ from typing import Generator
9
9
 
10
10
  import numpy as np
11
11
  import pytest
@@ -153,16 +153,16 @@ def test_mesh(cavity: FoamCase) -> None:
153
153
 
154
154
  points = file[None]
155
155
 
156
- assert isinstance(points, Sequence)
157
- assert isinstance(points[0], Sequence)
158
- assert len(points[0]) == 3
156
+ assert isinstance(points, np.ndarray)
157
+ assert points.ndim == 2
158
+ assert points.shape[-1] == 3
159
159
 
160
160
 
161
161
  def test_internal_field(cavity: FoamCase) -> None:
162
162
  blocks = cavity.block_mesh_dict["blocks"]
163
163
  assert isinstance(blocks, list)
164
164
  sizes = blocks[2]
165
- assert isinstance(sizes, list)
165
+ assert isinstance(sizes, np.ndarray)
166
166
  size = np.prod(sizes)
167
167
 
168
168
  p_arr = np.zeros(size)
@@ -1,5 +1,6 @@
1
1
  # Based on https://foss.heptapod.net/fluiddyn/fluidsimfoam/-/blob/branch/default/tests/test_parser_advanced.py
2
2
 
3
+ import numpy as np
3
4
  import pytest
4
5
  from foamlib._files._parsing import Parsed
5
6
 
@@ -276,9 +277,10 @@ def test_assignment_strange_name() -> None:
276
277
  """
277
278
  )
278
279
  assert parsed[("equations", '"(U|e|k).*"')] == 0.7
279
- assert parsed[("equations", '"(U|e|k|epsilon).*"')] == (
280
- "table",
281
- [[0, 0.4], [0.5, 0.7]],
280
+ assert isinstance(parsed[("equations", '"(U|e|k|epsilon).*"')], tuple)
281
+ assert parsed[("equations", '"(U|e|k|epsilon).*"')][0] == "table"
282
+ assert np.array_equal(
283
+ parsed[("equations", '"(U|e|k|epsilon).*"')][1], [[0, 0.4], [0.5, 0.7]]
282
284
  )
283
285
 
284
286
 
@@ -331,17 +333,23 @@ def test_list_u() -> None:
331
333
  )
332
334
  """
333
335
  )
334
- assert parsed[()] == [
335
- [4.50773, 1.79963, 0.0],
336
- [6.06208, 2.40831, 0.0],
337
- [6.874, 2.72079, 0.0],
338
- [7.42929, 2.931, 0.0],
339
- [7.85095, 3.08805, 0.0],
340
- [8.19202, 3.21306, 0.0],
341
- [17.5, 1.92559e-09, 0.0],
342
- [17.5, 6.81045e-12, 0.0],
343
- [17.5, 6.81045e-12, 0.0],
344
- ]
336
+ data = parsed[()]
337
+ assert isinstance(data, np.ndarray)
338
+ assert data.shape == (9, 3)
339
+ assert np.array_equal(
340
+ data,
341
+ [
342
+ [4.507730000e00, 1.799630000e00, 0.000000000e00],
343
+ [6.062080000e00, 2.408310000e00, 0.000000000e00],
344
+ [6.874000000e00, 2.720790000e00, 0.000000000e00],
345
+ [7.429290000e00, 2.931000000e00, 0.000000000e00],
346
+ [7.850950000e00, 3.088050000e00, 0.000000000e00],
347
+ [8.192020000e00, 3.213060000e00, 0.000000000e00],
348
+ [1.750000000e01, 1.925590000e-09, 0.000000000e00],
349
+ [1.750000000e01, 6.810450000e-12, 0.000000000e00],
350
+ [1.750000000e01, 6.810450000e-12, 0.000000000e00],
351
+ ],
352
+ )
345
353
 
346
354
 
347
355
  def test_list_as_write_cell_centers() -> None:
@@ -16,7 +16,16 @@ def test_parse_value() -> None:
16
16
  assert Parsed(b"uniform 1")[()] == 1
17
17
  assert Parsed(b"uniform 1.0")[()] == 1.0
18
18
  assert Parsed(b"uniform 1.0e-3")[()] == 1.0e-3
19
- assert Parsed(b"(1.0 2.0 3.0)")[()] == [1.0, 2.0, 3.0]
19
+ assert Parsed(b"(word word)")[()] == ["word", "word"]
20
+ lst = Parsed(b"(1 2 3)")[()]
21
+ assert isinstance(lst, np.ndarray)
22
+ assert lst.dtype == np.int64
23
+ assert np.array_equal(lst, [1, 2, 3])
24
+ lst = Parsed(b"(1.0 2 3)")[()]
25
+ assert isinstance(lst, np.ndarray)
26
+ assert lst.dtype == np.float64
27
+ assert np.array_equal(lst, [1.0, 2.0, 3.0])
28
+ assert Parsed(b"()")[()] == []
20
29
  field = Parsed(b"uniform (1 2 3)")[()]
21
30
  assert isinstance(field, np.ndarray)
22
31
  assert np.array_equal(field, [1, 2, 3])
@@ -32,15 +41,28 @@ def test_parse_value() -> None:
32
41
  field = Parsed(b"nonuniform List<tensor> ()")[()]
33
42
  assert isinstance(field, np.ndarray)
34
43
  assert field.shape == (0, 9)
35
- assert Parsed(b"3(1 2 3)")[()] == [1, 2, 3]
36
- assert Parsed(b"2((1 2 3) (4 5 6))")[()] == [
37
- [1, 2, 3],
38
- [4, 5, 6],
39
- ]
40
- assert Parsed(b"2{(1 2 3)}")[()] == [
41
- [1, 2, 3],
42
- [1, 2, 3],
43
- ]
44
+ lst = Parsed(b"3(1 2 3)")[()]
45
+ assert isinstance(lst, np.ndarray)
46
+ assert np.array_equal(lst, [1, 2, 3])
47
+ lst = Parsed(b"2((1 2 3) (4 5 6))")[()]
48
+ assert isinstance(lst, np.ndarray)
49
+ assert np.array_equal(
50
+ lst,
51
+ [
52
+ [1, 2, 3],
53
+ [4, 5, 6],
54
+ ],
55
+ )
56
+ lst = Parsed(b"2{(1 2 3)}")[()]
57
+ assert isinstance(lst, np.ndarray)
58
+ assert np.array_equal(
59
+ lst,
60
+ [
61
+ [1, 2, 3],
62
+ [1, 2, 3],
63
+ ],
64
+ )
65
+ assert Parsed(b"0()")[()] == []
44
66
  field = Parsed(b"nonuniform List<vector> 2((1 2 3) (4 5 6))")[()]
45
67
  assert isinstance(field, np.ndarray)
46
68
  assert np.array_equal(
@@ -83,19 +105,26 @@ def test_parse_value() -> None:
83
105
  assert Parsed(b"[1 1 -2 0 0 0 0] 9.81")[()] == FoamFile.Dimensioned(
84
106
  dimensions=FoamFile.DimensionSet(mass=1, length=1, time=-2), value=9.81
85
107
  )
86
- assert Parsed(b"hex (0 1 2 3 4 5 6 7) (1 1 1) simpleGrading (1 1 1)")[()] == (
87
- "hex",
88
- [0, 1, 2, 3, 4, 5, 6, 7],
89
- [1, 1, 1],
90
- "simpleGrading",
91
- [1, 1, 1],
92
- )
108
+ tpl = Parsed(b"hex (0 1 2 3 4 5 6 7) (1 1 1) simpleGrading (1 1 1)")[()]
109
+ assert isinstance(tpl, tuple)
110
+ assert len(tpl) == 5
111
+ assert tpl[0] == "hex"
112
+ assert isinstance(tpl[1], np.ndarray)
113
+ assert tpl[1].dtype == np.int64
114
+ assert np.array_equal(tpl[1], [0, 1, 2, 3, 4, 5, 6, 7])
115
+ assert isinstance(tpl[2], np.ndarray)
116
+ assert tpl[2].dtype == np.int64
117
+ assert np.array_equal(tpl[2], [1, 1, 1])
118
+ assert tpl[3] == "simpleGrading"
119
+ assert isinstance(tpl[4], np.ndarray)
120
+ assert tpl[4].dtype == np.int64
121
+ assert np.array_equal(tpl[4], [1, 1, 1])
93
122
  assert Parsed(b"(a b; c d;)")[()] == [("a", "b"), ("c", "d")]
94
123
  assert Parsed(b"(a {b c;} d {e g;})")[()] == [
95
124
  ("a", {"b": "c"}),
96
125
  ("d", {"e": "g"}),
97
126
  ]
98
- assert Parsed(b"(a (0 1 2); b {})")[()] == [("a", [0, 1, 2]), ("b", {})]
127
+ assert Parsed(b"(a (b c d); e {})")[()] == [("a", ["b", "c", "d"]), ("e", {})]
99
128
  assert Parsed(b"({a b; c d;} {e g;})")[()] == [{"a": "b", "c": "d"}, {"e": "g"}]
100
129
  assert Parsed(b"(water oil mercury air)")[()] == ["water", "oil", "mercury", "air"]
101
130
  assert Parsed(b"div(phi,U)")[()] == "div(phi,U)"
@@ -3,6 +3,7 @@
3
3
  from pathlib import Path
4
4
  from textwrap import dedent
5
5
 
6
+ import numpy as np
6
7
  from foamlib import FoamFile
7
8
 
8
9
 
@@ -43,7 +44,8 @@ def test_simple(tmp_path: Path) -> None:
43
44
 
44
45
  assert decompose_par_dict["numberOfSubdomains"] == 8
45
46
  assert decompose_par_dict["method"] == "simple"
46
- assert decompose_par_dict["coeffs", "n"] == [4, 2, 1]
47
+ assert isinstance(decompose_par_dict["coeffs", "n"], np.ndarray)
48
+ assert np.array_equal(decompose_par_dict["coeffs", "n"], [4, 2, 1])
47
49
  assert decompose_par_dict["coeffs", "order"] == "xyz"
48
50
  assert decompose_par_dict["coeffs", "delta"] == 0.001
49
51
 
@@ -1,5 +1,6 @@
1
1
  # Based on https://foss.heptapod.net/fluiddyn/fluidsimfoam/-/blob/branch/default/tests/test_parser.py
2
2
 
3
+ import numpy as np
3
4
  import pytest
4
5
  from foamlib import FoamFile
5
6
  from foamlib._files._parsing import Parsed
@@ -61,20 +62,20 @@ def test_strange_names() -> None:
61
62
 
62
63
 
63
64
  def test_list_simple() -> None:
64
- assert Parsed(b"""
65
+ faces = Parsed(b"""
65
66
  faces
66
67
  (
67
68
  (1 5 4 0)
68
69
  (2 3 4 5)
69
70
  );
70
- """)[("faces",)] == [
71
- [1, 5, 4, 0],
72
- [2, 3, 4, 5],
73
- ]
71
+ """)[("faces",)]
72
+
73
+ assert isinstance(faces, np.ndarray)
74
+ assert np.array_equal(faces, [[1, 5, 4, 0], [2, 3, 4, 5]])
74
75
 
75
76
 
76
77
  def test_list_assignment() -> None:
77
- assert Parsed(b"""
78
+ faces = Parsed(b"""
78
79
  faces
79
80
  (
80
81
  1
@@ -82,7 +83,9 @@ def test_list_assignment() -> None:
82
83
  4
83
84
  0
84
85
  );
85
- """)[("faces",)] == [1, 5, 4, 0]
86
+ """)[("faces",)]
87
+ assert isinstance(faces, np.ndarray)
88
+ assert np.array_equal(faces, [1, 5, 4, 0])
86
89
 
87
90
 
88
91
  def test_dict_simple() -> None:
@@ -135,11 +138,12 @@ def test_dict_with_list() -> None:
135
138
  pRefValue 0;
136
139
  }
137
140
  """)
138
- assert parsed[("PISO", "pRefPoint")] == [0, 0, 0]
141
+ assert isinstance(parsed[("PISO", "pRefPoint")], np.ndarray)
142
+ assert np.array_equal(parsed[("PISO", "pRefPoint")], [0, 0, 0])
139
143
 
140
144
 
141
145
  def test_list_with_dict() -> None:
142
- assert Parsed(b"""
146
+ boundary = Parsed(b"""
143
147
  boundary
144
148
  (
145
149
  upperBoundary
@@ -152,33 +156,36 @@ def test_list_with_dict() -> None:
152
156
  );
153
157
  }
154
158
  );
155
- """)[("boundary",)] == [
156
- (
157
- "upperBoundary",
158
- {
159
- "type": "cyclic",
160
- "neighbourPatch": "lowerBoundary",
161
- "faces": [
162
- [3, 7, 6, 2],
163
- ],
164
- },
165
- ),
166
- ]
159
+ """)[("boundary",)]
160
+ assert isinstance(boundary, list)
161
+ assert len(boundary) == 1
162
+
163
+ upper_boundary = boundary[0]
164
+ assert upper_boundary[0] == "upperBoundary"
165
+ assert upper_boundary[1]["type"] == "cyclic"
166
+ assert upper_boundary[1]["neighbourPatch"] == "lowerBoundary"
167
+ assert isinstance(upper_boundary[1]["faces"], np.ndarray)
168
+ assert np.array_equal(upper_boundary[1]["faces"], [[3, 7, 6, 2]])
167
169
 
168
170
 
169
171
  def test_list_with_str() -> None:
170
- assert Parsed(b"""
172
+ blocks = Parsed(b"""
171
173
  blocks
172
174
  (
173
175
  hex (0 1 2 3 4 5 6 7) (40 40 40) simpleGrading (1 1 1)
174
176
  );
175
- """)[("blocks",)] == [
176
- "hex",
177
- [0, 1, 2, 3, 4, 5, 6, 7],
178
- [40, 40, 40],
179
- "simpleGrading",
180
- [1, 1, 1],
181
- ]
177
+ """)[("blocks",)]
178
+ assert isinstance(blocks, list)
179
+ assert len(blocks) == 5
180
+
181
+ assert blocks[0] == "hex"
182
+ assert isinstance(blocks[1], np.ndarray)
183
+ assert np.array_equal(blocks[1], [0, 1, 2, 3, 4, 5, 6, 7])
184
+ assert isinstance(blocks[2], np.ndarray)
185
+ assert np.array_equal(blocks[2], [40, 40, 40])
186
+ assert blocks[3] == "simpleGrading"
187
+ assert isinstance(blocks[4], np.ndarray)
188
+ assert np.array_equal(blocks[4], [1, 1, 1])
182
189
 
183
190
 
184
191
  def test_file_simple() -> None:
@@ -230,10 +237,9 @@ def test_file() -> None:
230
237
  """)
231
238
  assert parsed[("a",)] == 1
232
239
  assert parsed[("b",)] == 2
233
- assert parsed[("faces",)] == [
234
- [1, 5, 4, 0],
235
- [2, 3, 4, 5],
236
- ]
240
+ faces = parsed[("faces",)]
241
+ assert isinstance(faces, np.ndarray)
242
+ assert np.array_equal(faces, [[1, 5, 4, 0], [2, 3, 4, 5]])
237
243
  assert parsed[("my_dict", "a")] == 1
238
244
 
239
245
 
@@ -455,12 +461,15 @@ def test_for_blockmesh() -> None:
455
461
  );
456
462
  """)
457
463
  assert parsed[("negHalfWidth",)] == ("#neg", "$halfWidth")
458
- assert parsed[("blocks",)] == [
459
- "hex",
460
- [4, 6, 14, 12, 0, 2, 10, 8],
461
- [1, "$upstreamCells", "$cylinderBoxCells"],
462
- "$expandBlock",
463
- ]
464
+ blocks = parsed[("blocks",)]
465
+ assert isinstance(blocks, list)
466
+ assert len(blocks) == 4
467
+
468
+ assert blocks[0] == "hex"
469
+ assert isinstance(blocks[1], np.ndarray)
470
+ assert np.array_equal(blocks[1], [4, 6, 14, 12, 0, 2, 10, 8])
471
+ assert blocks[2] == [1, "$upstreamCells", "$cylinderBoxCells"]
472
+ assert blocks[3] == "$expandBlock"
464
473
 
465
474
 
466
475
  def test_for_u() -> None:
@@ -483,30 +492,44 @@ def test_blocks() -> None:
483
492
  hex (16 17 18 19 20 21 22 23) (96 1 72) simpleGrading (1 1 1)
484
493
  );
485
494
  """)
486
- assert parsed[("blocks",)] == [
487
- "hex",
488
- [0, 1, 2, 3, 4, 5, 6, 7],
489
- "inletChannel",
490
- [40, 1, 64],
491
- "simpleGrading",
492
- [1, 1, 1],
493
- "hex",
494
- [4, 5, 6, 7, 8, 9, 10, 11, 12],
495
- "inletChannel",
496
- [40, 1, 16],
497
- "simpleGrading",
498
- [1, 1, 1],
499
- "hex",
500
- [12, 13, 14, 15, 16, 17, 18, 19],
501
- [96, 1, 8],
502
- "simpleGrading",
503
- [1, 1, 1],
504
- "hex",
505
- [16, 17, 18, 19, 20, 21, 22, 23],
506
- [96, 1, 72],
507
- "simpleGrading",
508
- [1, 1, 1],
509
- ]
495
+ blocks = parsed[("blocks",)]
496
+ assert isinstance(blocks, list)
497
+ assert len(blocks) == 22
498
+
499
+ assert blocks[0] == "hex"
500
+ assert isinstance(blocks[1], np.ndarray)
501
+ assert np.array_equal(blocks[1], [0, 1, 2, 3, 4, 5, 6, 7])
502
+ assert blocks[2] == "inletChannel"
503
+ assert isinstance(blocks[3], np.ndarray)
504
+ assert np.array_equal(blocks[3], [40, 1, 64])
505
+ assert blocks[4] == "simpleGrading"
506
+ assert isinstance(blocks[5], np.ndarray)
507
+ assert np.array_equal(blocks[5], [1, 1, 1])
508
+ assert blocks[6] == "hex"
509
+ assert isinstance(blocks[7], np.ndarray)
510
+ assert np.array_equal(blocks[7], [4, 5, 6, 7, 8, 9, 10, 11, 12])
511
+ assert blocks[8] == "inletChannel"
512
+ assert isinstance(blocks[9], np.ndarray)
513
+ assert np.array_equal(blocks[9], [40, 1, 16])
514
+ assert blocks[10] == "simpleGrading"
515
+ assert isinstance(blocks[11], np.ndarray)
516
+ assert np.array_equal(blocks[11], [1, 1, 1])
517
+ assert blocks[12] == "hex"
518
+ assert isinstance(blocks[13], np.ndarray)
519
+ assert np.array_equal(blocks[13], [12, 13, 14, 15, 16, 17, 18, 19])
520
+ assert isinstance(blocks[14], np.ndarray)
521
+ assert np.array_equal(blocks[14], [96, 1, 8])
522
+ assert blocks[15] == "simpleGrading"
523
+ assert isinstance(blocks[16], np.ndarray)
524
+ assert np.array_equal(blocks[16], [1, 1, 1])
525
+ assert blocks[17] == "hex"
526
+ assert isinstance(blocks[18], np.ndarray)
527
+ assert np.array_equal(blocks[18], [16, 17, 18, 19, 20, 21, 22, 23])
528
+ assert isinstance(blocks[19], np.ndarray)
529
+ assert np.array_equal(blocks[19], [96, 1, 72])
530
+ assert blocks[20] == "simpleGrading"
531
+ assert isinstance(blocks[21], np.ndarray)
532
+ assert np.array_equal(blocks[21], [1, 1, 1])
510
533
 
511
534
 
512
535
  @pytest.mark.xfail(reason="Not currently supported")
@@ -586,10 +609,16 @@ def test_list_edges() -> None:
586
609
  spline 6 5 ((0.6 0.0124 0.05) (0.7 0.0395 0.05) (0.8 0.0724 0.05) (0.9 0.132 0.05) (1 0.172 0.05) (1.1 0.132 0.05) (1.2 0.0724 0.05) (1.3 0.0395 0.05) (1.4 0.0124 0.05))
587
610
  );
588
611
  """)
589
- assert parsed[("edges",)] == [
590
- "spline",
591
- 1,
592
- 2,
612
+ edges = parsed[("edges",)]
613
+ assert isinstance(edges, list)
614
+ assert len(edges) == 8
615
+
616
+ assert edges[0] == "spline"
617
+ assert edges[1] == 1
618
+ assert edges[2] == 2
619
+ assert isinstance(edges[3], np.ndarray)
620
+ assert np.array_equal(
621
+ edges[3],
593
622
  [
594
623
  [0.6, 0.0124, 0.0],
595
624
  [0.7, 0.0395, 0.0],
@@ -601,9 +630,13 @@ def test_list_edges() -> None:
601
630
  [1.3, 0.0395, 0.0],
602
631
  [1.4, 0.0124, 0.0],
603
632
  ],
604
- "spline",
605
- 6,
606
- 5,
633
+ )
634
+ assert edges[4] == "spline"
635
+ assert edges[5] == 6
636
+ assert edges[6] == 5
637
+ assert isinstance(edges[7], np.ndarray)
638
+ assert np.array_equal(
639
+ edges[7],
607
640
  [
608
641
  [0.6, 0.0124, 0.05],
609
642
  [0.7, 0.0395, 0.05],
@@ -615,7 +648,7 @@ def test_list_edges() -> None:
615
648
  [1.3, 0.0395, 0.05],
616
649
  [1.4, 0.0124, 0.05],
617
650
  ],
618
- ]
651
+ )
619
652
 
620
653
 
621
654
  def test_list_edges_arcs() -> None:
@@ -626,18 +659,21 @@ def test_list_edges_arcs() -> None:
626
659
  arc 5 10 origin (0 0 0)
627
660
  );
628
661
  """)
629
- assert parsed[("edges",)] == [
630
- "arc",
631
- 0,
632
- 5,
633
- "origin",
634
- [0, 0, 0],
635
- "arc",
636
- 5,
637
- 10,
638
- "origin",
639
- [0, 0, 0],
640
- ]
662
+ edges = parsed[("edges",)]
663
+ assert isinstance(edges, list)
664
+ assert len(edges) == 10
665
+ assert edges[0] == "arc"
666
+ assert edges[1] == 0
667
+ assert edges[2] == 5
668
+ assert edges[3] == "origin"
669
+ assert isinstance(edges[4], np.ndarray)
670
+ assert np.array_equal(edges[4], [0, 0, 0])
671
+ assert edges[5] == "arc"
672
+ assert edges[6] == 5
673
+ assert edges[7] == 10
674
+ assert edges[8] == "origin"
675
+ assert isinstance(edges[9], np.ndarray)
676
+ assert np.array_equal(edges[9], [0, 0, 0])
641
677
 
642
678
 
643
679
  def test_list_blocks() -> None:
@@ -649,23 +685,47 @@ def test_list_blocks() -> None:
649
685
  hex (2 3 11 10 5 4 12 13) (225 100 1) simpleGrading (1 ((0.1 0.25 41.9) (0.9 0.75 1)) 1)
650
686
  );
651
687
  """)
652
- assert parsed[("blocks",)] == [
653
- "hex",
654
- [0, 1, 9, 8, 7, 6, 14, 15],
655
- [50, 100, 1],
656
- "simpleGrading",
657
- [1, [[0.1, 0.25, 41.9], [0.9, 0.75, 1]], 1],
658
- "hex",
659
- [1, 2, 10, 9, 6, 5, 13, 14],
660
- [50, 100, 1],
661
- "simpleGrading",
662
- [1, [[0.1, 0.25, 41.9], [0.9, 0.75, 1]], 1],
663
- "hex",
664
- [2, 3, 11, 10, 5, 4, 12, 13],
665
- [225, 100, 1],
666
- "simpleGrading",
667
- [1, [[0.1, 0.25, 41.9], [0.9, 0.75, 1]], 1],
668
- ]
688
+ blocks = parsed[("blocks",)]
689
+
690
+ assert isinstance(blocks, list)
691
+ assert len(blocks) == 15
692
+
693
+ assert blocks[0] == "hex"
694
+ assert isinstance(blocks[1], np.ndarray)
695
+ assert np.array_equal(blocks[1], [0, 1, 9, 8, 7, 6, 14, 15])
696
+ assert isinstance(blocks[2], np.ndarray)
697
+ assert np.array_equal(blocks[2], [50, 100, 1])
698
+ assert blocks[3] == "simpleGrading"
699
+ assert isinstance(blocks[4], list)
700
+ assert len(blocks[4]) == 3
701
+ assert blocks[4][0] == 1
702
+ assert isinstance(blocks[4][1], np.ndarray)
703
+ assert np.array_equal(blocks[4][1], [[0.1, 0.25, 41.9], [0.9, 0.75, 1]])
704
+ assert blocks[4][2] == 1
705
+ assert blocks[5] == "hex"
706
+ assert isinstance(blocks[6], np.ndarray)
707
+ assert np.array_equal(blocks[6], [1, 2, 10, 9, 6, 5, 13, 14])
708
+ assert isinstance(blocks[7], np.ndarray)
709
+ assert np.array_equal(blocks[7], [50, 100, 1])
710
+ assert blocks[8] == "simpleGrading"
711
+ assert isinstance(blocks[9], list)
712
+ assert len(blocks[9]) == 3
713
+ assert blocks[9][0] == 1
714
+ assert isinstance(blocks[9][1], np.ndarray)
715
+ assert np.array_equal(blocks[9][1], [[0.1, 0.25, 41.9], [0.9, 0.75, 1]])
716
+ assert blocks[9][2] == 1
717
+ assert blocks[10] == "hex"
718
+ assert isinstance(blocks[11], np.ndarray)
719
+ assert np.array_equal(blocks[11], [2, 3, 11, 10, 5, 4, 12, 13])
720
+ assert isinstance(blocks[12], np.ndarray)
721
+ assert np.array_equal(blocks[12], [225, 100, 1])
722
+ assert blocks[13] == "simpleGrading"
723
+ assert isinstance(blocks[14], list)
724
+ assert len(blocks[14]) == 3
725
+ assert blocks[14][0] == 1
726
+ assert isinstance(blocks[14][1], np.ndarray)
727
+ assert np.array_equal(blocks[14][1], [[0.1, 0.25, 41.9], [0.9, 0.75, 1]])
728
+ assert blocks[14][2] == 1
669
729
 
670
730
 
671
731
  @pytest.mark.xfail(reason="Not currently supported")
@@ -2,6 +2,7 @@
2
2
 
3
3
  from pathlib import Path
4
4
 
5
+ import numpy as np
5
6
  from foamlib import FoamFile
6
7
 
7
8
  contents = r"""
@@ -49,18 +50,18 @@ def test_get_cells_coords(tmp_path: Path) -> None:
49
50
  file = FoamFile(path)
50
51
 
51
52
  points = file[None]
52
- assert isinstance(points, list)
53
+ assert isinstance(points, np.ndarray)
53
54
 
54
- assert points[0] == [0, 0, 0]
55
- assert points[1] == [0.15707963268, 0, 0]
56
- assert points[2] == [0.314159265359, 0, 0]
57
- assert points[3] == [0.471238898038, 0, 0]
58
- assert points[4] == [0.628318530718, 0, 0]
59
- assert points[5] == [0, 0, 0]
60
- assert points[6] == [0.15707963268, 0, 0]
61
- assert points[7] == [0.314159265359, 0, 0]
62
- assert points[8] == [0.471238898038, 0, 0]
63
- assert points[9] == [0.628318530718, 0, 0]
55
+ assert np.array_equal(points[0], [0, 0, 0])
56
+ assert np.array_equal(points[1], [0.15707963268, 0, 0])
57
+ assert np.array_equal(points[2], [0.314159265359, 0, 0])
58
+ assert np.array_equal(points[3], [0.471238898038, 0, 0])
59
+ assert np.array_equal(points[4], [0.628318530718, 0, 0])
60
+ assert np.array_equal(points[5], [0, 0, 0])
61
+ assert np.array_equal(points[6], [0.15707963268, 0, 0])
62
+ assert np.array_equal(points[7], [0.314159265359, 0, 0])
63
+ assert np.array_equal(points[8], [0.471238898038, 0, 0])
64
+ assert np.array_equal(points[9], [0.628318530718, 0, 0])
64
65
 
65
66
  assert len(points) == 10
66
67
 
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes