foamlib 0.8.10__tar.gz → 0.9.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. {foamlib-0.8.10 → foamlib-0.9.0}/PKG-INFO +1 -26
  2. {foamlib-0.8.10 → foamlib-0.9.0}/foamlib/__init__.py +1 -1
  3. {foamlib-0.8.10 → foamlib-0.9.0}/foamlib/_files/_parsing.py +212 -98
  4. {foamlib-0.8.10 → foamlib-0.9.0}/pyproject.toml +19 -18
  5. {foamlib-0.8.10 → foamlib-0.9.0}/tests/test_files/test_files.py +6 -6
  6. {foamlib-0.8.10 → foamlib-0.9.0}/tests/test_files/test_parsing/test_advanced.py +26 -19
  7. {foamlib-0.8.10 → foamlib-0.9.0}/tests/test_files/test_parsing/test_basic.py +53 -18
  8. {foamlib-0.8.10 → foamlib-0.9.0}/tests/test_files/test_parsing/test_decompose_par.py +5 -3
  9. {foamlib-0.8.10 → foamlib-0.9.0}/tests/test_files/test_parsing/test_fields.py +7 -4
  10. {foamlib-0.8.10 → foamlib-0.9.0}/tests/test_files/test_parsing/test_fv_schemes.py +7 -7
  11. {foamlib-0.8.10 → foamlib-0.9.0}/tests/test_files/test_parsing/test_intermediate.py +163 -101
  12. {foamlib-0.8.10 → foamlib-0.9.0}/tests/test_files/test_parsing/test_poly_mesh.py +18 -15
  13. {foamlib-0.8.10 → foamlib-0.9.0}/.devcontainer.json +0 -0
  14. {foamlib-0.8.10 → foamlib-0.9.0}/.dockerignore +0 -0
  15. {foamlib-0.8.10 → foamlib-0.9.0}/.git-blame-ignore-revs +0 -0
  16. {foamlib-0.8.10 → foamlib-0.9.0}/.github/dependabot.yml +0 -0
  17. {foamlib-0.8.10 → foamlib-0.9.0}/.github/workflows/ci.yml +0 -0
  18. {foamlib-0.8.10 → foamlib-0.9.0}/.github/workflows/docker.yml +0 -0
  19. {foamlib-0.8.10 → foamlib-0.9.0}/.github/workflows/dockerhub-description.yml +0 -0
  20. {foamlib-0.8.10 → foamlib-0.9.0}/.github/workflows/pypi-publish.yml +0 -0
  21. {foamlib-0.8.10 → foamlib-0.9.0}/.gitignore +0 -0
  22. {foamlib-0.8.10 → foamlib-0.9.0}/.readthedocs.yaml +0 -0
  23. {foamlib-0.8.10 → foamlib-0.9.0}/CONTRIBUTING.md +0 -0
  24. {foamlib-0.8.10 → foamlib-0.9.0}/Dockerfile +0 -0
  25. {foamlib-0.8.10 → foamlib-0.9.0}/LICENSE.txt +0 -0
  26. {foamlib-0.8.10 → foamlib-0.9.0}/README.md +0 -0
  27. {foamlib-0.8.10 → foamlib-0.9.0}/benchmark/benchmark.png +0 -0
  28. {foamlib-0.8.10 → foamlib-0.9.0}/benchmark/benchmark.py +0 -0
  29. {foamlib-0.8.10 → foamlib-0.9.0}/benchmark/requirements.txt +0 -0
  30. {foamlib-0.8.10 → foamlib-0.9.0}/benchmark/ruff.toml +0 -0
  31. {foamlib-0.8.10 → foamlib-0.9.0}/docs/Makefile +0 -0
  32. {foamlib-0.8.10 → foamlib-0.9.0}/docs/cases.rst +0 -0
  33. {foamlib-0.8.10 → foamlib-0.9.0}/docs/conf.py +0 -0
  34. {foamlib-0.8.10 → foamlib-0.9.0}/docs/files.rst +0 -0
  35. {foamlib-0.8.10 → foamlib-0.9.0}/docs/index.rst +0 -0
  36. {foamlib-0.8.10 → foamlib-0.9.0}/docs/make.bat +0 -0
  37. {foamlib-0.8.10 → foamlib-0.9.0}/docs/ruff.toml +0 -0
  38. {foamlib-0.8.10 → foamlib-0.9.0}/foamlib/_cases/__init__.py +0 -0
  39. {foamlib-0.8.10 → foamlib-0.9.0}/foamlib/_cases/_async.py +0 -0
  40. {foamlib-0.8.10 → foamlib-0.9.0}/foamlib/_cases/_base.py +0 -0
  41. {foamlib-0.8.10 → foamlib-0.9.0}/foamlib/_cases/_run.py +0 -0
  42. {foamlib-0.8.10 → foamlib-0.9.0}/foamlib/_cases/_slurm.py +0 -0
  43. {foamlib-0.8.10 → foamlib-0.9.0}/foamlib/_cases/_subprocess.py +0 -0
  44. {foamlib-0.8.10 → foamlib-0.9.0}/foamlib/_cases/_sync.py +0 -0
  45. {foamlib-0.8.10 → foamlib-0.9.0}/foamlib/_cases/_util.py +0 -0
  46. {foamlib-0.8.10 → foamlib-0.9.0}/foamlib/_files/__init__.py +0 -0
  47. {foamlib-0.8.10 → foamlib-0.9.0}/foamlib/_files/_files.py +0 -0
  48. {foamlib-0.8.10 → foamlib-0.9.0}/foamlib/_files/_io.py +0 -0
  49. {foamlib-0.8.10 → foamlib-0.9.0}/foamlib/_files/_serialization.py +0 -0
  50. {foamlib-0.8.10 → foamlib-0.9.0}/foamlib/_files/_types.py +0 -0
  51. {foamlib-0.8.10 → foamlib-0.9.0}/foamlib/py.typed +0 -0
  52. {foamlib-0.8.10 → foamlib-0.9.0}/logo.png +0 -0
  53. {foamlib-0.8.10 → foamlib-0.9.0}/tests/__init__.py +0 -0
  54. {foamlib-0.8.10 → foamlib-0.9.0}/tests/ruff.toml +0 -0
  55. {foamlib-0.8.10 → foamlib-0.9.0}/tests/test_cases/__init__.py +0 -0
  56. {foamlib-0.8.10 → foamlib-0.9.0}/tests/test_cases/test_cavity.py +0 -0
  57. {foamlib-0.8.10 → foamlib-0.9.0}/tests/test_cases/test_cavity_async.py +0 -0
  58. {foamlib-0.8.10 → foamlib-0.9.0}/tests/test_cases/test_flange.py +0 -0
  59. {foamlib-0.8.10 → foamlib-0.9.0}/tests/test_cases/test_flange_async.py +0 -0
  60. {foamlib-0.8.10 → foamlib-0.9.0}/tests/test_example.py +0 -0
  61. {foamlib-0.8.10 → foamlib-0.9.0}/tests/test_files/__init__.py +0 -0
  62. {foamlib-0.8.10 → foamlib-0.9.0}/tests/test_files/test_dumps.py +0 -0
  63. {foamlib-0.8.10 → foamlib-0.9.0}/tests/test_files/test_parsing/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: foamlib
3
- Version: 0.8.10
3
+ Version: 0.9.0
4
4
  Summary: A Python interface for interacting with OpenFOAM
5
5
  Project-URL: Homepage, https://github.com/gerlero/foamlib
6
6
  Project-URL: Repository, https://github.com/gerlero/foamlib
@@ -30,35 +30,10 @@ Requires-Dist: numpy<3,>=1
30
30
  Requires-Dist: pyparsing<4,>=3.1.2
31
31
  Requires-Dist: rich<15,>=13
32
32
  Requires-Dist: typing-extensions<5,>=4; python_version < '3.11'
33
- Provides-Extra: dev
34
- Requires-Dist: mypy<2,>=1; extra == 'dev'
35
- Requires-Dist: pytest-asyncio<0.27,>=0.21; extra == 'dev'
36
- Requires-Dist: pytest-cov; extra == 'dev'
37
- Requires-Dist: pytest<9,>=7; extra == 'dev'
38
- Requires-Dist: ruff; extra == 'dev'
39
- Requires-Dist: scipy-stubs; (python_version >= '3.10') and extra == 'dev'
40
- Requires-Dist: scipy<2,>=1; extra == 'dev'
41
- Requires-Dist: sphinx-rtd-theme; extra == 'dev'
42
- Requires-Dist: sphinx<9,>=5; extra == 'dev'
43
33
  Provides-Extra: docs
44
34
  Requires-Dist: ruff; extra == 'docs'
45
35
  Requires-Dist: sphinx-rtd-theme; extra == 'docs'
46
36
  Requires-Dist: sphinx<9,>=5; extra == 'docs'
47
- Provides-Extra: lint
48
- Requires-Dist: ruff; extra == 'lint'
49
- Provides-Extra: test
50
- Requires-Dist: mypy<2,>=1; extra == 'test'
51
- Requires-Dist: pytest-asyncio<0.27,>=0.21; extra == 'test'
52
- Requires-Dist: pytest-cov; extra == 'test'
53
- Requires-Dist: pytest<9,>=7; extra == 'test'
54
- Requires-Dist: scipy<2,>=1; extra == 'test'
55
- Provides-Extra: typing
56
- Requires-Dist: mypy<2,>=1; extra == 'typing'
57
- Requires-Dist: pytest-asyncio<0.27,>=0.21; extra == 'typing'
58
- Requires-Dist: pytest-cov; extra == 'typing'
59
- Requires-Dist: pytest<9,>=7; extra == 'typing'
60
- Requires-Dist: scipy-stubs; (python_version >= '3.10') and extra == 'typing'
61
- Requires-Dist: scipy<2,>=1; extra == 'typing'
62
37
  Description-Content-Type: text/markdown
63
38
 
64
39
  [<img alt="foamlib" src="https://github.com/gerlero/foamlib/raw/main/logo.png" height="65">](https://github.com/gerlero/foamlib)
@@ -1,6 +1,6 @@
1
1
  """A Python interface for interacting with OpenFOAM."""
2
2
 
3
- __version__ = "0.8.10"
3
+ __version__ = "0.9.0"
4
4
 
5
5
  from ._cases import (
6
6
  AsyncFoamCase,
@@ -25,6 +25,7 @@ from pyparsing import (
25
25
  LineEnd,
26
26
  Literal,
27
27
  Located,
28
+ NoMatch,
28
29
  Opt,
29
30
  ParserElement,
30
31
  ParseResults,
@@ -37,119 +38,194 @@ from pyparsing import (
37
38
  printables,
38
39
  )
39
40
 
40
- from ._types import Data, Dimensioned, DimensionSet, File, TensorKind
41
+ from ._types import Data, Dimensioned, DimensionSet, File
41
42
 
42
43
 
43
- def _tensor(tensor_kind: TensorKind, *, ignore: Regex | None = None) -> Regex:
44
+ def _numeric_list(
45
+ *, nested: int | None = None, ignore: Regex | None = None, force_float: bool = False
46
+ ) -> ParserElement:
47
+ if not force_float:
48
+ int_pattern = r"(?:-?\d+)"
44
49
  float_pattern = r"(?i:[+-]?(?:(?:\d+\.?\d*(?:e[+-]?\d+)?)|nan|inf(?:inity)?))"
50
+ spacing_pattern = (
51
+ rf"(?:(?:\s|{ignore.re.pattern})+)" if ignore is not None else r"\s+"
52
+ )
45
53
 
46
- if tensor_kind == TensorKind.SCALAR:
47
- ret = Regex(float_pattern)
48
- ret.add_parse_action(lambda tks: [float(tks[0])])
49
- return ret
54
+ if nested is None:
55
+ if not force_float:
56
+ int_element_pattern = int_pattern
57
+ int_element = common.integer
58
+ float_element_pattern = float_pattern
59
+ float_element = common.ieee_float
60
+ else:
61
+ if not force_float:
62
+ int_element_pattern = rf"(?:(?:{nested})?{spacing_pattern}?\({spacing_pattern}?(?:{int_pattern}{spacing_pattern}){{{nested - 1}}}{int_pattern}{spacing_pattern}?\))"
63
+ int_element = (
64
+ Opt(Literal(str(nested))).suppress()
65
+ + Literal("(").suppress()
66
+ + Group(common.integer[nested])
67
+ + Literal(")").suppress()
68
+ )
69
+ float_element_pattern = rf"(?:(?:{nested})?{spacing_pattern}?\({spacing_pattern}?(?:{float_pattern}{spacing_pattern}){{{nested - 1}}}{float_pattern}{spacing_pattern}?\))"
70
+ float_element = (
71
+ Opt(Literal(str(nested))).suppress()
72
+ + Literal("(").suppress()
73
+ + Group(common.ieee_float[nested])
74
+ + Literal(")").suppress()
75
+ )
50
76
 
51
- ignore_pattern = rf"(?:\s|{ignore.re.pattern})+" if ignore is not None else r"\s+"
77
+ if not force_float:
78
+ int_list = Forward()
79
+ float_list = Forward()
52
80
 
53
- ret = Regex(
54
- rf"\((?:{ignore_pattern})?(?:{float_pattern}{ignore_pattern}){{{tensor_kind.size - 1}}}{float_pattern}(?:{ignore_pattern})?\)"
55
- )
56
- ret.add_parse_action(
57
- lambda tks: np.fromstring(
58
- re.sub(ignore.re, " ", tks[0][1:-1])
59
- if ignore is not None
60
- else tks[0][1:-1],
61
- sep=" ",
62
- )
63
- )
64
- return ret
81
+ def process_count(tks: ParseResults) -> None:
82
+ nonlocal int_list, float_list
65
83
 
84
+ if not tks:
85
+ count = None
86
+ else:
87
+ (count,) = tks
88
+ assert isinstance(count, int)
66
89
 
67
- def _list_of(entry: ParserElement) -> ParserElement:
68
- return Opt(
69
- Literal("List") + Literal("<") + _IDENTIFIER + Literal(">")
70
- ).suppress() + (
71
- (
72
- counted_array(entry, common.integer + Literal("(").suppress())
73
- + Literal(")").suppress()
74
- ).set_parse_action(lambda tks: [tks.as_list()])
75
- | (
76
- Literal("(").suppress()
77
- + Group((entry)[...], aslist=True)
78
- + Literal(")").suppress()
90
+ if count is None:
91
+ if not force_float:
92
+ int_list_pattern = rf"\({spacing_pattern}?(?:{int_element_pattern}{spacing_pattern})*{int_element_pattern}{spacing_pattern}?\)"
93
+ float_list_pattern = rf"\({spacing_pattern}?(?:{float_element_pattern}{spacing_pattern})*{float_element_pattern}{spacing_pattern}?\)"
94
+ else:
95
+ float_list_pattern = rf"\({spacing_pattern}?(?:{float_element_pattern}{spacing_pattern})*{float_element_pattern}?{spacing_pattern}?\)"
96
+
97
+ elif count == 0:
98
+ if not force_float:
99
+ int_list <<= NoMatch()
100
+ float_list <<= NoMatch()
101
+ else:
102
+ float_list <<= (Literal("(") + Literal(")")).add_parse_action(
103
+ lambda: np.empty((0, nested) if nested else 0, dtype=float)
104
+ )
105
+ return
106
+
107
+ else:
108
+ if not force_float:
109
+ int_list_pattern = rf"\({spacing_pattern}?(?:{int_element_pattern}{spacing_pattern}){{{count - 1}}}{int_element_pattern}{spacing_pattern}?\)"
110
+ float_list_pattern = rf"\({spacing_pattern}?(?:{float_element_pattern}{spacing_pattern}){{{count - 1}}}{float_element_pattern}{spacing_pattern}?\)"
111
+
112
+ if not force_float:
113
+ int_list <<= Regex(int_list_pattern).add_parse_action(
114
+ lambda tks: to_array(tks, dtype=int)
115
+ )
116
+ float_list <<= Regex(float_list_pattern).add_parse_action(
117
+ lambda tks: to_array(tks, dtype=float)
79
118
  )
80
- | (
81
- common.integer + Literal("{").suppress() + entry + Literal("}").suppress()
82
- ).set_parse_action(lambda tks: [[tks[1]] * tks[0]])
83
- )
84
119
 
120
+ def to_array(
121
+ tks: ParseResults, *, dtype: type
122
+ ) -> np.ndarray[tuple[int] | tuple[int, int], np.dtype[np.int64 | np.float64]]:
123
+ (s,) = tks
124
+ s = s.replace("(", "").replace(")", "")
85
125
 
86
- def _parse_ascii_field(
87
- s: str, tensor_kind: TensorKind, *, ignore: Regex | None
88
- ) -> np.ndarray[tuple[int] | tuple[int, int], np.dtype[np.float64]]:
89
- if ignore is not None:
90
- s = re.sub(ignore.re, " ", s)
91
- s = s.replace("(", " ").replace(")", " ")
126
+ if ignore is not None:
127
+ s = re.sub(ignore.re, " ", s)
92
128
 
93
- return np.fromstring(s, sep=" ").reshape(-1, *tensor_kind.shape) # type: ignore [return-value]
129
+ ret: np.ndarray[
130
+ tuple[int] | tuple[int, int], np.dtype[np.int64 | np.float64]
131
+ ] = np.fromstring(s, sep=" ", dtype=dtype) # type: ignore[assignment]
94
132
 
133
+ if nested is not None:
134
+ ret = ret.reshape(-1, nested)
95
135
 
96
- def _unpack_binary_field(
97
- b: bytes, tensor_kind: TensorKind, *, length: int
98
- ) -> np.ndarray[tuple[int] | tuple[int, int], np.dtype[np.float64 | np.float32]]:
99
- float_size = len(b) / tensor_kind.size / length
100
- assert float_size in (4, 8)
136
+ return ret
101
137
 
102
- dtype = np.float32 if float_size == 4 else float
103
- return np.frombuffer(b, dtype=dtype).reshape(-1, *tensor_kind.shape) # type: ignore [return-value]
138
+ def to_full_array(
139
+ tks: ParseResults, *, dtype: type
140
+ ) -> np.ndarray[tuple[int] | tuple[int, int], np.dtype[np.int64 | np.float64]]:
141
+ count, lst = tks
142
+ assert isinstance(count, int)
104
143
 
144
+ if nested is None:
145
+ return np.full(count, lst, dtype=dtype)
105
146
 
106
- def _tensor_list(
107
- tensor_kind: TensorKind, *, ignore: Regex | None = None
108
- ) -> ParserElement:
109
- tensor = _tensor(tensor_kind, ignore=ignore)
110
- ignore_pattern = rf"(?:\s|{ignore.re.pattern})+" if ignore is not None else r"\s+"
147
+ return np.full((count, nested), lst, dtype=dtype) # type: ignore[return-value]
111
148
 
112
- list_ = Forward()
149
+ count = Opt(common.integer).add_parse_action(process_count)
113
150
 
114
- list_ <<= Regex(
115
- rf"\((?:{ignore_pattern})?(?:{tensor.re.pattern}{ignore_pattern})*{tensor.re.pattern}(?:{ignore_pattern})?\)"
116
- ).add_parse_action(
117
- lambda tks: [_parse_ascii_field(tks[0], tensor_kind, ignore=ignore)]
151
+ ret = count.suppress() + (
152
+ (int_list | float_list) if not force_float else float_list
118
153
  )
119
154
 
120
- def count_parse_action(tks: ParseResults) -> None:
121
- nonlocal list_
122
- length = tks[0]
123
- assert isinstance(length, int)
155
+ if not force_float:
156
+ ret |= (
157
+ common.integer
158
+ + Literal("{").suppress()
159
+ + int_element
160
+ + Literal("}").suppress()
161
+ ).add_parse_action(lambda tks: to_full_array(tks, dtype=int))
162
+ ret |= (
163
+ common.integer
164
+ + Literal("{").suppress()
165
+ + float_element
166
+ + Literal("}").suppress()
167
+ ).add_parse_action(lambda tks: to_full_array(tks, dtype=float))
124
168
 
125
- list_ <<= (
126
- Regex(
127
- rf"\((?:{ignore_pattern})?(?:{tensor.re.pattern}{ignore_pattern}){{{length - 1}}}{tensor.re.pattern}(?:{ignore_pattern})?\)"
128
- ).add_parse_action(
129
- lambda tks: [_parse_ascii_field(tks[0], tensor_kind, ignore=ignore)]
130
- )
131
- | Regex(
132
- rf"\((?s:({'.' * 8 * tensor_kind.size}|{'.' * 4 * tensor_kind.size}){{{length}}})\)"
133
- ).add_parse_action(
134
- lambda tks: [
135
- _unpack_binary_field(
136
- tks[0][1:-1].encode("latin-1"), tensor_kind, length=length
137
- )
138
- ]
139
- )
140
- | (
141
- Literal("{").suppress() + tensor + Literal("}").suppress()
142
- ).add_parse_action(
143
- lambda tks: [np.full((length, *tensor_kind.shape), tks[0], dtype=float)]
144
- )
169
+ if ignore is not None:
170
+ ret.ignore(ignore)
171
+
172
+ return ret
173
+
174
+
175
+ def _binary_field(*, nested: int | None = None) -> ParserElement:
176
+ elsize = nested if nested is not None else 1
177
+
178
+ binary_field = Forward()
179
+
180
+ def process_count(tks: ParseResults) -> None:
181
+ nonlocal binary_field
182
+ (size,) = tks
183
+ assert isinstance(size, int)
184
+
185
+ binary_field <<= Regex(
186
+ rf"\((?s:({'.' * 8 * elsize}|{'.' * 4 * elsize}){{{size}}})\)"
145
187
  )
146
188
 
147
- count = common.integer.copy().add_parse_action(count_parse_action)
189
+ def to_array(
190
+ tks: ParseResults,
191
+ ) -> np.ndarray[tuple[int] | tuple[int, int], np.dtype[np.float64 | np.float32]]:
192
+ size, s = tks
193
+ assert isinstance(size, int)
194
+ assert isinstance(s, str)
195
+ assert s[0] == "("
196
+ assert s[-1] == ")"
197
+ s = s[1:-1]
198
+
199
+ float_size = len(s) / elsize / size
200
+ assert float_size in (4, 8)
201
+
202
+ dtype = np.float32 if float_size == 4 else float
203
+ ret = np.frombuffer(s.encode("latin-1"), dtype=dtype)
204
+
205
+ if nested is not None:
206
+ ret = ret.reshape(-1, nested)
207
+
208
+ return ret # type: ignore[return-value]
148
209
 
210
+ count = common.integer.copy().add_parse_action(process_count)
211
+
212
+ return (count + binary_field).add_parse_action(to_array)
213
+
214
+
215
+ def _list_of(entry: ParserElement) -> ParserElement:
149
216
  return (
150
- Opt(Literal("List") + Literal("<") + str(tensor_kind) + Literal(">")).suppress()
151
- + Opt(count).suppress()
152
- + list_
217
+ (
218
+ counted_array(entry, common.integer + Literal("(").suppress())
219
+ + Literal(")").suppress()
220
+ ).set_parse_action(lambda tks: [tks.as_list()])
221
+ | (
222
+ Literal("(").suppress()
223
+ + Group((entry)[...], aslist=True)
224
+ + Literal(")").suppress()
225
+ )
226
+ | (
227
+ common.integer + Literal("{").suppress() + entry + Literal("}").suppress()
228
+ ).set_parse_action(lambda tks: [[tks[1]] * tks[0]])
153
229
  )
154
230
 
155
231
 
@@ -238,12 +314,11 @@ _SWITCH = (
238
314
  _DIMENSIONS = (
239
315
  Literal("[").suppress() + common.number[0, 7] + Literal("]").suppress()
240
316
  ).set_parse_action(lambda tks: DimensionSet(*tks))
241
- _TENSOR = (
242
- _tensor(TensorKind.SCALAR)
243
- | _tensor(TensorKind.VECTOR)
244
- | _tensor(TensorKind.SYMM_TENSOR)
245
- | _tensor(TensorKind.TENSOR)
246
- )
317
+ _TENSOR = common.ieee_float | (
318
+ Literal("(").suppress()
319
+ + Group(common.ieee_float[3] | common.ieee_float[6] | common.ieee_float[9])
320
+ + Literal(")").suppress()
321
+ ).add_parse_action(lambda tks: np.array(tks[0], dtype=float))
247
322
  _PARENTHESIZED = Forward()
248
323
  _IDENTIFIER = Combine(Word(_IDENTCHARS, _IDENTBODYCHARS) + Opt(_PARENTHESIZED))
249
324
  _PARENTHESIZED <<= Combine(
@@ -258,12 +333,46 @@ _DIMENSIONED = (Opt(_IDENTIFIER) + _DIMENSIONS + _TENSOR).set_parse_action(
258
333
  _FIELD = (Keyword("uniform", _IDENTBODYCHARS).suppress() + _TENSOR) | (
259
334
  Keyword("nonuniform", _IDENTBODYCHARS).suppress()
260
335
  + (
261
- _tensor_list(TensorKind.SCALAR, ignore=_COMMENT)
262
- | _tensor_list(TensorKind.VECTOR, ignore=_COMMENT)
263
- | _tensor_list(TensorKind.SYMM_TENSOR, ignore=_COMMENT)
264
- | _tensor_list(TensorKind.TENSOR, ignore=_COMMENT)
336
+ (
337
+ Opt(
338
+ Literal("List") + Literal("<") + Literal("scalar") + Literal(">")
339
+ ).suppress()
340
+ + (_numeric_list(force_float=True, ignore=_COMMENT) | _binary_field())
341
+ )
342
+ | (
343
+ Opt(
344
+ Literal("List") + Literal("<") + Literal("vector") + Literal(">")
345
+ ).suppress()
346
+ + (
347
+ _numeric_list(nested=3, force_float=True, ignore=_COMMENT)
348
+ | _binary_field(nested=3)
349
+ )
350
+ )
351
+ | (
352
+ Opt(
353
+ Literal("List") + Literal("<") + Literal("symmTensor") + Literal(">")
354
+ ).suppress()
355
+ + (
356
+ _numeric_list(
357
+ nested=6,
358
+ force_float=True,
359
+ ignore=_COMMENT,
360
+ )
361
+ | _binary_field(nested=6)
362
+ )
363
+ )
364
+ | (
365
+ Opt(
366
+ Literal("List") + Literal("<") + Literal("tensor") + Literal(">")
367
+ ).suppress()
368
+ + (
369
+ _numeric_list(nested=9, force_float=True, ignore=_COMMENT)
370
+ | _binary_field(nested=9)
371
+ )
372
+ )
265
373
  )
266
374
  )
375
+
267
376
  _DIRECTIVE = Word("#", _IDENTBODYCHARS)
268
377
  _TOKEN = dbl_quoted_string | _DIRECTIVE | _IDENTIFIER
269
378
  _DATA = Forward()
@@ -271,7 +380,12 @@ _KEYWORD_ENTRY = _keyword_entry_of(_TOKEN | _list_of(_IDENTIFIER), _DATA)
271
380
  _DICT = _dict_of(_TOKEN, _DATA)
272
381
  _DATA_ENTRY = Forward()
273
382
  _LIST_ENTRY = _DICT | _KEYWORD_ENTRY | _DATA_ENTRY
274
- _LIST = _list_of(_LIST_ENTRY)
383
+ _LIST = (
384
+ _numeric_list(ignore=_COMMENT)
385
+ | _numeric_list(nested=3, ignore=_COMMENT)
386
+ | _numeric_list(nested=4, ignore=_COMMENT)
387
+ | _list_of(_LIST_ENTRY)
388
+ )
275
389
  _NUMBER = (
276
390
  common.number
277
391
  | CaselessKeyword("nan").set_parse_action(lambda: np.nan)
@@ -39,36 +39,34 @@ dependencies = [
39
39
  dynamic = ["version"]
40
40
 
41
41
  [dependency-groups]
42
- dev = ["foamlib[dev]"]
43
- lint = ["foamlib[lint]"]
44
- test = ["foamlib[test]"]
45
- typing = ["foamlib[typing]"]
46
- docs = ["foamlib[docs]"]
47
-
48
- [project.optional-dependencies]
49
42
  lint = ["ruff"]
50
- test = [
43
+ typing = [
51
44
  "mypy>=1,<2",
45
+ "scipy-stubs; python_version>='3.10'",
46
+ ]
47
+ test = [
52
48
  "pytest>=7,<9",
53
49
  "pytest-asyncio>=0.21,<0.27",
54
50
  "pytest-cov",
55
51
  "scipy>=1,<2",
56
52
  ]
57
- typing = [
58
- "foamlib[test]",
59
- "mypy>=1,<2",
60
- "scipy-stubs; python_version>='3.10'",
61
- ]
62
53
  docs = [
63
54
  "ruff",
64
55
  "sphinx>=5,<9",
65
56
  "sphinx_rtd_theme",
66
57
  ]
67
58
  dev = [
68
- "foamlib[lint]",
69
- "foamlib[test]",
70
- "foamlib[typing]",
71
- "foamlib[docs]",
59
+ {include-group = "lint"},
60
+ {include-group = "typing"},
61
+ {include-group = "test"},
62
+ {include-group = "docs"},
63
+ ]
64
+
65
+ [project.optional-dependencies]
66
+ docs = [
67
+ "ruff",
68
+ "sphinx>=5,<9",
69
+ "sphinx_rtd_theme",
72
70
  ]
73
71
 
74
72
  [project.urls]
@@ -80,7 +78,10 @@ Documentation = "https://foamlib.readthedocs.io"
80
78
  path = "foamlib/__init__.py"
81
79
 
82
80
  [tool.mypy]
83
- packages = ["foamlib"]
81
+ packages = [
82
+ "foamlib",
83
+ "tests",
84
+ ]
84
85
  strict = true
85
86
 
86
87
  [tool.ruff.lint]
@@ -3,9 +3,9 @@ import sys
3
3
  from pathlib import Path
4
4
 
5
5
  if sys.version_info >= (3, 9):
6
- from collections.abc import Generator, Sequence
6
+ from collections.abc import Generator
7
7
  else:
8
- from typing import Generator, Sequence
8
+ from typing import Generator
9
9
 
10
10
  import numpy as np
11
11
  import pytest
@@ -153,16 +153,16 @@ def test_mesh(cavity: FoamCase) -> None:
153
153
 
154
154
  points = file[None]
155
155
 
156
- assert isinstance(points, Sequence)
157
- assert isinstance(points[0], Sequence)
158
- assert len(points[0]) == 3
156
+ assert isinstance(points, np.ndarray)
157
+ assert points.ndim == 2
158
+ assert points.shape[-1] == 3
159
159
 
160
160
 
161
161
  def test_internal_field(cavity: FoamCase) -> None:
162
162
  blocks = cavity.block_mesh_dict["blocks"]
163
163
  assert isinstance(blocks, list)
164
164
  sizes = blocks[2]
165
- assert isinstance(sizes, list)
165
+ assert isinstance(sizes, np.ndarray)
166
166
  size = np.prod(sizes)
167
167
 
168
168
  p_arr = np.zeros(size)
@@ -17,10 +17,10 @@ def test_dict_strange_name() -> None:
17
17
  """
18
18
  )
19
19
  assert parsed[
20
- (("div(phi,ft_b_ha_hau)", "Gauss", "multivariateSelection"), "ft")
20
+ (("div(phi,ft_b_ha_hau)", "Gauss", "multivariateSelection"), "ft") # type: ignore[index]
21
21
  ] == ("limitedLinear01", 1)
22
22
  assert parsed[
23
- (("div(phi,ft_b_ha_hau)", "Gauss", "multivariateSelection"), "b")
23
+ (("div(phi,ft_b_ha_hau)", "Gauss", "multivariateSelection"), "b") # type: ignore[index]
24
24
  ] == ("limitedLinear01", 1)
25
25
 
26
26
 
@@ -263,7 +263,7 @@ def test_list_triple_named() -> None:
263
263
  """
264
264
  )
265
265
  assert parsed[("velocity-inlet-5", "type")] == "fixedValue"
266
- assert np.allclose(parsed[("velocity-inlet-5", "value")], [1, 0, 0])
266
+ assert parsed[("velocity-inlet-5", "value")] == pytest.approx([1, 0, 0])
267
267
 
268
268
 
269
269
  def test_assignment_strange_name() -> None:
@@ -277,9 +277,10 @@ def test_assignment_strange_name() -> None:
277
277
  """
278
278
  )
279
279
  assert parsed[("equations", '"(U|e|k).*"')] == 0.7
280
- assert parsed[("equations", '"(U|e|k|epsilon).*"')] == (
281
- "table",
282
- [[0, 0.4], [0.5, 0.7]],
280
+ assert isinstance(parsed[("equations", '"(U|e|k|epsilon).*"')], tuple)
281
+ assert parsed[("equations", '"(U|e|k|epsilon).*"')][0] == "table"
282
+ assert np.array_equal(
283
+ parsed[("equations", '"(U|e|k|epsilon).*"')][1], [[0, 0.4], [0.5, 0.7]]
283
284
  )
284
285
 
285
286
 
@@ -332,17 +333,23 @@ def test_list_u() -> None:
332
333
  )
333
334
  """
334
335
  )
335
- assert parsed[()] == [
336
- [4.50773, 1.79963, 0.0],
337
- [6.06208, 2.40831, 0.0],
338
- [6.874, 2.72079, 0.0],
339
- [7.42929, 2.931, 0.0],
340
- [7.85095, 3.08805, 0.0],
341
- [8.19202, 3.21306, 0.0],
342
- [17.5, 1.92559e-09, 0.0],
343
- [17.5, 6.81045e-12, 0.0],
344
- [17.5, 6.81045e-12, 0.0],
345
- ]
336
+ data = parsed[()]
337
+ assert isinstance(data, np.ndarray)
338
+ assert data.shape == (9, 3)
339
+ assert np.array_equal(
340
+ data,
341
+ [
342
+ [4.507730000e00, 1.799630000e00, 0.000000000e00],
343
+ [6.062080000e00, 2.408310000e00, 0.000000000e00],
344
+ [6.874000000e00, 2.720790000e00, 0.000000000e00],
345
+ [7.429290000e00, 2.931000000e00, 0.000000000e00],
346
+ [7.850950000e00, 3.088050000e00, 0.000000000e00],
347
+ [8.192020000e00, 3.213060000e00, 0.000000000e00],
348
+ [1.750000000e01, 1.925590000e-09, 0.000000000e00],
349
+ [1.750000000e01, 6.810450000e-12, 0.000000000e00],
350
+ [1.750000000e01, 6.810450000e-12, 0.000000000e00],
351
+ ],
352
+ )
346
353
 
347
354
 
348
355
  def test_list_as_write_cell_centers() -> None:
@@ -356,9 +363,9 @@ def test_list_as_write_cell_centers() -> None:
356
363
  );
357
364
  """
358
365
  )
359
- assert np.allclose(parsed[("value",)], [47.619, 142.857])
366
+ assert parsed[("value",)] == pytest.approx([47.619, 142.857])
360
367
 
361
368
 
362
369
  def test_list_as_write_cell_centers_short() -> None:
363
370
  parsed = Parsed(b"value nonuniform List<scalar> 4(250 750 1250 1750);")
364
- assert np.allclose(parsed[("value",)], [250, 750, 1250, 1750])
371
+ assert parsed[("value",)] == pytest.approx([250, 750, 1250, 1750])