foamlib 0.8.10__py3-none-any.whl → 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
foamlib/__init__.py CHANGED
@@ -1,6 +1,6 @@
1
1
  """A Python interface for interacting with OpenFOAM."""
2
2
 
3
- __version__ = "0.8.10"
3
+ __version__ = "0.9.0"
4
4
 
5
5
  from ._cases import (
6
6
  AsyncFoamCase,
@@ -25,6 +25,7 @@ from pyparsing import (
25
25
  LineEnd,
26
26
  Literal,
27
27
  Located,
28
+ NoMatch,
28
29
  Opt,
29
30
  ParserElement,
30
31
  ParseResults,
@@ -37,119 +38,194 @@ from pyparsing import (
37
38
  printables,
38
39
  )
39
40
 
40
- from ._types import Data, Dimensioned, DimensionSet, File, TensorKind
41
+ from ._types import Data, Dimensioned, DimensionSet, File
41
42
 
42
43
 
43
- def _tensor(tensor_kind: TensorKind, *, ignore: Regex | None = None) -> Regex:
44
+ def _numeric_list(
45
+ *, nested: int | None = None, ignore: Regex | None = None, force_float: bool = False
46
+ ) -> ParserElement:
47
+ if not force_float:
48
+ int_pattern = r"(?:-?\d+)"
44
49
  float_pattern = r"(?i:[+-]?(?:(?:\d+\.?\d*(?:e[+-]?\d+)?)|nan|inf(?:inity)?))"
50
+ spacing_pattern = (
51
+ rf"(?:(?:\s|{ignore.re.pattern})+)" if ignore is not None else r"\s+"
52
+ )
45
53
 
46
- if tensor_kind == TensorKind.SCALAR:
47
- ret = Regex(float_pattern)
48
- ret.add_parse_action(lambda tks: [float(tks[0])])
49
- return ret
54
+ if nested is None:
55
+ if not force_float:
56
+ int_element_pattern = int_pattern
57
+ int_element = common.integer
58
+ float_element_pattern = float_pattern
59
+ float_element = common.ieee_float
60
+ else:
61
+ if not force_float:
62
+ int_element_pattern = rf"(?:(?:{nested})?{spacing_pattern}?\({spacing_pattern}?(?:{int_pattern}{spacing_pattern}){{{nested - 1}}}{int_pattern}{spacing_pattern}?\))"
63
+ int_element = (
64
+ Opt(Literal(str(nested))).suppress()
65
+ + Literal("(").suppress()
66
+ + Group(common.integer[nested])
67
+ + Literal(")").suppress()
68
+ )
69
+ float_element_pattern = rf"(?:(?:{nested})?{spacing_pattern}?\({spacing_pattern}?(?:{float_pattern}{spacing_pattern}){{{nested - 1}}}{float_pattern}{spacing_pattern}?\))"
70
+ float_element = (
71
+ Opt(Literal(str(nested))).suppress()
72
+ + Literal("(").suppress()
73
+ + Group(common.ieee_float[nested])
74
+ + Literal(")").suppress()
75
+ )
50
76
 
51
- ignore_pattern = rf"(?:\s|{ignore.re.pattern})+" if ignore is not None else r"\s+"
77
+ if not force_float:
78
+ int_list = Forward()
79
+ float_list = Forward()
52
80
 
53
- ret = Regex(
54
- rf"\((?:{ignore_pattern})?(?:{float_pattern}{ignore_pattern}){{{tensor_kind.size - 1}}}{float_pattern}(?:{ignore_pattern})?\)"
55
- )
56
- ret.add_parse_action(
57
- lambda tks: np.fromstring(
58
- re.sub(ignore.re, " ", tks[0][1:-1])
59
- if ignore is not None
60
- else tks[0][1:-1],
61
- sep=" ",
62
- )
63
- )
64
- return ret
81
+ def process_count(tks: ParseResults) -> None:
82
+ nonlocal int_list, float_list
65
83
 
84
+ if not tks:
85
+ count = None
86
+ else:
87
+ (count,) = tks
88
+ assert isinstance(count, int)
66
89
 
67
- def _list_of(entry: ParserElement) -> ParserElement:
68
- return Opt(
69
- Literal("List") + Literal("<") + _IDENTIFIER + Literal(">")
70
- ).suppress() + (
71
- (
72
- counted_array(entry, common.integer + Literal("(").suppress())
73
- + Literal(")").suppress()
74
- ).set_parse_action(lambda tks: [tks.as_list()])
75
- | (
76
- Literal("(").suppress()
77
- + Group((entry)[...], aslist=True)
78
- + Literal(")").suppress()
90
+ if count is None:
91
+ if not force_float:
92
+ int_list_pattern = rf"\({spacing_pattern}?(?:{int_element_pattern}{spacing_pattern})*{int_element_pattern}{spacing_pattern}?\)"
93
+ float_list_pattern = rf"\({spacing_pattern}?(?:{float_element_pattern}{spacing_pattern})*{float_element_pattern}{spacing_pattern}?\)"
94
+ else:
95
+ float_list_pattern = rf"\({spacing_pattern}?(?:{float_element_pattern}{spacing_pattern})*{float_element_pattern}?{spacing_pattern}?\)"
96
+
97
+ elif count == 0:
98
+ if not force_float:
99
+ int_list <<= NoMatch()
100
+ float_list <<= NoMatch()
101
+ else:
102
+ float_list <<= (Literal("(") + Literal(")")).add_parse_action(
103
+ lambda: np.empty((0, nested) if nested else 0, dtype=float)
104
+ )
105
+ return
106
+
107
+ else:
108
+ if not force_float:
109
+ int_list_pattern = rf"\({spacing_pattern}?(?:{int_element_pattern}{spacing_pattern}){{{count - 1}}}{int_element_pattern}{spacing_pattern}?\)"
110
+ float_list_pattern = rf"\({spacing_pattern}?(?:{float_element_pattern}{spacing_pattern}){{{count - 1}}}{float_element_pattern}{spacing_pattern}?\)"
111
+
112
+ if not force_float:
113
+ int_list <<= Regex(int_list_pattern).add_parse_action(
114
+ lambda tks: to_array(tks, dtype=int)
115
+ )
116
+ float_list <<= Regex(float_list_pattern).add_parse_action(
117
+ lambda tks: to_array(tks, dtype=float)
79
118
  )
80
- | (
81
- common.integer + Literal("{").suppress() + entry + Literal("}").suppress()
82
- ).set_parse_action(lambda tks: [[tks[1]] * tks[0]])
83
- )
84
119
 
120
+ def to_array(
121
+ tks: ParseResults, *, dtype: type
122
+ ) -> np.ndarray[tuple[int] | tuple[int, int], np.dtype[np.int64 | np.float64]]:
123
+ (s,) = tks
124
+ s = s.replace("(", "").replace(")", "")
85
125
 
86
- def _parse_ascii_field(
87
- s: str, tensor_kind: TensorKind, *, ignore: Regex | None
88
- ) -> np.ndarray[tuple[int] | tuple[int, int], np.dtype[np.float64]]:
89
- if ignore is not None:
90
- s = re.sub(ignore.re, " ", s)
91
- s = s.replace("(", " ").replace(")", " ")
126
+ if ignore is not None:
127
+ s = re.sub(ignore.re, " ", s)
92
128
 
93
- return np.fromstring(s, sep=" ").reshape(-1, *tensor_kind.shape) # type: ignore [return-value]
129
+ ret: np.ndarray[
130
+ tuple[int] | tuple[int, int], np.dtype[np.int64 | np.float64]
131
+ ] = np.fromstring(s, sep=" ", dtype=dtype) # type: ignore[assignment]
94
132
 
133
+ if nested is not None:
134
+ ret = ret.reshape(-1, nested)
95
135
 
96
- def _unpack_binary_field(
97
- b: bytes, tensor_kind: TensorKind, *, length: int
98
- ) -> np.ndarray[tuple[int] | tuple[int, int], np.dtype[np.float64 | np.float32]]:
99
- float_size = len(b) / tensor_kind.size / length
100
- assert float_size in (4, 8)
136
+ return ret
101
137
 
102
- dtype = np.float32 if float_size == 4 else float
103
- return np.frombuffer(b, dtype=dtype).reshape(-1, *tensor_kind.shape) # type: ignore [return-value]
138
+ def to_full_array(
139
+ tks: ParseResults, *, dtype: type
140
+ ) -> np.ndarray[tuple[int] | tuple[int, int], np.dtype[np.int64 | np.float64]]:
141
+ count, lst = tks
142
+ assert isinstance(count, int)
104
143
 
144
+ if nested is None:
145
+ return np.full(count, lst, dtype=dtype)
105
146
 
106
- def _tensor_list(
107
- tensor_kind: TensorKind, *, ignore: Regex | None = None
108
- ) -> ParserElement:
109
- tensor = _tensor(tensor_kind, ignore=ignore)
110
- ignore_pattern = rf"(?:\s|{ignore.re.pattern})+" if ignore is not None else r"\s+"
147
+ return np.full((count, nested), lst, dtype=dtype) # type: ignore[return-value]
111
148
 
112
- list_ = Forward()
149
+ count = Opt(common.integer).add_parse_action(process_count)
113
150
 
114
- list_ <<= Regex(
115
- rf"\((?:{ignore_pattern})?(?:{tensor.re.pattern}{ignore_pattern})*{tensor.re.pattern}(?:{ignore_pattern})?\)"
116
- ).add_parse_action(
117
- lambda tks: [_parse_ascii_field(tks[0], tensor_kind, ignore=ignore)]
151
+ ret = count.suppress() + (
152
+ (int_list | float_list) if not force_float else float_list
118
153
  )
119
154
 
120
- def count_parse_action(tks: ParseResults) -> None:
121
- nonlocal list_
122
- length = tks[0]
123
- assert isinstance(length, int)
155
+ if not force_float:
156
+ ret |= (
157
+ common.integer
158
+ + Literal("{").suppress()
159
+ + int_element
160
+ + Literal("}").suppress()
161
+ ).add_parse_action(lambda tks: to_full_array(tks, dtype=int))
162
+ ret |= (
163
+ common.integer
164
+ + Literal("{").suppress()
165
+ + float_element
166
+ + Literal("}").suppress()
167
+ ).add_parse_action(lambda tks: to_full_array(tks, dtype=float))
124
168
 
125
- list_ <<= (
126
- Regex(
127
- rf"\((?:{ignore_pattern})?(?:{tensor.re.pattern}{ignore_pattern}){{{length - 1}}}{tensor.re.pattern}(?:{ignore_pattern})?\)"
128
- ).add_parse_action(
129
- lambda tks: [_parse_ascii_field(tks[0], tensor_kind, ignore=ignore)]
130
- )
131
- | Regex(
132
- rf"\((?s:({'.' * 8 * tensor_kind.size}|{'.' * 4 * tensor_kind.size}){{{length}}})\)"
133
- ).add_parse_action(
134
- lambda tks: [
135
- _unpack_binary_field(
136
- tks[0][1:-1].encode("latin-1"), tensor_kind, length=length
137
- )
138
- ]
139
- )
140
- | (
141
- Literal("{").suppress() + tensor + Literal("}").suppress()
142
- ).add_parse_action(
143
- lambda tks: [np.full((length, *tensor_kind.shape), tks[0], dtype=float)]
144
- )
169
+ if ignore is not None:
170
+ ret.ignore(ignore)
171
+
172
+ return ret
173
+
174
+
175
+ def _binary_field(*, nested: int | None = None) -> ParserElement:
176
+ elsize = nested if nested is not None else 1
177
+
178
+ binary_field = Forward()
179
+
180
+ def process_count(tks: ParseResults) -> None:
181
+ nonlocal binary_field
182
+ (size,) = tks
183
+ assert isinstance(size, int)
184
+
185
+ binary_field <<= Regex(
186
+ rf"\((?s:({'.' * 8 * elsize}|{'.' * 4 * elsize}){{{size}}})\)"
145
187
  )
146
188
 
147
- count = common.integer.copy().add_parse_action(count_parse_action)
189
+ def to_array(
190
+ tks: ParseResults,
191
+ ) -> np.ndarray[tuple[int] | tuple[int, int], np.dtype[np.float64 | np.float32]]:
192
+ size, s = tks
193
+ assert isinstance(size, int)
194
+ assert isinstance(s, str)
195
+ assert s[0] == "("
196
+ assert s[-1] == ")"
197
+ s = s[1:-1]
198
+
199
+ float_size = len(s) / elsize / size
200
+ assert float_size in (4, 8)
201
+
202
+ dtype = np.float32 if float_size == 4 else float
203
+ ret = np.frombuffer(s.encode("latin-1"), dtype=dtype)
204
+
205
+ if nested is not None:
206
+ ret = ret.reshape(-1, nested)
207
+
208
+ return ret # type: ignore[return-value]
148
209
 
210
+ count = common.integer.copy().add_parse_action(process_count)
211
+
212
+ return (count + binary_field).add_parse_action(to_array)
213
+
214
+
215
+ def _list_of(entry: ParserElement) -> ParserElement:
149
216
  return (
150
- Opt(Literal("List") + Literal("<") + str(tensor_kind) + Literal(">")).suppress()
151
- + Opt(count).suppress()
152
- + list_
217
+ (
218
+ counted_array(entry, common.integer + Literal("(").suppress())
219
+ + Literal(")").suppress()
220
+ ).set_parse_action(lambda tks: [tks.as_list()])
221
+ | (
222
+ Literal("(").suppress()
223
+ + Group((entry)[...], aslist=True)
224
+ + Literal(")").suppress()
225
+ )
226
+ | (
227
+ common.integer + Literal("{").suppress() + entry + Literal("}").suppress()
228
+ ).set_parse_action(lambda tks: [[tks[1]] * tks[0]])
153
229
  )
154
230
 
155
231
 
@@ -238,12 +314,11 @@ _SWITCH = (
238
314
  _DIMENSIONS = (
239
315
  Literal("[").suppress() + common.number[0, 7] + Literal("]").suppress()
240
316
  ).set_parse_action(lambda tks: DimensionSet(*tks))
241
- _TENSOR = (
242
- _tensor(TensorKind.SCALAR)
243
- | _tensor(TensorKind.VECTOR)
244
- | _tensor(TensorKind.SYMM_TENSOR)
245
- | _tensor(TensorKind.TENSOR)
246
- )
317
+ _TENSOR = common.ieee_float | (
318
+ Literal("(").suppress()
319
+ + Group(common.ieee_float[3] | common.ieee_float[6] | common.ieee_float[9])
320
+ + Literal(")").suppress()
321
+ ).add_parse_action(lambda tks: np.array(tks[0], dtype=float))
247
322
  _PARENTHESIZED = Forward()
248
323
  _IDENTIFIER = Combine(Word(_IDENTCHARS, _IDENTBODYCHARS) + Opt(_PARENTHESIZED))
249
324
  _PARENTHESIZED <<= Combine(
@@ -258,12 +333,46 @@ _DIMENSIONED = (Opt(_IDENTIFIER) + _DIMENSIONS + _TENSOR).set_parse_action(
258
333
  _FIELD = (Keyword("uniform", _IDENTBODYCHARS).suppress() + _TENSOR) | (
259
334
  Keyword("nonuniform", _IDENTBODYCHARS).suppress()
260
335
  + (
261
- _tensor_list(TensorKind.SCALAR, ignore=_COMMENT)
262
- | _tensor_list(TensorKind.VECTOR, ignore=_COMMENT)
263
- | _tensor_list(TensorKind.SYMM_TENSOR, ignore=_COMMENT)
264
- | _tensor_list(TensorKind.TENSOR, ignore=_COMMENT)
336
+ (
337
+ Opt(
338
+ Literal("List") + Literal("<") + Literal("scalar") + Literal(">")
339
+ ).suppress()
340
+ + (_numeric_list(force_float=True, ignore=_COMMENT) | _binary_field())
341
+ )
342
+ | (
343
+ Opt(
344
+ Literal("List") + Literal("<") + Literal("vector") + Literal(">")
345
+ ).suppress()
346
+ + (
347
+ _numeric_list(nested=3, force_float=True, ignore=_COMMENT)
348
+ | _binary_field(nested=3)
349
+ )
350
+ )
351
+ | (
352
+ Opt(
353
+ Literal("List") + Literal("<") + Literal("symmTensor") + Literal(">")
354
+ ).suppress()
355
+ + (
356
+ _numeric_list(
357
+ nested=6,
358
+ force_float=True,
359
+ ignore=_COMMENT,
360
+ )
361
+ | _binary_field(nested=6)
362
+ )
363
+ )
364
+ | (
365
+ Opt(
366
+ Literal("List") + Literal("<") + Literal("tensor") + Literal(">")
367
+ ).suppress()
368
+ + (
369
+ _numeric_list(nested=9, force_float=True, ignore=_COMMENT)
370
+ | _binary_field(nested=9)
371
+ )
372
+ )
265
373
  )
266
374
  )
375
+
267
376
  _DIRECTIVE = Word("#", _IDENTBODYCHARS)
268
377
  _TOKEN = dbl_quoted_string | _DIRECTIVE | _IDENTIFIER
269
378
  _DATA = Forward()
@@ -271,7 +380,12 @@ _KEYWORD_ENTRY = _keyword_entry_of(_TOKEN | _list_of(_IDENTIFIER), _DATA)
271
380
  _DICT = _dict_of(_TOKEN, _DATA)
272
381
  _DATA_ENTRY = Forward()
273
382
  _LIST_ENTRY = _DICT | _KEYWORD_ENTRY | _DATA_ENTRY
274
- _LIST = _list_of(_LIST_ENTRY)
383
+ _LIST = (
384
+ _numeric_list(ignore=_COMMENT)
385
+ | _numeric_list(nested=3, ignore=_COMMENT)
386
+ | _numeric_list(nested=4, ignore=_COMMENT)
387
+ | _list_of(_LIST_ENTRY)
388
+ )
275
389
  _NUMBER = (
276
390
  common.number
277
391
  | CaselessKeyword("nan").set_parse_action(lambda: np.nan)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: foamlib
3
- Version: 0.8.10
3
+ Version: 0.9.0
4
4
  Summary: A Python interface for interacting with OpenFOAM
5
5
  Project-URL: Homepage, https://github.com/gerlero/foamlib
6
6
  Project-URL: Repository, https://github.com/gerlero/foamlib
@@ -30,35 +30,10 @@ Requires-Dist: numpy<3,>=1
30
30
  Requires-Dist: pyparsing<4,>=3.1.2
31
31
  Requires-Dist: rich<15,>=13
32
32
  Requires-Dist: typing-extensions<5,>=4; python_version < '3.11'
33
- Provides-Extra: dev
34
- Requires-Dist: mypy<2,>=1; extra == 'dev'
35
- Requires-Dist: pytest-asyncio<0.27,>=0.21; extra == 'dev'
36
- Requires-Dist: pytest-cov; extra == 'dev'
37
- Requires-Dist: pytest<9,>=7; extra == 'dev'
38
- Requires-Dist: ruff; extra == 'dev'
39
- Requires-Dist: scipy-stubs; (python_version >= '3.10') and extra == 'dev'
40
- Requires-Dist: scipy<2,>=1; extra == 'dev'
41
- Requires-Dist: sphinx-rtd-theme; extra == 'dev'
42
- Requires-Dist: sphinx<9,>=5; extra == 'dev'
43
33
  Provides-Extra: docs
44
34
  Requires-Dist: ruff; extra == 'docs'
45
35
  Requires-Dist: sphinx-rtd-theme; extra == 'docs'
46
36
  Requires-Dist: sphinx<9,>=5; extra == 'docs'
47
- Provides-Extra: lint
48
- Requires-Dist: ruff; extra == 'lint'
49
- Provides-Extra: test
50
- Requires-Dist: mypy<2,>=1; extra == 'test'
51
- Requires-Dist: pytest-asyncio<0.27,>=0.21; extra == 'test'
52
- Requires-Dist: pytest-cov; extra == 'test'
53
- Requires-Dist: pytest<9,>=7; extra == 'test'
54
- Requires-Dist: scipy<2,>=1; extra == 'test'
55
- Provides-Extra: typing
56
- Requires-Dist: mypy<2,>=1; extra == 'typing'
57
- Requires-Dist: pytest-asyncio<0.27,>=0.21; extra == 'typing'
58
- Requires-Dist: pytest-cov; extra == 'typing'
59
- Requires-Dist: pytest<9,>=7; extra == 'typing'
60
- Requires-Dist: scipy-stubs; (python_version >= '3.10') and extra == 'typing'
61
- Requires-Dist: scipy<2,>=1; extra == 'typing'
62
37
  Description-Content-Type: text/markdown
63
38
 
64
39
  [<img alt="foamlib" src="https://github.com/gerlero/foamlib/raw/main/logo.png" height="65">](https://github.com/gerlero/foamlib)
@@ -1,4 +1,4 @@
1
- foamlib/__init__.py,sha256=CVxAhmVRk4kg59EjIjtiZRj8zM9g5pkxbYp0lSYiwB4,453
1
+ foamlib/__init__.py,sha256=gSk-v_a_STLCffHq4oHQOi00Pj2TrxcMBOThu5S7ATY,452
2
2
  foamlib/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
3
  foamlib/_cases/__init__.py,sha256=_A1TTHuQfS9FH2_33lSEyLtOJZGFHZBco1tWJCVOHks,358
4
4
  foamlib/_cases/_async.py,sha256=e4lGTcQBbFGwfG6SmJks5aa5LWd_0dy01kgKZWAgTGQ,11655
@@ -11,10 +11,10 @@ foamlib/_cases/_util.py,sha256=QCizfbuJdOCeF9ogU2R-y-iWX5kfaOA4U2W68t6QlOM,2544
11
11
  foamlib/_files/__init__.py,sha256=q1vkjXnjnSZvo45jPAICpWeF2LZv5V6xfzAR6S8fS5A,96
12
12
  foamlib/_files/_files.py,sha256=gSJQjvB1f7N2yJtCTx9kpivKqSSNjDj37qNMpned5CM,19505
13
13
  foamlib/_files/_io.py,sha256=BGbbm6HKxL2ka0YMCmHqZQZ1R4PPQlkvWWb4FHMAS8k,2217
14
- foamlib/_files/_parsing.py,sha256=oTPQftA3DkcZI0A16TzXakwjDDqWpgq8DEasEYvaO1g,14109
14
+ foamlib/_files/_parsing.py,sha256=VLPyK11J57C02zJu1YfiLR0Xv1qRxw0sYYNsmmluFg0,17880
15
15
  foamlib/_files/_serialization.py,sha256=QJ-F6BKizVe0gpjnpIfPxNGTqWwalY4PQtCKdDY9D70,5502
16
16
  foamlib/_files/_types.py,sha256=PDhFW5hUzcoQsLx7M0Va1oaYV6km02jFgrvKJof0JKQ,3750
17
- foamlib-0.8.10.dist-info/METADATA,sha256=LCF_5y-idjrYW5gGzFdAITkkswHn2JD0MkhEA1OmwqA,14015
18
- foamlib-0.8.10.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
19
- foamlib-0.8.10.dist-info/licenses/LICENSE.txt,sha256=5Dte9TUnLZzPRs4NQzl-Jc2-Ljd-t_v0ZR5Ng5r0UsY,35131
20
- foamlib-0.8.10.dist-info/RECORD,,
17
+ foamlib-0.9.0.dist-info/METADATA,sha256=xyisPf6vPUB497hmKtJngw_Z2QiuX7DCSvzZI5T3gQg,12906
18
+ foamlib-0.9.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
19
+ foamlib-0.9.0.dist-info/licenses/LICENSE.txt,sha256=5Dte9TUnLZzPRs4NQzl-Jc2-Ljd-t_v0ZR5Ng5r0UsY,35131
20
+ foamlib-0.9.0.dist-info/RECORD,,