foamlib 0.9.0__py3-none-any.whl → 0.9.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
foamlib/__init__.py CHANGED
@@ -1,6 +1,6 @@
1
1
  """A Python interface for interacting with OpenFOAM."""
2
2
 
3
- __version__ = "0.9.0"
3
+ __version__ = "0.9.2"
4
4
 
5
5
  from ._cases import (
6
6
  AsyncFoamCase,
foamlib/_files/_files.py CHANGED
@@ -17,9 +17,11 @@ else:
17
17
  import numpy as np
18
18
 
19
19
  from ._io import FoamFileIO
20
- from ._serialization import Kind, dumps, normalize
20
+ from ._parsing import loads
21
+ from ._serialization import dumps, normalize_data, normalize_keyword
21
22
  from ._types import (
22
23
  Data,
24
+ DataLike,
23
25
  Dict_,
24
26
  Dimensioned,
25
27
  DimensionSet,
@@ -31,6 +33,32 @@ from ._types import (
31
33
  )
32
34
 
33
35
 
36
+ def _tensor_kind_for_field(
37
+ field: FieldLike,
38
+ ) -> str:
39
+ shape = np.shape(field) # type: ignore [arg-type]
40
+ if not shape:
41
+ return "scalar"
42
+ if shape == (3,):
43
+ return "vector"
44
+ if shape == (6,):
45
+ return "symmTensor"
46
+ if shape == (9,):
47
+ return "tensor"
48
+ if len(shape) == 1:
49
+ return "scalar"
50
+ if len(shape) == 2:
51
+ if shape[1] == 3:
52
+ return "vector"
53
+ if shape[1] == 6:
54
+ return "symmTensor"
55
+ if shape[1] == 9:
56
+ return "tensor"
57
+
58
+ msg = f"Invalid field shape: {shape}"
59
+ raise ValueError(msg)
60
+
61
+
34
62
  class FoamFile(
35
63
  MutableMapping[
36
64
  Optional[Union[str, Tuple[str, ...]]],
@@ -261,7 +289,7 @@ class FoamFile(
261
289
  elif not isinstance(keywords, tuple):
262
290
  keywords = (keywords,)
263
291
 
264
- if keywords and not isinstance(normalize(keywords[-1], kind=Kind.KEYWORD), str):
292
+ if keywords and not isinstance(normalize_keyword(keywords[-1]), str):
265
293
  msg = f"Invalid keyword: {keywords[-1]}"
266
294
  raise ValueError(msg)
267
295
 
@@ -283,52 +311,26 @@ class FoamFile(
283
311
  self.path.stem if self.path.suffix == ".gz" else self.path.name
284
312
  )
285
313
 
286
- kind = Kind.DEFAULT
287
- if keywords == ("internalField",) or (
288
- len(keywords) == 3
289
- and keywords[0] == "boundaryField"
290
- and (
291
- keywords[2] in ("value", "gradient")
292
- or keywords[2].endswith("Value")
293
- or keywords[2].endswith("Gradient")
294
- )
295
- ):
296
- kind = (
297
- Kind.BINARY_FIELD if self.format == "binary" else Kind.ASCII_FIELD
298
- )
299
- elif keywords == ("dimensions",):
300
- kind = Kind.DIMENSIONS
301
-
302
314
  if (
303
- kind in (Kind.ASCII_FIELD, Kind.BINARY_FIELD)
315
+ keywords == ("internalField",)
316
+ or (
317
+ len(keywords) == 3
318
+ and keywords[0] == "boundaryField"
319
+ and (
320
+ keywords[2] == "value"
321
+ or keywords[2] == "gradient"
322
+ or keywords[2].endswith(("Value", "Gradient"))
323
+ )
324
+ )
304
325
  ) and self.class_ == "dictionary":
305
326
  try:
306
- shape = np.shape(data) # type: ignore [arg-type]
327
+ tensor_kind = _tensor_kind_for_field(data) # type: ignore [arg-type]
307
328
  except ValueError:
308
329
  pass
309
330
  else:
310
- if not shape:
311
- self.class_ = "volScalarField"
312
- elif shape == (3,):
313
- self.class_ = "volVectorField"
314
- elif shape == (6,):
315
- self.class_ = "volSymmTensorField"
316
- elif shape == (9,):
317
- self.class_ = "volTensorField"
318
- elif len(shape) == 1:
319
- self.class_ = "volScalarField"
320
- elif len(shape) == 2:
321
- if shape[1] == 3:
322
- self.class_ = "volVectorField"
323
- elif shape[1] == 6:
324
- self.class_ = "volSymmTensorField"
325
- elif shape[1] == 9:
326
- self.class_ = "volTensorField"
327
-
328
- if kind == Kind.ASCII_FIELD and self.class_.endswith("scalarField"):
329
- kind = Kind.SCALAR_ASCII_FIELD
330
- elif kind == Kind.BINARY_FIELD and self.class_.endswith("scalarField"):
331
- kind = Kind.SCALAR_BINARY_FIELD
331
+ self.class_ = (
332
+ "vol" + tensor_kind[0].upper() + tensor_kind[1:] + "Field"
333
+ )
332
334
 
333
335
  parsed = self._get_parsed(missing_ok=True)
334
336
 
@@ -360,7 +362,7 @@ class FoamFile(
360
362
  ...,
361
363
  before
362
364
  + indentation
363
- + dumps(keywords[-1])
365
+ + dumps(normalize_keyword(keywords[-1]))
364
366
  + b"\n"
365
367
  + indentation
366
368
  + b"{\n"
@@ -373,23 +375,37 @@ class FoamFile(
373
375
  self[(*keywords, k)] = v
374
376
 
375
377
  elif keywords:
376
- val = dumps(data, kind=kind)
378
+ header = self.get("FoamFile", None)
379
+ assert header is None or isinstance(header, FoamFile.SubDict)
380
+ val = dumps(
381
+ data,
382
+ keywords=keywords,
383
+ header=header,
384
+ )
377
385
  parsed.put(
378
386
  keywords,
379
- normalize(data, kind=kind),
387
+ normalize_data(data, keywords=keywords),
380
388
  before
381
389
  + indentation
382
- + dumps(keywords[-1])
390
+ + dumps(normalize_keyword(keywords[-1]))
383
391
  + ((b" " + val) if val else b"")
384
392
  + (b";" if not keywords[-1].startswith("#") else b"")
385
393
  + after,
386
394
  )
387
395
 
388
396
  else:
397
+ header = self.get("FoamFile", None)
398
+ assert header is None or isinstance(header, FoamFile.SubDict)
389
399
  parsed.put(
390
400
  (),
391
- normalize(data, kind=kind),
392
- before + dumps(data, kind=kind) + after,
401
+ normalize_data(data, keywords=keywords),
402
+ before
403
+ + dumps(
404
+ data,
405
+ keywords=(),
406
+ header=header,
407
+ )
408
+ + after,
393
409
  )
394
410
 
395
411
  def __delitem__(self, keywords: str | tuple[str, ...] | None) -> None:
@@ -442,6 +458,85 @@ class FoamFile(
442
458
  d.pop("FoamFile", None)
443
459
  return deepcopy(d)
444
460
 
461
+ @staticmethod
462
+ def loads(
463
+ s: bytes | str,
464
+ *,
465
+ include_header: bool = False,
466
+ ) -> File | Data:
467
+ """
468
+ Standalone deserializing function.
469
+
470
+ Deserialize the OpenFOAM FoamFile format to Python objects.
471
+
472
+ :param s: The string to deserialize. This can be a dictionary, list, or any
473
+ other object that can be serialized to the OpenFOAM format.
474
+ :param include_header: Whether to include the "FoamFile" header in the output.
475
+ If `True`, the header will be included if it is present in the input object.
476
+ """
477
+ ret = loads(s)
478
+
479
+ if not include_header and isinstance(ret, Mapping) and "FoamFile" in ret:
480
+ del ret["FoamFile"]
481
+ if len(ret) == 1 and None in ret:
482
+ val = ret[None]
483
+ assert not isinstance(val, Mapping)
484
+ return val
485
+
486
+ return ret
487
+
488
+ @staticmethod
489
+ def dumps(file: File | DataLike, *, ensure_header: bool = True) -> bytes:
490
+ """
491
+ Standalone serializing function.
492
+
493
+ Serialize Python objects to the OpenFOAM FoamFile format.
494
+
495
+ :param file: The Python object to serialize. This can be a dictionary, list,
496
+ or any other object that can be serialized to the OpenFOAM format.
497
+ :param ensure_header: Whether to include the "FoamFile" header in the output.
498
+ If `True`, a header will be included if it is not already present in the
499
+ input object.
500
+ """
501
+ if isinstance(file, Mapping):
502
+ header = file.get("FoamFile", None)
503
+ assert isinstance(header, FoamFile.SubDict) or header is None
504
+ entries: list[bytes] = []
505
+ for k, v in file.items():
506
+ if k is not None:
507
+ entries.append(
508
+ dumps((k, v), keywords=(), header=header, tuple_is_entry=True)
509
+ )
510
+ else:
511
+ entries.append(dumps(v, keywords=(), header=header))
512
+ ret = b" ".join(entries)
513
+ else:
514
+ header = None
515
+ ret = dumps(file)
516
+
517
+ if header is None and ensure_header:
518
+ class_ = "dictionary"
519
+ if isinstance(file, Mapping) and "internalField" in file:
520
+ try:
521
+ tensor_kind = _tensor_kind_for_field(file["internalField"]) # type: ignore [arg-type]
522
+ except (ValueError, TypeError):
523
+ pass
524
+ else:
525
+ class_ = "vol" + tensor_kind[0].upper() + tensor_kind[1:] + "Field"
526
+
527
+ header = {"version": 2.0, "format": "ascii", "class": class_}
528
+
529
+ ret = (
530
+ dumps(
531
+ {"FoamFile": header},
532
+ keywords=(),
533
+ )
534
+ + b" "
535
+ + ret
536
+ )
537
+
538
+ return ret
539
+
445
540
 
446
541
  class FoamFieldFile(FoamFile):
447
542
  """
@@ -2,7 +2,7 @@ from __future__ import annotations
2
2
 
3
3
  import re
4
4
  import sys
5
- from typing import Tuple, Union, cast
5
+ from typing import TYPE_CHECKING, Tuple, Union, cast
6
6
 
7
7
  if sys.version_info >= (3, 9):
8
8
  from collections.abc import Iterator, Mapping, MutableMapping, Sequence
@@ -40,46 +40,43 @@ from pyparsing import (
40
40
 
41
41
  from ._types import Data, Dimensioned, DimensionSet, File
42
42
 
43
+ if TYPE_CHECKING:
44
+ from numpy.typing import DTypeLike
43
45
 
44
- def _numeric_list(
45
- *, nested: int | None = None, ignore: Regex | None = None, force_float: bool = False
46
+
47
+ def _ascii_numeric_list(
48
+ dtype: DTypeLike,
49
+ *,
50
+ nested: int | None = None,
51
+ ignore: Regex | None = None,
52
+ empty_ok: bool = False,
46
53
  ) -> ParserElement:
47
- if not force_float:
48
- int_pattern = r"(?:-?\d+)"
49
- float_pattern = r"(?i:[+-]?(?:(?:\d+\.?\d*(?:e[+-]?\d+)?)|nan|inf(?:inity)?))"
54
+ dtype = np.dtype(dtype)
55
+
56
+ if np.issubdtype(dtype, np.floating):
57
+ element = common.ieee_float
58
+ element_pattern = r"(?i:[+-]?(?:(?:\d+\.?\d*(?:e[+-]?\d+)?)|nan|inf(?:inity)?))"
59
+ elif np.issubdtype(dtype, np.integer):
60
+ element = common.integer
61
+ element_pattern = r"(?:-?\d+)"
62
+ else:
63
+ msg = f"Unsupported dtype: {dtype}"
64
+ raise TypeError(msg)
65
+
50
66
  spacing_pattern = (
51
- rf"(?:(?:\s|{ignore.re.pattern})+)" if ignore is not None else r"\s+"
67
+ rf"(?:(?:\s|{ignore.re.pattern})+)" if ignore is not None else r"(?:\s+)"
52
68
  )
53
69
 
54
- if nested is None:
55
- if not force_float:
56
- int_element_pattern = int_pattern
57
- int_element = common.integer
58
- float_element_pattern = float_pattern
59
- float_element = common.ieee_float
60
- else:
61
- if not force_float:
62
- int_element_pattern = rf"(?:(?:{nested})?{spacing_pattern}?\({spacing_pattern}?(?:{int_pattern}{spacing_pattern}){{{nested - 1}}}{int_pattern}{spacing_pattern}?\))"
63
- int_element = (
64
- Opt(Literal(str(nested))).suppress()
65
- + Literal("(").suppress()
66
- + Group(common.integer[nested])
67
- + Literal(")").suppress()
68
- )
69
- float_element_pattern = rf"(?:(?:{nested})?{spacing_pattern}?\({spacing_pattern}?(?:{float_pattern}{spacing_pattern}){{{nested - 1}}}{float_pattern}{spacing_pattern}?\))"
70
- float_element = (
71
- Opt(Literal(str(nested))).suppress()
72
- + Literal("(").suppress()
73
- + Group(common.ieee_float[nested])
74
- + Literal(")").suppress()
70
+ if nested is not None:
71
+ element = (
72
+ Literal("(").suppress() + Group(element[nested]) + Literal(")").suppress()
75
73
  )
74
+ element_pattern = rf"(?:{spacing_pattern}?\({element_pattern}?(?:{element_pattern}{spacing_pattern}){{{nested - 1}}}{element_pattern}{spacing_pattern}?\))"
76
75
 
77
- if not force_float:
78
- int_list = Forward()
79
- float_list = Forward()
76
+ list_ = Forward()
80
77
 
81
78
  def process_count(tks: ParseResults) -> None:
82
- nonlocal int_list, float_list
79
+ nonlocal list_
83
80
 
84
81
  if not tks:
85
82
  count = None
@@ -88,46 +85,41 @@ def _numeric_list(
88
85
  assert isinstance(count, int)
89
86
 
90
87
  if count is None:
91
- if not force_float:
92
- int_list_pattern = rf"\({spacing_pattern}?(?:{int_element_pattern}{spacing_pattern})*{int_element_pattern}{spacing_pattern}?\)"
93
- float_list_pattern = rf"\({spacing_pattern}?(?:{float_element_pattern}{spacing_pattern})*{float_element_pattern}{spacing_pattern}?\)"
88
+ if not empty_ok:
89
+ list_pattern = rf"\({spacing_pattern}?(?:{element_pattern}{spacing_pattern})*{element_pattern}{spacing_pattern}?\)"
94
90
  else:
95
- float_list_pattern = rf"\({spacing_pattern}?(?:{float_element_pattern}{spacing_pattern})*{float_element_pattern}?{spacing_pattern}?\)"
91
+ list_pattern = rf"\({spacing_pattern}?(?:{element_pattern}{spacing_pattern})*{element_pattern}?{spacing_pattern}?\)"
96
92
 
97
93
  elif count == 0:
98
- if not force_float:
99
- int_list <<= NoMatch()
100
- float_list <<= NoMatch()
94
+ if not empty_ok:
95
+ list_ <<= NoMatch()
101
96
  else:
102
- float_list <<= (Literal("(") + Literal(")")).add_parse_action(
103
- lambda: np.empty((0, nested) if nested else 0, dtype=float)
97
+ list_ <<= (Literal("(") + Literal(")")).add_parse_action(
98
+ lambda: np.empty((0, nested) if nested else 0, dtype=dtype)
104
99
  )
105
100
  return
106
101
 
107
102
  else:
108
- if not force_float:
109
- int_list_pattern = rf"\({spacing_pattern}?(?:{int_element_pattern}{spacing_pattern}){{{count - 1}}}{int_element_pattern}{spacing_pattern}?\)"
110
- float_list_pattern = rf"\({spacing_pattern}?(?:{float_element_pattern}{spacing_pattern}){{{count - 1}}}{float_element_pattern}{spacing_pattern}?\)"
103
+ list_pattern = rf"\({spacing_pattern}?(?:{element_pattern}{spacing_pattern}){{{count - 1}}}{element_pattern}{spacing_pattern}?\)"
111
104
 
112
- if not force_float:
113
- int_list <<= Regex(int_list_pattern).add_parse_action(
114
- lambda tks: to_array(tks, dtype=int)
115
- )
116
- float_list <<= Regex(float_list_pattern).add_parse_action(
117
- lambda tks: to_array(tks, dtype=float)
105
+ list_ <<= Regex(list_pattern).add_parse_action(
106
+ lambda tks: to_array(tks, dtype=dtype)
118
107
  )
119
108
 
120
109
  def to_array(
121
- tks: ParseResults, *, dtype: type
122
- ) -> np.ndarray[tuple[int] | tuple[int, int], np.dtype[np.int64 | np.float64]]:
110
+ tks: ParseResults, *, dtype: DTypeLike
111
+ ) -> np.ndarray[tuple[int] | tuple[int, int], np.dtype[np.integer | np.floating]]:
123
112
  (s,) = tks
124
- s = s.replace("(", "").replace(")", "")
125
-
113
+ assert s.startswith("(")
114
+ assert s.endswith(")")
115
+ s = s[1:-1]
126
116
  if ignore is not None:
127
117
  s = re.sub(ignore.re, " ", s)
118
+ if nested is not None:
119
+ s = s.replace("(", " ").replace(")", " ")
128
120
 
129
121
  ret: np.ndarray[
130
- tuple[int] | tuple[int, int], np.dtype[np.int64 | np.float64]
122
+ tuple[int] | tuple[int, int], np.dtype[np.integer | np.floating]
131
123
  ] = np.fromstring(s, sep=" ", dtype=dtype) # type: ignore[assignment]
132
124
 
133
125
  if nested is not None:
@@ -137,7 +129,7 @@ def _numeric_list(
137
129
 
138
130
  def to_full_array(
139
131
  tks: ParseResults, *, dtype: type
140
- ) -> np.ndarray[tuple[int] | tuple[int, int], np.dtype[np.int64 | np.float64]]:
132
+ ) -> np.ndarray[tuple[int] | tuple[int, int], np.dtype[np.integer | np.floating]]:
141
133
  count, lst = tks
142
134
  assert isinstance(count, int)
143
135
 
@@ -146,24 +138,8 @@ def _numeric_list(
146
138
 
147
139
  return np.full((count, nested), lst, dtype=dtype) # type: ignore[return-value]
148
140
 
149
- count = Opt(common.integer).add_parse_action(process_count)
150
-
151
- ret = count.suppress() + (
152
- (int_list | float_list) if not force_float else float_list
153
- )
154
-
155
- if not force_float:
156
- ret |= (
157
- common.integer
158
- + Literal("{").suppress()
159
- + int_element
160
- + Literal("}").suppress()
161
- ).add_parse_action(lambda tks: to_full_array(tks, dtype=int))
162
- ret |= (
163
- common.integer
164
- + Literal("{").suppress()
165
- + float_element
166
- + Literal("}").suppress()
141
+ ret = ((Opt(common.integer).add_parse_action(process_count)).suppress() + list_) | (
142
+ common.integer + Literal("{").suppress() + element + Literal("}").suppress()
167
143
  ).add_parse_action(lambda tks: to_full_array(tks, dtype=float))
168
144
 
169
145
  if ignore is not None:
@@ -172,23 +148,29 @@ def _numeric_list(
172
148
  return ret
173
149
 
174
150
 
175
- def _binary_field(*, nested: int | None = None) -> ParserElement:
151
+ def _binary_numeric_list(
152
+ dtype: DTypeLike, *, nested: int | None = None, empty_ok: bool = False
153
+ ) -> ParserElement:
154
+ dtype = np.dtype(dtype)
155
+
176
156
  elsize = nested if nested is not None else 1
177
157
 
178
- binary_field = Forward()
158
+ list_ = Forward()
179
159
 
180
160
  def process_count(tks: ParseResults) -> None:
181
- nonlocal binary_field
161
+ nonlocal list_
182
162
  (size,) = tks
183
163
  assert isinstance(size, int)
184
164
 
185
- binary_field <<= Regex(
186
- rf"\((?s:({'.' * 8 * elsize}|{'.' * 4 * elsize}){{{size}}})\)"
187
- )
165
+ if size == 0 and not empty_ok:
166
+ list_ <<= NoMatch()
167
+ return
168
+
169
+ list_ <<= Regex(rf"\((?s:{'.' * dtype.itemsize * elsize}){{{size}}}\)")
188
170
 
189
171
  def to_array(
190
172
  tks: ParseResults,
191
- ) -> np.ndarray[tuple[int] | tuple[int, int], np.dtype[np.float64 | np.float32]]:
173
+ ) -> np.ndarray[tuple[int] | tuple[int, int], np.dtype[np.integer | np.floating]]:
192
174
  size, s = tks
193
175
  assert isinstance(size, int)
194
176
  assert isinstance(s, str)
@@ -196,10 +178,6 @@ def _binary_field(*, nested: int | None = None) -> ParserElement:
196
178
  assert s[-1] == ")"
197
179
  s = s[1:-1]
198
180
 
199
- float_size = len(s) / elsize / size
200
- assert float_size in (4, 8)
201
-
202
- dtype = np.float32 if float_size == 4 else float
203
181
  ret = np.frombuffer(s.encode("latin-1"), dtype=dtype)
204
182
 
205
183
  if nested is not None:
@@ -207,9 +185,9 @@ def _binary_field(*, nested: int | None = None) -> ParserElement:
207
185
 
208
186
  return ret # type: ignore[return-value]
209
187
 
210
- count = common.integer.copy().add_parse_action(process_count)
211
-
212
- return (count + binary_field).add_parse_action(to_array)
188
+ return (
189
+ common.integer.copy().add_parse_action(process_count) + list_
190
+ ).add_parse_action(to_array)
213
191
 
214
192
 
215
193
  def _list_of(entry: ParserElement) -> ParserElement:
@@ -337,15 +315,22 @@ _FIELD = (Keyword("uniform", _IDENTBODYCHARS).suppress() + _TENSOR) | (
337
315
  Opt(
338
316
  Literal("List") + Literal("<") + Literal("scalar") + Literal(">")
339
317
  ).suppress()
340
- + (_numeric_list(force_float=True, ignore=_COMMENT) | _binary_field())
318
+ + (
319
+ _ascii_numeric_list(dtype=float, ignore=_COMMENT, empty_ok=True)
320
+ | _binary_numeric_list(dtype=np.float64, empty_ok=True)
321
+ | _binary_numeric_list(dtype=np.float32, empty_ok=True)
322
+ )
341
323
  )
342
324
  | (
343
325
  Opt(
344
326
  Literal("List") + Literal("<") + Literal("vector") + Literal(">")
345
327
  ).suppress()
346
328
  + (
347
- _numeric_list(nested=3, force_float=True, ignore=_COMMENT)
348
- | _binary_field(nested=3)
329
+ _ascii_numeric_list(
330
+ dtype=float, nested=3, ignore=_COMMENT, empty_ok=True
331
+ )
332
+ | _binary_numeric_list(np.float64, nested=3, empty_ok=True)
333
+ | _binary_numeric_list(np.float32, nested=3, empty_ok=True)
349
334
  )
350
335
  )
351
336
  | (
@@ -353,12 +338,11 @@ _FIELD = (Keyword("uniform", _IDENTBODYCHARS).suppress() + _TENSOR) | (
353
338
  Literal("List") + Literal("<") + Literal("symmTensor") + Literal(">")
354
339
  ).suppress()
355
340
  + (
356
- _numeric_list(
357
- nested=6,
358
- force_float=True,
359
- ignore=_COMMENT,
341
+ _ascii_numeric_list(
342
+ dtype=float, nested=6, ignore=_COMMENT, empty_ok=True
360
343
  )
361
- | _binary_field(nested=6)
344
+ | _binary_numeric_list(np.float64, nested=6, empty_ok=True)
345
+ | _binary_numeric_list(np.float32, nested=6, empty_ok=True)
362
346
  )
363
347
  )
364
348
  | (
@@ -366,8 +350,11 @@ _FIELD = (Keyword("uniform", _IDENTBODYCHARS).suppress() + _TENSOR) | (
366
350
  Literal("List") + Literal("<") + Literal("tensor") + Literal(">")
367
351
  ).suppress()
368
352
  + (
369
- _numeric_list(nested=9, force_float=True, ignore=_COMMENT)
370
- | _binary_field(nested=9)
353
+ _ascii_numeric_list(
354
+ dtype=float, nested=9, ignore=_COMMENT, empty_ok=True
355
+ )
356
+ | _binary_numeric_list(np.float64, nested=9, empty_ok=True)
357
+ | _binary_numeric_list(np.float32, nested=9, empty_ok=True)
371
358
  )
372
359
  )
373
360
  )
@@ -380,12 +367,7 @@ _KEYWORD_ENTRY = _keyword_entry_of(_TOKEN | _list_of(_IDENTIFIER), _DATA)
380
367
  _DICT = _dict_of(_TOKEN, _DATA)
381
368
  _DATA_ENTRY = Forward()
382
369
  _LIST_ENTRY = _DICT | _KEYWORD_ENTRY | _DATA_ENTRY
383
- _LIST = (
384
- _numeric_list(ignore=_COMMENT)
385
- | _numeric_list(nested=3, ignore=_COMMENT)
386
- | _numeric_list(nested=4, ignore=_COMMENT)
387
- | _list_of(_LIST_ENTRY)
388
- )
370
+ _LIST = _list_of(_LIST_ENTRY)
389
371
  _NUMBER = (
390
372
  common.number
391
373
  | CaselessKeyword("nan").set_parse_action(lambda: np.nan)
@@ -398,21 +380,41 @@ _NUMBER = (
398
380
  )
399
381
  _DATA_ENTRY <<= _FIELD | _LIST | _DIMENSIONED | _DIMENSIONS | _NUMBER | _SWITCH | _TOKEN
400
382
 
401
- _DATA <<= (
402
- _DATA_ENTRY[1, ...]
403
- .set_parse_action(lambda tks: [tuple(tks)] if len(tks) > 1 else [tks[0]])
383
+ _DATA <<= _DATA_ENTRY[1, ...].set_parse_action(
384
+ lambda tks: [tuple(tks)] if len(tks) > 1 else [tks[0]]
385
+ )
386
+
387
+ _STANDALONE_DATA = (
388
+ _ascii_numeric_list(dtype=int, ignore=_COMMENT)
389
+ | _binary_numeric_list(dtype=np.int64)
390
+ | _binary_numeric_list(dtype=np.int32)
391
+ | _ascii_numeric_list(dtype=float, nested=3, ignore=_COMMENT)
392
+ | _binary_numeric_list(dtype=np.float64, nested=3)
393
+ | _binary_numeric_list(dtype=np.float32, nested=3)
394
+ | _DATA
395
+ ).add_parse_action(lambda tks: [None, tks[0]])
396
+
397
+
398
+ _FILE = (
399
+ Dict(_KEYWORD_ENTRY[...] + Opt(Group(_STANDALONE_DATA)) + _KEYWORD_ENTRY[...])
404
400
  .ignore(_COMMENT)
405
401
  .parse_with_tabs()
406
402
  )
407
403
 
408
404
 
409
- def parse_data(s: str) -> Data:
410
- if not s.strip():
411
- return ""
412
- return cast("Data", _DATA.parse_string(s, parse_all=True)[0])
405
+ def loads(s: bytes | str) -> File | Data:
406
+ if isinstance(s, bytes):
407
+ s = s.decode("latin-1")
408
+
409
+ file = _FILE.parse_string(s, parse_all=True).as_dict()
413
410
 
411
+ if len(file) == 1 and None in file:
412
+ return file[None] # type: ignore[no-any-return]
414
413
 
415
- _LOCATED_DICTIONARY = Group(
414
+ return file
415
+
416
+
417
+ _LOCATED_KEYWORD_ENTRIES = Group(
416
418
  _keyword_entry_of(
417
419
  _TOKEN,
418
420
  Opt(_DATA, default=""),
@@ -421,10 +423,14 @@ _LOCATED_DICTIONARY = Group(
421
423
  located=True,
422
424
  )
423
425
  )[...]
424
- _LOCATED_DATA = Group(Located(_DATA.copy().add_parse_action(lambda tks: ["", tks[0]])))
426
+ _LOCATED_STANDALONE_DATA = Group(Located(_STANDALONE_DATA))
425
427
 
426
- _FILE = (
427
- Dict(_LOCATED_DICTIONARY + Opt(_LOCATED_DATA) + _LOCATED_DICTIONARY)
428
+ _LOCATED_FILE = (
429
+ Dict(
430
+ _LOCATED_KEYWORD_ENTRIES
431
+ + Opt(_LOCATED_STANDALONE_DATA)
432
+ + _LOCATED_KEYWORD_ENTRIES
433
+ )
428
434
  .ignore(_COMMENT)
429
435
  .parse_with_tabs()
430
436
  )
@@ -436,7 +442,7 @@ class Parsed(Mapping[Tuple[str, ...], Union[Data, EllipsisType]]):
436
442
  tuple[str, ...],
437
443
  tuple[int, Data | EllipsisType, int],
438
444
  ] = {}
439
- for parse_result in _FILE.parse_string(
445
+ for parse_result in _LOCATED_FILE.parse_string(
440
446
  contents.decode("latin-1"), parse_all=True
441
447
  ):
442
448
  self._parsed.update(self._flatten_result(parse_result))
@@ -459,8 +465,7 @@ class Parsed(Mapping[Tuple[str, ...], Union[Data, EllipsisType]]):
459
465
  end = parse_result.locn_end
460
466
  assert isinstance(end, int)
461
467
  keyword, *data = item
462
- assert isinstance(keyword, str)
463
- if not keyword:
468
+ if keyword is None:
464
469
  assert not _keywords
465
470
  assert len(data) == 1
466
471
  assert not isinstance(data[0], ParseResults)
@@ -1,7 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import sys
4
- from enum import Enum, auto
5
4
  from typing import overload
6
5
 
7
6
  if sys.version_info >= (3, 9):
@@ -11,7 +10,7 @@ else:
11
10
 
12
11
  import numpy as np
13
12
 
14
- from ._parsing import parse_data
13
+ from ._parsing import loads
15
14
  from ._types import (
16
15
  Data,
17
16
  DataLike,
@@ -23,31 +22,32 @@ from ._types import (
23
22
  )
24
23
 
25
24
 
26
- class Kind(Enum):
27
- DEFAULT = auto()
28
- SINGLE_ENTRY = auto()
29
- ASCII_FIELD = auto()
30
- SCALAR_ASCII_FIELD = auto()
31
- BINARY_FIELD = auto()
32
- SCALAR_BINARY_FIELD = auto()
33
- DIMENSIONS = auto()
34
- KEYWORD = auto()
35
-
36
-
37
25
  @overload
38
- def normalize(data: DataLike, *, kind: Kind = Kind.DEFAULT) -> Data: ...
26
+ def normalize_data(
27
+ data: DataLike, *, keywords: tuple[str, ...] | None = None
28
+ ) -> Data: ...
39
29
 
40
30
 
41
31
  @overload
42
- def normalize(data: EntryLike, *, kind: Kind = Kind.DEFAULT) -> Entry: ...
43
-
44
-
45
- def normalize(data: EntryLike, *, kind: Kind = Kind.DEFAULT) -> Entry:
46
- if kind in (
47
- Kind.ASCII_FIELD,
48
- Kind.SCALAR_ASCII_FIELD,
49
- Kind.BINARY_FIELD,
50
- Kind.SCALAR_BINARY_FIELD,
32
+ def normalize_data(
33
+ data: EntryLike, *, keywords: tuple[str, ...] | None = None
34
+ ) -> Entry: ...
35
+
36
+
37
+ def normalize_data(
38
+ data: EntryLike, *, keywords: tuple[str, ...] | None = None
39
+ ) -> Entry:
40
+ if keywords is not None and (
41
+ keywords == ("internalField",)
42
+ or (
43
+ len(keywords) == 3
44
+ and keywords[0] == "boundaryField"
45
+ and (
46
+ keywords[2] == "value"
47
+ or keywords[2] == "gradient"
48
+ or keywords[2].endswith(("Value", "Gradient"))
49
+ )
50
+ )
51
51
  ):
52
52
  if is_sequence(data):
53
53
  try:
@@ -61,41 +61,50 @@ def normalize(data: EntryLike, *, kind: Kind = Kind.DEFAULT) -> Entry:
61
61
  if arr.ndim == 1 or (arr.ndim == 2 and arr.shape[1] in (3, 6, 9)):
62
62
  return arr # type: ignore [return-value]
63
63
 
64
- return [normalize(d, kind=Kind.SINGLE_ENTRY) for d in data]
64
+ return [normalize_data(d) for d in data]
65
65
 
66
66
  if isinstance(data, int):
67
67
  return float(data)
68
68
 
69
- return normalize(data)
69
+ return normalize_data(data)
70
+
71
+ if isinstance(data, Mapping):
72
+ return {normalize_keyword(k): normalize_data(v) for k, v in data.items()} # type: ignore [misc]
70
73
 
71
74
  if isinstance(data, np.ndarray):
72
75
  ret = data.tolist()
73
76
  assert isinstance(ret, (int, float, list))
74
77
  return ret
75
78
 
76
- if isinstance(data, Mapping):
77
- return {k: normalize(v, kind=kind) for k, v in data.items()}
78
-
79
79
  if (
80
- kind == Kind.DIMENSIONS
80
+ not isinstance(data, DimensionSet)
81
+ and keywords is not None
82
+ and keywords == ("dimensions",)
81
83
  and is_sequence(data)
82
84
  and len(data) <= 7
83
85
  and all(isinstance(d, (int, float)) for d in data)
84
86
  ):
85
87
  return DimensionSet(*data)
86
88
 
87
- if isinstance(data, tuple) and kind == Kind.SINGLE_ENTRY and len(data) == 2:
89
+ if keywords is None and isinstance(data, tuple) and len(data) == 2:
88
90
  k, v = data
89
- return (normalize(k), normalize(v))
91
+ assert not isinstance(k, Mapping)
92
+ return (
93
+ normalize_keyword(k),
94
+ normalize_data(v) if not isinstance(v, Mapping) else v,
95
+ ) # type: ignore [return-value]
90
96
 
91
- if is_sequence(data) and (kind == Kind.SINGLE_ENTRY or not isinstance(data, tuple)):
92
- return [normalize(d, kind=Kind.SINGLE_ENTRY) for d in data]
97
+ if (
98
+ is_sequence(data)
99
+ and not isinstance(data, DimensionSet)
100
+ and (keywords is None or not isinstance(data, tuple))
101
+ ):
102
+ return [normalize_data(d) for d in data]
93
103
 
94
104
  if isinstance(data, str):
95
- parsed_data = parse_data(data)
96
- if kind == Kind.KEYWORD and isinstance(parsed_data, bool):
97
- return data
98
- return parsed_data
105
+ s = loads(data)
106
+ if isinstance(s, (str, tuple, bool)):
107
+ return s
99
108
 
100
109
  if isinstance(
101
110
  data,
@@ -107,52 +116,70 @@ def normalize(data: EntryLike, *, kind: Kind = Kind.DEFAULT) -> Entry:
107
116
  raise TypeError(msg)
108
117
 
109
118
 
119
+ def normalize_keyword(data: DataLike) -> Data:
120
+ ret = normalize_data(data)
121
+
122
+ if isinstance(data, str) and isinstance(ret, bool):
123
+ return data
124
+
125
+ return ret
126
+
127
+
110
128
  def dumps(
111
129
  data: EntryLike,
112
130
  *,
113
- kind: Kind = Kind.DEFAULT,
131
+ keywords: tuple[str, ...] | None = None,
132
+ header: Mapping[str, Entry] | None = None,
133
+ tuple_is_entry: bool = False,
114
134
  ) -> bytes:
115
- data = normalize(data, kind=kind)
135
+ data = normalize_data(data, keywords=keywords)
116
136
 
117
137
  if isinstance(data, Mapping):
118
138
  return (
119
139
  b"{"
120
- + b" ".join(dumps((k, v), kind=Kind.SINGLE_ENTRY) for k, v in data.items())
140
+ + b" ".join(
141
+ dumps(
142
+ (k, v),
143
+ keywords=keywords,
144
+ tuple_is_entry=True,
145
+ )
146
+ for k, v in data.items()
147
+ )
121
148
  + b"}"
122
149
  )
123
150
 
124
- if isinstance(data, tuple) and kind == Kind.SINGLE_ENTRY and len(data) == 2:
125
- k, v = data
126
- ret = dumps(k)
127
- val = dumps(v)
128
- if val:
129
- ret += b" " + val
130
- if not isinstance(v, Mapping):
131
- ret += b";"
132
- return ret
133
-
134
- if isinstance(data, DimensionSet):
135
- return b"[" + b" ".join(dumps(v) for v in data) + b"]"
151
+ if (
152
+ keywords is not None
153
+ and (
154
+ keywords == ("internalField",)
155
+ or (
156
+ len(keywords) == 3
157
+ and keywords[0] == "boundaryField"
158
+ and (
159
+ keywords[2] == "value"
160
+ or keywords[2] == "gradient"
161
+ or keywords[2].endswith(("Value", "Gradient"))
162
+ )
163
+ )
164
+ )
165
+ and isinstance(data, (int, float, np.ndarray))
166
+ ):
167
+ data = np.asarray(data) # type: ignore [assignment]
168
+ class_ = header.get("class", "") if header else ""
169
+ assert isinstance(class_, str)
170
+ scalar = "Scalar" in class_
136
171
 
137
- if kind in (
138
- Kind.ASCII_FIELD,
139
- Kind.SCALAR_ASCII_FIELD,
140
- Kind.BINARY_FIELD,
141
- Kind.SCALAR_BINARY_FIELD,
142
- ) and (isinstance(data, (int, float, np.ndarray))):
143
172
  shape = np.shape(data)
144
- if not shape or (
145
- kind not in (Kind.SCALAR_ASCII_FIELD, Kind.SCALAR_BINARY_FIELD)
146
- and shape in ((3,), (6,), (9,))
147
- ):
148
- return b"uniform " + dumps(data, kind=Kind.SINGLE_ENTRY)
173
+ if not shape or (not scalar and shape in ((3,), (6,), (9,))):
174
+ return b"uniform " + dumps(data)
149
175
 
150
176
  assert isinstance(data, np.ndarray)
151
- ndim = len(shape)
177
+ ndim = np.ndim(data)
152
178
  if ndim == 1:
153
179
  tensor_kind = b"scalar"
154
180
 
155
181
  elif ndim == 2:
182
+ assert len(shape) == 2
156
183
  if shape[1] == 3:
157
184
  tensor_kind = b"vector"
158
185
  elif shape[1] == 6:
@@ -165,34 +192,46 @@ def dumps(
165
192
  else:
166
193
  return dumps(data)
167
194
 
168
- if kind in (Kind.BINARY_FIELD, Kind.SCALAR_BINARY_FIELD):
169
- contents = b"(" + data.tobytes() + b")"
170
- else:
171
- assert kind in (Kind.ASCII_FIELD, Kind.SCALAR_ASCII_FIELD)
172
- contents = dumps(data, kind=Kind.SINGLE_ENTRY)
195
+ binary = (header.get("format", "") if header else "") == "binary"
196
+
197
+ contents = b"(" + data.tobytes() + b")" if binary else dumps(data)
173
198
 
174
199
  return b"nonuniform List<" + tensor_kind + b"> " + dumps(len(data)) + contents
175
200
 
201
+ if isinstance(data, DimensionSet):
202
+ return b"[" + b" ".join(dumps(v) for v in data) + b"]"
203
+
176
204
  if isinstance(data, Dimensioned):
177
205
  if data.name is not None:
178
206
  return (
179
207
  dumps(data.name)
180
208
  + b" "
181
- + dumps(data.dimensions, kind=Kind.DIMENSIONS)
209
+ + dumps(data.dimensions)
182
210
  + b" "
183
- + dumps(data.value, kind=Kind.SINGLE_ENTRY)
211
+ + dumps(data.value)
184
212
  )
185
- return (
186
- dumps(data.dimensions, kind=Kind.DIMENSIONS)
187
- + b" "
188
- + dumps(data.value, kind=Kind.SINGLE_ENTRY)
189
- )
213
+ return dumps(data.dimensions) + b" " + dumps(data.value)
190
214
 
191
215
  if isinstance(data, tuple):
216
+ if tuple_is_entry:
217
+ k, v = data
218
+ ret = dumps(k)
219
+ val = dumps(
220
+ v,
221
+ keywords=(*keywords, k)
222
+ if keywords is not None and isinstance(k, str)
223
+ else None,
224
+ )
225
+ if val:
226
+ ret += b" " + val
227
+ if not isinstance(v, Mapping):
228
+ ret += b";"
229
+ return ret
230
+
192
231
  return b" ".join(dumps(v) for v in data)
193
232
 
194
- if is_sequence(data) and not isinstance(data, tuple):
195
- return b"(" + b" ".join(dumps(v, kind=Kind.SINGLE_ENTRY) for v in data) + b")"
233
+ if is_sequence(data):
234
+ return b"(" + b" ".join(dumps(v, tuple_is_entry=True) for v in data) + b")"
196
235
 
197
236
  if data is True:
198
237
  return b"yes"
foamlib/_files/_types.py CHANGED
@@ -1,7 +1,6 @@
1
1
  from __future__ import annotations
2
2
 
3
3
  import sys
4
- from enum import Enum
5
4
  from typing import Dict, NamedTuple, Optional, Union
6
5
 
7
6
  import numpy as np
@@ -42,39 +41,6 @@ TensorLike = Union[
42
41
  ]
43
42
 
44
43
 
45
- class TensorKind(Enum):
46
- SCALAR = ()
47
- VECTOR = (3,)
48
- SYMM_TENSOR = (6,)
49
- TENSOR = (9,)
50
-
51
- @property
52
- def shape(self) -> tuple[()] | tuple[int]:
53
- shape: tuple[()] | tuple[int] = self.value
54
- return shape
55
-
56
- @property
57
- def size(self) -> int:
58
- return int(np.prod(self.shape))
59
-
60
- def __str__(self) -> str:
61
- return {
62
- TensorKind.SCALAR: "scalar",
63
- TensorKind.VECTOR: "vector",
64
- TensorKind.SYMM_TENSOR: "symmTensor",
65
- TensorKind.TENSOR: "tensor",
66
- }[self]
67
-
68
- @staticmethod
69
- def from_shape(shape: tuple[int, ...]) -> TensorKind:
70
- for kind in TensorKind:
71
- if kind.shape == shape:
72
- return kind
73
-
74
- msg = f"No tensor kind for shape {shape!r}"
75
- raise ValueError(msg)
76
-
77
-
78
44
  class Dimensioned:
79
45
  def __init__(
80
46
  self,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: foamlib
3
- Version: 0.9.0
3
+ Version: 0.9.2
4
4
  Summary: A Python interface for interacting with OpenFOAM
5
5
  Project-URL: Homepage, https://github.com/gerlero/foamlib
6
6
  Project-URL: Repository, https://github.com/gerlero/foamlib
@@ -1,4 +1,4 @@
1
- foamlib/__init__.py,sha256=gSk-v_a_STLCffHq4oHQOi00Pj2TrxcMBOThu5S7ATY,452
1
+ foamlib/__init__.py,sha256=75bXvyciN5IbxNdsJ4XEBETcI5ojrnjWQhQco2Xmqy4,452
2
2
  foamlib/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
3
  foamlib/_cases/__init__.py,sha256=_A1TTHuQfS9FH2_33lSEyLtOJZGFHZBco1tWJCVOHks,358
4
4
  foamlib/_cases/_async.py,sha256=e4lGTcQBbFGwfG6SmJks5aa5LWd_0dy01kgKZWAgTGQ,11655
@@ -9,12 +9,12 @@ foamlib/_cases/_subprocess.py,sha256=VHV2SuOLqa711an6kCuvN6UlIkeh4qqFfdrpNoKzQps
9
9
  foamlib/_cases/_sync.py,sha256=yhrkwStKri7u41YImYCGBH4REcKn8Ar-32VW_WPa40c,9641
10
10
  foamlib/_cases/_util.py,sha256=QCizfbuJdOCeF9ogU2R-y-iWX5kfaOA4U2W68t6QlOM,2544
11
11
  foamlib/_files/__init__.py,sha256=q1vkjXnjnSZvo45jPAICpWeF2LZv5V6xfzAR6S8fS5A,96
12
- foamlib/_files/_files.py,sha256=gSJQjvB1f7N2yJtCTx9kpivKqSSNjDj37qNMpned5CM,19505
12
+ foamlib/_files/_files.py,sha256=7Si-C-2O0h27Ga4bC0W-i6_PKq7B4K2Gg1tDGh8AZFo,22416
13
13
  foamlib/_files/_io.py,sha256=BGbbm6HKxL2ka0YMCmHqZQZ1R4PPQlkvWWb4FHMAS8k,2217
14
- foamlib/_files/_parsing.py,sha256=VLPyK11J57C02zJu1YfiLR0Xv1qRxw0sYYNsmmluFg0,17880
15
- foamlib/_files/_serialization.py,sha256=QJ-F6BKizVe0gpjnpIfPxNGTqWwalY4PQtCKdDY9D70,5502
16
- foamlib/_files/_types.py,sha256=PDhFW5hUzcoQsLx7M0Va1oaYV6km02jFgrvKJof0JKQ,3750
17
- foamlib-0.9.0.dist-info/METADATA,sha256=xyisPf6vPUB497hmKtJngw_Z2QiuX7DCSvzZI5T3gQg,12906
18
- foamlib-0.9.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
19
- foamlib-0.9.0.dist-info/licenses/LICENSE.txt,sha256=5Dte9TUnLZzPRs4NQzl-Jc2-Ljd-t_v0ZR5Ng5r0UsY,35131
20
- foamlib-0.9.0.dist-info/RECORD,,
14
+ foamlib/_files/_parsing.py,sha256=ftaAh8WJ816Y6ej9ntoUzBOn6hC22qHLgN00568Hc2w,17636
15
+ foamlib/_files/_serialization.py,sha256=R9-oXYywZubevuPkYaTIAyM0_BfF9rC819j9AXOCcg8,6451
16
+ foamlib/_files/_types.py,sha256=q5O_x680XhGfvPHCNF_3objK1imUG2kgQYNRF2Z4qJ0,2918
17
+ foamlib-0.9.2.dist-info/METADATA,sha256=c1itaOTpgA5-3jV5ADxqg02_th-qPhry2mA4M7fR9rk,12906
18
+ foamlib-0.9.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
19
+ foamlib-0.9.2.dist-info/licenses/LICENSE.txt,sha256=5Dte9TUnLZzPRs4NQzl-Jc2-Ljd-t_v0ZR5Ng5r0UsY,35131
20
+ foamlib-0.9.2.dist-info/RECORD,,