foamlib 0.9.1__tar.gz → 0.9.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (65) hide show
  1. {foamlib-0.9.1 → foamlib-0.9.2}/PKG-INFO +1 -1
  2. {foamlib-0.9.1 → foamlib-0.9.2}/foamlib/__init__.py +1 -1
  3. {foamlib-0.9.1 → foamlib-0.9.2}/foamlib/_files/_files.py +143 -48
  4. {foamlib-0.9.1 → foamlib-0.9.2}/foamlib/_files/_parsing.py +37 -26
  5. foamlib-0.9.2/foamlib/_files/_serialization.py +241 -0
  6. {foamlib-0.9.1 → foamlib-0.9.2}/tests/test_files/test_dumps.py +55 -15
  7. {foamlib-0.9.1 → foamlib-0.9.2}/tests/test_files/test_parsing/test_basic.py +1 -0
  8. foamlib-0.9.2/tests/test_files/test_parsing/test_loads.py +69 -0
  9. foamlib-0.9.1/foamlib/_files/_serialization.py +0 -202
  10. {foamlib-0.9.1 → foamlib-0.9.2}/.devcontainer.json +0 -0
  11. {foamlib-0.9.1 → foamlib-0.9.2}/.dockerignore +0 -0
  12. {foamlib-0.9.1 → foamlib-0.9.2}/.git-blame-ignore-revs +0 -0
  13. {foamlib-0.9.1 → foamlib-0.9.2}/.github/dependabot.yml +0 -0
  14. {foamlib-0.9.1 → foamlib-0.9.2}/.github/workflows/ci.yml +0 -0
  15. {foamlib-0.9.1 → foamlib-0.9.2}/.github/workflows/docker.yml +0 -0
  16. {foamlib-0.9.1 → foamlib-0.9.2}/.github/workflows/dockerhub-description.yml +0 -0
  17. {foamlib-0.9.1 → foamlib-0.9.2}/.github/workflows/pypi-publish.yml +0 -0
  18. {foamlib-0.9.1 → foamlib-0.9.2}/.gitignore +0 -0
  19. {foamlib-0.9.1 → foamlib-0.9.2}/.readthedocs.yaml +0 -0
  20. {foamlib-0.9.1 → foamlib-0.9.2}/CONTRIBUTING.md +0 -0
  21. {foamlib-0.9.1 → foamlib-0.9.2}/Dockerfile +0 -0
  22. {foamlib-0.9.1 → foamlib-0.9.2}/LICENSE.txt +0 -0
  23. {foamlib-0.9.1 → foamlib-0.9.2}/README.md +0 -0
  24. {foamlib-0.9.1 → foamlib-0.9.2}/benchmark/benchmark.png +0 -0
  25. {foamlib-0.9.1 → foamlib-0.9.2}/benchmark/benchmark.py +0 -0
  26. {foamlib-0.9.1 → foamlib-0.9.2}/benchmark/requirements.txt +0 -0
  27. {foamlib-0.9.1 → foamlib-0.9.2}/benchmark/ruff.toml +0 -0
  28. {foamlib-0.9.1 → foamlib-0.9.2}/docs/Makefile +0 -0
  29. {foamlib-0.9.1 → foamlib-0.9.2}/docs/cases.rst +0 -0
  30. {foamlib-0.9.1 → foamlib-0.9.2}/docs/conf.py +0 -0
  31. {foamlib-0.9.1 → foamlib-0.9.2}/docs/files.rst +0 -0
  32. {foamlib-0.9.1 → foamlib-0.9.2}/docs/index.rst +0 -0
  33. {foamlib-0.9.1 → foamlib-0.9.2}/docs/make.bat +0 -0
  34. {foamlib-0.9.1 → foamlib-0.9.2}/docs/ruff.toml +0 -0
  35. {foamlib-0.9.1 → foamlib-0.9.2}/foamlib/_cases/__init__.py +0 -0
  36. {foamlib-0.9.1 → foamlib-0.9.2}/foamlib/_cases/_async.py +0 -0
  37. {foamlib-0.9.1 → foamlib-0.9.2}/foamlib/_cases/_base.py +0 -0
  38. {foamlib-0.9.1 → foamlib-0.9.2}/foamlib/_cases/_run.py +0 -0
  39. {foamlib-0.9.1 → foamlib-0.9.2}/foamlib/_cases/_slurm.py +0 -0
  40. {foamlib-0.9.1 → foamlib-0.9.2}/foamlib/_cases/_subprocess.py +0 -0
  41. {foamlib-0.9.1 → foamlib-0.9.2}/foamlib/_cases/_sync.py +0 -0
  42. {foamlib-0.9.1 → foamlib-0.9.2}/foamlib/_cases/_util.py +0 -0
  43. {foamlib-0.9.1 → foamlib-0.9.2}/foamlib/_files/__init__.py +0 -0
  44. {foamlib-0.9.1 → foamlib-0.9.2}/foamlib/_files/_io.py +0 -0
  45. {foamlib-0.9.1 → foamlib-0.9.2}/foamlib/_files/_types.py +0 -0
  46. {foamlib-0.9.1 → foamlib-0.9.2}/foamlib/py.typed +0 -0
  47. {foamlib-0.9.1 → foamlib-0.9.2}/logo.png +0 -0
  48. {foamlib-0.9.1 → foamlib-0.9.2}/pyproject.toml +0 -0
  49. {foamlib-0.9.1 → foamlib-0.9.2}/tests/__init__.py +0 -0
  50. {foamlib-0.9.1 → foamlib-0.9.2}/tests/ruff.toml +0 -0
  51. {foamlib-0.9.1 → foamlib-0.9.2}/tests/test_cases/__init__.py +0 -0
  52. {foamlib-0.9.1 → foamlib-0.9.2}/tests/test_cases/test_cavity.py +0 -0
  53. {foamlib-0.9.1 → foamlib-0.9.2}/tests/test_cases/test_cavity_async.py +0 -0
  54. {foamlib-0.9.1 → foamlib-0.9.2}/tests/test_cases/test_flange.py +0 -0
  55. {foamlib-0.9.1 → foamlib-0.9.2}/tests/test_cases/test_flange_async.py +0 -0
  56. {foamlib-0.9.1 → foamlib-0.9.2}/tests/test_example.py +0 -0
  57. {foamlib-0.9.1 → foamlib-0.9.2}/tests/test_files/__init__.py +0 -0
  58. {foamlib-0.9.1 → foamlib-0.9.2}/tests/test_files/test_files.py +0 -0
  59. {foamlib-0.9.1 → foamlib-0.9.2}/tests/test_files/test_parsing/__init__.py +0 -0
  60. {foamlib-0.9.1 → foamlib-0.9.2}/tests/test_files/test_parsing/test_advanced.py +0 -0
  61. {foamlib-0.9.1 → foamlib-0.9.2}/tests/test_files/test_parsing/test_decompose_par.py +0 -0
  62. {foamlib-0.9.1 → foamlib-0.9.2}/tests/test_files/test_parsing/test_fields.py +0 -0
  63. {foamlib-0.9.1 → foamlib-0.9.2}/tests/test_files/test_parsing/test_fv_schemes.py +0 -0
  64. {foamlib-0.9.1 → foamlib-0.9.2}/tests/test_files/test_parsing/test_intermediate.py +0 -0
  65. {foamlib-0.9.1 → foamlib-0.9.2}/tests/test_files/test_parsing/test_poly_mesh.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: foamlib
3
- Version: 0.9.1
3
+ Version: 0.9.2
4
4
  Summary: A Python interface for interacting with OpenFOAM
5
5
  Project-URL: Homepage, https://github.com/gerlero/foamlib
6
6
  Project-URL: Repository, https://github.com/gerlero/foamlib
@@ -1,6 +1,6 @@
1
1
  """A Python interface for interacting with OpenFOAM."""
2
2
 
3
- __version__ = "0.9.1"
3
+ __version__ = "0.9.2"
4
4
 
5
5
  from ._cases import (
6
6
  AsyncFoamCase,
@@ -17,9 +17,11 @@ else:
17
17
  import numpy as np
18
18
 
19
19
  from ._io import FoamFileIO
20
- from ._serialization import Kind, dumps, normalize
20
+ from ._parsing import loads
21
+ from ._serialization import dumps, normalize_data, normalize_keyword
21
22
  from ._types import (
22
23
  Data,
24
+ DataLike,
23
25
  Dict_,
24
26
  Dimensioned,
25
27
  DimensionSet,
@@ -31,6 +33,32 @@ from ._types import (
31
33
  )
32
34
 
33
35
 
36
+ def _tensor_kind_for_field(
37
+ field: FieldLike,
38
+ ) -> str:
39
+ shape = np.shape(field) # type: ignore [arg-type]
40
+ if not shape:
41
+ return "scalar"
42
+ if shape == (3,):
43
+ return "vector"
44
+ if shape == (6,):
45
+ return "symmTensor"
46
+ if shape == (9,):
47
+ return "tensor"
48
+ if len(shape) == 1:
49
+ return "scalar"
50
+ if len(shape) == 2:
51
+ if shape[1] == 3:
52
+ return "vector"
53
+ if shape[1] == 6:
54
+ return "symmTensor"
55
+ if shape[1] == 9:
56
+ return "tensor"
57
+
58
+ msg = f"Invalid field shape: {shape}"
59
+ raise ValueError(msg)
60
+
61
+
34
62
  class FoamFile(
35
63
  MutableMapping[
36
64
  Optional[Union[str, Tuple[str, ...]]],
@@ -261,7 +289,7 @@ class FoamFile(
261
289
  elif not isinstance(keywords, tuple):
262
290
  keywords = (keywords,)
263
291
 
264
- if keywords and not isinstance(normalize(keywords[-1], kind=Kind.KEYWORD), str):
292
+ if keywords and not isinstance(normalize_keyword(keywords[-1]), str):
265
293
  msg = f"Invalid keyword: {keywords[-1]}"
266
294
  raise ValueError(msg)
267
295
 
@@ -283,52 +311,26 @@ class FoamFile(
283
311
  self.path.stem if self.path.suffix == ".gz" else self.path.name
284
312
  )
285
313
 
286
- kind = Kind.DEFAULT
287
- if keywords == ("internalField",) or (
288
- len(keywords) == 3
289
- and keywords[0] == "boundaryField"
290
- and (
291
- keywords[2] in ("value", "gradient")
292
- or keywords[2].endswith("Value")
293
- or keywords[2].endswith("Gradient")
294
- )
295
- ):
296
- kind = (
297
- Kind.BINARY_FIELD if self.format == "binary" else Kind.ASCII_FIELD
298
- )
299
- elif keywords == ("dimensions",):
300
- kind = Kind.DIMENSIONS
301
-
302
314
  if (
303
- kind in (Kind.ASCII_FIELD, Kind.BINARY_FIELD)
315
+ keywords == ("internalField",)
316
+ or (
317
+ len(keywords) == 3
318
+ and keywords[0] == "boundaryField"
319
+ and (
320
+ keywords[2] == "value"
321
+ or keywords[2] == "gradient"
322
+ or keywords[2].endswith(("Value", "Gradient"))
323
+ )
324
+ )
304
325
  ) and self.class_ == "dictionary":
305
326
  try:
306
- shape = np.shape(data) # type: ignore [arg-type]
327
+ tensor_kind = _tensor_kind_for_field(data) # type: ignore [arg-type]
307
328
  except ValueError:
308
329
  pass
309
330
  else:
310
- if not shape:
311
- self.class_ = "volScalarField"
312
- elif shape == (3,):
313
- self.class_ = "volVectorField"
314
- elif shape == (6,):
315
- self.class_ = "volSymmTensorField"
316
- elif shape == (9,):
317
- self.class_ = "volTensorField"
318
- elif len(shape) == 1:
319
- self.class_ = "volScalarField"
320
- elif len(shape) == 2:
321
- if shape[1] == 3:
322
- self.class_ = "volVectorField"
323
- elif shape[1] == 6:
324
- self.class_ = "volSymmTensorField"
325
- elif shape[1] == 9:
326
- self.class_ = "volTensorField"
327
-
328
- if kind == Kind.ASCII_FIELD and self.class_.endswith("scalarField"):
329
- kind = Kind.SCALAR_ASCII_FIELD
330
- elif kind == Kind.BINARY_FIELD and self.class_.endswith("scalarField"):
331
- kind = Kind.SCALAR_BINARY_FIELD
331
+ self.class_ = (
332
+ "vol" + tensor_kind[0].upper() + tensor_kind[1:] + "Field"
333
+ )
332
334
 
333
335
  parsed = self._get_parsed(missing_ok=True)
334
336
 
@@ -360,7 +362,7 @@ class FoamFile(
360
362
  ...,
361
363
  before
362
364
  + indentation
363
- + dumps(keywords[-1])
365
+ + dumps(normalize_keyword(keywords[-1]))
364
366
  + b"\n"
365
367
  + indentation
366
368
  + b"{\n"
@@ -373,23 +375,37 @@ class FoamFile(
373
375
  self[(*keywords, k)] = v
374
376
 
375
377
  elif keywords:
376
- val = dumps(data, kind=kind)
378
+ header = self.get("FoamFile", None)
379
+ assert header is None or isinstance(header, FoamFile.SubDict)
380
+ val = dumps(
381
+ data,
382
+ keywords=keywords,
383
+ header=header,
384
+ )
377
385
  parsed.put(
378
386
  keywords,
379
- normalize(data, kind=kind),
387
+ normalize_data(data, keywords=keywords),
380
388
  before
381
389
  + indentation
382
- + dumps(keywords[-1])
390
+ + dumps(normalize_keyword(keywords[-1]))
383
391
  + ((b" " + val) if val else b"")
384
392
  + (b";" if not keywords[-1].startswith("#") else b"")
385
393
  + after,
386
394
  )
387
395
 
388
396
  else:
397
+ header = self.get("FoamFile", None)
398
+ assert header is None or isinstance(header, FoamFile.SubDict)
389
399
  parsed.put(
390
400
  (),
391
- normalize(data, kind=kind),
392
- before + dumps(data, kind=kind) + after,
401
+ normalize_data(data, keywords=keywords),
402
+ before
403
+ + dumps(
404
+ data,
405
+ keywords=(),
406
+ header=header,
407
+ )
408
+ + after,
393
409
  )
394
410
 
395
411
  def __delitem__(self, keywords: str | tuple[str, ...] | None) -> None:
@@ -442,6 +458,85 @@ class FoamFile(
442
458
  d.pop("FoamFile", None)
443
459
  return deepcopy(d)
444
460
 
461
+ @staticmethod
462
+ def loads(
463
+ s: bytes | str,
464
+ *,
465
+ include_header: bool = False,
466
+ ) -> File | Data:
467
+ """
468
+ Standalone deserializing function.
469
+
470
+ Deserialize the OpenFOAM FoamFile format to Python objects.
471
+
472
+ :param s: The string to deserialize. This can be a dictionary, list, or any
473
+ other object that can be serialized to the OpenFOAM format.
474
+ :param include_header: Whether to include the "FoamFile" header in the output.
475
+ If `True`, the header will be included if it is present in the input object.
476
+ """
477
+ ret = loads(s)
478
+
479
+ if not include_header and isinstance(ret, Mapping) and "FoamFile" in ret:
480
+ del ret["FoamFile"]
481
+ if len(ret) == 1 and None in ret:
482
+ val = ret[None]
483
+ assert not isinstance(val, Mapping)
484
+ return val
485
+
486
+ return ret
487
+
488
+ @staticmethod
489
+ def dumps(file: File | DataLike, *, ensure_header: bool = True) -> bytes:
490
+ """
491
+ Standalone serializing function.
492
+
493
+ Serialize Python objects to the OpenFOAM FoamFile format.
494
+
495
+ :param file: The Python object to serialize. This can be a dictionary, list,
496
+ or any other object that can be serialized to the OpenFOAM format.
497
+ :param ensure_header: Whether to include the "FoamFile" header in the output.
498
+ If `True`, a header will be included if it is not already present in the
499
+ input object.
500
+ """
501
+ if isinstance(file, Mapping):
502
+ header = file.get("FoamFile", None)
503
+ assert isinstance(header, FoamFile.SubDict) or header is None
504
+ entries: list[bytes] = []
505
+ for k, v in file.items():
506
+ if k is not None:
507
+ entries.append(
508
+ dumps((k, v), keywords=(), header=header, tuple_is_entry=True)
509
+ )
510
+ else:
511
+ entries.append(dumps(v, keywords=(), header=header))
512
+ ret = b" ".join(entries)
513
+ else:
514
+ header = None
515
+ ret = dumps(file)
516
+
517
+ if header is None and ensure_header:
518
+ class_ = "dictionary"
519
+ if isinstance(file, Mapping) and "internalField" in file:
520
+ try:
521
+ tensor_kind = _tensor_kind_for_field(file["internalField"]) # type: ignore [arg-type]
522
+ except (ValueError, TypeError):
523
+ pass
524
+ else:
525
+ class_ = "vol" + tensor_kind[0].upper() + tensor_kind[1:] + "Field"
526
+
527
+ header = {"version": 2.0, "format": "ascii", "class": class_}
528
+
529
+ ret = (
530
+ dumps(
531
+ {"FoamFile": header},
532
+ keywords=(),
533
+ )
534
+ + b" "
535
+ + ret
536
+ )
537
+
538
+ return ret
539
+
445
540
 
446
541
  class FoamFieldFile(FoamFile):
447
542
  """
@@ -380,21 +380,41 @@ _NUMBER = (
380
380
  )
381
381
  _DATA_ENTRY <<= _FIELD | _LIST | _DIMENSIONED | _DIMENSIONS | _NUMBER | _SWITCH | _TOKEN
382
382
 
383
- _DATA <<= (
384
- _DATA_ENTRY[1, ...]
385
- .set_parse_action(lambda tks: [tuple(tks)] if len(tks) > 1 else [tks[0]])
383
+ _DATA <<= _DATA_ENTRY[1, ...].set_parse_action(
384
+ lambda tks: [tuple(tks)] if len(tks) > 1 else [tks[0]]
385
+ )
386
+
387
+ _STANDALONE_DATA = (
388
+ _ascii_numeric_list(dtype=int, ignore=_COMMENT)
389
+ | _binary_numeric_list(dtype=np.int64)
390
+ | _binary_numeric_list(dtype=np.int32)
391
+ | _ascii_numeric_list(dtype=float, nested=3, ignore=_COMMENT)
392
+ | _binary_numeric_list(dtype=np.float64, nested=3)
393
+ | _binary_numeric_list(dtype=np.float32, nested=3)
394
+ | _DATA
395
+ ).add_parse_action(lambda tks: [None, tks[0]])
396
+
397
+
398
+ _FILE = (
399
+ Dict(_KEYWORD_ENTRY[...] + Opt(Group(_STANDALONE_DATA)) + _KEYWORD_ENTRY[...])
386
400
  .ignore(_COMMENT)
387
401
  .parse_with_tabs()
388
402
  )
389
403
 
390
404
 
391
- def parse_data(s: str) -> Data:
392
- if not s.strip():
393
- return ""
394
- return cast("Data", _DATA.parse_string(s, parse_all=True)[0])
405
+ def loads(s: bytes | str) -> File | Data:
406
+ if isinstance(s, bytes):
407
+ s = s.decode("latin-1")
408
+
409
+ file = _FILE.parse_string(s, parse_all=True).as_dict()
410
+
411
+ if len(file) == 1 and None in file:
412
+ return file[None] # type: ignore[no-any-return]
395
413
 
414
+ return file
396
415
 
397
- _LOCATED_DICTIONARY = Group(
416
+
417
+ _LOCATED_KEYWORD_ENTRIES = Group(
398
418
  _keyword_entry_of(
399
419
  _TOKEN,
400
420
  Opt(_DATA, default=""),
@@ -403,22 +423,14 @@ _LOCATED_DICTIONARY = Group(
403
423
  located=True,
404
424
  )
405
425
  )[...]
406
- _LOCATED_DATA = Group(
407
- Located(
408
- (
409
- _ascii_numeric_list(dtype=int, ignore=_COMMENT)
410
- | _binary_numeric_list(dtype=np.int64)
411
- | _binary_numeric_list(dtype=np.int32)
412
- | _ascii_numeric_list(dtype=float, nested=3, ignore=_COMMENT)
413
- | _binary_numeric_list(dtype=np.float64, nested=3)
414
- | _binary_numeric_list(dtype=np.float32, nested=3)
415
- | _DATA
416
- ).add_parse_action(lambda tks: ["", tks[0]])
417
- )
418
- )
426
+ _LOCATED_STANDALONE_DATA = Group(Located(_STANDALONE_DATA))
419
427
 
420
- _FILE = (
421
- Dict(_LOCATED_DICTIONARY + Opt(_LOCATED_DATA) + _LOCATED_DICTIONARY)
428
+ _LOCATED_FILE = (
429
+ Dict(
430
+ _LOCATED_KEYWORD_ENTRIES
431
+ + Opt(_LOCATED_STANDALONE_DATA)
432
+ + _LOCATED_KEYWORD_ENTRIES
433
+ )
422
434
  .ignore(_COMMENT)
423
435
  .parse_with_tabs()
424
436
  )
@@ -430,7 +442,7 @@ class Parsed(Mapping[Tuple[str, ...], Union[Data, EllipsisType]]):
430
442
  tuple[str, ...],
431
443
  tuple[int, Data | EllipsisType, int],
432
444
  ] = {}
433
- for parse_result in _FILE.parse_string(
445
+ for parse_result in _LOCATED_FILE.parse_string(
434
446
  contents.decode("latin-1"), parse_all=True
435
447
  ):
436
448
  self._parsed.update(self._flatten_result(parse_result))
@@ -453,8 +465,7 @@ class Parsed(Mapping[Tuple[str, ...], Union[Data, EllipsisType]]):
453
465
  end = parse_result.locn_end
454
466
  assert isinstance(end, int)
455
467
  keyword, *data = item
456
- assert isinstance(keyword, str)
457
- if not keyword:
468
+ if keyword is None:
458
469
  assert not _keywords
459
470
  assert len(data) == 1
460
471
  assert not isinstance(data[0], ParseResults)
@@ -0,0 +1,241 @@
1
+ from __future__ import annotations
2
+
3
+ import sys
4
+ from typing import overload
5
+
6
+ if sys.version_info >= (3, 9):
7
+ from collections.abc import Mapping
8
+ else:
9
+ from typing import Mapping
10
+
11
+ import numpy as np
12
+
13
+ from ._parsing import loads
14
+ from ._types import (
15
+ Data,
16
+ DataLike,
17
+ Dimensioned,
18
+ DimensionSet,
19
+ Entry,
20
+ EntryLike,
21
+ is_sequence,
22
+ )
23
+
24
+
25
+ @overload
26
+ def normalize_data(
27
+ data: DataLike, *, keywords: tuple[str, ...] | None = None
28
+ ) -> Data: ...
29
+
30
+
31
+ @overload
32
+ def normalize_data(
33
+ data: EntryLike, *, keywords: tuple[str, ...] | None = None
34
+ ) -> Entry: ...
35
+
36
+
37
+ def normalize_data(
38
+ data: EntryLike, *, keywords: tuple[str, ...] | None = None
39
+ ) -> Entry:
40
+ if keywords is not None and (
41
+ keywords == ("internalField",)
42
+ or (
43
+ len(keywords) == 3
44
+ and keywords[0] == "boundaryField"
45
+ and (
46
+ keywords[2] == "value"
47
+ or keywords[2] == "gradient"
48
+ or keywords[2].endswith(("Value", "Gradient"))
49
+ )
50
+ )
51
+ ):
52
+ if is_sequence(data):
53
+ try:
54
+ arr = np.asarray(data)
55
+ except ValueError:
56
+ pass
57
+ else:
58
+ if not np.issubdtype(arr.dtype, np.floating):
59
+ arr = arr.astype(float)
60
+
61
+ if arr.ndim == 1 or (arr.ndim == 2 and arr.shape[1] in (3, 6, 9)):
62
+ return arr # type: ignore [return-value]
63
+
64
+ return [normalize_data(d) for d in data]
65
+
66
+ if isinstance(data, int):
67
+ return float(data)
68
+
69
+ return normalize_data(data)
70
+
71
+ if isinstance(data, Mapping):
72
+ return {normalize_keyword(k): normalize_data(v) for k, v in data.items()} # type: ignore [misc]
73
+
74
+ if isinstance(data, np.ndarray):
75
+ ret = data.tolist()
76
+ assert isinstance(ret, (int, float, list))
77
+ return ret
78
+
79
+ if (
80
+ not isinstance(data, DimensionSet)
81
+ and keywords is not None
82
+ and keywords == ("dimensions",)
83
+ and is_sequence(data)
84
+ and len(data) <= 7
85
+ and all(isinstance(d, (int, float)) for d in data)
86
+ ):
87
+ return DimensionSet(*data)
88
+
89
+ if keywords is None and isinstance(data, tuple) and len(data) == 2:
90
+ k, v = data
91
+ assert not isinstance(k, Mapping)
92
+ return (
93
+ normalize_keyword(k),
94
+ normalize_data(v) if not isinstance(v, Mapping) else v,
95
+ ) # type: ignore [return-value]
96
+
97
+ if (
98
+ is_sequence(data)
99
+ and not isinstance(data, DimensionSet)
100
+ and (keywords is None or not isinstance(data, tuple))
101
+ ):
102
+ return [normalize_data(d) for d in data]
103
+
104
+ if isinstance(data, str):
105
+ s = loads(data)
106
+ if isinstance(s, (str, tuple, bool)):
107
+ return s
108
+
109
+ if isinstance(
110
+ data,
111
+ (int, float, bool, tuple, DimensionSet, Dimensioned),
112
+ ):
113
+ return data
114
+
115
+ msg = f"Unsupported data type: {type(data)}"
116
+ raise TypeError(msg)
117
+
118
+
119
+ def normalize_keyword(data: DataLike) -> Data:
120
+ ret = normalize_data(data)
121
+
122
+ if isinstance(data, str) and isinstance(ret, bool):
123
+ return data
124
+
125
+ return ret
126
+
127
+
128
+ def dumps(
129
+ data: EntryLike,
130
+ *,
131
+ keywords: tuple[str, ...] | None = None,
132
+ header: Mapping[str, Entry] | None = None,
133
+ tuple_is_entry: bool = False,
134
+ ) -> bytes:
135
+ data = normalize_data(data, keywords=keywords)
136
+
137
+ if isinstance(data, Mapping):
138
+ return (
139
+ b"{"
140
+ + b" ".join(
141
+ dumps(
142
+ (k, v),
143
+ keywords=keywords,
144
+ tuple_is_entry=True,
145
+ )
146
+ for k, v in data.items()
147
+ )
148
+ + b"}"
149
+ )
150
+
151
+ if (
152
+ keywords is not None
153
+ and (
154
+ keywords == ("internalField",)
155
+ or (
156
+ len(keywords) == 3
157
+ and keywords[0] == "boundaryField"
158
+ and (
159
+ keywords[2] == "value"
160
+ or keywords[2] == "gradient"
161
+ or keywords[2].endswith(("Value", "Gradient"))
162
+ )
163
+ )
164
+ )
165
+ and isinstance(data, (int, float, np.ndarray))
166
+ ):
167
+ data = np.asarray(data) # type: ignore [assignment]
168
+ class_ = header.get("class", "") if header else ""
169
+ assert isinstance(class_, str)
170
+ scalar = "Scalar" in class_
171
+
172
+ shape = np.shape(data)
173
+ if not shape or (not scalar and shape in ((3,), (6,), (9,))):
174
+ return b"uniform " + dumps(data)
175
+
176
+ assert isinstance(data, np.ndarray)
177
+ ndim = np.ndim(data)
178
+ if ndim == 1:
179
+ tensor_kind = b"scalar"
180
+
181
+ elif ndim == 2:
182
+ assert len(shape) == 2
183
+ if shape[1] == 3:
184
+ tensor_kind = b"vector"
185
+ elif shape[1] == 6:
186
+ tensor_kind = b"symmTensor"
187
+ elif shape[1] == 9:
188
+ tensor_kind = b"tensor"
189
+ else:
190
+ return dumps(data)
191
+
192
+ else:
193
+ return dumps(data)
194
+
195
+ binary = (header.get("format", "") if header else "") == "binary"
196
+
197
+ contents = b"(" + data.tobytes() + b")" if binary else dumps(data)
198
+
199
+ return b"nonuniform List<" + tensor_kind + b"> " + dumps(len(data)) + contents
200
+
201
+ if isinstance(data, DimensionSet):
202
+ return b"[" + b" ".join(dumps(v) for v in data) + b"]"
203
+
204
+ if isinstance(data, Dimensioned):
205
+ if data.name is not None:
206
+ return (
207
+ dumps(data.name)
208
+ + b" "
209
+ + dumps(data.dimensions)
210
+ + b" "
211
+ + dumps(data.value)
212
+ )
213
+ return dumps(data.dimensions) + b" " + dumps(data.value)
214
+
215
+ if isinstance(data, tuple):
216
+ if tuple_is_entry:
217
+ k, v = data
218
+ ret = dumps(k)
219
+ val = dumps(
220
+ v,
221
+ keywords=(*keywords, k)
222
+ if keywords is not None and isinstance(k, str)
223
+ else None,
224
+ )
225
+ if val:
226
+ ret += b" " + val
227
+ if not isinstance(v, Mapping):
228
+ ret += b";"
229
+ return ret
230
+
231
+ return b" ".join(dumps(v) for v in data)
232
+
233
+ if is_sequence(data):
234
+ return b"(" + b" ".join(dumps(v, tuple_is_entry=True) for v in data) + b")"
235
+
236
+ if data is True:
237
+ return b"yes"
238
+ if data is False:
239
+ return b"no"
240
+
241
+ return str(data).encode("latin-1")
@@ -1,6 +1,6 @@
1
1
  import numpy as np
2
2
  from foamlib import FoamFile
3
- from foamlib._files._serialization import Kind, dumps
3
+ from foamlib._files._serialization import dumps
4
4
 
5
5
 
6
6
  def test_serialize_data() -> None:
@@ -10,34 +10,55 @@ def test_serialize_data() -> None:
10
10
  assert dumps(True) == b"yes"
11
11
  assert dumps(False) == b"no"
12
12
  assert dumps("word") == b"word"
13
- assert dumps(("word", "word")) == b"word word"
13
+ assert dumps(("word", "word"), keywords=()) == b"word word"
14
14
  assert dumps('"a string"') == b'"a string"'
15
- assert dumps(1, kind=Kind.ASCII_FIELD) == b"uniform 1.0"
16
- assert dumps(1.0, kind=Kind.ASCII_FIELD) == b"uniform 1.0"
17
- assert dumps(1.0e-3, kind=Kind.ASCII_FIELD) == b"uniform 0.001"
15
+ assert dumps(1, keywords=("internalField",)) == b"uniform 1.0"
16
+ assert dumps(1.0, keywords=("internalField",)) == b"uniform 1.0"
17
+ assert dumps(1.0e-3, keywords=("internalField",)) == b"uniform 0.001"
18
18
  assert dumps([1.0, 2.0, 3.0]) == b"(1.0 2.0 3.0)"
19
- assert dumps([1, 2, 3], kind=Kind.ASCII_FIELD) == b"uniform (1.0 2.0 3.0)"
19
+ assert dumps([1, 2, 3], keywords=("internalField",)) == b"uniform (1.0 2.0 3.0)"
20
20
  assert (
21
- dumps([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], kind=Kind.ASCII_FIELD)
21
+ dumps([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], keywords=("internalField",))
22
22
  == b"nonuniform List<scalar> 10(1.0 2.0 3.0 4.0 5.0 6.0 7.0 8.0 9.0 10.0)"
23
23
  )
24
24
  assert (
25
- dumps([[1, 2, 3], [4, 5, 6]], kind=Kind.ASCII_FIELD)
25
+ dumps([[1, 2, 3], [4, 5, 6]], keywords=("internalField",))
26
26
  == b"nonuniform List<vector> 2((1.0 2.0 3.0) (4.0 5.0 6.0))"
27
27
  )
28
- assert dumps(1, kind=Kind.BINARY_FIELD) == b"uniform 1.0"
29
- assert dumps(1.0, kind=Kind.BINARY_FIELD) == b"uniform 1.0"
30
- assert dumps([1, 2, 3], kind=Kind.BINARY_FIELD) == b"uniform (1.0 2.0 3.0)"
31
28
  assert (
32
- dumps([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], kind=Kind.BINARY_FIELD)
29
+ dumps(1, keywords=("internalField",), header={"format": "binary"})
30
+ == b"uniform 1.0"
31
+ )
32
+ assert (
33
+ dumps(1.0, keywords=("internalField",), header={"format": "binary"})
34
+ == b"uniform 1.0"
35
+ )
36
+ assert (
37
+ dumps([1, 2, 3], keywords=("internalField",), header={"format": "binary"})
38
+ == b"uniform (1.0 2.0 3.0)"
39
+ )
40
+ assert (
41
+ dumps(
42
+ [1, 2, 3, 4, 5, 6, 7, 8, 9, 10],
43
+ keywords=("internalField",),
44
+ header={"format": "binary"},
45
+ )
33
46
  == b'nonuniform List<scalar> 10(\x00\x00\x00\x00\x00\x00\xf0?\x00\x00\x00\x00\x00\x00\x00@\x00\x00\x00\x00\x00\x00\x08@\x00\x00\x00\x00\x00\x00\x10@\x00\x00\x00\x00\x00\x00\x14@\x00\x00\x00\x00\x00\x00\x18@\x00\x00\x00\x00\x00\x00\x1c@\x00\x00\x00\x00\x00\x00 @\x00\x00\x00\x00\x00\x00"@\x00\x00\x00\x00\x00\x00$@)'
34
47
  )
35
48
  assert (
36
- dumps([[1, 2, 3], [4, 5, 6]], kind=Kind.BINARY_FIELD)
49
+ dumps(
50
+ [[1, 2, 3], [4, 5, 6]],
51
+ keywords=("internalField",),
52
+ header={"format": "binary"},
53
+ )
37
54
  == b"nonuniform List<vector> 2(\x00\x00\x00\x00\x00\x00\xf0?\x00\x00\x00\x00\x00\x00\x00@\x00\x00\x00\x00\x00\x00\x08@\x00\x00\x00\x00\x00\x00\x10@\x00\x00\x00\x00\x00\x00\x14@\x00\x00\x00\x00\x00\x00\x18@)"
38
55
  )
39
56
  assert (
40
- dumps(np.array([1, 2], dtype=np.float32), kind=Kind.BINARY_FIELD) # type: ignore [arg-type]
57
+ dumps(
58
+ np.array([1, 2], dtype=np.float32), # type: ignore[arg-type]
59
+ keywords=("internalField",),
60
+ header={"format": "binary"},
61
+ )
41
62
  == b"nonuniform List<scalar> 2(\x00\x00\x80?\x00\x00\x00@)"
42
63
  )
43
64
  assert (
@@ -62,7 +83,10 @@ def test_serialize_data() -> None:
62
83
  == b"[1 1 -2 0 0 0 0] 9.81"
63
84
  )
64
85
  assert (
65
- dumps(("hex", [0, 1, 2, 3, 4, 5, 6, 7], [1, 1, 1], "simpleGrading", [1, 1, 1]))
86
+ dumps(
87
+ ("hex", [0, 1, 2, 3, 4, 5, 6, 7], [1, 1, 1], "simpleGrading", [1, 1, 1]),
88
+ keywords=(),
89
+ )
66
90
  == b"hex (0 1 2 3 4 5 6 7) (1 1 1) simpleGrading (1 1 1)"
67
91
  )
68
92
  assert dumps([("a", "b"), ("c", "d")]) == b"(a b; c d;)"
@@ -71,3 +95,19 @@ def test_serialize_data() -> None:
71
95
  assert dumps([{"a": "b", "c": "d"}, {"e": "g"}]) == b"({a b; c d;} {e g;})"
72
96
  assert dumps(["water", "oil", "mercury", "air"]) == b"(water oil mercury air)"
73
97
  assert dumps("div(phi,U)") == b"div(phi,U)"
98
+
99
+
100
+ def test_serialize_file() -> None:
101
+ assert FoamFile.dumps(1.0, ensure_header=False) == b"1.0"
102
+ assert (
103
+ FoamFile.dumps(1.0)
104
+ == b"{FoamFile {version 2.0; format ascii; class dictionary;}} 1.0"
105
+ )
106
+ assert (
107
+ FoamFile.dumps([{"a": "b", "c": "d"}, {"e": "g"}])
108
+ == b"{FoamFile {version 2.0; format ascii; class dictionary;}} ({a b; c d;} {e g;})"
109
+ )
110
+ assert (
111
+ FoamFile.dumps({"internalField": [[1, 2, 3], [4, 5, 6]]})
112
+ == b"{FoamFile {version 2.0; format ascii; class volVectorField;}} internalField nonuniform List<vector> 2((1.0 2.0 3.0) (4.0 5.0 6.0));"
113
+ )
@@ -5,6 +5,7 @@ from foamlib._files._types import is_sequence
5
5
 
6
6
 
7
7
  def test_parse_value() -> None:
8
+ assert Parsed(b"1")[()] == 1
8
9
  assert Parsed(b"1")[()] == 1
9
10
  assert Parsed(b"1.0")[()] == 1.0
10
11
  assert Parsed(b"1.0e-3")[()] == 1.0e-3
@@ -0,0 +1,69 @@
1
+ import numpy as np
2
+ from foamlib import FoamFile
3
+
4
+
5
+ def test_loads() -> None:
6
+ assert FoamFile.loads("") == {}
7
+ assert FoamFile.loads("1") == 1
8
+ assert FoamFile.loads("FoamFile {} 1") == 1
9
+ assert FoamFile.loads("FoamFile {} 1", include_header=True) == {
10
+ "FoamFile": {},
11
+ None: 1,
12
+ }
13
+ assert FoamFile.loads("1.0") == 1.0
14
+ assert FoamFile.loads("1.0e-3") == 1.0e-3
15
+ assert FoamFile.loads("yes") is True
16
+ assert FoamFile.loads("no") is False
17
+ assert FoamFile.loads("word") == "word"
18
+ assert FoamFile.loads("word word") == ("word", "word")
19
+ assert FoamFile.loads('"a string"') == '"a string"'
20
+ assert FoamFile.loads("(word word)") == ["word", "word"]
21
+ assert FoamFile.loads("uniform 1") == 1
22
+ assert FoamFile.loads("uniform 1.0") == 1.0
23
+ assert FoamFile.loads("uniform 1.0e-3") == 1.0e-3
24
+ assert np.array_equal(FoamFile.loads("(1 2 3)"), [1, 2, 3]) # type: ignore[arg-type]
25
+ assert np.array_equal(FoamFile.loads("3(1 2 3)"), [1, 2, 3]) # type: ignore[arg-type]
26
+ assert np.array_equal(
27
+ FoamFile.loads("2((1 2 3) (4 5 6))"), # type: ignore[arg-type]
28
+ [
29
+ [1, 2, 3],
30
+ [4, 5, 6],
31
+ ],
32
+ )
33
+ assert np.array_equal(
34
+ FoamFile.loads("2{(1 2 3)}"), # type: ignore[arg-type]
35
+ [
36
+ [1, 2, 3],
37
+ [1, 2, 3],
38
+ ],
39
+ )
40
+ assert FoamFile.loads("0()") == []
41
+ assert np.array_equal(
42
+ FoamFile.loads(
43
+ b"nonuniform List<scalar> 2(\x00\x00\x00\x00\x00\x00\xf0?\x00\x00\x00\x00\x00\x00\x00@)"
44
+ ), # type: ignore[arg-type]
45
+ [1, 2],
46
+ )
47
+ assert np.array_equal(
48
+ FoamFile.loads(
49
+ b"nonuniform List<vector> 2(\x00\x00\x00\x00\x00\x00\xf0?\x00\x00\x00\x00\x00\x00\x00@\x00\x00\x00\x00\x00\x00\x08@\x00\x00\x00\x00\x00\x00\x10@\x00\x00\x00\x00\x00\x00\x14@\x00\x00\x00\x00\x00\x00\x18@)"
50
+ ), # type: ignore[arg-type]
51
+ [[1, 2, 3], [4, 5, 6]],
52
+ )
53
+ assert np.array_equal(
54
+ FoamFile.loads(b"nonuniform List<scalar> 2(\x00\x00\x80?\x00\x00\x00@)"), # type: ignore[arg-type]
55
+ [1, 2],
56
+ )
57
+ assert FoamFile.loads("[1 1 -2 0 0 0 0]") == FoamFile.DimensionSet(
58
+ mass=1, length=1, time=-2
59
+ )
60
+ assert FoamFile.loads("g [1 1 -2 0 0 0 0] (0 0 -9.81)") == FoamFile.Dimensioned(
61
+ name="g",
62
+ dimensions=FoamFile.DimensionSet(mass=1, length=1, time=-2),
63
+ value=[0, 0, -9.81],
64
+ )
65
+ assert FoamFile.loads("[1 1 -2 0 0 0 0] 9.81") == FoamFile.Dimensioned(
66
+ dimensions=FoamFile.DimensionSet(mass=1, length=1, time=-2), value=9.81
67
+ )
68
+ assert FoamFile.loads("a {b c; d e;}") == {"a": {"b": "c", "d": "e"}}
69
+ assert FoamFile.loads("(a b; c d;)") == [("a", "b"), ("c", "d")]
@@ -1,202 +0,0 @@
1
- from __future__ import annotations
2
-
3
- import sys
4
- from enum import Enum, auto
5
- from typing import overload
6
-
7
- if sys.version_info >= (3, 9):
8
- from collections.abc import Mapping
9
- else:
10
- from typing import Mapping
11
-
12
- import numpy as np
13
-
14
- from ._parsing import parse_data
15
- from ._types import (
16
- Data,
17
- DataLike,
18
- Dimensioned,
19
- DimensionSet,
20
- Entry,
21
- EntryLike,
22
- is_sequence,
23
- )
24
-
25
-
26
- class Kind(Enum):
27
- DEFAULT = auto()
28
- SINGLE_ENTRY = auto()
29
- ASCII_FIELD = auto()
30
- SCALAR_ASCII_FIELD = auto()
31
- BINARY_FIELD = auto()
32
- SCALAR_BINARY_FIELD = auto()
33
- DIMENSIONS = auto()
34
- KEYWORD = auto()
35
-
36
-
37
- @overload
38
- def normalize(data: DataLike, *, kind: Kind = Kind.DEFAULT) -> Data: ...
39
-
40
-
41
- @overload
42
- def normalize(data: EntryLike, *, kind: Kind = Kind.DEFAULT) -> Entry: ...
43
-
44
-
45
- def normalize(data: EntryLike, *, kind: Kind = Kind.DEFAULT) -> Entry:
46
- if kind in (
47
- Kind.ASCII_FIELD,
48
- Kind.SCALAR_ASCII_FIELD,
49
- Kind.BINARY_FIELD,
50
- Kind.SCALAR_BINARY_FIELD,
51
- ):
52
- if is_sequence(data):
53
- try:
54
- arr = np.asarray(data)
55
- except ValueError:
56
- pass
57
- else:
58
- if not np.issubdtype(arr.dtype, np.floating):
59
- arr = arr.astype(float)
60
-
61
- if arr.ndim == 1 or (arr.ndim == 2 and arr.shape[1] in (3, 6, 9)):
62
- return arr # type: ignore [return-value]
63
-
64
- return [normalize(d, kind=Kind.SINGLE_ENTRY) for d in data]
65
-
66
- if isinstance(data, int):
67
- return float(data)
68
-
69
- return normalize(data)
70
-
71
- if isinstance(data, np.ndarray):
72
- ret = data.tolist()
73
- assert isinstance(ret, (int, float, list))
74
- return ret
75
-
76
- if isinstance(data, Mapping):
77
- return {k: normalize(v, kind=kind) for k, v in data.items()}
78
-
79
- if (
80
- kind == Kind.DIMENSIONS
81
- and is_sequence(data)
82
- and len(data) <= 7
83
- and all(isinstance(d, (int, float)) for d in data)
84
- ):
85
- return DimensionSet(*data)
86
-
87
- if isinstance(data, tuple) and kind == Kind.SINGLE_ENTRY and len(data) == 2:
88
- k, v = data
89
- return (normalize(k), normalize(v))
90
-
91
- if is_sequence(data) and (kind == Kind.SINGLE_ENTRY or not isinstance(data, tuple)):
92
- return [normalize(d, kind=Kind.SINGLE_ENTRY) for d in data]
93
-
94
- if isinstance(data, str):
95
- parsed_data = parse_data(data)
96
- if kind == Kind.KEYWORD and isinstance(parsed_data, bool):
97
- return data
98
- return parsed_data
99
-
100
- if isinstance(
101
- data,
102
- (int, float, bool, tuple, DimensionSet, Dimensioned),
103
- ):
104
- return data
105
-
106
- msg = f"Unsupported data type: {type(data)}"
107
- raise TypeError(msg)
108
-
109
-
110
- def dumps(
111
- data: EntryLike,
112
- *,
113
- kind: Kind = Kind.DEFAULT,
114
- ) -> bytes:
115
- data = normalize(data, kind=kind)
116
-
117
- if isinstance(data, Mapping):
118
- return (
119
- b"{"
120
- + b" ".join(dumps((k, v), kind=Kind.SINGLE_ENTRY) for k, v in data.items())
121
- + b"}"
122
- )
123
-
124
- if isinstance(data, tuple) and kind == Kind.SINGLE_ENTRY and len(data) == 2:
125
- k, v = data
126
- ret = dumps(k)
127
- val = dumps(v)
128
- if val:
129
- ret += b" " + val
130
- if not isinstance(v, Mapping):
131
- ret += b";"
132
- return ret
133
-
134
- if isinstance(data, DimensionSet):
135
- return b"[" + b" ".join(dumps(v) for v in data) + b"]"
136
-
137
- if kind in (
138
- Kind.ASCII_FIELD,
139
- Kind.SCALAR_ASCII_FIELD,
140
- Kind.BINARY_FIELD,
141
- Kind.SCALAR_BINARY_FIELD,
142
- ) and (isinstance(data, (int, float, np.ndarray))):
143
- shape = np.shape(data)
144
- if not shape or (
145
- kind not in (Kind.SCALAR_ASCII_FIELD, Kind.SCALAR_BINARY_FIELD)
146
- and shape in ((3,), (6,), (9,))
147
- ):
148
- return b"uniform " + dumps(data, kind=Kind.SINGLE_ENTRY)
149
-
150
- assert isinstance(data, np.ndarray)
151
- ndim = len(shape)
152
- if ndim == 1:
153
- tensor_kind = b"scalar"
154
-
155
- elif ndim == 2:
156
- if shape[1] == 3:
157
- tensor_kind = b"vector"
158
- elif shape[1] == 6:
159
- tensor_kind = b"symmTensor"
160
- elif shape[1] == 9:
161
- tensor_kind = b"tensor"
162
- else:
163
- return dumps(data)
164
-
165
- else:
166
- return dumps(data)
167
-
168
- if kind in (Kind.BINARY_FIELD, Kind.SCALAR_BINARY_FIELD):
169
- contents = b"(" + data.tobytes() + b")"
170
- else:
171
- assert kind in (Kind.ASCII_FIELD, Kind.SCALAR_ASCII_FIELD)
172
- contents = dumps(data, kind=Kind.SINGLE_ENTRY)
173
-
174
- return b"nonuniform List<" + tensor_kind + b"> " + dumps(len(data)) + contents
175
-
176
- if isinstance(data, Dimensioned):
177
- if data.name is not None:
178
- return (
179
- dumps(data.name)
180
- + b" "
181
- + dumps(data.dimensions, kind=Kind.DIMENSIONS)
182
- + b" "
183
- + dumps(data.value, kind=Kind.SINGLE_ENTRY)
184
- )
185
- return (
186
- dumps(data.dimensions, kind=Kind.DIMENSIONS)
187
- + b" "
188
- + dumps(data.value, kind=Kind.SINGLE_ENTRY)
189
- )
190
-
191
- if isinstance(data, tuple):
192
- return b" ".join(dumps(v) for v in data)
193
-
194
- if is_sequence(data) and not isinstance(data, tuple):
195
- return b"(" + b" ".join(dumps(v, kind=Kind.SINGLE_ENTRY) for v in data) + b")"
196
-
197
- if data is True:
198
- return b"yes"
199
- if data is False:
200
- return b"no"
201
-
202
- return str(data).encode("latin-1")
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes