foamlib 0.9.1__tar.gz → 0.9.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. {foamlib-0.9.1 → foamlib-0.9.3}/PKG-INFO +1 -1
  2. {foamlib-0.9.1 → foamlib-0.9.3}/foamlib/__init__.py +1 -1
  3. {foamlib-0.9.1 → foamlib-0.9.3}/foamlib/_files/_files.py +143 -48
  4. {foamlib-0.9.1 → foamlib-0.9.3}/foamlib/_files/_parsing.py +40 -27
  5. foamlib-0.9.3/foamlib/_files/_serialization.py +241 -0
  6. foamlib-0.9.3/foamlib/_files/_types.py +252 -0
  7. {foamlib-0.9.1 → foamlib-0.9.3}/tests/test_files/test_dumps.py +55 -15
  8. {foamlib-0.9.1 → foamlib-0.9.3}/tests/test_files/test_files.py +5 -6
  9. {foamlib-0.9.1 → foamlib-0.9.3}/tests/test_files/test_parsing/test_basic.py +11 -8
  10. foamlib-0.9.3/tests/test_files/test_parsing/test_loads.py +73 -0
  11. foamlib-0.9.3/tests/test_files/test_types.py +62 -0
  12. foamlib-0.9.1/foamlib/_files/_serialization.py +0 -202
  13. foamlib-0.9.1/foamlib/_files/_types.py +0 -136
  14. {foamlib-0.9.1 → foamlib-0.9.3}/.devcontainer.json +0 -0
  15. {foamlib-0.9.1 → foamlib-0.9.3}/.dockerignore +0 -0
  16. {foamlib-0.9.1 → foamlib-0.9.3}/.git-blame-ignore-revs +0 -0
  17. {foamlib-0.9.1 → foamlib-0.9.3}/.github/dependabot.yml +0 -0
  18. {foamlib-0.9.1 → foamlib-0.9.3}/.github/workflows/ci.yml +0 -0
  19. {foamlib-0.9.1 → foamlib-0.9.3}/.github/workflows/docker.yml +0 -0
  20. {foamlib-0.9.1 → foamlib-0.9.3}/.github/workflows/dockerhub-description.yml +0 -0
  21. {foamlib-0.9.1 → foamlib-0.9.3}/.github/workflows/pypi-publish.yml +0 -0
  22. {foamlib-0.9.1 → foamlib-0.9.3}/.gitignore +0 -0
  23. {foamlib-0.9.1 → foamlib-0.9.3}/.readthedocs.yaml +0 -0
  24. {foamlib-0.9.1 → foamlib-0.9.3}/CONTRIBUTING.md +0 -0
  25. {foamlib-0.9.1 → foamlib-0.9.3}/Dockerfile +0 -0
  26. {foamlib-0.9.1 → foamlib-0.9.3}/LICENSE.txt +0 -0
  27. {foamlib-0.9.1 → foamlib-0.9.3}/README.md +0 -0
  28. {foamlib-0.9.1 → foamlib-0.9.3}/benchmark/benchmark.png +0 -0
  29. {foamlib-0.9.1 → foamlib-0.9.3}/benchmark/benchmark.py +0 -0
  30. {foamlib-0.9.1 → foamlib-0.9.3}/benchmark/requirements.txt +0 -0
  31. {foamlib-0.9.1 → foamlib-0.9.3}/benchmark/ruff.toml +0 -0
  32. {foamlib-0.9.1 → foamlib-0.9.3}/docs/Makefile +0 -0
  33. {foamlib-0.9.1 → foamlib-0.9.3}/docs/cases.rst +0 -0
  34. {foamlib-0.9.1 → foamlib-0.9.3}/docs/conf.py +0 -0
  35. {foamlib-0.9.1 → foamlib-0.9.3}/docs/files.rst +0 -0
  36. {foamlib-0.9.1 → foamlib-0.9.3}/docs/index.rst +0 -0
  37. {foamlib-0.9.1 → foamlib-0.9.3}/docs/make.bat +0 -0
  38. {foamlib-0.9.1 → foamlib-0.9.3}/docs/ruff.toml +0 -0
  39. {foamlib-0.9.1 → foamlib-0.9.3}/foamlib/_cases/__init__.py +0 -0
  40. {foamlib-0.9.1 → foamlib-0.9.3}/foamlib/_cases/_async.py +0 -0
  41. {foamlib-0.9.1 → foamlib-0.9.3}/foamlib/_cases/_base.py +0 -0
  42. {foamlib-0.9.1 → foamlib-0.9.3}/foamlib/_cases/_run.py +0 -0
  43. {foamlib-0.9.1 → foamlib-0.9.3}/foamlib/_cases/_slurm.py +0 -0
  44. {foamlib-0.9.1 → foamlib-0.9.3}/foamlib/_cases/_subprocess.py +0 -0
  45. {foamlib-0.9.1 → foamlib-0.9.3}/foamlib/_cases/_sync.py +0 -0
  46. {foamlib-0.9.1 → foamlib-0.9.3}/foamlib/_cases/_util.py +0 -0
  47. {foamlib-0.9.1 → foamlib-0.9.3}/foamlib/_files/__init__.py +0 -0
  48. {foamlib-0.9.1 → foamlib-0.9.3}/foamlib/_files/_io.py +0 -0
  49. {foamlib-0.9.1 → foamlib-0.9.3}/foamlib/py.typed +0 -0
  50. {foamlib-0.9.1 → foamlib-0.9.3}/logo.png +0 -0
  51. {foamlib-0.9.1 → foamlib-0.9.3}/pyproject.toml +0 -0
  52. {foamlib-0.9.1 → foamlib-0.9.3}/tests/__init__.py +0 -0
  53. {foamlib-0.9.1 → foamlib-0.9.3}/tests/ruff.toml +0 -0
  54. {foamlib-0.9.1 → foamlib-0.9.3}/tests/test_cases/__init__.py +0 -0
  55. {foamlib-0.9.1 → foamlib-0.9.3}/tests/test_cases/test_cavity.py +0 -0
  56. {foamlib-0.9.1 → foamlib-0.9.3}/tests/test_cases/test_cavity_async.py +0 -0
  57. {foamlib-0.9.1 → foamlib-0.9.3}/tests/test_cases/test_flange.py +0 -0
  58. {foamlib-0.9.1 → foamlib-0.9.3}/tests/test_cases/test_flange_async.py +0 -0
  59. {foamlib-0.9.1 → foamlib-0.9.3}/tests/test_example.py +0 -0
  60. {foamlib-0.9.1 → foamlib-0.9.3}/tests/test_files/__init__.py +0 -0
  61. {foamlib-0.9.1 → foamlib-0.9.3}/tests/test_files/test_parsing/__init__.py +0 -0
  62. {foamlib-0.9.1 → foamlib-0.9.3}/tests/test_files/test_parsing/test_advanced.py +0 -0
  63. {foamlib-0.9.1 → foamlib-0.9.3}/tests/test_files/test_parsing/test_decompose_par.py +0 -0
  64. {foamlib-0.9.1 → foamlib-0.9.3}/tests/test_files/test_parsing/test_fields.py +0 -0
  65. {foamlib-0.9.1 → foamlib-0.9.3}/tests/test_files/test_parsing/test_fv_schemes.py +0 -0
  66. {foamlib-0.9.1 → foamlib-0.9.3}/tests/test_files/test_parsing/test_intermediate.py +0 -0
  67. {foamlib-0.9.1 → foamlib-0.9.3}/tests/test_files/test_parsing/test_poly_mesh.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: foamlib
3
- Version: 0.9.1
3
+ Version: 0.9.3
4
4
  Summary: A Python interface for interacting with OpenFOAM
5
5
  Project-URL: Homepage, https://github.com/gerlero/foamlib
6
6
  Project-URL: Repository, https://github.com/gerlero/foamlib
@@ -1,6 +1,6 @@
1
1
  """A Python interface for interacting with OpenFOAM."""
2
2
 
3
- __version__ = "0.9.1"
3
+ __version__ = "0.9.3"
4
4
 
5
5
  from ._cases import (
6
6
  AsyncFoamCase,
@@ -17,9 +17,11 @@ else:
17
17
  import numpy as np
18
18
 
19
19
  from ._io import FoamFileIO
20
- from ._serialization import Kind, dumps, normalize
20
+ from ._parsing import loads
21
+ from ._serialization import dumps, normalize_data, normalize_keyword
21
22
  from ._types import (
22
23
  Data,
24
+ DataLike,
23
25
  Dict_,
24
26
  Dimensioned,
25
27
  DimensionSet,
@@ -31,6 +33,32 @@ from ._types import (
31
33
  )
32
34
 
33
35
 
36
+ def _tensor_kind_for_field(
37
+ field: FieldLike,
38
+ ) -> str:
39
+ shape = np.shape(field) # type: ignore [arg-type]
40
+ if not shape:
41
+ return "scalar"
42
+ if shape == (3,):
43
+ return "vector"
44
+ if shape == (6,):
45
+ return "symmTensor"
46
+ if shape == (9,):
47
+ return "tensor"
48
+ if len(shape) == 1:
49
+ return "scalar"
50
+ if len(shape) == 2:
51
+ if shape[1] == 3:
52
+ return "vector"
53
+ if shape[1] == 6:
54
+ return "symmTensor"
55
+ if shape[1] == 9:
56
+ return "tensor"
57
+
58
+ msg = f"Invalid field shape: {shape}"
59
+ raise ValueError(msg)
60
+
61
+
34
62
  class FoamFile(
35
63
  MutableMapping[
36
64
  Optional[Union[str, Tuple[str, ...]]],
@@ -261,7 +289,7 @@ class FoamFile(
261
289
  elif not isinstance(keywords, tuple):
262
290
  keywords = (keywords,)
263
291
 
264
- if keywords and not isinstance(normalize(keywords[-1], kind=Kind.KEYWORD), str):
292
+ if keywords and not isinstance(normalize_keyword(keywords[-1]), str):
265
293
  msg = f"Invalid keyword: {keywords[-1]}"
266
294
  raise ValueError(msg)
267
295
 
@@ -283,52 +311,26 @@ class FoamFile(
283
311
  self.path.stem if self.path.suffix == ".gz" else self.path.name
284
312
  )
285
313
 
286
- kind = Kind.DEFAULT
287
- if keywords == ("internalField",) or (
288
- len(keywords) == 3
289
- and keywords[0] == "boundaryField"
290
- and (
291
- keywords[2] in ("value", "gradient")
292
- or keywords[2].endswith("Value")
293
- or keywords[2].endswith("Gradient")
294
- )
295
- ):
296
- kind = (
297
- Kind.BINARY_FIELD if self.format == "binary" else Kind.ASCII_FIELD
298
- )
299
- elif keywords == ("dimensions",):
300
- kind = Kind.DIMENSIONS
301
-
302
314
  if (
303
- kind in (Kind.ASCII_FIELD, Kind.BINARY_FIELD)
315
+ keywords == ("internalField",)
316
+ or (
317
+ len(keywords) == 3
318
+ and keywords[0] == "boundaryField"
319
+ and (
320
+ keywords[2] == "value"
321
+ or keywords[2] == "gradient"
322
+ or keywords[2].endswith(("Value", "Gradient"))
323
+ )
324
+ )
304
325
  ) and self.class_ == "dictionary":
305
326
  try:
306
- shape = np.shape(data) # type: ignore [arg-type]
327
+ tensor_kind = _tensor_kind_for_field(data) # type: ignore [arg-type]
307
328
  except ValueError:
308
329
  pass
309
330
  else:
310
- if not shape:
311
- self.class_ = "volScalarField"
312
- elif shape == (3,):
313
- self.class_ = "volVectorField"
314
- elif shape == (6,):
315
- self.class_ = "volSymmTensorField"
316
- elif shape == (9,):
317
- self.class_ = "volTensorField"
318
- elif len(shape) == 1:
319
- self.class_ = "volScalarField"
320
- elif len(shape) == 2:
321
- if shape[1] == 3:
322
- self.class_ = "volVectorField"
323
- elif shape[1] == 6:
324
- self.class_ = "volSymmTensorField"
325
- elif shape[1] == 9:
326
- self.class_ = "volTensorField"
327
-
328
- if kind == Kind.ASCII_FIELD and self.class_.endswith("scalarField"):
329
- kind = Kind.SCALAR_ASCII_FIELD
330
- elif kind == Kind.BINARY_FIELD and self.class_.endswith("scalarField"):
331
- kind = Kind.SCALAR_BINARY_FIELD
331
+ self.class_ = (
332
+ "vol" + tensor_kind[0].upper() + tensor_kind[1:] + "Field"
333
+ )
332
334
 
333
335
  parsed = self._get_parsed(missing_ok=True)
334
336
 
@@ -360,7 +362,7 @@ class FoamFile(
360
362
  ...,
361
363
  before
362
364
  + indentation
363
- + dumps(keywords[-1])
365
+ + dumps(normalize_keyword(keywords[-1]))
364
366
  + b"\n"
365
367
  + indentation
366
368
  + b"{\n"
@@ -373,23 +375,37 @@ class FoamFile(
373
375
  self[(*keywords, k)] = v
374
376
 
375
377
  elif keywords:
376
- val = dumps(data, kind=kind)
378
+ header = self.get("FoamFile", None)
379
+ assert header is None or isinstance(header, FoamFile.SubDict)
380
+ val = dumps(
381
+ data,
382
+ keywords=keywords,
383
+ header=header,
384
+ )
377
385
  parsed.put(
378
386
  keywords,
379
- normalize(data, kind=kind),
387
+ normalize_data(data, keywords=keywords),
380
388
  before
381
389
  + indentation
382
- + dumps(keywords[-1])
390
+ + dumps(normalize_keyword(keywords[-1]))
383
391
  + ((b" " + val) if val else b"")
384
392
  + (b";" if not keywords[-1].startswith("#") else b"")
385
393
  + after,
386
394
  )
387
395
 
388
396
  else:
397
+ header = self.get("FoamFile", None)
398
+ assert header is None or isinstance(header, FoamFile.SubDict)
389
399
  parsed.put(
390
400
  (),
391
- normalize(data, kind=kind),
392
- before + dumps(data, kind=kind) + after,
401
+ normalize_data(data, keywords=keywords),
402
+ before
403
+ + dumps(
404
+ data,
405
+ keywords=(),
406
+ header=header,
407
+ )
408
+ + after,
393
409
  )
394
410
 
395
411
  def __delitem__(self, keywords: str | tuple[str, ...] | None) -> None:
@@ -442,6 +458,85 @@ class FoamFile(
442
458
  d.pop("FoamFile", None)
443
459
  return deepcopy(d)
444
460
 
461
+ @staticmethod
462
+ def loads(
463
+ s: bytes | str,
464
+ *,
465
+ include_header: bool = False,
466
+ ) -> File | Data:
467
+ """
468
+ Standalone deserializing function.
469
+
470
+ Deserialize the OpenFOAM FoamFile format to Python objects.
471
+
472
+ :param s: The string to deserialize. This can be a dictionary, list, or any
473
+ other object that can be serialized to the OpenFOAM format.
474
+ :param include_header: Whether to include the "FoamFile" header in the output.
475
+ If `True`, the header will be included if it is present in the input object.
476
+ """
477
+ ret = loads(s)
478
+
479
+ if not include_header and isinstance(ret, Mapping) and "FoamFile" in ret:
480
+ del ret["FoamFile"]
481
+ if len(ret) == 1 and None in ret:
482
+ val = ret[None]
483
+ assert not isinstance(val, Mapping)
484
+ return val
485
+
486
+ return ret
487
+
488
+ @staticmethod
489
+ def dumps(file: File | DataLike, *, ensure_header: bool = True) -> bytes:
490
+ """
491
+ Standalone serializing function.
492
+
493
+ Serialize Python objects to the OpenFOAM FoamFile format.
494
+
495
+ :param file: The Python object to serialize. This can be a dictionary, list,
496
+ or any other object that can be serialized to the OpenFOAM format.
497
+ :param ensure_header: Whether to include the "FoamFile" header in the output.
498
+ If `True`, a header will be included if it is not already present in the
499
+ input object.
500
+ """
501
+ if isinstance(file, Mapping):
502
+ header = file.get("FoamFile", None)
503
+ assert isinstance(header, FoamFile.SubDict) or header is None
504
+ entries: list[bytes] = []
505
+ for k, v in file.items():
506
+ if k is not None:
507
+ entries.append(
508
+ dumps((k, v), keywords=(), header=header, tuple_is_entry=True)
509
+ )
510
+ else:
511
+ entries.append(dumps(v, keywords=(), header=header))
512
+ ret = b" ".join(entries)
513
+ else:
514
+ header = None
515
+ ret = dumps(file)
516
+
517
+ if header is None and ensure_header:
518
+ class_ = "dictionary"
519
+ if isinstance(file, Mapping) and "internalField" in file:
520
+ try:
521
+ tensor_kind = _tensor_kind_for_field(file["internalField"]) # type: ignore [arg-type]
522
+ except (ValueError, TypeError):
523
+ pass
524
+ else:
525
+ class_ = "vol" + tensor_kind[0].upper() + tensor_kind[1:] + "Field"
526
+
527
+ header = {"version": 2.0, "format": "ascii", "class": class_}
528
+
529
+ ret = (
530
+ dumps(
531
+ {"FoamFile": header},
532
+ keywords=(),
533
+ )
534
+ + b" "
535
+ + ret
536
+ )
537
+
538
+ return ret
539
+
445
540
 
446
541
  class FoamFieldFile(FoamFile):
447
542
  """
@@ -363,7 +363,9 @@ _FIELD = (Keyword("uniform", _IDENTBODYCHARS).suppress() + _TENSOR) | (
363
363
  _DIRECTIVE = Word("#", _IDENTBODYCHARS)
364
364
  _TOKEN = dbl_quoted_string | _DIRECTIVE | _IDENTIFIER
365
365
  _DATA = Forward()
366
- _KEYWORD_ENTRY = _keyword_entry_of(_TOKEN | _list_of(_IDENTIFIER), _DATA)
366
+ _KEYWORD_ENTRY = _keyword_entry_of(
367
+ _TOKEN | _list_of(_IDENTIFIER), Opt(_DATA, default="")
368
+ )
367
369
  _DICT = _dict_of(_TOKEN, _DATA)
368
370
  _DATA_ENTRY = Forward()
369
371
  _LIST_ENTRY = _DICT | _KEYWORD_ENTRY | _DATA_ENTRY
@@ -380,21 +382,41 @@ _NUMBER = (
380
382
  )
381
383
  _DATA_ENTRY <<= _FIELD | _LIST | _DIMENSIONED | _DIMENSIONS | _NUMBER | _SWITCH | _TOKEN
382
384
 
383
- _DATA <<= (
384
- _DATA_ENTRY[1, ...]
385
- .set_parse_action(lambda tks: [tuple(tks)] if len(tks) > 1 else [tks[0]])
385
+ _DATA <<= _DATA_ENTRY[1, ...].set_parse_action(
386
+ lambda tks: [tuple(tks)] if len(tks) > 1 else [tks[0]]
387
+ )
388
+
389
+ _STANDALONE_DATA = (
390
+ _ascii_numeric_list(dtype=int, ignore=_COMMENT)
391
+ | _binary_numeric_list(dtype=np.int64)
392
+ | _binary_numeric_list(dtype=np.int32)
393
+ | _ascii_numeric_list(dtype=float, nested=3, ignore=_COMMENT)
394
+ | _binary_numeric_list(dtype=np.float64, nested=3)
395
+ | _binary_numeric_list(dtype=np.float32, nested=3)
396
+ | _DATA
397
+ ).add_parse_action(lambda tks: [None, tks[0]])
398
+
399
+
400
+ _FILE = (
401
+ Dict(_KEYWORD_ENTRY[...] + Opt(Group(_STANDALONE_DATA)) + _KEYWORD_ENTRY[...])
386
402
  .ignore(_COMMENT)
387
403
  .parse_with_tabs()
388
404
  )
389
405
 
390
406
 
391
- def parse_data(s: str) -> Data:
392
- if not s.strip():
393
- return ""
394
- return cast("Data", _DATA.parse_string(s, parse_all=True)[0])
407
+ def loads(s: bytes | str) -> File | Data:
408
+ if isinstance(s, bytes):
409
+ s = s.decode("latin-1")
410
+
411
+ file = _FILE.parse_string(s, parse_all=True).as_dict()
395
412
 
413
+ if len(file) == 1 and None in file:
414
+ return file[None] # type: ignore[no-any-return]
396
415
 
397
- _LOCATED_DICTIONARY = Group(
416
+ return file
417
+
418
+
419
+ _LOCATED_KEYWORD_ENTRIES = Group(
398
420
  _keyword_entry_of(
399
421
  _TOKEN,
400
422
  Opt(_DATA, default=""),
@@ -403,22 +425,14 @@ _LOCATED_DICTIONARY = Group(
403
425
  located=True,
404
426
  )
405
427
  )[...]
406
- _LOCATED_DATA = Group(
407
- Located(
408
- (
409
- _ascii_numeric_list(dtype=int, ignore=_COMMENT)
410
- | _binary_numeric_list(dtype=np.int64)
411
- | _binary_numeric_list(dtype=np.int32)
412
- | _ascii_numeric_list(dtype=float, nested=3, ignore=_COMMENT)
413
- | _binary_numeric_list(dtype=np.float64, nested=3)
414
- | _binary_numeric_list(dtype=np.float32, nested=3)
415
- | _DATA
416
- ).add_parse_action(lambda tks: ["", tks[0]])
417
- )
418
- )
428
+ _LOCATED_STANDALONE_DATA = Group(Located(_STANDALONE_DATA))
419
429
 
420
- _FILE = (
421
- Dict(_LOCATED_DICTIONARY + Opt(_LOCATED_DATA) + _LOCATED_DICTIONARY)
430
+ _LOCATED_FILE = (
431
+ Dict(
432
+ _LOCATED_KEYWORD_ENTRIES
433
+ + Opt(_LOCATED_STANDALONE_DATA)
434
+ + _LOCATED_KEYWORD_ENTRIES
435
+ )
422
436
  .ignore(_COMMENT)
423
437
  .parse_with_tabs()
424
438
  )
@@ -430,7 +444,7 @@ class Parsed(Mapping[Tuple[str, ...], Union[Data, EllipsisType]]):
430
444
  tuple[str, ...],
431
445
  tuple[int, Data | EllipsisType, int],
432
446
  ] = {}
433
- for parse_result in _FILE.parse_string(
447
+ for parse_result in _LOCATED_FILE.parse_string(
434
448
  contents.decode("latin-1"), parse_all=True
435
449
  ):
436
450
  self._parsed.update(self._flatten_result(parse_result))
@@ -453,8 +467,7 @@ class Parsed(Mapping[Tuple[str, ...], Union[Data, EllipsisType]]):
453
467
  end = parse_result.locn_end
454
468
  assert isinstance(end, int)
455
469
  keyword, *data = item
456
- assert isinstance(keyword, str)
457
- if not keyword:
470
+ if keyword is None:
458
471
  assert not _keywords
459
472
  assert len(data) == 1
460
473
  assert not isinstance(data[0], ParseResults)
@@ -0,0 +1,241 @@
1
+ from __future__ import annotations
2
+
3
+ import sys
4
+ from typing import overload
5
+
6
+ if sys.version_info >= (3, 9):
7
+ from collections.abc import Mapping
8
+ else:
9
+ from typing import Mapping
10
+
11
+ import numpy as np
12
+
13
+ from ._parsing import loads
14
+ from ._types import (
15
+ Data,
16
+ DataLike,
17
+ Dimensioned,
18
+ DimensionSet,
19
+ Entry,
20
+ EntryLike,
21
+ is_sequence,
22
+ )
23
+
24
+
25
+ @overload
26
+ def normalize_data(
27
+ data: DataLike, *, keywords: tuple[str, ...] | None = None
28
+ ) -> Data: ...
29
+
30
+
31
+ @overload
32
+ def normalize_data(
33
+ data: EntryLike, *, keywords: tuple[str, ...] | None = None
34
+ ) -> Entry: ...
35
+
36
+
37
+ def normalize_data(
38
+ data: EntryLike, *, keywords: tuple[str, ...] | None = None
39
+ ) -> Entry:
40
+ if keywords is not None and (
41
+ keywords == ("internalField",)
42
+ or (
43
+ len(keywords) == 3
44
+ and keywords[0] == "boundaryField"
45
+ and (
46
+ keywords[2] == "value"
47
+ or keywords[2] == "gradient"
48
+ or keywords[2].endswith(("Value", "Gradient"))
49
+ )
50
+ )
51
+ ):
52
+ if is_sequence(data):
53
+ try:
54
+ arr = np.asarray(data)
55
+ except ValueError:
56
+ pass
57
+ else:
58
+ if not np.issubdtype(arr.dtype, np.floating):
59
+ arr = arr.astype(float)
60
+
61
+ if arr.ndim == 1 or (arr.ndim == 2 and arr.shape[1] in (3, 6, 9)):
62
+ return arr # type: ignore [return-value]
63
+
64
+ return [normalize_data(d) for d in data]
65
+
66
+ if isinstance(data, int):
67
+ return float(data)
68
+
69
+ return normalize_data(data)
70
+
71
+ if isinstance(data, Mapping):
72
+ return {normalize_keyword(k): normalize_data(v) for k, v in data.items()} # type: ignore [misc]
73
+
74
+ if isinstance(data, np.ndarray):
75
+ ret = data.tolist()
76
+ assert isinstance(ret, (int, float, list))
77
+ return ret
78
+
79
+ if (
80
+ not isinstance(data, DimensionSet)
81
+ and keywords is not None
82
+ and keywords == ("dimensions",)
83
+ and is_sequence(data)
84
+ and len(data) <= 7
85
+ and all(isinstance(d, (int, float)) for d in data)
86
+ ):
87
+ return DimensionSet(*data)
88
+
89
+ if keywords is None and isinstance(data, tuple) and len(data) == 2:
90
+ k, v = data
91
+ assert not isinstance(k, Mapping)
92
+ return (
93
+ normalize_keyword(k),
94
+ normalize_data(v) if not isinstance(v, Mapping) else v,
95
+ ) # type: ignore [return-value]
96
+
97
+ if (
98
+ is_sequence(data)
99
+ and not isinstance(data, DimensionSet)
100
+ and (keywords is None or not isinstance(data, tuple))
101
+ ):
102
+ return [normalize_data(d) for d in data]
103
+
104
+ if isinstance(data, str):
105
+ s = loads(data)
106
+ if isinstance(s, (str, tuple, bool)):
107
+ return s
108
+
109
+ if isinstance(
110
+ data,
111
+ (int, float, bool, tuple, DimensionSet, Dimensioned),
112
+ ):
113
+ return data
114
+
115
+ msg = f"Unsupported data type: {type(data)}"
116
+ raise TypeError(msg)
117
+
118
+
119
+ def normalize_keyword(data: DataLike) -> Data:
120
+ ret = normalize_data(data)
121
+
122
+ if isinstance(data, str) and isinstance(ret, bool):
123
+ return data
124
+
125
+ return ret
126
+
127
+
128
+ def dumps(
129
+ data: EntryLike,
130
+ *,
131
+ keywords: tuple[str, ...] | None = None,
132
+ header: Mapping[str, Entry] | None = None,
133
+ tuple_is_entry: bool = False,
134
+ ) -> bytes:
135
+ data = normalize_data(data, keywords=keywords)
136
+
137
+ if isinstance(data, Mapping):
138
+ return (
139
+ b"{"
140
+ + b" ".join(
141
+ dumps(
142
+ (k, v),
143
+ keywords=keywords,
144
+ tuple_is_entry=True,
145
+ )
146
+ for k, v in data.items()
147
+ )
148
+ + b"}"
149
+ )
150
+
151
+ if (
152
+ keywords is not None
153
+ and (
154
+ keywords == ("internalField",)
155
+ or (
156
+ len(keywords) == 3
157
+ and keywords[0] == "boundaryField"
158
+ and (
159
+ keywords[2] == "value"
160
+ or keywords[2] == "gradient"
161
+ or keywords[2].endswith(("Value", "Gradient"))
162
+ )
163
+ )
164
+ )
165
+ and isinstance(data, (int, float, np.ndarray))
166
+ ):
167
+ data = np.asarray(data) # type: ignore [assignment]
168
+ class_ = header.get("class", "") if header else ""
169
+ assert isinstance(class_, str)
170
+ scalar = "Scalar" in class_
171
+
172
+ shape = np.shape(data)
173
+ if not shape or (not scalar and shape in ((3,), (6,), (9,))):
174
+ return b"uniform " + dumps(data)
175
+
176
+ assert isinstance(data, np.ndarray)
177
+ ndim = np.ndim(data)
178
+ if ndim == 1:
179
+ tensor_kind = b"scalar"
180
+
181
+ elif ndim == 2:
182
+ assert len(shape) == 2
183
+ if shape[1] == 3:
184
+ tensor_kind = b"vector"
185
+ elif shape[1] == 6:
186
+ tensor_kind = b"symmTensor"
187
+ elif shape[1] == 9:
188
+ tensor_kind = b"tensor"
189
+ else:
190
+ return dumps(data)
191
+
192
+ else:
193
+ return dumps(data)
194
+
195
+ binary = (header.get("format", "") if header else "") == "binary"
196
+
197
+ contents = b"(" + data.tobytes() + b")" if binary else dumps(data)
198
+
199
+ return b"nonuniform List<" + tensor_kind + b"> " + dumps(len(data)) + contents
200
+
201
+ if isinstance(data, DimensionSet):
202
+ return b"[" + b" ".join(dumps(v) for v in data) + b"]"
203
+
204
+ if isinstance(data, Dimensioned):
205
+ if data.name is not None:
206
+ return (
207
+ dumps(data.name)
208
+ + b" "
209
+ + dumps(data.dimensions)
210
+ + b" "
211
+ + dumps(data.value)
212
+ )
213
+ return dumps(data.dimensions) + b" " + dumps(data.value)
214
+
215
+ if isinstance(data, tuple):
216
+ if tuple_is_entry:
217
+ k, v = data
218
+ ret = dumps(k)
219
+ val = dumps(
220
+ v,
221
+ keywords=(*keywords, k)
222
+ if keywords is not None and isinstance(k, str)
223
+ else None,
224
+ )
225
+ if val:
226
+ ret += b" " + val
227
+ if not isinstance(v, Mapping):
228
+ ret += b";"
229
+ return ret
230
+
231
+ return b" ".join(dumps(v) for v in data)
232
+
233
+ if is_sequence(data):
234
+ return b"(" + b" ".join(dumps(v, tuple_is_entry=True) for v in data) + b")"
235
+
236
+ if data is True:
237
+ return b"yes"
238
+ if data is False:
239
+ return b"no"
240
+
241
+ return str(data).encode("latin-1")