foamlib 0.8.0__tar.gz → 0.8.2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (54) hide show
  1. {foamlib-0.8.0 → foamlib-0.8.2}/PKG-INFO +4 -13
  2. foamlib-0.8.2/benchmark.png +0 -0
  3. {foamlib-0.8.0 → foamlib-0.8.2}/foamlib/__init__.py +1 -1
  4. {foamlib-0.8.0 → foamlib-0.8.2}/foamlib/_cases/_async.py +1 -1
  5. {foamlib-0.8.0 → foamlib-0.8.2}/foamlib/_files/_files.py +33 -42
  6. {foamlib-0.8.0 → foamlib-0.8.2}/foamlib/_files/_parsing.py +65 -110
  7. {foamlib-0.8.0 → foamlib-0.8.2}/foamlib/_files/_serialization.py +62 -73
  8. foamlib-0.8.2/foamlib/_files/_types.py +144 -0
  9. {foamlib-0.8.0 → foamlib-0.8.2}/pyproject.toml +3 -8
  10. {foamlib-0.8.0 → foamlib-0.8.2}/tests/test_cases/test_cavity.py +3 -3
  11. {foamlib-0.8.0 → foamlib-0.8.2}/tests/test_cases/test_cavity_async.py +3 -2
  12. {foamlib-0.8.0 → foamlib-0.8.2}/tests/test_files/test_dumps.py +11 -12
  13. {foamlib-0.8.0 → foamlib-0.8.2}/tests/test_files/test_files.py +21 -29
  14. {foamlib-0.8.0 → foamlib-0.8.2}/tests/test_files/test_parsing.py +39 -19
  15. foamlib-0.8.0/benchmark.png +0 -0
  16. foamlib-0.8.0/foamlib/_files/_types.py +0 -79
  17. foamlib-0.8.0/foamlib/_files/_util.py +0 -23
  18. {foamlib-0.8.0 → foamlib-0.8.2}/.devcontainer.json +0 -0
  19. {foamlib-0.8.0 → foamlib-0.8.2}/.dockerignore +0 -0
  20. {foamlib-0.8.0 → foamlib-0.8.2}/.git-blame-ignore-revs +0 -0
  21. {foamlib-0.8.0 → foamlib-0.8.2}/.github/dependabot.yml +0 -0
  22. {foamlib-0.8.0 → foamlib-0.8.2}/.github/workflows/ci.yml +0 -0
  23. {foamlib-0.8.0 → foamlib-0.8.2}/.github/workflows/docker.yml +0 -0
  24. {foamlib-0.8.0 → foamlib-0.8.2}/.github/workflows/dockerhub-description.yml +0 -0
  25. {foamlib-0.8.0 → foamlib-0.8.2}/.github/workflows/pypi-publish.yml +0 -0
  26. {foamlib-0.8.0 → foamlib-0.8.2}/.gitignore +0 -0
  27. {foamlib-0.8.0 → foamlib-0.8.2}/.readthedocs.yaml +0 -0
  28. {foamlib-0.8.0 → foamlib-0.8.2}/Dockerfile +0 -0
  29. {foamlib-0.8.0 → foamlib-0.8.2}/LICENSE.txt +0 -0
  30. {foamlib-0.8.0 → foamlib-0.8.2}/README.md +0 -0
  31. {foamlib-0.8.0 → foamlib-0.8.2}/docs/Makefile +0 -0
  32. {foamlib-0.8.0 → foamlib-0.8.2}/docs/cases.rst +0 -0
  33. {foamlib-0.8.0 → foamlib-0.8.2}/docs/conf.py +0 -0
  34. {foamlib-0.8.0 → foamlib-0.8.2}/docs/files.rst +0 -0
  35. {foamlib-0.8.0 → foamlib-0.8.2}/docs/index.rst +0 -0
  36. {foamlib-0.8.0 → foamlib-0.8.2}/docs/make.bat +0 -0
  37. {foamlib-0.8.0 → foamlib-0.8.2}/docs/ruff.toml +0 -0
  38. {foamlib-0.8.0 → foamlib-0.8.2}/foamlib/_cases/__init__.py +0 -0
  39. {foamlib-0.8.0 → foamlib-0.8.2}/foamlib/_cases/_base.py +0 -0
  40. {foamlib-0.8.0 → foamlib-0.8.2}/foamlib/_cases/_run.py +0 -0
  41. {foamlib-0.8.0 → foamlib-0.8.2}/foamlib/_cases/_slurm.py +0 -0
  42. {foamlib-0.8.0 → foamlib-0.8.2}/foamlib/_cases/_subprocess.py +0 -0
  43. {foamlib-0.8.0 → foamlib-0.8.2}/foamlib/_cases/_sync.py +0 -0
  44. {foamlib-0.8.0 → foamlib-0.8.2}/foamlib/_cases/_util.py +0 -0
  45. {foamlib-0.8.0 → foamlib-0.8.2}/foamlib/_files/__init__.py +0 -0
  46. {foamlib-0.8.0 → foamlib-0.8.2}/foamlib/_files/_io.py +0 -0
  47. {foamlib-0.8.0 → foamlib-0.8.2}/foamlib/py.typed +0 -0
  48. {foamlib-0.8.0 → foamlib-0.8.2}/logo.png +0 -0
  49. {foamlib-0.8.0 → foamlib-0.8.2}/tests/__init__.py +0 -0
  50. {foamlib-0.8.0 → foamlib-0.8.2}/tests/ruff.toml +0 -0
  51. {foamlib-0.8.0 → foamlib-0.8.2}/tests/test_cases/__init__.py +0 -0
  52. {foamlib-0.8.0 → foamlib-0.8.2}/tests/test_cases/test_flange.py +0 -0
  53. {foamlib-0.8.0 → foamlib-0.8.2}/tests/test_cases/test_flange_async.py +0 -0
  54. {foamlib-0.8.0 → foamlib-0.8.2}/tests/test_files/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: foamlib
3
- Version: 0.8.0
3
+ Version: 0.8.2
4
4
  Summary: A Python interface for interacting with OpenFOAM
5
5
  Project-URL: Homepage, https://github.com/gerlero/foamlib
6
6
  Project-URL: Repository, https://github.com/gerlero/foamlib
@@ -25,12 +25,12 @@ Classifier: Topic :: Software Development
25
25
  Classifier: Typing :: Typed
26
26
  Requires-Python: >=3.7
27
27
  Requires-Dist: aioshutil<2,>=1
28
- Requires-Dist: pyparsing<4,>=3
28
+ Requires-Dist: numpy<3,>=1
29
+ Requires-Dist: numpy<3,>=1.25.0; python_version >= '3.10'
30
+ Requires-Dist: pyparsing<4,>=3.1.2
29
31
  Requires-Dist: typing-extensions<5,>=4; python_version < '3.11'
30
32
  Provides-Extra: dev
31
33
  Requires-Dist: mypy<2,>=1; extra == 'dev'
32
- Requires-Dist: numpy<3,>=1; extra == 'dev'
33
- Requires-Dist: numpy<3,>=1.25.0; (python_version >= '3.10') and extra == 'dev'
34
34
  Requires-Dist: pytest-asyncio<0.25,>=0.21; extra == 'dev'
35
35
  Requires-Dist: pytest-cov; extra == 'dev'
36
36
  Requires-Dist: pytest<9,>=7; extra == 'dev'
@@ -38,25 +38,16 @@ Requires-Dist: ruff; extra == 'dev'
38
38
  Requires-Dist: sphinx-rtd-theme; extra == 'dev'
39
39
  Requires-Dist: sphinx<9,>=5; extra == 'dev'
40
40
  Provides-Extra: docs
41
- Requires-Dist: numpy<3,>=1; extra == 'docs'
42
- Requires-Dist: numpy<3,>=1.25.0; (python_version >= '3.10') and extra == 'docs'
43
41
  Requires-Dist: sphinx-rtd-theme; extra == 'docs'
44
42
  Requires-Dist: sphinx<9,>=5; extra == 'docs'
45
43
  Provides-Extra: lint
46
44
  Requires-Dist: ruff; extra == 'lint'
47
- Provides-Extra: numpy
48
- Requires-Dist: numpy<3,>=1; extra == 'numpy'
49
- Requires-Dist: numpy<3,>=1.25.0; (python_version >= '3.10') and extra == 'numpy'
50
45
  Provides-Extra: test
51
- Requires-Dist: numpy<3,>=1; extra == 'test'
52
- Requires-Dist: numpy<3,>=1.25.0; (python_version >= '3.10') and extra == 'test'
53
46
  Requires-Dist: pytest-asyncio<0.25,>=0.21; extra == 'test'
54
47
  Requires-Dist: pytest-cov; extra == 'test'
55
48
  Requires-Dist: pytest<9,>=7; extra == 'test'
56
49
  Provides-Extra: typing
57
50
  Requires-Dist: mypy<2,>=1; extra == 'typing'
58
- Requires-Dist: numpy<3,>=1; extra == 'typing'
59
- Requires-Dist: numpy<3,>=1.25.0; (python_version >= '3.10') and extra == 'typing'
60
51
  Requires-Dist: pytest-asyncio<0.25,>=0.21; extra == 'typing'
61
52
  Requires-Dist: pytest-cov; extra == 'typing'
62
53
  Requires-Dist: pytest<9,>=7; extra == 'typing'
Binary file
@@ -1,6 +1,6 @@
1
1
  """A Python interface for interacting with OpenFOAM."""
2
2
 
3
- __version__ = "0.8.0"
3
+ __version__ = "0.8.2"
4
4
 
5
5
  from ._cases import (
6
6
  AsyncFoamCase,
@@ -106,7 +106,7 @@ class AsyncFoamCase(FoamCaseRunBase):
106
106
  async def _rmtree(
107
107
  path: os.PathLike[str] | str, *, ignore_errors: bool = False
108
108
  ) -> None:
109
- await aioshutil.rmtree(path, ignore_errors=ignore_errors)
109
+ await aioshutil.rmtree(path, ignore_errors=ignore_errors) # type: ignore [call-arg]
110
110
 
111
111
  @staticmethod
112
112
  async def _copytree(
@@ -1,6 +1,5 @@
1
1
  from __future__ import annotations
2
2
 
3
- import os
4
3
  import sys
5
4
  from copy import deepcopy
6
5
  from typing import Any, Optional, Tuple, Union, cast
@@ -15,6 +14,8 @@ if sys.version_info >= (3, 9):
15
14
  else:
16
15
  from typing import Iterator, Mapping, MutableMapping, Sequence
17
16
 
17
+ import numpy as np
18
+
18
19
  from ._io import FoamFileIO
19
20
  from ._serialization import Kind, dumps, normalize
20
21
  from ._types import (
@@ -27,7 +28,6 @@ from ._types import (
27
28
  File,
28
29
  MutableEntry,
29
30
  )
30
- from ._util import is_sequence
31
31
 
32
32
 
33
33
  class FoamFile(
@@ -196,7 +196,7 @@ class FoamFile(
196
196
  elif not isinstance(keywords, tuple):
197
197
  keywords = (keywords,)
198
198
 
199
- if keywords and not isinstance(normalize(keywords[-1], kind=Kind.KEYWORD), str):
199
+ if keywords and not isinstance(normalize(keywords[-1]), str):
200
200
  msg = f"Invalid keyword: {keywords[-1]}"
201
201
  raise ValueError(msg)
202
202
 
@@ -228,52 +228,43 @@ class FoamFile(
228
228
  or keywords[2].endswith("Gradient")
229
229
  )
230
230
  ):
231
- if self.format == "binary":
232
- arch = self.get(("FoamFile", "arch"), default=None)
233
- assert arch is None or isinstance(arch, str)
234
- if (arch is not None and "scalar=32" in arch) or (
235
- arch is None
236
- and os.environ.get("WM_PRECISION_OPTION", default="DP") == "SP"
237
- ):
238
- kind = Kind.SINGLE_PRECISION_BINARY_FIELD
239
- else:
240
- kind = Kind.DOUBLE_PRECISION_BINARY_FIELD
241
- else:
242
- kind = Kind.ASCII_FIELD
231
+ kind = (
232
+ Kind.BINARY_FIELD if self.format == "binary" else Kind.ASCII_FIELD
233
+ )
243
234
  elif keywords == ("dimensions",):
244
235
  kind = Kind.DIMENSIONS
245
236
 
246
237
  if (
247
- kind
248
- in (
249
- Kind.ASCII_FIELD,
250
- Kind.DOUBLE_PRECISION_BINARY_FIELD,
251
- Kind.SINGLE_PRECISION_BINARY_FIELD,
252
- )
238
+ kind in (Kind.ASCII_FIELD, Kind.BINARY_FIELD)
253
239
  ) and self.class_ == "dictionary":
254
- if isinstance(data, (int, float)):
255
- self.class_ = "volScalarField"
256
-
257
- elif is_sequence(data) and data:
258
- if isinstance(data[0], (int, float)):
259
- if len(data) == 3:
260
- self.class_ = "volVectorField"
261
- elif len(data) == 6:
262
- self.class_ = "volSymmTensorField"
263
- elif len(data) == 9:
264
- self.class_ = "volTensorField"
265
- elif (
266
- is_sequence(data[0])
267
- and data[0]
268
- and isinstance(data[0][0], (int, float))
269
- ):
270
- if len(data[0]) == 3:
240
+ try:
241
+ shape = np.shape(data) # type: ignore [arg-type]
242
+ except ValueError:
243
+ pass
244
+ else:
245
+ if not shape:
246
+ self.class_ = "volScalarField"
247
+ elif shape == (3,):
248
+ self.class_ = "volVectorField"
249
+ elif shape == (6,):
250
+ self.class_ = "volSymmTensorField"
251
+ elif shape == (9,):
252
+ self.class_ = "volTensorField"
253
+ elif len(shape) == 1:
254
+ self.class_ = "volScalarField"
255
+ elif len(shape) == 2:
256
+ if shape[1] == 3:
271
257
  self.class_ = "volVectorField"
272
- elif len(data[0]) == 6:
258
+ elif shape[1] == 6:
273
259
  self.class_ = "volSymmTensorField"
274
- elif len(data[0]) == 9:
260
+ elif shape[1] == 9:
275
261
  self.class_ = "volTensorField"
276
262
 
263
+ if kind == Kind.ASCII_FIELD and self.class_.endswith("scalarField"):
264
+ kind = Kind.SCALAR_ASCII_FIELD
265
+ elif kind == Kind.BINARY_FIELD and self.class_.endswith("scalarField"):
266
+ kind = Kind.SCALAR_BINARY_FIELD
267
+
277
268
  parsed = self._get_parsed(missing_ok=True)
278
269
 
279
270
  start, end = parsed.entry_location(keywords, missing_ok=True)
@@ -304,7 +295,7 @@ class FoamFile(
304
295
  ...,
305
296
  before
306
297
  + indentation
307
- + dumps(keywords[-1], kind=Kind.KEYWORD)
298
+ + dumps(keywords[-1])
308
299
  + b"\n"
309
300
  + indentation
310
301
  + b"{\n"
@@ -322,7 +313,7 @@ class FoamFile(
322
313
  normalize(data, kind=kind),
323
314
  before
324
315
  + indentation
325
- + dumps(keywords[-1], kind=Kind.KEYWORD)
316
+ + dumps(keywords[-1])
326
317
  + b" "
327
318
  + dumps(data, kind=kind)
328
319
  + b";"
@@ -1,9 +1,7 @@
1
1
  from __future__ import annotations
2
2
 
3
- import array
4
3
  import re
5
4
  import sys
6
- from enum import Enum, auto
7
5
  from typing import Tuple, Union, cast
8
6
 
9
7
  if sys.version_info >= (3, 9):
@@ -16,6 +14,7 @@ if sys.version_info >= (3, 10):
16
14
  else:
17
15
  EllipsisType = type(...)
18
16
 
17
+ import numpy as np
19
18
  from pyparsing import (
20
19
  Combine,
21
20
  Dict,
@@ -37,62 +36,31 @@ from pyparsing import (
37
36
  printables,
38
37
  )
39
38
 
40
- from ._types import Data, Dimensioned, DimensionSet, File
41
-
42
-
43
- class Tensor(Enum):
44
- SCALAR = auto()
45
- VECTOR = auto()
46
- SYMM_TENSOR = auto()
47
- TENSOR = auto()
48
-
49
- @property
50
- def shape(self) -> tuple[int, ...]:
51
- return {
52
- Tensor.SCALAR: (),
53
- Tensor.VECTOR: (3,),
54
- Tensor.SYMM_TENSOR: (6,),
55
- Tensor.TENSOR: (9,),
56
- }[self]
57
-
58
- @property
59
- def size(self) -> int:
60
- return {
61
- Tensor.SCALAR: 1,
62
- Tensor.VECTOR: 3,
63
- Tensor.SYMM_TENSOR: 6,
64
- Tensor.TENSOR: 9,
65
- }[self]
66
-
67
- def pattern(self, *, ignore: Regex | None = None) -> str:
68
- float_pattern = r"(?i:[+-]?(?:(?:\d+\.?\d*(?:e[+-]?\d+)?)|nan|inf(?:inity)?))"
69
-
70
- if self == Tensor.SCALAR:
71
- return float_pattern
72
-
73
- ignore_pattern = (
74
- rf"(?:\s|{ignore.re.pattern})+" if ignore is not None else r"\s+"
75
- )
39
+ from ._types import Data, Dimensioned, DimensionSet, File, TensorKind
76
40
 
77
- return rf"\((?:{ignore_pattern})?(?:{float_pattern}{ignore_pattern}){{{self.size - 1}}}{float_pattern}(?:{ignore_pattern})?\)"
78
41
 
79
- def parser(self) -> ParserElement:
80
- if self == Tensor.SCALAR:
81
- return common.ieee_float
42
+ def _tensor(tensor_kind: TensorKind, *, ignore: Regex | None = None) -> Regex:
43
+ float_pattern = r"(?i:[+-]?(?:(?:\d+\.?\d*(?:e[+-]?\d+)?)|nan|inf(?:inity)?))"
82
44
 
83
- return (
84
- Literal("(").suppress()
85
- + Group(common.ieee_float[self.size], aslist=True)
86
- + Literal(")").suppress()
87
- )
45
+ if tensor_kind == TensorKind.SCALAR:
46
+ ret = Regex(float_pattern)
47
+ ret.add_parse_action(lambda tks: [float(tks[0])])
48
+ return ret
88
49
 
89
- def __str__(self) -> str:
90
- return {
91
- Tensor.SCALAR: "scalar",
92
- Tensor.VECTOR: "vector",
93
- Tensor.SYMM_TENSOR: "symmTensor",
94
- Tensor.TENSOR: "tensor",
95
- }[self]
50
+ ignore_pattern = rf"(?:\s|{ignore.re.pattern})+" if ignore is not None else r"\s+"
51
+
52
+ ret = Regex(
53
+ rf"\((?:{ignore_pattern})?(?:{float_pattern}{ignore_pattern}){{{tensor_kind.size - 1}}}{float_pattern}(?:{ignore_pattern})?\)"
54
+ )
55
+ ret.add_parse_action(
56
+ lambda tks: np.fromstring(
57
+ re.sub(ignore.re, " ", tks[0][1:-1])
58
+ if ignore is not None
59
+ else tks[0][1:-1],
60
+ sep=" ",
61
+ )
62
+ )
63
+ return ret
96
64
 
97
65
 
98
66
  def _list_of(entry: ParserElement) -> ParserElement:
@@ -115,61 +83,35 @@ def _list_of(entry: ParserElement) -> ParserElement:
115
83
 
116
84
 
117
85
  def _parse_ascii_field(
118
- s: str, tensor_kind: Tensor, *, ignore: Regex | None
119
- ) -> list[float] | list[list[float]]:
120
- values = [
121
- float(v)
122
- for v in (re.sub(ignore.re, " ", s) if ignore is not None else s)
123
- .replace("(", " ")
124
- .replace(")", " ")
125
- .split()
126
- ]
127
-
128
- if tensor_kind == Tensor.SCALAR:
129
- return values
86
+ s: str, tensor_kind: TensorKind, *, ignore: Regex | None
87
+ ) -> np.ndarray[tuple[int] | tuple[int, int], np.dtype[np.float64]]:
88
+ if ignore is not None:
89
+ s = re.sub(ignore.re, " ", s)
90
+ s = s.replace("(", " ").replace(")", " ")
130
91
 
131
- return [
132
- values[i : i + tensor_kind.size]
133
- for i in range(0, len(values), tensor_kind.size)
134
- ]
92
+ return np.fromstring(s, sep=" ").reshape(-1, *tensor_kind.shape)
135
93
 
136
94
 
137
95
  def _unpack_binary_field(
138
- b: bytes, tensor_kind: Tensor, *, length: int
139
- ) -> list[float] | list[list[float]]:
96
+ b: bytes, tensor_kind: TensorKind, *, length: int
97
+ ) -> np.ndarray[tuple[int] | tuple[int, int], np.dtype[np.float64 | np.float32]]:
140
98
  float_size = len(b) / tensor_kind.size / length
141
99
  assert float_size in (4, 8)
142
100
 
143
- arr = array.array("f" if float_size == 4 else "d", b)
144
- values = arr.tolist()
145
-
146
- if tensor_kind == Tensor.SCALAR:
147
- return values
148
-
149
- return [
150
- values[i : i + tensor_kind.size]
151
- for i in range(0, len(values), tensor_kind.size)
152
- ]
101
+ dtype = np.float32 if float_size == 4 else float
102
+ return np.frombuffer(b, dtype=dtype).reshape(-1, *tensor_kind.shape)
153
103
 
154
104
 
155
105
  def _tensor_list(
156
- tensor_kind: Tensor | None = None, *, ignore: Regex | None = None
106
+ tensor_kind: TensorKind, *, ignore: Regex | None = None
157
107
  ) -> ParserElement:
158
- if tensor_kind is None:
159
- return (
160
- _tensor_list(Tensor.SCALAR, ignore=ignore)
161
- | _tensor_list(Tensor.VECTOR, ignore=ignore)
162
- | _tensor_list(Tensor.SYMM_TENSOR, ignore=ignore)
163
- | _tensor_list(Tensor.TENSOR, ignore=ignore)
164
- )
165
-
166
- tensor_pattern = tensor_kind.pattern(ignore=ignore)
108
+ tensor = _tensor(tensor_kind, ignore=ignore)
167
109
  ignore_pattern = rf"(?:\s|{ignore.re.pattern})+" if ignore is not None else r"\s+"
168
110
 
169
111
  list_ = Forward()
170
112
 
171
113
  list_ <<= Regex(
172
- rf"\((?:{ignore_pattern})?(?:{tensor_pattern}{ignore_pattern})*{tensor_pattern}(?:{ignore_pattern})?\)"
114
+ rf"\((?:{ignore_pattern})?(?:{tensor.re.pattern}{ignore_pattern})*{tensor.re.pattern}(?:{ignore_pattern})?\)"
173
115
  ).add_parse_action(
174
116
  lambda tks: [_parse_ascii_field(tks[0], tensor_kind, ignore=ignore)]
175
117
  )
@@ -181,13 +123,13 @@ def _tensor_list(
181
123
 
182
124
  list_ <<= (
183
125
  Regex(
184
- rf"\((?:{ignore_pattern})?(?:{tensor_pattern}{ignore_pattern}){{{length - 1}}}{tensor_pattern}(?:{ignore_pattern})?\)"
126
+ rf"\((?:{ignore_pattern})?(?:{tensor.re.pattern}{ignore_pattern}){{{length - 1}}}{tensor.re.pattern}(?:{ignore_pattern})?\)"
185
127
  ).add_parse_action(
186
128
  lambda tks: [_parse_ascii_field(tks[0], tensor_kind, ignore=ignore)]
187
129
  )
188
130
  | Regex(
189
131
  rf"\((?s:.{{{length * tensor_kind.size * 8}}}|.{{{length * tensor_kind.size * 4}}})\)"
190
- ).set_parse_action(
132
+ ).add_parse_action(
191
133
  lambda tks: [
192
134
  _unpack_binary_field(
193
135
  tks[0][1:-1].encode("latin-1"), tensor_kind, length=length
@@ -195,8 +137,10 @@ def _tensor_list(
195
137
  ]
196
138
  )
197
139
  | (
198
- Literal("{").suppress() + tensor_kind.parser() + Literal("}").suppress()
199
- ).set_parse_action(lambda tks: [[tks[0]] * length])
140
+ Literal("{").suppress() + tensor + Literal("}").suppress()
141
+ ).add_parse_action(
142
+ lambda tks: [np.full((length, *tensor_kind.shape), tks[0], dtype=float)]
143
+ )
200
144
  )
201
145
 
202
146
  count = common.integer.copy().add_parse_action(count_parse_action)
@@ -274,10 +218,10 @@ _DIMENSIONS = (
274
218
  Literal("[").suppress() + common.number[0, 7] + Literal("]").suppress()
275
219
  ).set_parse_action(lambda tks: DimensionSet(*tks))
276
220
  _TENSOR = (
277
- Tensor.SCALAR.parser()
278
- | Tensor.VECTOR.parser()
279
- | Tensor.SYMM_TENSOR.parser()
280
- | Tensor.TENSOR.parser()
221
+ _tensor(TensorKind.SCALAR)
222
+ | _tensor(TensorKind.VECTOR)
223
+ | _tensor(TensorKind.SYMM_TENSOR)
224
+ | _tensor(TensorKind.TENSOR)
281
225
  )
282
226
  _IDENTIFIER = Combine(
283
227
  Word(_IDENTCHARS, _IDENTBODYCHARS, exclude_chars="()")
@@ -287,29 +231,40 @@ _DIMENSIONED = (Opt(_IDENTIFIER) + _DIMENSIONS + _TENSOR).set_parse_action(
287
231
  lambda tks: Dimensioned(*reversed(tks.as_list()))
288
232
  )
289
233
  _FIELD = (Keyword("uniform", _IDENTBODYCHARS).suppress() + _TENSOR) | (
290
- Keyword("nonuniform", _IDENTBODYCHARS).suppress() + _tensor_list(ignore=_COMMENT)
234
+ Keyword("nonuniform", _IDENTBODYCHARS).suppress()
235
+ + (
236
+ _tensor_list(TensorKind.SCALAR, ignore=_COMMENT)
237
+ | _tensor_list(TensorKind.VECTOR, ignore=_COMMENT)
238
+ | _tensor_list(TensorKind.SYMM_TENSOR, ignore=_COMMENT)
239
+ | _tensor_list(TensorKind.TENSOR, ignore=_COMMENT)
240
+ )
291
241
  )
292
- TOKEN = dbl_quoted_string | _IDENTIFIER
293
- DATA = Forward()
294
- _KEYWORD_ENTRY = _keyword_entry_of(TOKEN | _list_of(_IDENTIFIER), DATA)
295
- _DICT = _dict_of(TOKEN, DATA)
242
+ _TOKEN = dbl_quoted_string | _IDENTIFIER
243
+ _DATA = Forward()
244
+ _KEYWORD_ENTRY = _keyword_entry_of(_TOKEN | _list_of(_IDENTIFIER), _DATA)
245
+ _DICT = _dict_of(_TOKEN, _DATA)
296
246
  _DATA_ENTRY = Forward()
297
247
  _LIST_ENTRY = _DICT | _KEYWORD_ENTRY | _DATA_ENTRY
298
248
  _LIST = _list_of(_LIST_ENTRY)
299
249
  _NUMBER = common.signed_integer ^ common.ieee_float
300
- _DATA_ENTRY <<= _FIELD | _LIST | _DIMENSIONED | _DIMENSIONS | _NUMBER | _SWITCH | TOKEN
250
+ _DATA_ENTRY <<= _FIELD | _LIST | _DIMENSIONED | _DIMENSIONS | _NUMBER | _SWITCH | _TOKEN
301
251
 
302
- DATA <<= (
252
+ _DATA <<= (
303
253
  _DATA_ENTRY[1, ...]
304
254
  .set_parse_action(lambda tks: [tuple(tks)] if len(tks) > 1 else [tks[0]])
305
255
  .ignore(_COMMENT)
306
256
  .parse_with_tabs()
307
257
  )
308
258
 
259
+
260
+ def parse_data(s: str) -> Data:
261
+ return cast(Data, _DATA.parse_string(s, parse_all=True)[0])
262
+
263
+
309
264
  _LOCATED_DICTIONARY = Group(
310
- _keyword_entry_of(TOKEN, Opt(DATA, default=""), located=True)
265
+ _keyword_entry_of(_TOKEN, Opt(_DATA, default=""), located=True)
311
266
  )[...]
312
- _LOCATED_DATA = Group(Located(DATA.copy().add_parse_action(lambda tks: ["", tks[0]])))
267
+ _LOCATED_DATA = Group(Located(_DATA.copy().add_parse_action(lambda tks: ["", tks[0]])))
313
268
 
314
269
  _FILE = (
315
270
  Dict(_LOCATED_DICTIONARY + Opt(_LOCATED_DATA) + _LOCATED_DICTIONARY)
@@ -1,35 +1,27 @@
1
1
  from __future__ import annotations
2
2
 
3
- import array
4
- import itertools
5
3
  import sys
6
4
  from enum import Enum, auto
7
- from typing import cast, overload
5
+ from typing import overload
8
6
 
9
7
  if sys.version_info >= (3, 9):
10
- from collections.abc import Mapping, Sequence
8
+ from collections.abc import Mapping
11
9
  else:
12
- from typing import Mapping, Sequence
10
+ from typing import Mapping
13
11
 
14
- from ._parsing import DATA, TOKEN
15
- from ._types import Data, Dimensioned, DimensionSet, Entry
16
- from ._util import is_sequence
12
+ import numpy as np
17
13
 
18
- try:
19
- import numpy as np
20
-
21
- numpy = True
22
- except ModuleNotFoundError:
23
- numpy = False
14
+ from ._parsing import parse_data
15
+ from ._types import Data, Dimensioned, DimensionSet, Entry, is_sequence
24
16
 
25
17
 
26
18
  class Kind(Enum):
27
19
  DEFAULT = auto()
28
- KEYWORD = auto()
29
20
  SINGLE_ENTRY = auto()
30
21
  ASCII_FIELD = auto()
31
- DOUBLE_PRECISION_BINARY_FIELD = auto()
32
- SINGLE_PRECISION_BINARY_FIELD = auto()
22
+ SCALAR_ASCII_FIELD = auto()
23
+ BINARY_FIELD = auto()
24
+ SCALAR_BINARY_FIELD = auto()
33
25
  DIMENSIONS = auto()
34
26
 
35
27
 
@@ -42,9 +34,34 @@ def normalize(data: Entry, *, kind: Kind = Kind.DEFAULT) -> Entry: ...
42
34
 
43
35
 
44
36
  def normalize(data: Entry, *, kind: Kind = Kind.DEFAULT) -> Entry:
45
- if numpy and isinstance(data, np.ndarray):
37
+ if kind in (
38
+ Kind.ASCII_FIELD,
39
+ Kind.SCALAR_ASCII_FIELD,
40
+ Kind.BINARY_FIELD,
41
+ Kind.SCALAR_BINARY_FIELD,
42
+ ):
43
+ if is_sequence(data):
44
+ try:
45
+ arr = np.asarray(data)
46
+ except ValueError:
47
+ pass
48
+ else:
49
+ if not np.issubdtype(arr.dtype, np.floating):
50
+ arr = arr.astype(float)
51
+
52
+ if arr.ndim == 1 or (arr.ndim == 2 and arr.shape[1] in (3, 6, 9)):
53
+ return arr
54
+
55
+ return data
56
+
57
+ if isinstance(data, int):
58
+ return float(data)
59
+
60
+ return data
61
+
62
+ if isinstance(data, np.ndarray):
46
63
  ret = data.tolist()
47
- assert isinstance(ret, list)
64
+ assert isinstance(ret, (int, float, list))
48
65
  return ret
49
66
 
50
67
  if isinstance(data, Mapping):
@@ -56,32 +73,21 @@ def normalize(data: Entry, *, kind: Kind = Kind.DEFAULT) -> Entry:
56
73
  and len(data) <= 7
57
74
  and all(isinstance(d, (int, float)) for d in data)
58
75
  ):
59
- data = cast(Sequence[float], data)
60
76
  return DimensionSet(*data)
61
77
 
62
78
  if isinstance(data, tuple) and kind == Kind.SINGLE_ENTRY and len(data) == 2:
63
79
  k, v = data
64
- return (normalize(k, kind=Kind.KEYWORD), normalize(v))
80
+ return (normalize(k), normalize(v))
65
81
 
66
82
  if is_sequence(data) and (kind == Kind.SINGLE_ENTRY or not isinstance(data, tuple)):
67
83
  return [normalize(d, kind=Kind.SINGLE_ENTRY) for d in data]
68
84
 
69
- if isinstance(data, Dimensioned):
70
- value = normalize(data.value, kind=Kind.SINGLE_ENTRY)
71
- assert isinstance(value, (int, float, list))
72
- return Dimensioned(value, data.dimensions, data.name)
73
-
74
85
  if isinstance(data, str):
75
- if kind == Kind.KEYWORD:
76
- data = TOKEN.parse_string(data, parse_all=True)[0]
77
- assert isinstance(data, str)
78
- return data
79
-
80
- return cast(Data, DATA.parse_string(data, parse_all=True)[0])
86
+ return parse_data(data)
81
87
 
82
88
  if isinstance(
83
89
  data,
84
- (int, float, bool, tuple, DimensionSet),
90
+ (int, float, bool, tuple, DimensionSet, Dimensioned),
85
91
  ):
86
92
  return data
87
93
 
@@ -105,7 +111,7 @@ def dumps(
105
111
 
106
112
  if isinstance(data, tuple) and kind == Kind.SINGLE_ENTRY and len(data) == 2:
107
113
  k, v = data
108
- ret = dumps(k, kind=Kind.KEYWORD) + b" " + dumps(v)
114
+ ret = dumps(k) + b" " + dumps(v)
109
115
  if not isinstance(v, Mapping):
110
116
  ret += b";"
111
117
  return ret
@@ -115,56 +121,39 @@ def dumps(
115
121
 
116
122
  if kind in (
117
123
  Kind.ASCII_FIELD,
118
- Kind.DOUBLE_PRECISION_BINARY_FIELD,
119
- Kind.SINGLE_PRECISION_BINARY_FIELD,
120
- ) and (
121
- isinstance(data, (int, float))
122
- or (
123
- is_sequence(data)
124
- and data
125
- and isinstance(data[0], (int, float))
126
- and len(data) in (3, 6, 9)
127
- )
128
- ):
129
- return b"uniform " + dumps(data, kind=Kind.SINGLE_ENTRY)
124
+ Kind.SCALAR_ASCII_FIELD,
125
+ Kind.BINARY_FIELD,
126
+ Kind.SCALAR_BINARY_FIELD,
127
+ ) and (isinstance(data, (int, float, np.ndarray))):
128
+ shape = np.shape(data)
129
+ if not shape or (
130
+ kind not in (Kind.SCALAR_ASCII_FIELD, Kind.SCALAR_BINARY_FIELD)
131
+ and shape in ((3,), (6,), (9,))
132
+ ):
133
+ return b"uniform " + dumps(data, kind=Kind.SINGLE_ENTRY)
130
134
 
131
- if kind in (
132
- Kind.ASCII_FIELD,
133
- Kind.DOUBLE_PRECISION_BINARY_FIELD,
134
- Kind.SINGLE_PRECISION_BINARY_FIELD,
135
- ) and is_sequence(data):
136
- if data and isinstance(data[0], (int, float)):
135
+ assert isinstance(data, np.ndarray)
136
+ ndim = len(shape)
137
+ if ndim == 1:
137
138
  tensor_kind = b"scalar"
138
- elif is_sequence(data[0]) and data[0] and isinstance(data[0][0], (int, float)):
139
- if len(data[0]) == 3:
139
+
140
+ elif ndim == 2:
141
+ if shape[1] == 3:
140
142
  tensor_kind = b"vector"
141
- elif len(data[0]) == 6:
143
+ elif shape[1] == 6:
142
144
  tensor_kind = b"symmTensor"
143
- elif len(data[0]) == 9:
145
+ elif shape[1] == 9:
144
146
  tensor_kind = b"tensor"
145
147
  else:
146
148
  return dumps(data)
149
+
147
150
  else:
148
151
  return dumps(data)
149
152
 
150
- if kind in (
151
- Kind.DOUBLE_PRECISION_BINARY_FIELD,
152
- Kind.SINGLE_PRECISION_BINARY_FIELD,
153
- ):
154
- typecode = "f" if kind == Kind.SINGLE_PRECISION_BINARY_FIELD else "d"
155
- if tensor_kind == b"scalar":
156
- data = cast(Sequence[float], data)
157
- contents = b"(" + array.array(typecode, data).tobytes() + b")"
158
- else:
159
- data = cast(Sequence[Sequence[float]], data)
160
- contents = (
161
- b"("
162
- + array.array(
163
- typecode, itertools.chain.from_iterable(data)
164
- ).tobytes()
165
- + b")"
166
- )
153
+ if kind in (Kind.BINARY_FIELD, Kind.SCALAR_BINARY_FIELD):
154
+ contents = b"(" + data.tobytes() + b")"
167
155
  else:
156
+ assert kind in (Kind.ASCII_FIELD, Kind.SCALAR_ASCII_FIELD)
168
157
  contents = dumps(data, kind=Kind.SINGLE_ENTRY)
169
158
 
170
159
  return b"nonuniform List<" + tensor_kind + b"> " + dumps(len(data)) + contents