foamlib 0.8.1__tar.gz → 0.8.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {foamlib-0.8.1 → foamlib-0.8.2}/PKG-INFO +2 -2
- {foamlib-0.8.1 → foamlib-0.8.2}/foamlib/__init__.py +1 -1
- {foamlib-0.8.1 → foamlib-0.8.2}/foamlib/_cases/_async.py +1 -1
- {foamlib-0.8.1 → foamlib-0.8.2}/foamlib/_files/_files.py +5 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/foamlib/_files/_parsing.py +40 -77
- {foamlib-0.8.1 → foamlib-0.8.2}/foamlib/_files/_serialization.py +20 -7
- {foamlib-0.8.1 → foamlib-0.8.2}/foamlib/_files/_types.py +36 -2
- {foamlib-0.8.1 → foamlib-0.8.2}/pyproject.toml +1 -1
- {foamlib-0.8.1 → foamlib-0.8.2}/.devcontainer.json +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/.dockerignore +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/.git-blame-ignore-revs +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/.github/dependabot.yml +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/.github/workflows/ci.yml +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/.github/workflows/docker.yml +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/.github/workflows/dockerhub-description.yml +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/.github/workflows/pypi-publish.yml +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/.gitignore +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/.readthedocs.yaml +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/Dockerfile +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/LICENSE.txt +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/README.md +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/benchmark.png +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/docs/Makefile +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/docs/cases.rst +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/docs/conf.py +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/docs/files.rst +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/docs/index.rst +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/docs/make.bat +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/docs/ruff.toml +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/foamlib/_cases/__init__.py +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/foamlib/_cases/_base.py +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/foamlib/_cases/_run.py +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/foamlib/_cases/_slurm.py +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/foamlib/_cases/_subprocess.py +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/foamlib/_cases/_sync.py +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/foamlib/_cases/_util.py +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/foamlib/_files/__init__.py +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/foamlib/_files/_io.py +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/foamlib/py.typed +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/logo.png +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/tests/__init__.py +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/tests/ruff.toml +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/tests/test_cases/__init__.py +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/tests/test_cases/test_cavity.py +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/tests/test_cases/test_cavity_async.py +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/tests/test_cases/test_flange.py +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/tests/test_cases/test_flange_async.py +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/tests/test_files/__init__.py +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/tests/test_files/test_dumps.py +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/tests/test_files/test_files.py +0 -0
- {foamlib-0.8.1 → foamlib-0.8.2}/tests/test_files/test_parsing.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.3
|
2
2
|
Name: foamlib
|
3
|
-
Version: 0.8.
|
3
|
+
Version: 0.8.2
|
4
4
|
Summary: A Python interface for interacting with OpenFOAM
|
5
5
|
Project-URL: Homepage, https://github.com/gerlero/foamlib
|
6
6
|
Project-URL: Repository, https://github.com/gerlero/foamlib
|
@@ -27,7 +27,7 @@ Requires-Python: >=3.7
|
|
27
27
|
Requires-Dist: aioshutil<2,>=1
|
28
28
|
Requires-Dist: numpy<3,>=1
|
29
29
|
Requires-Dist: numpy<3,>=1.25.0; python_version >= '3.10'
|
30
|
-
Requires-Dist: pyparsing<4,>=3
|
30
|
+
Requires-Dist: pyparsing<4,>=3.1.2
|
31
31
|
Requires-Dist: typing-extensions<5,>=4; python_version < '3.11'
|
32
32
|
Provides-Extra: dev
|
33
33
|
Requires-Dist: mypy<2,>=1; extra == 'dev'
|
@@ -106,7 +106,7 @@ class AsyncFoamCase(FoamCaseRunBase):
|
|
106
106
|
async def _rmtree(
|
107
107
|
path: os.PathLike[str] | str, *, ignore_errors: bool = False
|
108
108
|
) -> None:
|
109
|
-
await aioshutil.rmtree(path, ignore_errors=ignore_errors)
|
109
|
+
await aioshutil.rmtree(path, ignore_errors=ignore_errors) # type: ignore [call-arg]
|
110
110
|
|
111
111
|
@staticmethod
|
112
112
|
async def _copytree(
|
@@ -260,6 +260,11 @@ class FoamFile(
|
|
260
260
|
elif shape[1] == 9:
|
261
261
|
self.class_ = "volTensorField"
|
262
262
|
|
263
|
+
if kind == Kind.ASCII_FIELD and self.class_.endswith("scalarField"):
|
264
|
+
kind = Kind.SCALAR_ASCII_FIELD
|
265
|
+
elif kind == Kind.BINARY_FIELD and self.class_.endswith("scalarField"):
|
266
|
+
kind = Kind.SCALAR_BINARY_FIELD
|
267
|
+
|
263
268
|
parsed = self._get_parsed(missing_ok=True)
|
264
269
|
|
265
270
|
start, end = parsed.entry_location(keywords, missing_ok=True)
|
@@ -2,7 +2,6 @@ from __future__ import annotations
|
|
2
2
|
|
3
3
|
import re
|
4
4
|
import sys
|
5
|
-
from enum import Enum, auto
|
6
5
|
from typing import Tuple, Union, cast
|
7
6
|
|
8
7
|
if sys.version_info >= (3, 9):
|
@@ -37,65 +36,31 @@ from pyparsing import (
|
|
37
36
|
printables,
|
38
37
|
)
|
39
38
|
|
40
|
-
from ._types import Data, Dimensioned, DimensionSet, File
|
41
|
-
|
42
|
-
|
43
|
-
class _Tensor(Enum):
|
44
|
-
SCALAR = auto()
|
45
|
-
VECTOR = auto()
|
46
|
-
SYMM_TENSOR = auto()
|
47
|
-
TENSOR = auto()
|
48
|
-
|
49
|
-
@property
|
50
|
-
def shape(self) -> tuple[()] | tuple[int]:
|
51
|
-
if self == _Tensor.SCALAR:
|
52
|
-
return ()
|
53
|
-
if self == _Tensor.VECTOR:
|
54
|
-
return (3,)
|
55
|
-
if self == _Tensor.SYMM_TENSOR:
|
56
|
-
return (6,)
|
57
|
-
if self == _Tensor.TENSOR:
|
58
|
-
return (9,)
|
59
|
-
raise NotImplementedError
|
60
|
-
|
61
|
-
@property
|
62
|
-
def size(self) -> int:
|
63
|
-
return {
|
64
|
-
_Tensor.SCALAR: 1,
|
65
|
-
_Tensor.VECTOR: 3,
|
66
|
-
_Tensor.SYMM_TENSOR: 6,
|
67
|
-
_Tensor.TENSOR: 9,
|
68
|
-
}[self]
|
69
|
-
|
70
|
-
def pattern(self, *, ignore: Regex | None = None) -> str:
|
71
|
-
float_pattern = r"(?i:[+-]?(?:(?:\d+\.?\d*(?:e[+-]?\d+)?)|nan|inf(?:inity)?))"
|
72
|
-
|
73
|
-
if self == _Tensor.SCALAR:
|
74
|
-
return float_pattern
|
75
|
-
|
76
|
-
ignore_pattern = (
|
77
|
-
rf"(?:\s|{ignore.re.pattern})+" if ignore is not None else r"\s+"
|
78
|
-
)
|
39
|
+
from ._types import Data, Dimensioned, DimensionSet, File, TensorKind
|
79
40
|
|
80
|
-
return rf"\((?:{ignore_pattern})?(?:{float_pattern}{ignore_pattern}){{{self.size - 1}}}{float_pattern}(?:{ignore_pattern})?\)"
|
81
41
|
|
82
|
-
|
83
|
-
|
84
|
-
return common.ieee_float
|
42
|
+
def _tensor(tensor_kind: TensorKind, *, ignore: Regex | None = None) -> Regex:
|
43
|
+
float_pattern = r"(?i:[+-]?(?:(?:\d+\.?\d*(?:e[+-]?\d+)?)|nan|inf(?:inity)?))"
|
85
44
|
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
45
|
+
if tensor_kind == TensorKind.SCALAR:
|
46
|
+
ret = Regex(float_pattern)
|
47
|
+
ret.add_parse_action(lambda tks: [float(tks[0])])
|
48
|
+
return ret
|
49
|
+
|
50
|
+
ignore_pattern = rf"(?:\s|{ignore.re.pattern})+" if ignore is not None else r"\s+"
|
91
51
|
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
52
|
+
ret = Regex(
|
53
|
+
rf"\((?:{ignore_pattern})?(?:{float_pattern}{ignore_pattern}){{{tensor_kind.size - 1}}}{float_pattern}(?:{ignore_pattern})?\)"
|
54
|
+
)
|
55
|
+
ret.add_parse_action(
|
56
|
+
lambda tks: np.fromstring(
|
57
|
+
re.sub(ignore.re, " ", tks[0][1:-1])
|
58
|
+
if ignore is not None
|
59
|
+
else tks[0][1:-1],
|
60
|
+
sep=" ",
|
61
|
+
)
|
62
|
+
)
|
63
|
+
return ret
|
99
64
|
|
100
65
|
|
101
66
|
def _list_of(entry: ParserElement) -> ParserElement:
|
@@ -118,17 +83,17 @@ def _list_of(entry: ParserElement) -> ParserElement:
|
|
118
83
|
|
119
84
|
|
120
85
|
def _parse_ascii_field(
|
121
|
-
s: str, tensor_kind:
|
86
|
+
s: str, tensor_kind: TensorKind, *, ignore: Regex | None
|
122
87
|
) -> np.ndarray[tuple[int] | tuple[int, int], np.dtype[np.float64]]:
|
123
88
|
if ignore is not None:
|
124
89
|
s = re.sub(ignore.re, " ", s)
|
125
90
|
s = s.replace("(", " ").replace(")", " ")
|
126
91
|
|
127
|
-
return np.fromstring(s,
|
92
|
+
return np.fromstring(s, sep=" ").reshape(-1, *tensor_kind.shape)
|
128
93
|
|
129
94
|
|
130
95
|
def _unpack_binary_field(
|
131
|
-
b: bytes, tensor_kind:
|
96
|
+
b: bytes, tensor_kind: TensorKind, *, length: int
|
132
97
|
) -> np.ndarray[tuple[int] | tuple[int, int], np.dtype[np.float64 | np.float32]]:
|
133
98
|
float_size = len(b) / tensor_kind.size / length
|
134
99
|
assert float_size in (4, 8)
|
@@ -138,23 +103,15 @@ def _unpack_binary_field(
|
|
138
103
|
|
139
104
|
|
140
105
|
def _tensor_list(
|
141
|
-
tensor_kind:
|
106
|
+
tensor_kind: TensorKind, *, ignore: Regex | None = None
|
142
107
|
) -> ParserElement:
|
143
|
-
|
144
|
-
return (
|
145
|
-
_tensor_list(_Tensor.SCALAR, ignore=ignore)
|
146
|
-
| _tensor_list(_Tensor.VECTOR, ignore=ignore)
|
147
|
-
| _tensor_list(_Tensor.SYMM_TENSOR, ignore=ignore)
|
148
|
-
| _tensor_list(_Tensor.TENSOR, ignore=ignore)
|
149
|
-
)
|
150
|
-
|
151
|
-
tensor_pattern = tensor_kind.pattern(ignore=ignore)
|
108
|
+
tensor = _tensor(tensor_kind, ignore=ignore)
|
152
109
|
ignore_pattern = rf"(?:\s|{ignore.re.pattern})+" if ignore is not None else r"\s+"
|
153
110
|
|
154
111
|
list_ = Forward()
|
155
112
|
|
156
113
|
list_ <<= Regex(
|
157
|
-
rf"\((?:{ignore_pattern})?(?:{
|
114
|
+
rf"\((?:{ignore_pattern})?(?:{tensor.re.pattern}{ignore_pattern})*{tensor.re.pattern}(?:{ignore_pattern})?\)"
|
158
115
|
).add_parse_action(
|
159
116
|
lambda tks: [_parse_ascii_field(tks[0], tensor_kind, ignore=ignore)]
|
160
117
|
)
|
@@ -166,7 +123,7 @@ def _tensor_list(
|
|
166
123
|
|
167
124
|
list_ <<= (
|
168
125
|
Regex(
|
169
|
-
rf"\((?:{ignore_pattern})?(?:{
|
126
|
+
rf"\((?:{ignore_pattern})?(?:{tensor.re.pattern}{ignore_pattern}){{{length - 1}}}{tensor.re.pattern}(?:{ignore_pattern})?\)"
|
170
127
|
).add_parse_action(
|
171
128
|
lambda tks: [_parse_ascii_field(tks[0], tensor_kind, ignore=ignore)]
|
172
129
|
)
|
@@ -180,7 +137,7 @@ def _tensor_list(
|
|
180
137
|
]
|
181
138
|
)
|
182
139
|
| (
|
183
|
-
Literal("{").suppress() +
|
140
|
+
Literal("{").suppress() + tensor + Literal("}").suppress()
|
184
141
|
).add_parse_action(
|
185
142
|
lambda tks: [np.full((length, *tensor_kind.shape), tks[0], dtype=float)]
|
186
143
|
)
|
@@ -261,10 +218,10 @@ _DIMENSIONS = (
|
|
261
218
|
Literal("[").suppress() + common.number[0, 7] + Literal("]").suppress()
|
262
219
|
).set_parse_action(lambda tks: DimensionSet(*tks))
|
263
220
|
_TENSOR = (
|
264
|
-
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
221
|
+
_tensor(TensorKind.SCALAR)
|
222
|
+
| _tensor(TensorKind.VECTOR)
|
223
|
+
| _tensor(TensorKind.SYMM_TENSOR)
|
224
|
+
| _tensor(TensorKind.TENSOR)
|
268
225
|
)
|
269
226
|
_IDENTIFIER = Combine(
|
270
227
|
Word(_IDENTCHARS, _IDENTBODYCHARS, exclude_chars="()")
|
@@ -274,7 +231,13 @@ _DIMENSIONED = (Opt(_IDENTIFIER) + _DIMENSIONS + _TENSOR).set_parse_action(
|
|
274
231
|
lambda tks: Dimensioned(*reversed(tks.as_list()))
|
275
232
|
)
|
276
233
|
_FIELD = (Keyword("uniform", _IDENTBODYCHARS).suppress() + _TENSOR) | (
|
277
|
-
Keyword("nonuniform", _IDENTBODYCHARS).suppress()
|
234
|
+
Keyword("nonuniform", _IDENTBODYCHARS).suppress()
|
235
|
+
+ (
|
236
|
+
_tensor_list(TensorKind.SCALAR, ignore=_COMMENT)
|
237
|
+
| _tensor_list(TensorKind.VECTOR, ignore=_COMMENT)
|
238
|
+
| _tensor_list(TensorKind.SYMM_TENSOR, ignore=_COMMENT)
|
239
|
+
| _tensor_list(TensorKind.TENSOR, ignore=_COMMENT)
|
240
|
+
)
|
278
241
|
)
|
279
242
|
_TOKEN = dbl_quoted_string | _IDENTIFIER
|
280
243
|
_DATA = Forward()
|
@@ -19,7 +19,9 @@ class Kind(Enum):
|
|
19
19
|
DEFAULT = auto()
|
20
20
|
SINGLE_ENTRY = auto()
|
21
21
|
ASCII_FIELD = auto()
|
22
|
+
SCALAR_ASCII_FIELD = auto()
|
22
23
|
BINARY_FIELD = auto()
|
24
|
+
SCALAR_BINARY_FIELD = auto()
|
23
25
|
DIMENSIONS = auto()
|
24
26
|
|
25
27
|
|
@@ -32,7 +34,12 @@ def normalize(data: Entry, *, kind: Kind = Kind.DEFAULT) -> Entry: ...
|
|
32
34
|
|
33
35
|
|
34
36
|
def normalize(data: Entry, *, kind: Kind = Kind.DEFAULT) -> Entry:
|
35
|
-
if kind in (
|
37
|
+
if kind in (
|
38
|
+
Kind.ASCII_FIELD,
|
39
|
+
Kind.SCALAR_ASCII_FIELD,
|
40
|
+
Kind.BINARY_FIELD,
|
41
|
+
Kind.SCALAR_BINARY_FIELD,
|
42
|
+
):
|
36
43
|
if is_sequence(data):
|
37
44
|
try:
|
38
45
|
arr = np.asarray(data)
|
@@ -112,11 +119,17 @@ def dumps(
|
|
112
119
|
if isinstance(data, DimensionSet):
|
113
120
|
return b"[" + b" ".join(dumps(v) for v in data) + b"]"
|
114
121
|
|
115
|
-
if kind in (
|
116
|
-
|
117
|
-
|
122
|
+
if kind in (
|
123
|
+
Kind.ASCII_FIELD,
|
124
|
+
Kind.SCALAR_ASCII_FIELD,
|
125
|
+
Kind.BINARY_FIELD,
|
126
|
+
Kind.SCALAR_BINARY_FIELD,
|
127
|
+
) and (isinstance(data, (int, float, np.ndarray))):
|
118
128
|
shape = np.shape(data)
|
119
|
-
if shape
|
129
|
+
if not shape or (
|
130
|
+
kind not in (Kind.SCALAR_ASCII_FIELD, Kind.SCALAR_BINARY_FIELD)
|
131
|
+
and shape in ((3,), (6,), (9,))
|
132
|
+
):
|
120
133
|
return b"uniform " + dumps(data, kind=Kind.SINGLE_ENTRY)
|
121
134
|
|
122
135
|
assert isinstance(data, np.ndarray)
|
@@ -137,10 +150,10 @@ def dumps(
|
|
137
150
|
else:
|
138
151
|
return dumps(data)
|
139
152
|
|
140
|
-
if kind
|
153
|
+
if kind in (Kind.BINARY_FIELD, Kind.SCALAR_BINARY_FIELD):
|
141
154
|
contents = b"(" + data.tobytes() + b")"
|
142
155
|
else:
|
143
|
-
assert kind
|
156
|
+
assert kind in (Kind.ASCII_FIELD, Kind.SCALAR_ASCII_FIELD)
|
144
157
|
contents = dumps(data, kind=Kind.SINGLE_ENTRY)
|
145
158
|
|
146
159
|
return b"nonuniform List<" + tensor_kind + b"> " + dumps(len(data)) + contents
|
@@ -2,7 +2,8 @@ from __future__ import annotations
|
|
2
2
|
|
3
3
|
import sys
|
4
4
|
from dataclasses import dataclass
|
5
|
-
from
|
5
|
+
from enum import Enum
|
6
|
+
from typing import Dict, NamedTuple, Optional, Union
|
6
7
|
|
7
8
|
import numpy as np
|
8
9
|
|
@@ -33,10 +34,43 @@ class DimensionSet(NamedTuple):
|
|
33
34
|
Tensor = Union[
|
34
35
|
float,
|
35
36
|
Sequence[float],
|
36
|
-
"np.ndarray[tuple[()] |
|
37
|
+
"np.ndarray[tuple[()] | tuple[int], np.dtype[np.float64]]",
|
37
38
|
]
|
38
39
|
|
39
40
|
|
41
|
+
class TensorKind(Enum):
|
42
|
+
SCALAR = ()
|
43
|
+
VECTOR = (3,)
|
44
|
+
SYMM_TENSOR = (6,)
|
45
|
+
TENSOR = (9,)
|
46
|
+
|
47
|
+
@property
|
48
|
+
def shape(self) -> tuple[()] | tuple[int]:
|
49
|
+
shape: tuple[()] | tuple[int] = self.value
|
50
|
+
return shape
|
51
|
+
|
52
|
+
@property
|
53
|
+
def size(self) -> int:
|
54
|
+
return int(np.prod(self.shape))
|
55
|
+
|
56
|
+
def __str__(self) -> str:
|
57
|
+
return {
|
58
|
+
TensorKind.SCALAR: "scalar",
|
59
|
+
TensorKind.VECTOR: "vector",
|
60
|
+
TensorKind.SYMM_TENSOR: "symmTensor",
|
61
|
+
TensorKind.TENSOR: "tensor",
|
62
|
+
}[self]
|
63
|
+
|
64
|
+
@staticmethod
|
65
|
+
def from_shape(shape: tuple[int, ...]) -> TensorKind:
|
66
|
+
for kind in TensorKind:
|
67
|
+
if kind.shape == shape:
|
68
|
+
return kind
|
69
|
+
|
70
|
+
msg = f"No tensor kind for shape {shape!r}"
|
71
|
+
raise ValueError(msg)
|
72
|
+
|
73
|
+
|
40
74
|
@dataclass
|
41
75
|
class Dimensioned:
|
42
76
|
value: Tensor = 0
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|