foamlib 0.6.14__tar.gz → 0.7.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {foamlib-0.6.14 → foamlib-0.7.0}/PKG-INFO +6 -7
- {foamlib-0.6.14 → foamlib-0.7.0}/README.md +5 -6
- {foamlib-0.6.14 → foamlib-0.7.0}/foamlib/__init__.py +2 -3
- {foamlib-0.6.14 → foamlib-0.7.0}/foamlib/_files/__init__.py +0 -2
- {foamlib-0.6.14 → foamlib-0.7.0}/foamlib/_files/_files.py +40 -29
- {foamlib-0.6.14 → foamlib-0.7.0}/foamlib/_files/_parsing.py +18 -22
- {foamlib-0.6.14 → foamlib-0.7.0}/foamlib/_files/_serialization.py +33 -47
- foamlib-0.7.0/foamlib/_files/_types.py +78 -0
- {foamlib-0.6.14 → foamlib-0.7.0}/foamlib/_files/_util.py +3 -3
- {foamlib-0.6.14 → foamlib-0.7.0}/foamlib.egg-info/PKG-INFO +6 -7
- {foamlib-0.6.14 → foamlib-0.7.0}/foamlib.egg-info/SOURCES.txt +1 -1
- foamlib-0.6.14/foamlib/_files/_base.py +0 -76
- {foamlib-0.6.14 → foamlib-0.7.0}/LICENSE.txt +0 -0
- {foamlib-0.6.14 → foamlib-0.7.0}/foamlib/_cases/__init__.py +0 -0
- {foamlib-0.6.14 → foamlib-0.7.0}/foamlib/_cases/_async.py +0 -0
- {foamlib-0.6.14 → foamlib-0.7.0}/foamlib/_cases/_base.py +0 -0
- {foamlib-0.6.14 → foamlib-0.7.0}/foamlib/_cases/_run.py +0 -0
- {foamlib-0.6.14 → foamlib-0.7.0}/foamlib/_cases/_slurm.py +0 -0
- {foamlib-0.6.14 → foamlib-0.7.0}/foamlib/_cases/_subprocess.py +0 -0
- {foamlib-0.6.14 → foamlib-0.7.0}/foamlib/_cases/_sync.py +0 -0
- {foamlib-0.6.14 → foamlib-0.7.0}/foamlib/_cases/_util.py +0 -0
- {foamlib-0.6.14 → foamlib-0.7.0}/foamlib/_files/_io.py +0 -0
- {foamlib-0.6.14 → foamlib-0.7.0}/foamlib/py.typed +0 -0
- {foamlib-0.6.14 → foamlib-0.7.0}/foamlib.egg-info/dependency_links.txt +0 -0
- {foamlib-0.6.14 → foamlib-0.7.0}/foamlib.egg-info/requires.txt +0 -0
- {foamlib-0.6.14 → foamlib-0.7.0}/foamlib.egg-info/top_level.txt +0 -0
- {foamlib-0.6.14 → foamlib-0.7.0}/pyproject.toml +0 -0
- {foamlib-0.6.14 → foamlib-0.7.0}/setup.cfg +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: foamlib
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.7.0
|
4
4
|
Summary: A Python interface for interacting with OpenFOAM
|
5
5
|
Author-email: "Gabriel S. Gerlero" <ggerlero@cimec.unl.edu.ar>
|
6
6
|
Project-URL: Homepage, https://github.com/gerlero/foamlib
|
@@ -169,24 +169,23 @@ U = FoamFieldFile(Path(my_pitz) / "0/U")
|
|
169
169
|
print(U.internal_field)
|
170
170
|
```
|
171
171
|
|
172
|
-
### 🔁 Run an optimization loop
|
172
|
+
### 🔁 Run an optimization loop on a Slurm-based cluster
|
173
173
|
|
174
174
|
```python
|
175
175
|
import os
|
176
176
|
from pathlib import Path
|
177
|
-
from foamlib import
|
177
|
+
from foamlib import AsyncSlurmFoamCase
|
178
178
|
from scipy.optimize import differential_evolution
|
179
179
|
|
180
|
-
base =
|
181
|
-
# Replace with `AsyncSlurmFoamCase` if on a cluster and you want cases to be run as Slurm jobs
|
180
|
+
base = AsyncSlurmFoamCase(Path(os.environ["FOAM_TUTORIALS"]) / "incompressible/simpleFoam/pitzDaily")
|
182
181
|
|
183
182
|
async def cost(x):
|
184
183
|
async with base.clone() as clone:
|
185
184
|
clone[0]["U"].boundary_field["inlet"].value = [x[0], 0, 0]
|
186
|
-
await clone.run()
|
185
|
+
await clone.run(fallback=True) # Run locally if Slurm is not available
|
187
186
|
return abs(clone[-1]["U"].internal_field[0][0])
|
188
187
|
|
189
|
-
result = differential_evolution(cost, bounds=[(-1, 1)], workers=
|
188
|
+
result = differential_evolution(cost, bounds=[(-1, 1)], workers=AsyncSlurmFoamCase.map, polish=False)
|
190
189
|
```
|
191
190
|
|
192
191
|
### 📄 Use it to create a `run` (or `clean`) script
|
@@ -114,24 +114,23 @@ U = FoamFieldFile(Path(my_pitz) / "0/U")
|
|
114
114
|
print(U.internal_field)
|
115
115
|
```
|
116
116
|
|
117
|
-
### 🔁 Run an optimization loop
|
117
|
+
### 🔁 Run an optimization loop on a Slurm-based cluster
|
118
118
|
|
119
119
|
```python
|
120
120
|
import os
|
121
121
|
from pathlib import Path
|
122
|
-
from foamlib import
|
122
|
+
from foamlib import AsyncSlurmFoamCase
|
123
123
|
from scipy.optimize import differential_evolution
|
124
124
|
|
125
|
-
base =
|
126
|
-
# Replace with `AsyncSlurmFoamCase` if on a cluster and you want cases to be run as Slurm jobs
|
125
|
+
base = AsyncSlurmFoamCase(Path(os.environ["FOAM_TUTORIALS"]) / "incompressible/simpleFoam/pitzDaily")
|
127
126
|
|
128
127
|
async def cost(x):
|
129
128
|
async with base.clone() as clone:
|
130
129
|
clone[0]["U"].boundary_field["inlet"].value = [x[0], 0, 0]
|
131
|
-
await clone.run()
|
130
|
+
await clone.run(fallback=True) # Run locally if Slurm is not available
|
132
131
|
return abs(clone[-1]["U"].internal_field[0][0])
|
133
132
|
|
134
|
-
result = differential_evolution(cost, bounds=[(-1, 1)], workers=
|
133
|
+
result = differential_evolution(cost, bounds=[(-1, 1)], workers=AsyncSlurmFoamCase.map, polish=False)
|
135
134
|
```
|
136
135
|
|
137
136
|
### 📄 Use it to create a `run` (or `clean`) script
|
@@ -1,6 +1,6 @@
|
|
1
1
|
"""A Python interface for interacting with OpenFOAM."""
|
2
2
|
|
3
|
-
__version__ = "0.
|
3
|
+
__version__ = "0.7.0"
|
4
4
|
|
5
5
|
from ._cases import (
|
6
6
|
AsyncFoamCase,
|
@@ -10,7 +10,7 @@ from ._cases import (
|
|
10
10
|
FoamCaseBase,
|
11
11
|
FoamCaseRunBase,
|
12
12
|
)
|
13
|
-
from ._files import FoamFieldFile, FoamFile
|
13
|
+
from ._files import FoamFieldFile, FoamFile
|
14
14
|
|
15
15
|
__all__ = [
|
16
16
|
"AsyncFoamCase",
|
@@ -21,5 +21,4 @@ __all__ = [
|
|
21
21
|
"FoamCaseBase",
|
22
22
|
"FoamCaseRunBase",
|
23
23
|
"FoamFieldFile",
|
24
|
-
"FoamFileBase",
|
25
24
|
]
|
@@ -15,17 +15,25 @@ if sys.version_info >= (3, 9):
|
|
15
15
|
else:
|
16
16
|
from typing import Iterator, Mapping, MutableMapping, Sequence
|
17
17
|
|
18
|
-
from ._base import FoamFileBase
|
19
18
|
from ._io import FoamFileIO
|
20
19
|
from ._serialization import Kind, dumps, normalize
|
20
|
+
from ._types import (
|
21
|
+
Data,
|
22
|
+
DataEntry,
|
23
|
+
Dict_,
|
24
|
+
Dimensioned,
|
25
|
+
DimensionSet,
|
26
|
+
Field,
|
27
|
+
File,
|
28
|
+
MutableData,
|
29
|
+
)
|
21
30
|
from ._util import is_sequence
|
22
31
|
|
23
32
|
|
24
33
|
class FoamFile(
|
25
|
-
FoamFileBase,
|
26
34
|
MutableMapping[
|
27
35
|
Optional[Union[str, Tuple[str, ...]]],
|
28
|
-
|
36
|
+
MutableData,
|
29
37
|
],
|
30
38
|
FoamFileIO,
|
31
39
|
):
|
@@ -37,8 +45,11 @@ class FoamFile(
|
|
37
45
|
Use as a context manager to make multiple changes to the file while saving all changes only once at the end.
|
38
46
|
"""
|
39
47
|
|
48
|
+
Dimensioned = Dimensioned
|
49
|
+
DimensionSet = DimensionSet
|
50
|
+
|
40
51
|
class SubDict(
|
41
|
-
MutableMapping[str,
|
52
|
+
MutableMapping[str, MutableData],
|
42
53
|
):
|
43
54
|
"""An OpenFOAM dictionary within a file as a mutable mapping."""
|
44
55
|
|
@@ -46,15 +57,13 @@ class FoamFile(
|
|
46
57
|
self._file = _file
|
47
58
|
self._keywords = _keywords
|
48
59
|
|
49
|
-
def __getitem__(
|
50
|
-
self, keyword: str
|
51
|
-
) -> FoamFileBase._DataEntry | FoamFile.SubDict:
|
60
|
+
def __getitem__(self, keyword: str) -> DataEntry | FoamFile.SubDict:
|
52
61
|
return self._file[(*self._keywords, keyword)]
|
53
62
|
|
54
63
|
def __setitem__(
|
55
64
|
self,
|
56
65
|
keyword: str,
|
57
|
-
data:
|
66
|
+
data: Data,
|
58
67
|
) -> None:
|
59
68
|
self._file[(*self._keywords, keyword)] = data
|
60
69
|
|
@@ -83,7 +92,7 @@ class FoamFile(
|
|
83
92
|
def __repr__(self) -> str:
|
84
93
|
return f"{type(self).__qualname__}('{self._file}', {self._keywords})"
|
85
94
|
|
86
|
-
def as_dict(self) ->
|
95
|
+
def as_dict(self) -> Dict_:
|
87
96
|
"""Return a nested dict representation of the dictionary."""
|
88
97
|
ret = self._file.as_dict(include_header=True)
|
89
98
|
|
@@ -91,9 +100,9 @@ class FoamFile(
|
|
91
100
|
assert isinstance(ret, dict)
|
92
101
|
v = ret[k]
|
93
102
|
assert isinstance(v, dict)
|
94
|
-
ret = cast(
|
103
|
+
ret = cast(File, v)
|
95
104
|
|
96
|
-
return cast(
|
105
|
+
return cast(Dict_, ret)
|
97
106
|
|
98
107
|
@property
|
99
108
|
def version(self) -> float:
|
@@ -165,7 +174,7 @@ class FoamFile(
|
|
165
174
|
|
166
175
|
def __getitem__(
|
167
176
|
self, keywords: str | tuple[str, ...] | None
|
168
|
-
) ->
|
177
|
+
) -> DataEntry | FoamFile.SubDict:
|
169
178
|
if not keywords:
|
170
179
|
keywords = ()
|
171
180
|
elif not isinstance(keywords, tuple):
|
@@ -181,14 +190,16 @@ class FoamFile(
|
|
181
190
|
return FoamFile.SubDict(self, keywords)
|
182
191
|
return deepcopy(value)
|
183
192
|
|
184
|
-
def __setitem__(
|
185
|
-
self, keywords: str | tuple[str, ...] | None, data: FoamFileBase.Data
|
186
|
-
) -> None:
|
193
|
+
def __setitem__(self, keywords: str | tuple[str, ...] | None, data: Data) -> None:
|
187
194
|
if not keywords:
|
188
195
|
keywords = ()
|
189
196
|
elif not isinstance(keywords, tuple):
|
190
197
|
keywords = (keywords,)
|
191
198
|
|
199
|
+
if keywords and not isinstance(normalize(keywords[-1], kind=Kind.KEYWORD), str):
|
200
|
+
msg = f"Invalid keyword: {keywords[-1]}"
|
201
|
+
raise ValueError(msg)
|
202
|
+
|
192
203
|
with self:
|
193
204
|
try:
|
194
205
|
write_header = (
|
@@ -293,7 +304,7 @@ class FoamFile(
|
|
293
304
|
...,
|
294
305
|
before
|
295
306
|
+ indentation
|
296
|
-
+ dumps(keywords[-1])
|
307
|
+
+ dumps(keywords[-1], kind=Kind.KEYWORD)
|
297
308
|
+ b"\n"
|
298
309
|
+ indentation
|
299
310
|
+ b"{\n"
|
@@ -311,7 +322,7 @@ class FoamFile(
|
|
311
322
|
normalize(data, kind=kind),
|
312
323
|
before
|
313
324
|
+ indentation
|
314
|
-
+ dumps(keywords[-1])
|
325
|
+
+ dumps(keywords[-1], kind=Kind.KEYWORD)
|
315
326
|
+ b" "
|
316
327
|
+ dumps(data, kind=kind)
|
317
328
|
+ b";"
|
@@ -364,7 +375,7 @@ class FoamFile(
|
|
364
375
|
def __fspath__(self) -> str:
|
365
376
|
return str(self.path)
|
366
377
|
|
367
|
-
def as_dict(self, *, include_header: bool = False) ->
|
378
|
+
def as_dict(self, *, include_header: bool = False) -> File:
|
368
379
|
"""
|
369
380
|
Return a nested dict representation of the file.
|
370
381
|
|
@@ -407,17 +418,17 @@ class FoamFieldFile(FoamFile):
|
|
407
418
|
@property
|
408
419
|
def value(
|
409
420
|
self,
|
410
|
-
) ->
|
421
|
+
) -> Field:
|
411
422
|
"""Alias of `self["value"]`."""
|
412
423
|
return cast(
|
413
|
-
|
424
|
+
Field,
|
414
425
|
self["value"],
|
415
426
|
)
|
416
427
|
|
417
428
|
@value.setter
|
418
429
|
def value(
|
419
430
|
self,
|
420
|
-
value:
|
431
|
+
value: Field,
|
421
432
|
) -> None:
|
422
433
|
self["value"] = value
|
423
434
|
|
@@ -427,7 +438,7 @@ class FoamFieldFile(FoamFile):
|
|
427
438
|
|
428
439
|
def __getitem__(
|
429
440
|
self, keywords: str | tuple[str, ...] | None
|
430
|
-
) ->
|
441
|
+
) -> DataEntry | FoamFile.SubDict:
|
431
442
|
if not keywords:
|
432
443
|
keywords = ()
|
433
444
|
elif not isinstance(keywords, tuple):
|
@@ -442,29 +453,29 @@ class FoamFieldFile(FoamFile):
|
|
442
453
|
return ret
|
443
454
|
|
444
455
|
@property
|
445
|
-
def dimensions(self) ->
|
456
|
+
def dimensions(self) -> DimensionSet | Sequence[float]:
|
446
457
|
"""Alias of `self["dimensions"]`."""
|
447
458
|
ret = self["dimensions"]
|
448
|
-
if not isinstance(ret,
|
459
|
+
if not isinstance(ret, DimensionSet):
|
449
460
|
msg = "dimensions is not a DimensionSet"
|
450
461
|
raise TypeError(msg)
|
451
462
|
return ret
|
452
463
|
|
453
464
|
@dimensions.setter
|
454
|
-
def dimensions(self, value:
|
465
|
+
def dimensions(self, value: DimensionSet | Sequence[float]) -> None:
|
455
466
|
self["dimensions"] = value
|
456
467
|
|
457
468
|
@property
|
458
469
|
def internal_field(
|
459
470
|
self,
|
460
|
-
) ->
|
471
|
+
) -> Field:
|
461
472
|
"""Alias of `self["internalField"]`."""
|
462
|
-
return cast(
|
473
|
+
return cast(Field, self["internalField"])
|
463
474
|
|
464
475
|
@internal_field.setter
|
465
476
|
def internal_field(
|
466
477
|
self,
|
467
|
-
value:
|
478
|
+
value: Field,
|
468
479
|
) -> None:
|
469
480
|
self["internalField"] = value
|
470
481
|
|
@@ -479,5 +490,5 @@ class FoamFieldFile(FoamFile):
|
|
479
490
|
return ret
|
480
491
|
|
481
492
|
@boundary_field.setter
|
482
|
-
def boundary_field(self, value: Mapping[str,
|
493
|
+
def boundary_field(self, value: Mapping[str, Dict_]) -> None:
|
483
494
|
self["boundaryField"] = value
|
@@ -36,7 +36,7 @@ from pyparsing import (
|
|
36
36
|
printables,
|
37
37
|
)
|
38
38
|
|
39
|
-
from .
|
39
|
+
from ._types import DataEntry, Dimensioned, DimensionSet, File
|
40
40
|
|
41
41
|
|
42
42
|
def _list_of(entry: ParserElement) -> ParserElement:
|
@@ -120,7 +120,7 @@ _SWITCH = (
|
|
120
120
|
).set_parse_action(lambda: False)
|
121
121
|
_DIMENSIONS = (
|
122
122
|
Literal("[").suppress() + common.number[0, 7] + Literal("]").suppress()
|
123
|
-
).set_parse_action(lambda tks:
|
123
|
+
).set_parse_action(lambda tks: DimensionSet(*tks))
|
124
124
|
_TENSOR = common.ieee_float | (
|
125
125
|
Literal("(").suppress()
|
126
126
|
+ Group(
|
@@ -133,7 +133,7 @@ _IDENTIFIER = Combine(
|
|
133
133
|
+ Opt(Literal("(") + Word(_IDENTBODYCHARS, exclude_chars="()") + Literal(")"))
|
134
134
|
)
|
135
135
|
_DIMENSIONED = (Opt(_IDENTIFIER) + _DIMENSIONS + _TENSOR).set_parse_action(
|
136
|
-
lambda tks:
|
136
|
+
lambda tks: Dimensioned(*reversed(tks.as_list()))
|
137
137
|
)
|
138
138
|
_FIELD = (Keyword("uniform", _IDENTBODYCHARS).suppress() + _TENSOR) | (
|
139
139
|
Keyword("nonuniform", _IDENTBODYCHARS).suppress()
|
@@ -253,24 +253,24 @@ _FIELD = (Keyword("uniform", _IDENTBODYCHARS).suppress() + _TENSOR) | (
|
|
253
253
|
)
|
254
254
|
)
|
255
255
|
_TOKEN = QuotedString('"', unquote_results=False) | _IDENTIFIER
|
256
|
-
|
257
|
-
|
256
|
+
DATA = Forward()
|
257
|
+
KEYWORD = _TOKEN | _list_of(_IDENTIFIER).set_parse_action(
|
258
258
|
lambda tks: "(" + " ".join(tks[0]) + ")"
|
259
259
|
)
|
260
|
-
_KEYWORD_ENTRY = Dict(Group(_keyword_entry_of(
|
260
|
+
_KEYWORD_ENTRY = Dict(Group(_keyword_entry_of(KEYWORD, DATA)), asdict=True)
|
261
261
|
_DATA_ENTRY = Forward()
|
262
262
|
_LIST_ENTRY = _KEYWORD_ENTRY | _DATA_ENTRY
|
263
263
|
_LIST = _list_of(_LIST_ENTRY)
|
264
264
|
_NUMBER = common.signed_integer ^ common.ieee_float
|
265
265
|
_DATA_ENTRY <<= _FIELD | _LIST | _DIMENSIONED | _DIMENSIONS | _NUMBER | _SWITCH | _TOKEN
|
266
266
|
|
267
|
-
|
267
|
+
DATA <<= _DATA_ENTRY[1, ...].set_parse_action(
|
268
268
|
lambda tks: tuple(tks) if len(tks) > 1 else [tks[0]]
|
269
269
|
)
|
270
270
|
|
271
271
|
_FILE = (
|
272
272
|
Dict(
|
273
|
-
Group(_keyword_entry_of(
|
273
|
+
Group(_keyword_entry_of(KEYWORD, Opt(DATA, default=""), located=True))[...]
|
274
274
|
+ Opt(
|
275
275
|
Group(
|
276
276
|
Located(
|
@@ -280,7 +280,7 @@ _FILE = (
|
|
280
280
|
)
|
281
281
|
)
|
282
282
|
)
|
283
|
-
+ Group(_keyword_entry_of(
|
283
|
+
+ Group(_keyword_entry_of(KEYWORD, Opt(DATA, default=""), located=True))[...]
|
284
284
|
)
|
285
285
|
.ignore(cpp_style_comment)
|
286
286
|
.ignore(Literal("#include") + ... + LineEnd()) # type: ignore [no-untyped-call]
|
@@ -288,11 +288,11 @@ _FILE = (
|
|
288
288
|
)
|
289
289
|
|
290
290
|
|
291
|
-
class Parsed(Mapping[Tuple[str, ...], Union[
|
291
|
+
class Parsed(Mapping[Tuple[str, ...], Union[DataEntry, EllipsisType]]):
|
292
292
|
def __init__(self, contents: bytes) -> None:
|
293
293
|
self._parsed: MutableMapping[
|
294
294
|
tuple[str, ...],
|
295
|
-
tuple[int,
|
295
|
+
tuple[int, DataEntry | EllipsisType, int],
|
296
296
|
] = {}
|
297
297
|
for parse_result in _FILE.parse_string(
|
298
298
|
contents.decode("latin-1"), parse_all=True
|
@@ -305,12 +305,10 @@ class Parsed(Mapping[Tuple[str, ...], Union[FoamFileBase._DataEntry, EllipsisTyp
|
|
305
305
|
@staticmethod
|
306
306
|
def _flatten_result(
|
307
307
|
parse_result: ParseResults, *, _keywords: tuple[str, ...] = ()
|
308
|
-
) -> Mapping[
|
309
|
-
tuple[str, ...], tuple[int, FoamFileBase._DataEntry | EllipsisType, int]
|
310
|
-
]:
|
308
|
+
) -> Mapping[tuple[str, ...], tuple[int, DataEntry | EllipsisType, int]]:
|
311
309
|
ret: MutableMapping[
|
312
310
|
tuple[str, ...],
|
313
|
-
tuple[int,
|
311
|
+
tuple[int, DataEntry | EllipsisType, int],
|
314
312
|
] = {}
|
315
313
|
start = parse_result.locn_start
|
316
314
|
assert isinstance(start, int)
|
@@ -336,16 +334,14 @@ class Parsed(Mapping[Tuple[str, ...], Union[FoamFileBase._DataEntry, EllipsisTyp
|
|
336
334
|
ret[(*_keywords, keyword)] = (start, d, end)
|
337
335
|
return ret
|
338
336
|
|
339
|
-
def __getitem__(
|
340
|
-
self, keywords: tuple[str, ...]
|
341
|
-
) -> FoamFileBase._DataEntry | EllipsisType:
|
337
|
+
def __getitem__(self, keywords: tuple[str, ...]) -> DataEntry | EllipsisType:
|
342
338
|
_, data, _ = self._parsed[keywords]
|
343
339
|
return data
|
344
340
|
|
345
341
|
def put(
|
346
342
|
self,
|
347
343
|
keywords: tuple[str, ...],
|
348
|
-
data:
|
344
|
+
data: DataEntry | EllipsisType,
|
349
345
|
content: bytes,
|
350
346
|
) -> None:
|
351
347
|
start, end = self.entry_location(keywords, missing_ok=True)
|
@@ -413,14 +409,14 @@ class Parsed(Mapping[Tuple[str, ...], Union[FoamFileBase._DataEntry, EllipsisTyp
|
|
413
409
|
|
414
410
|
return start, end
|
415
411
|
|
416
|
-
def as_dict(self) ->
|
417
|
-
ret:
|
412
|
+
def as_dict(self) -> File:
|
413
|
+
ret: File = {}
|
418
414
|
for keywords, (_, data, _) in self._parsed.items():
|
419
415
|
r = ret
|
420
416
|
for k in keywords[:-1]:
|
421
417
|
v = r[k]
|
422
418
|
assert isinstance(v, dict)
|
423
|
-
r = cast(
|
419
|
+
r = cast(File, v)
|
424
420
|
|
425
421
|
assert isinstance(r, dict)
|
426
422
|
if keywords:
|
@@ -1,9 +1,7 @@
|
|
1
1
|
from __future__ import annotations
|
2
2
|
|
3
3
|
import array
|
4
|
-
import contextlib
|
5
4
|
import itertools
|
6
|
-
import re
|
7
5
|
import sys
|
8
6
|
from enum import Enum, auto
|
9
7
|
from typing import cast, overload
|
@@ -13,7 +11,8 @@ if sys.version_info >= (3, 9):
|
|
13
11
|
else:
|
14
12
|
from typing import Mapping, Sequence
|
15
13
|
|
16
|
-
from .
|
14
|
+
from ._parsing import DATA, KEYWORD
|
15
|
+
from ._types import Data, DataEntry, Dimensioned, DimensionSet
|
17
16
|
from ._util import is_sequence
|
18
17
|
|
19
18
|
try:
|
@@ -26,6 +25,7 @@ except ModuleNotFoundError:
|
|
26
25
|
|
27
26
|
class Kind(Enum):
|
28
27
|
DEFAULT = auto()
|
28
|
+
KEYWORD = auto()
|
29
29
|
SINGLE_ENTRY = auto()
|
30
30
|
ASCII_FIELD = auto()
|
31
31
|
DOUBLE_PRECISION_BINARY_FIELD = auto()
|
@@ -33,34 +33,20 @@ class Kind(Enum):
|
|
33
33
|
DIMENSIONS = auto()
|
34
34
|
|
35
35
|
|
36
|
-
_TOKENS = re.compile(r'(?:[^\s"]|"(?:[^"])*")+')
|
37
|
-
|
38
|
-
|
39
36
|
@overload
|
40
|
-
def normalize(
|
41
|
-
data: FoamFileBase._DataEntry, *, kind: Kind = Kind.DEFAULT
|
42
|
-
) -> FoamFileBase._DataEntry: ...
|
37
|
+
def normalize(data: DataEntry, *, kind: Kind = Kind.DEFAULT) -> DataEntry: ...
|
43
38
|
|
44
39
|
|
45
40
|
@overload
|
46
|
-
def normalize(
|
47
|
-
data: FoamFileBase.Data, *, kind: Kind = Kind.DEFAULT
|
48
|
-
) -> FoamFileBase.Data: ...
|
41
|
+
def normalize(data: Data, *, kind: Kind = Kind.DEFAULT) -> Data: ...
|
49
42
|
|
50
43
|
|
51
|
-
def normalize(
|
52
|
-
data: FoamFileBase.Data, *, kind: Kind = Kind.DEFAULT
|
53
|
-
) -> FoamFileBase.Data:
|
44
|
+
def normalize(data: Data, *, kind: Kind = Kind.DEFAULT) -> Data:
|
54
45
|
if numpy and isinstance(data, np.ndarray):
|
55
46
|
ret = data.tolist()
|
56
47
|
assert isinstance(ret, list)
|
57
48
|
return ret
|
58
49
|
|
59
|
-
if kind == Kind.SINGLE_ENTRY and isinstance(data, tuple):
|
60
|
-
ret = normalize(list(data))
|
61
|
-
assert isinstance(ret, list)
|
62
|
-
return ret
|
63
|
-
|
64
50
|
if isinstance(data, Mapping):
|
65
51
|
return {k: normalize(v, kind=kind) for k, v in data.items()}
|
66
52
|
|
@@ -71,33 +57,30 @@ def normalize(
|
|
71
57
|
and all(isinstance(d, (int, float)) for d in data)
|
72
58
|
):
|
73
59
|
data = cast(Sequence[float], data)
|
74
|
-
return
|
75
|
-
|
76
|
-
if is_sequence(data) and not isinstance(data, tuple):
|
77
|
-
return [normalize(d, kind=Kind.SINGLE_ENTRY) for d in data]
|
78
|
-
|
79
|
-
if isinstance(data, str):
|
80
|
-
with contextlib.suppress(ValueError):
|
81
|
-
return int(data)
|
82
|
-
|
83
|
-
with contextlib.suppress(ValueError):
|
84
|
-
return float(data)
|
60
|
+
return DimensionSet(*data)
|
85
61
|
|
86
|
-
|
62
|
+
if is_sequence(data) and (kind == Kind.SINGLE_ENTRY or not isinstance(data, tuple)):
|
63
|
+
if len(data) == 1 and isinstance(data[0], Mapping) and len(data[0]) > 1:
|
64
|
+
return [normalize({k: v}) for k, v in data[0].items()]
|
87
65
|
|
88
|
-
|
89
|
-
return tokens[0]
|
90
|
-
|
91
|
-
return tuple(tokens) if kind != Kind.SINGLE_ENTRY else " ".join(tokens)
|
66
|
+
return [normalize(d, kind=Kind.SINGLE_ENTRY) for d in data]
|
92
67
|
|
93
|
-
if isinstance(data,
|
68
|
+
if isinstance(data, Dimensioned):
|
94
69
|
value = normalize(data.value, kind=Kind.SINGLE_ENTRY)
|
95
70
|
assert isinstance(value, (int, float, list))
|
96
|
-
return
|
71
|
+
return Dimensioned(value, data.dimensions, data.name)
|
72
|
+
|
73
|
+
if isinstance(data, str):
|
74
|
+
if kind == Kind.KEYWORD:
|
75
|
+
data = KEYWORD.parse_string(data, parse_all=True)[0]
|
76
|
+
assert isinstance(data, str)
|
77
|
+
return data
|
78
|
+
|
79
|
+
return cast(DataEntry, DATA.parse_string(data, parse_all=True)[0])
|
97
80
|
|
98
81
|
if isinstance(
|
99
82
|
data,
|
100
|
-
(int, float, bool, tuple,
|
83
|
+
(int, float, bool, tuple, DimensionSet),
|
101
84
|
):
|
102
85
|
return data
|
103
86
|
|
@@ -106,7 +89,7 @@ def normalize(
|
|
106
89
|
|
107
90
|
|
108
91
|
def dumps(
|
109
|
-
data:
|
92
|
+
data: Data,
|
110
93
|
*,
|
111
94
|
kind: Kind = Kind.DEFAULT,
|
112
95
|
) -> bytes:
|
@@ -115,16 +98,19 @@ def dumps(
|
|
115
98
|
if isinstance(data, Mapping):
|
116
99
|
entries = []
|
117
100
|
for k, v in data.items():
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
101
|
+
value = normalize(v)
|
102
|
+
if isinstance(value, Mapping):
|
103
|
+
entries.append(
|
104
|
+
dumps(k, kind=Kind.KEYWORD) + b" {" + dumps(value) + b"}"
|
105
|
+
)
|
106
|
+
elif not value:
|
107
|
+
entries.append(dumps(k, kind=Kind.KEYWORD) + b";")
|
122
108
|
else:
|
123
|
-
entries.append(dumps(k) + b" " + dumps(
|
109
|
+
entries.append(dumps(k, kind=Kind.KEYWORD) + b" " + dumps(value) + b";")
|
124
110
|
|
125
111
|
return b" ".join(entries)
|
126
112
|
|
127
|
-
if isinstance(data,
|
113
|
+
if isinstance(data, DimensionSet):
|
128
114
|
return b"[" + b" ".join(dumps(v) for v in data) + b"]"
|
129
115
|
|
130
116
|
if kind in (
|
@@ -181,7 +167,7 @@ def dumps(
|
|
181
167
|
|
182
168
|
return b"nonuniform List<" + tensor_kind + b"> " + dumps(len(data)) + contents
|
183
169
|
|
184
|
-
if isinstance(data,
|
170
|
+
if isinstance(data, Dimensioned):
|
185
171
|
if data.name is not None:
|
186
172
|
return (
|
187
173
|
dumps(data.name)
|
@@ -0,0 +1,78 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
import sys
|
4
|
+
from dataclasses import dataclass
|
5
|
+
from typing import TYPE_CHECKING, Dict, NamedTuple, Optional, Tuple, Union
|
6
|
+
|
7
|
+
if TYPE_CHECKING:
|
8
|
+
import numpy as np
|
9
|
+
|
10
|
+
if sys.version_info >= (3, 9):
|
11
|
+
from collections.abc import Mapping, MutableMapping, Sequence
|
12
|
+
else:
|
13
|
+
from typing import Mapping, MutableMapping, Sequence
|
14
|
+
|
15
|
+
|
16
|
+
class DimensionSet(NamedTuple):
|
17
|
+
mass: float = 0
|
18
|
+
length: float = 0
|
19
|
+
time: float = 0
|
20
|
+
temperature: float = 0
|
21
|
+
moles: float = 0
|
22
|
+
current: float = 0
|
23
|
+
luminous_intensity: float = 0
|
24
|
+
|
25
|
+
def __repr__(self) -> str:
|
26
|
+
return f"{type(self).__name__}({', '.join(f'{n}={v}' for n, v in zip(self._fields, self) if v != 0)})"
|
27
|
+
|
28
|
+
|
29
|
+
Tensor = Union[
|
30
|
+
float,
|
31
|
+
Sequence[float],
|
32
|
+
"np.ndarray[Tuple[()], np.dtype[np.generic]]",
|
33
|
+
"np.ndarray[Tuple[int], np.dtype[np.generic]]",
|
34
|
+
]
|
35
|
+
|
36
|
+
|
37
|
+
@dataclass
|
38
|
+
class Dimensioned:
|
39
|
+
value: Tensor = 0
|
40
|
+
dimensions: DimensionSet | Sequence[float] = ()
|
41
|
+
name: str | None = None
|
42
|
+
|
43
|
+
def __post_init__(self) -> None:
|
44
|
+
if not isinstance(self.dimensions, DimensionSet):
|
45
|
+
self.dimensions = DimensionSet(*self.dimensions)
|
46
|
+
|
47
|
+
|
48
|
+
Field = Union[
|
49
|
+
Tensor, Sequence[Tensor], "np.ndarray[Tuple[int, int], np.dtype[np.generic]]"
|
50
|
+
]
|
51
|
+
|
52
|
+
DataEntry = Union[
|
53
|
+
str,
|
54
|
+
int,
|
55
|
+
float,
|
56
|
+
bool,
|
57
|
+
Dimensioned,
|
58
|
+
DimensionSet,
|
59
|
+
Sequence["Data"],
|
60
|
+
Tensor,
|
61
|
+
Field,
|
62
|
+
]
|
63
|
+
|
64
|
+
Data = Union[
|
65
|
+
DataEntry,
|
66
|
+
Mapping[str, "Data"],
|
67
|
+
]
|
68
|
+
"""
|
69
|
+
A value that can be stored in an OpenFOAM file.
|
70
|
+
"""
|
71
|
+
|
72
|
+
MutableData = Union[
|
73
|
+
DataEntry,
|
74
|
+
MutableMapping[str, "MutableData"],
|
75
|
+
]
|
76
|
+
|
77
|
+
Dict_ = Dict[str, Union["Data", "Dict_"]]
|
78
|
+
File = Dict[Optional[str], Union["Data", "Dict_"]]
|
@@ -14,10 +14,10 @@ else:
|
|
14
14
|
from typing_extensions import TypeGuard
|
15
15
|
|
16
16
|
if TYPE_CHECKING:
|
17
|
-
from .
|
17
|
+
from ._types import Data
|
18
18
|
|
19
19
|
|
20
20
|
def is_sequence(
|
21
|
-
value:
|
22
|
-
) -> TypeGuard[Sequence[
|
21
|
+
value: Data,
|
22
|
+
) -> TypeGuard[Sequence[Data]]:
|
23
23
|
return isinstance(value, Sequence) and not isinstance(value, str)
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: foamlib
|
3
|
-
Version: 0.
|
3
|
+
Version: 0.7.0
|
4
4
|
Summary: A Python interface for interacting with OpenFOAM
|
5
5
|
Author-email: "Gabriel S. Gerlero" <ggerlero@cimec.unl.edu.ar>
|
6
6
|
Project-URL: Homepage, https://github.com/gerlero/foamlib
|
@@ -169,24 +169,23 @@ U = FoamFieldFile(Path(my_pitz) / "0/U")
|
|
169
169
|
print(U.internal_field)
|
170
170
|
```
|
171
171
|
|
172
|
-
### 🔁 Run an optimization loop
|
172
|
+
### 🔁 Run an optimization loop on a Slurm-based cluster
|
173
173
|
|
174
174
|
```python
|
175
175
|
import os
|
176
176
|
from pathlib import Path
|
177
|
-
from foamlib import
|
177
|
+
from foamlib import AsyncSlurmFoamCase
|
178
178
|
from scipy.optimize import differential_evolution
|
179
179
|
|
180
|
-
base =
|
181
|
-
# Replace with `AsyncSlurmFoamCase` if on a cluster and you want cases to be run as Slurm jobs
|
180
|
+
base = AsyncSlurmFoamCase(Path(os.environ["FOAM_TUTORIALS"]) / "incompressible/simpleFoam/pitzDaily")
|
182
181
|
|
183
182
|
async def cost(x):
|
184
183
|
async with base.clone() as clone:
|
185
184
|
clone[0]["U"].boundary_field["inlet"].value = [x[0], 0, 0]
|
186
|
-
await clone.run()
|
185
|
+
await clone.run(fallback=True) # Run locally if Slurm is not available
|
187
186
|
return abs(clone[-1]["U"].internal_field[0][0])
|
188
187
|
|
189
|
-
result = differential_evolution(cost, bounds=[(-1, 1)], workers=
|
188
|
+
result = differential_evolution(cost, bounds=[(-1, 1)], workers=AsyncSlurmFoamCase.map, polish=False)
|
190
189
|
```
|
191
190
|
|
192
191
|
### 📄 Use it to create a `run` (or `clean`) script
|
@@ -17,9 +17,9 @@ foamlib/_cases/_subprocess.py
|
|
17
17
|
foamlib/_cases/_sync.py
|
18
18
|
foamlib/_cases/_util.py
|
19
19
|
foamlib/_files/__init__.py
|
20
|
-
foamlib/_files/_base.py
|
21
20
|
foamlib/_files/_files.py
|
22
21
|
foamlib/_files/_io.py
|
23
22
|
foamlib/_files/_parsing.py
|
24
23
|
foamlib/_files/_serialization.py
|
24
|
+
foamlib/_files/_types.py
|
25
25
|
foamlib/_files/_util.py
|
@@ -1,76 +0,0 @@
|
|
1
|
-
from __future__ import annotations
|
2
|
-
|
3
|
-
import sys
|
4
|
-
from dataclasses import dataclass
|
5
|
-
from typing import TYPE_CHECKING, Dict, NamedTuple, Optional, Tuple, Union
|
6
|
-
|
7
|
-
if TYPE_CHECKING:
|
8
|
-
import numpy as np
|
9
|
-
|
10
|
-
if sys.version_info >= (3, 9):
|
11
|
-
from collections.abc import Mapping, MutableMapping, Sequence
|
12
|
-
else:
|
13
|
-
from typing import Mapping, MutableMapping, Sequence
|
14
|
-
|
15
|
-
|
16
|
-
class FoamFileBase:
|
17
|
-
class DimensionSet(NamedTuple):
|
18
|
-
mass: float = 0
|
19
|
-
length: float = 0
|
20
|
-
time: float = 0
|
21
|
-
temperature: float = 0
|
22
|
-
moles: float = 0
|
23
|
-
current: float = 0
|
24
|
-
luminous_intensity: float = 0
|
25
|
-
|
26
|
-
def __repr__(self) -> str:
|
27
|
-
return f"{type(self).__qualname__}({', '.join(f'{n}={v}' for n, v in zip(self._fields, self) if v != 0)})"
|
28
|
-
|
29
|
-
_Tensor = Union[
|
30
|
-
float,
|
31
|
-
Sequence[float],
|
32
|
-
"np.ndarray[Tuple[()], np.dtype[np.generic]]",
|
33
|
-
"np.ndarray[Tuple[int], np.dtype[np.generic]]",
|
34
|
-
]
|
35
|
-
|
36
|
-
@dataclass
|
37
|
-
class Dimensioned:
|
38
|
-
value: FoamFileBase._Tensor = 0
|
39
|
-
dimensions: FoamFileBase.DimensionSet | Sequence[float] = ()
|
40
|
-
name: str | None = None
|
41
|
-
|
42
|
-
def __post_init__(self) -> None:
|
43
|
-
if not isinstance(self.dimensions, FoamFileBase.DimensionSet):
|
44
|
-
self.dimensions = FoamFileBase.DimensionSet(*self.dimensions)
|
45
|
-
|
46
|
-
_Field = Union[
|
47
|
-
_Tensor, Sequence[_Tensor], "np.ndarray[Tuple[int, int], np.dtype[np.generic]]"
|
48
|
-
]
|
49
|
-
|
50
|
-
_DataEntry = Union[
|
51
|
-
str,
|
52
|
-
int,
|
53
|
-
float,
|
54
|
-
bool,
|
55
|
-
Dimensioned,
|
56
|
-
DimensionSet,
|
57
|
-
Sequence["Data"],
|
58
|
-
_Tensor,
|
59
|
-
_Field,
|
60
|
-
]
|
61
|
-
|
62
|
-
Data = Union[
|
63
|
-
_DataEntry,
|
64
|
-
Mapping[str, "Data"],
|
65
|
-
]
|
66
|
-
"""
|
67
|
-
A value that can be stored in an OpenFOAM file.
|
68
|
-
"""
|
69
|
-
|
70
|
-
_MutableData = Union[
|
71
|
-
_DataEntry,
|
72
|
-
MutableMapping[str, "_MutableData"],
|
73
|
-
]
|
74
|
-
|
75
|
-
_Dict = Dict[str, Union["Data", "_Dict"]]
|
76
|
-
_File = Dict[Optional[str], Union["Data", "_Dict"]]
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|