foamlib 0.6.12__tar.gz → 0.6.13__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {foamlib-0.6.12 → foamlib-0.6.13}/PKG-INFO +1 -1
- {foamlib-0.6.12 → foamlib-0.6.13}/foamlib/__init__.py +1 -1
- {foamlib-0.6.12 → foamlib-0.6.13}/foamlib/_files/_files.py +26 -5
- {foamlib-0.6.12 → foamlib-0.6.13}/foamlib/_files/_parsing.py +75 -26
- foamlib-0.6.13/foamlib/_files/_serialization.py +210 -0
- {foamlib-0.6.12 → foamlib-0.6.13}/foamlib.egg-info/PKG-INFO +1 -1
- foamlib-0.6.12/foamlib/_files/_serialization.py +0 -125
- {foamlib-0.6.12 → foamlib-0.6.13}/LICENSE.txt +0 -0
- {foamlib-0.6.12 → foamlib-0.6.13}/README.md +0 -0
- {foamlib-0.6.12 → foamlib-0.6.13}/foamlib/_cases/__init__.py +0 -0
- {foamlib-0.6.12 → foamlib-0.6.13}/foamlib/_cases/_async.py +0 -0
- {foamlib-0.6.12 → foamlib-0.6.13}/foamlib/_cases/_base.py +0 -0
- {foamlib-0.6.12 → foamlib-0.6.13}/foamlib/_cases/_run.py +0 -0
- {foamlib-0.6.12 → foamlib-0.6.13}/foamlib/_cases/_slurm.py +0 -0
- {foamlib-0.6.12 → foamlib-0.6.13}/foamlib/_cases/_subprocess.py +0 -0
- {foamlib-0.6.12 → foamlib-0.6.13}/foamlib/_cases/_sync.py +0 -0
- {foamlib-0.6.12 → foamlib-0.6.13}/foamlib/_cases/_util.py +0 -0
- {foamlib-0.6.12 → foamlib-0.6.13}/foamlib/_files/__init__.py +0 -0
- {foamlib-0.6.12 → foamlib-0.6.13}/foamlib/_files/_base.py +0 -0
- {foamlib-0.6.12 → foamlib-0.6.13}/foamlib/_files/_io.py +0 -0
- {foamlib-0.6.12 → foamlib-0.6.13}/foamlib/_files/_util.py +0 -0
- {foamlib-0.6.12 → foamlib-0.6.13}/foamlib/py.typed +0 -0
- {foamlib-0.6.12 → foamlib-0.6.13}/foamlib.egg-info/SOURCES.txt +0 -0
- {foamlib-0.6.12 → foamlib-0.6.13}/foamlib.egg-info/dependency_links.txt +0 -0
- {foamlib-0.6.12 → foamlib-0.6.13}/foamlib.egg-info/requires.txt +0 -0
- {foamlib-0.6.12 → foamlib-0.6.13}/foamlib.egg-info/top_level.txt +0 -0
- {foamlib-0.6.12 → foamlib-0.6.13}/pyproject.toml +0 -0
- {foamlib-0.6.12 → foamlib-0.6.13}/setup.cfg +0 -0
@@ -1,5 +1,6 @@
|
|
1
1
|
from __future__ import annotations
|
2
2
|
|
3
|
+
import os
|
3
4
|
import sys
|
4
5
|
from copy import deepcopy
|
5
6
|
from typing import Any, Optional, Tuple, Union, cast
|
@@ -16,7 +17,7 @@ else:
|
|
16
17
|
|
17
18
|
from ._base import FoamFileBase
|
18
19
|
from ._io import FoamFileIO
|
19
|
-
from ._serialization import Kind, dumps
|
20
|
+
from ._serialization import Kind, dumps, normalize
|
20
21
|
from ._util import is_sequence
|
21
22
|
|
22
23
|
|
@@ -216,12 +217,28 @@ class FoamFile(
|
|
216
217
|
or keywords[2].endswith("Gradient")
|
217
218
|
)
|
218
219
|
):
|
219
|
-
|
220
|
+
if self.format == "binary":
|
221
|
+
arch = self.get(("FoamFile", "arch"), default=None)
|
222
|
+
assert arch is None or isinstance(arch, str)
|
223
|
+
if (arch is not None and "scalar=32" in arch) or (
|
224
|
+
arch is None
|
225
|
+
and os.environ.get("WM_PRECISION_OPTION", default="DP") == "SP"
|
226
|
+
):
|
227
|
+
kind = Kind.SINGLE_PRECISION_BINARY_FIELD
|
228
|
+
else:
|
229
|
+
kind = Kind.DOUBLE_PRECISION_BINARY_FIELD
|
230
|
+
else:
|
231
|
+
kind = Kind.ASCII_FIELD
|
220
232
|
elif keywords == ("dimensions",):
|
221
233
|
kind = Kind.DIMENSIONS
|
222
234
|
|
223
235
|
if (
|
224
|
-
kind
|
236
|
+
kind
|
237
|
+
in (
|
238
|
+
Kind.ASCII_FIELD,
|
239
|
+
Kind.DOUBLE_PRECISION_BINARY_FIELD,
|
240
|
+
Kind.SINGLE_PRECISION_BINARY_FIELD,
|
241
|
+
)
|
225
242
|
) and self.class_ == "dictionary":
|
226
243
|
if isinstance(data, (int, float)):
|
227
244
|
self.class_ = "volScalarField"
|
@@ -291,7 +308,7 @@ class FoamFile(
|
|
291
308
|
elif keywords:
|
292
309
|
parsed.put(
|
293
310
|
keywords,
|
294
|
-
|
311
|
+
normalize(data, kind=kind),
|
295
312
|
before
|
296
313
|
+ indentation
|
297
314
|
+ dumps(keywords[-1])
|
@@ -302,7 +319,11 @@ class FoamFile(
|
|
302
319
|
)
|
303
320
|
|
304
321
|
else:
|
305
|
-
parsed.put(
|
322
|
+
parsed.put(
|
323
|
+
(),
|
324
|
+
normalize(data, kind=kind),
|
325
|
+
before + dumps(data, kind=kind) + after,
|
326
|
+
)
|
306
327
|
|
307
328
|
def __delitem__(self, keywords: str | tuple[str, ...] | None) -> None:
|
308
329
|
if not keywords:
|
@@ -82,10 +82,12 @@ def _keyword_entry_of(
|
|
82
82
|
|
83
83
|
def _unpack_binary_field(
|
84
84
|
tks: ParseResults,
|
85
|
+
*,
|
86
|
+
elsize: int = 1,
|
85
87
|
) -> Sequence[Sequence[float] | Sequence[Sequence[float]]]:
|
86
|
-
|
88
|
+
float_size = len(tks[0]) // elsize
|
87
89
|
|
88
|
-
arr = array.array("d", "".join(tks).encode("latin-1"))
|
90
|
+
arr = array.array("f" if float_size == 4 else "d", "".join(tks).encode("latin-1"))
|
89
91
|
|
90
92
|
values: Sequence[float] | Sequence[Sequence[float]]
|
91
93
|
|
@@ -130,45 +132,92 @@ _IDENTIFIER = Combine(
|
|
130
132
|
_DIMENSIONED = (Opt(_IDENTIFIER) + _DIMENSIONS + _TENSOR).set_parse_action(
|
131
133
|
lambda tks: FoamFileBase.Dimensioned(*reversed(tks.as_list()))
|
132
134
|
)
|
133
|
-
_FIELD = (Keyword("uniform").suppress() + _TENSOR) | (
|
134
|
-
Keyword("nonuniform").suppress()
|
135
|
+
_FIELD = (Keyword("uniform", _IDENTBODYCHARS).suppress() + _TENSOR) | (
|
136
|
+
Keyword("nonuniform", _IDENTBODYCHARS).suppress()
|
135
137
|
+ (
|
136
138
|
_list_of(_TENSOR)
|
137
139
|
| (
|
138
140
|
Literal("List").suppress()
|
139
141
|
+ Literal("<").suppress()
|
140
142
|
+ (
|
141
|
-
|
142
|
-
CharsNotIn(exact=8),
|
143
|
+
(
|
143
144
|
Literal("scalar").suppress()
|
144
145
|
+ Literal(">").suppress()
|
145
|
-
+
|
146
|
-
|
147
|
-
|
148
|
-
|
149
|
-
|
146
|
+
+ (
|
147
|
+
(
|
148
|
+
counted_array(
|
149
|
+
CharsNotIn(exact=8),
|
150
|
+
common.integer + Literal("(").suppress(),
|
151
|
+
)
|
152
|
+
)
|
153
|
+
| (
|
154
|
+
counted_array(
|
155
|
+
CharsNotIn(exact=4),
|
156
|
+
common.integer + Literal("(").suppress(),
|
157
|
+
)
|
158
|
+
)
|
159
|
+
)
|
160
|
+
+ Literal(")").suppress()
|
161
|
+
).set_parse_action(_unpack_binary_field)
|
162
|
+
| (
|
150
163
|
Literal("vector").suppress()
|
151
164
|
+ Literal(">").suppress()
|
152
|
-
+
|
153
|
-
|
154
|
-
|
155
|
-
|
156
|
-
|
165
|
+
+ (
|
166
|
+
(
|
167
|
+
counted_array(
|
168
|
+
CharsNotIn(exact=8 * 3),
|
169
|
+
common.integer + Literal("(").suppress(),
|
170
|
+
)
|
171
|
+
)
|
172
|
+
| (
|
173
|
+
counted_array(
|
174
|
+
CharsNotIn(exact=4 * 3),
|
175
|
+
common.integer + Literal("(").suppress(),
|
176
|
+
)
|
177
|
+
)
|
178
|
+
)
|
179
|
+
+ Literal(")").suppress()
|
180
|
+
).set_parse_action(lambda tks: _unpack_binary_field(tks, elsize=3))
|
181
|
+
| (
|
157
182
|
Literal("symmTensor").suppress()
|
158
183
|
+ Literal(">").suppress()
|
159
|
-
+
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
184
|
+
+ (
|
185
|
+
(
|
186
|
+
counted_array(
|
187
|
+
CharsNotIn(exact=8 * 6),
|
188
|
+
common.integer + Literal("(").suppress(),
|
189
|
+
)
|
190
|
+
)
|
191
|
+
| (
|
192
|
+
counted_array(
|
193
|
+
CharsNotIn(exact=4 * 6),
|
194
|
+
common.integer + Literal("(").suppress(),
|
195
|
+
)
|
196
|
+
)
|
197
|
+
)
|
198
|
+
+ Literal(")").suppress()
|
199
|
+
).set_parse_action(lambda tks: _unpack_binary_field(tks, elsize=6))
|
200
|
+
| (
|
164
201
|
Literal("tensor").suppress()
|
165
202
|
+ Literal(">").suppress()
|
166
|
-
+
|
167
|
-
|
168
|
-
|
203
|
+
+ (
|
204
|
+
(
|
205
|
+
counted_array(
|
206
|
+
CharsNotIn(exact=8 * 9),
|
207
|
+
common.integer + Literal("(").suppress(),
|
208
|
+
)
|
209
|
+
)
|
210
|
+
| (
|
211
|
+
counted_array(
|
212
|
+
CharsNotIn(exact=4 * 9),
|
213
|
+
common.integer + Literal("(").suppress(),
|
214
|
+
)
|
215
|
+
)
|
216
|
+
)
|
217
|
+
+ Literal(")").suppress()
|
218
|
+
).set_parse_action(lambda tks: _unpack_binary_field(tks, elsize=9))
|
169
219
|
)
|
170
|
-
|
171
|
-
).set_parse_action(_unpack_binary_field)
|
220
|
+
)
|
172
221
|
)
|
173
222
|
)
|
174
223
|
_TOKEN = QuotedString('"', unquote_results=False) | _IDENTIFIER
|
@@ -0,0 +1,210 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
3
|
+
import array
|
4
|
+
import contextlib
|
5
|
+
import itertools
|
6
|
+
import re
|
7
|
+
import sys
|
8
|
+
from enum import Enum, auto
|
9
|
+
from typing import cast, overload
|
10
|
+
|
11
|
+
if sys.version_info >= (3, 9):
|
12
|
+
from collections.abc import Mapping, Sequence
|
13
|
+
else:
|
14
|
+
from typing import Mapping, Sequence
|
15
|
+
|
16
|
+
from ._base import FoamFileBase
|
17
|
+
from ._util import is_sequence
|
18
|
+
|
19
|
+
try:
|
20
|
+
import numpy as np
|
21
|
+
|
22
|
+
numpy = True
|
23
|
+
except ModuleNotFoundError:
|
24
|
+
numpy = False
|
25
|
+
|
26
|
+
|
27
|
+
class Kind(Enum):
|
28
|
+
DEFAULT = auto()
|
29
|
+
SINGLE_ENTRY = auto()
|
30
|
+
ASCII_FIELD = auto()
|
31
|
+
DOUBLE_PRECISION_BINARY_FIELD = auto()
|
32
|
+
SINGLE_PRECISION_BINARY_FIELD = auto()
|
33
|
+
DIMENSIONS = auto()
|
34
|
+
|
35
|
+
|
36
|
+
_TOKENS = re.compile(r'(?:[^\s"]|"(?:[^"])*")+')
|
37
|
+
|
38
|
+
|
39
|
+
@overload
|
40
|
+
def normalize(
|
41
|
+
data: FoamFileBase._DataEntry, *, kind: Kind = Kind.DEFAULT
|
42
|
+
) -> FoamFileBase._DataEntry: ...
|
43
|
+
|
44
|
+
|
45
|
+
@overload
|
46
|
+
def normalize(
|
47
|
+
data: FoamFileBase.Data, *, kind: Kind = Kind.DEFAULT
|
48
|
+
) -> FoamFileBase.Data: ...
|
49
|
+
|
50
|
+
|
51
|
+
def normalize(
|
52
|
+
data: FoamFileBase.Data, *, kind: Kind = Kind.DEFAULT
|
53
|
+
) -> FoamFileBase.Data:
|
54
|
+
if numpy and isinstance(data, np.ndarray):
|
55
|
+
ret = data.tolist()
|
56
|
+
assert isinstance(ret, list)
|
57
|
+
return ret
|
58
|
+
|
59
|
+
if kind == Kind.SINGLE_ENTRY and isinstance(data, tuple):
|
60
|
+
ret = normalize(list(data))
|
61
|
+
assert isinstance(ret, list)
|
62
|
+
return ret
|
63
|
+
|
64
|
+
if isinstance(data, Mapping):
|
65
|
+
return {k: normalize(v, kind=kind) for k, v in data.items()}
|
66
|
+
|
67
|
+
if (
|
68
|
+
kind == Kind.DIMENSIONS
|
69
|
+
and is_sequence(data)
|
70
|
+
and len(data) <= 7
|
71
|
+
and all(isinstance(d, (int, float)) for d in data)
|
72
|
+
):
|
73
|
+
data = cast(Sequence[float], data)
|
74
|
+
return FoamFileBase.DimensionSet(*data)
|
75
|
+
|
76
|
+
if is_sequence(data) and not isinstance(data, tuple):
|
77
|
+
return [normalize(d, kind=Kind.SINGLE_ENTRY) for d in data]
|
78
|
+
|
79
|
+
if isinstance(data, str):
|
80
|
+
with contextlib.suppress(ValueError):
|
81
|
+
return int(data)
|
82
|
+
|
83
|
+
with contextlib.suppress(ValueError):
|
84
|
+
return float(data)
|
85
|
+
|
86
|
+
tokens: list[str] = re.findall(_TOKENS, data)
|
87
|
+
|
88
|
+
if len(tokens) == 1:
|
89
|
+
return tokens[0]
|
90
|
+
|
91
|
+
return tuple(tokens) if kind != Kind.SINGLE_ENTRY else " ".join(tokens)
|
92
|
+
|
93
|
+
if isinstance(data, FoamFileBase.Dimensioned):
|
94
|
+
value = normalize(data.value, kind=Kind.SINGLE_ENTRY)
|
95
|
+
assert isinstance(value, (int, float, list))
|
96
|
+
return FoamFileBase.Dimensioned(value, data.dimensions, data.name)
|
97
|
+
|
98
|
+
if isinstance(
|
99
|
+
data,
|
100
|
+
(int, float, bool, tuple, FoamFileBase.DimensionSet),
|
101
|
+
):
|
102
|
+
return data
|
103
|
+
|
104
|
+
msg = f"Unsupported data type: {type(data)}"
|
105
|
+
raise TypeError(msg)
|
106
|
+
|
107
|
+
|
108
|
+
def dumps(
|
109
|
+
data: FoamFileBase.Data,
|
110
|
+
*,
|
111
|
+
kind: Kind = Kind.DEFAULT,
|
112
|
+
) -> bytes:
|
113
|
+
data = normalize(data, kind=kind)
|
114
|
+
|
115
|
+
if isinstance(data, Mapping):
|
116
|
+
entries = []
|
117
|
+
for k, v in data.items():
|
118
|
+
if isinstance(v, Mapping):
|
119
|
+
entries.append(dumps(k) + b" {" + dumps(v) + b"}")
|
120
|
+
elif not v:
|
121
|
+
entries.append(dumps(k) + b";")
|
122
|
+
else:
|
123
|
+
entries.append(dumps(k) + b" " + dumps(v) + b";")
|
124
|
+
|
125
|
+
return b" ".join(entries)
|
126
|
+
|
127
|
+
if isinstance(data, FoamFileBase.DimensionSet):
|
128
|
+
return b"[" + b" ".join(dumps(v) for v in data) + b"]"
|
129
|
+
|
130
|
+
if kind in (
|
131
|
+
Kind.ASCII_FIELD,
|
132
|
+
Kind.DOUBLE_PRECISION_BINARY_FIELD,
|
133
|
+
Kind.SINGLE_PRECISION_BINARY_FIELD,
|
134
|
+
) and (
|
135
|
+
isinstance(data, (int, float))
|
136
|
+
or is_sequence(data)
|
137
|
+
and data
|
138
|
+
and isinstance(data[0], (int, float))
|
139
|
+
and len(data) in (3, 6, 9)
|
140
|
+
):
|
141
|
+
return b"uniform " + dumps(data, kind=Kind.SINGLE_ENTRY)
|
142
|
+
|
143
|
+
if kind in (
|
144
|
+
Kind.ASCII_FIELD,
|
145
|
+
Kind.DOUBLE_PRECISION_BINARY_FIELD,
|
146
|
+
Kind.SINGLE_PRECISION_BINARY_FIELD,
|
147
|
+
) and is_sequence(data):
|
148
|
+
if data and isinstance(data[0], (int, float)):
|
149
|
+
tensor_kind = b"scalar"
|
150
|
+
elif is_sequence(data[0]) and data[0] and isinstance(data[0][0], (int, float)):
|
151
|
+
if len(data[0]) == 3:
|
152
|
+
tensor_kind = b"vector"
|
153
|
+
elif len(data[0]) == 6:
|
154
|
+
tensor_kind = b"symmTensor"
|
155
|
+
elif len(data[0]) == 9:
|
156
|
+
tensor_kind = b"tensor"
|
157
|
+
else:
|
158
|
+
return dumps(data)
|
159
|
+
else:
|
160
|
+
return dumps(data)
|
161
|
+
|
162
|
+
if kind in (
|
163
|
+
Kind.DOUBLE_PRECISION_BINARY_FIELD,
|
164
|
+
Kind.SINGLE_PRECISION_BINARY_FIELD,
|
165
|
+
):
|
166
|
+
typecode = "f" if kind == Kind.SINGLE_PRECISION_BINARY_FIELD else "d"
|
167
|
+
if tensor_kind == b"scalar":
|
168
|
+
data = cast(Sequence[float], data)
|
169
|
+
contents = b"(" + array.array(typecode, data).tobytes() + b")"
|
170
|
+
else:
|
171
|
+
data = cast(Sequence[Sequence[float]], data)
|
172
|
+
contents = (
|
173
|
+
b"("
|
174
|
+
+ array.array(
|
175
|
+
typecode, itertools.chain.from_iterable(data)
|
176
|
+
).tobytes()
|
177
|
+
+ b")"
|
178
|
+
)
|
179
|
+
else:
|
180
|
+
contents = dumps(data, kind=Kind.SINGLE_ENTRY)
|
181
|
+
|
182
|
+
return b"nonuniform List<" + tensor_kind + b"> " + dumps(len(data)) + contents
|
183
|
+
|
184
|
+
if isinstance(data, FoamFileBase.Dimensioned):
|
185
|
+
if data.name is not None:
|
186
|
+
return (
|
187
|
+
dumps(data.name)
|
188
|
+
+ b" "
|
189
|
+
+ dumps(data.dimensions, kind=Kind.DIMENSIONS)
|
190
|
+
+ b" "
|
191
|
+
+ dumps(data.value, kind=Kind.SINGLE_ENTRY)
|
192
|
+
)
|
193
|
+
return (
|
194
|
+
dumps(data.dimensions, kind=Kind.DIMENSIONS)
|
195
|
+
+ b" "
|
196
|
+
+ dumps(data.value, kind=Kind.SINGLE_ENTRY)
|
197
|
+
)
|
198
|
+
|
199
|
+
if isinstance(data, tuple):
|
200
|
+
return b" ".join(dumps(v) for v in data)
|
201
|
+
|
202
|
+
if is_sequence(data):
|
203
|
+
return b"(" + b" ".join(dumps(v, kind=Kind.SINGLE_ENTRY) for v in data) + b")"
|
204
|
+
|
205
|
+
if data is True:
|
206
|
+
return b"yes"
|
207
|
+
if data is False:
|
208
|
+
return b"no"
|
209
|
+
|
210
|
+
return str(data).encode("latin-1")
|
@@ -1,125 +0,0 @@
|
|
1
|
-
from __future__ import annotations
|
2
|
-
|
3
|
-
import array
|
4
|
-
import itertools
|
5
|
-
import sys
|
6
|
-
from enum import Enum, auto
|
7
|
-
from typing import cast
|
8
|
-
|
9
|
-
if sys.version_info >= (3, 9):
|
10
|
-
from collections.abc import Mapping, Sequence
|
11
|
-
else:
|
12
|
-
from typing import Mapping, Sequence
|
13
|
-
|
14
|
-
from ._base import FoamFileBase
|
15
|
-
from ._util import is_sequence
|
16
|
-
|
17
|
-
try:
|
18
|
-
import numpy as np
|
19
|
-
|
20
|
-
numpy = True
|
21
|
-
except ModuleNotFoundError:
|
22
|
-
numpy = False
|
23
|
-
|
24
|
-
|
25
|
-
class Kind(Enum):
|
26
|
-
DEFAULT = auto()
|
27
|
-
SINGLE_ENTRY = auto()
|
28
|
-
FIELD = auto()
|
29
|
-
BINARY_FIELD = auto()
|
30
|
-
DIMENSIONS = auto()
|
31
|
-
|
32
|
-
|
33
|
-
def dumps(
|
34
|
-
data: FoamFileBase.Data,
|
35
|
-
*,
|
36
|
-
kind: Kind = Kind.DEFAULT,
|
37
|
-
) -> bytes:
|
38
|
-
if numpy and isinstance(data, np.ndarray):
|
39
|
-
return dumps(data.tolist(), kind=kind)
|
40
|
-
|
41
|
-
if isinstance(data, Mapping):
|
42
|
-
entries = []
|
43
|
-
for k, v in data.items():
|
44
|
-
b = dumps(v, kind=kind)
|
45
|
-
if isinstance(v, Mapping):
|
46
|
-
entries.append(dumps(k) + b" {" + b + b"}")
|
47
|
-
elif not b:
|
48
|
-
entries.append(dumps(k) + b";")
|
49
|
-
else:
|
50
|
-
entries.append(dumps(k) + b" " + b + b";")
|
51
|
-
|
52
|
-
return b" ".join(entries)
|
53
|
-
|
54
|
-
if isinstance(data, FoamFileBase.DimensionSet) or (
|
55
|
-
kind == Kind.DIMENSIONS and is_sequence(data) and len(data) == 7
|
56
|
-
):
|
57
|
-
return b"[" + b" ".join(dumps(v) for v in data) + b"]"
|
58
|
-
|
59
|
-
if kind in (Kind.FIELD, Kind.BINARY_FIELD) and (
|
60
|
-
isinstance(data, (int, float))
|
61
|
-
or is_sequence(data)
|
62
|
-
and data
|
63
|
-
and isinstance(data[0], (int, float))
|
64
|
-
and len(data) in (3, 6, 9)
|
65
|
-
):
|
66
|
-
return b"uniform " + dumps(data, kind=Kind.SINGLE_ENTRY)
|
67
|
-
|
68
|
-
if kind in (Kind.FIELD, Kind.BINARY_FIELD) and is_sequence(data):
|
69
|
-
if data and isinstance(data[0], (int, float)):
|
70
|
-
tensor_kind = b"scalar"
|
71
|
-
elif is_sequence(data[0]) and data[0] and isinstance(data[0][0], (int, float)):
|
72
|
-
if len(data[0]) == 3:
|
73
|
-
tensor_kind = b"vector"
|
74
|
-
elif len(data[0]) == 6:
|
75
|
-
tensor_kind = b"symmTensor"
|
76
|
-
elif len(data[0]) == 9:
|
77
|
-
tensor_kind = b"tensor"
|
78
|
-
else:
|
79
|
-
return dumps(data)
|
80
|
-
else:
|
81
|
-
return dumps(data)
|
82
|
-
|
83
|
-
if kind == Kind.BINARY_FIELD:
|
84
|
-
if tensor_kind == b"scalar":
|
85
|
-
data = cast(Sequence[float], data)
|
86
|
-
contents = b"(" + array.array("d", data).tobytes() + b")"
|
87
|
-
else:
|
88
|
-
data = cast(Sequence[Sequence[float]], data)
|
89
|
-
contents = (
|
90
|
-
b"("
|
91
|
-
+ array.array("d", itertools.chain.from_iterable(data)).tobytes()
|
92
|
-
+ b")"
|
93
|
-
)
|
94
|
-
else:
|
95
|
-
contents = dumps(data, kind=Kind.SINGLE_ENTRY)
|
96
|
-
|
97
|
-
return b"nonuniform List<" + tensor_kind + b"> " + dumps(len(data)) + contents
|
98
|
-
|
99
|
-
if kind != Kind.SINGLE_ENTRY and isinstance(data, tuple):
|
100
|
-
return b" ".join(dumps(v) for v in data)
|
101
|
-
|
102
|
-
if isinstance(data, FoamFileBase.Dimensioned):
|
103
|
-
if data.name is not None:
|
104
|
-
return (
|
105
|
-
dumps(data.name)
|
106
|
-
+ b" "
|
107
|
-
+ dumps(data.dimensions, kind=Kind.DIMENSIONS)
|
108
|
-
+ b" "
|
109
|
-
+ dumps(data.value, kind=Kind.SINGLE_ENTRY)
|
110
|
-
)
|
111
|
-
return (
|
112
|
-
dumps(data.dimensions, kind=Kind.DIMENSIONS)
|
113
|
-
+ b" "
|
114
|
-
+ dumps(data.value, kind=Kind.SINGLE_ENTRY)
|
115
|
-
)
|
116
|
-
|
117
|
-
if is_sequence(data):
|
118
|
-
return b"(" + b" ".join(dumps(v, kind=Kind.SINGLE_ENTRY) for v in data) + b")"
|
119
|
-
|
120
|
-
if data is True:
|
121
|
-
return b"yes"
|
122
|
-
if data is False:
|
123
|
-
return b"no"
|
124
|
-
|
125
|
-
return str(data).encode("latin-1")
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|