foamlib 0.8.0__py3-none-any.whl → 0.8.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
foamlib/__init__.py CHANGED
@@ -1,6 +1,6 @@
1
1
  """A Python interface for interacting with OpenFOAM."""
2
2
 
3
- __version__ = "0.8.0"
3
+ __version__ = "0.8.1"
4
4
 
5
5
  from ._cases import (
6
6
  AsyncFoamCase,
foamlib/_files/_files.py CHANGED
@@ -1,6 +1,5 @@
1
1
  from __future__ import annotations
2
2
 
3
- import os
4
3
  import sys
5
4
  from copy import deepcopy
6
5
  from typing import Any, Optional, Tuple, Union, cast
@@ -15,6 +14,8 @@ if sys.version_info >= (3, 9):
15
14
  else:
16
15
  from typing import Iterator, Mapping, MutableMapping, Sequence
17
16
 
17
+ import numpy as np
18
+
18
19
  from ._io import FoamFileIO
19
20
  from ._serialization import Kind, dumps, normalize
20
21
  from ._types import (
@@ -27,7 +28,6 @@ from ._types import (
27
28
  File,
28
29
  MutableEntry,
29
30
  )
30
- from ._util import is_sequence
31
31
 
32
32
 
33
33
  class FoamFile(
@@ -196,7 +196,7 @@ class FoamFile(
196
196
  elif not isinstance(keywords, tuple):
197
197
  keywords = (keywords,)
198
198
 
199
- if keywords and not isinstance(normalize(keywords[-1], kind=Kind.KEYWORD), str):
199
+ if keywords and not isinstance(normalize(keywords[-1]), str):
200
200
  msg = f"Invalid keyword: {keywords[-1]}"
201
201
  raise ValueError(msg)
202
202
 
@@ -228,50 +228,36 @@ class FoamFile(
228
228
  or keywords[2].endswith("Gradient")
229
229
  )
230
230
  ):
231
- if self.format == "binary":
232
- arch = self.get(("FoamFile", "arch"), default=None)
233
- assert arch is None or isinstance(arch, str)
234
- if (arch is not None and "scalar=32" in arch) or (
235
- arch is None
236
- and os.environ.get("WM_PRECISION_OPTION", default="DP") == "SP"
237
- ):
238
- kind = Kind.SINGLE_PRECISION_BINARY_FIELD
239
- else:
240
- kind = Kind.DOUBLE_PRECISION_BINARY_FIELD
241
- else:
242
- kind = Kind.ASCII_FIELD
231
+ kind = (
232
+ Kind.BINARY_FIELD if self.format == "binary" else Kind.ASCII_FIELD
233
+ )
243
234
  elif keywords == ("dimensions",):
244
235
  kind = Kind.DIMENSIONS
245
236
 
246
237
  if (
247
- kind
248
- in (
249
- Kind.ASCII_FIELD,
250
- Kind.DOUBLE_PRECISION_BINARY_FIELD,
251
- Kind.SINGLE_PRECISION_BINARY_FIELD,
252
- )
238
+ kind in (Kind.ASCII_FIELD, Kind.BINARY_FIELD)
253
239
  ) and self.class_ == "dictionary":
254
- if isinstance(data, (int, float)):
255
- self.class_ = "volScalarField"
256
-
257
- elif is_sequence(data) and data:
258
- if isinstance(data[0], (int, float)):
259
- if len(data) == 3:
260
- self.class_ = "volVectorField"
261
- elif len(data) == 6:
262
- self.class_ = "volSymmTensorField"
263
- elif len(data) == 9:
264
- self.class_ = "volTensorField"
265
- elif (
266
- is_sequence(data[0])
267
- and data[0]
268
- and isinstance(data[0][0], (int, float))
269
- ):
270
- if len(data[0]) == 3:
240
+ try:
241
+ shape = np.shape(data) # type: ignore [arg-type]
242
+ except ValueError:
243
+ pass
244
+ else:
245
+ if not shape:
246
+ self.class_ = "volScalarField"
247
+ elif shape == (3,):
248
+ self.class_ = "volVectorField"
249
+ elif shape == (6,):
250
+ self.class_ = "volSymmTensorField"
251
+ elif shape == (9,):
252
+ self.class_ = "volTensorField"
253
+ elif len(shape) == 1:
254
+ self.class_ = "volScalarField"
255
+ elif len(shape) == 2:
256
+ if shape[1] == 3:
271
257
  self.class_ = "volVectorField"
272
- elif len(data[0]) == 6:
258
+ elif shape[1] == 6:
273
259
  self.class_ = "volSymmTensorField"
274
- elif len(data[0]) == 9:
260
+ elif shape[1] == 9:
275
261
  self.class_ = "volTensorField"
276
262
 
277
263
  parsed = self._get_parsed(missing_ok=True)
@@ -304,7 +290,7 @@ class FoamFile(
304
290
  ...,
305
291
  before
306
292
  + indentation
307
- + dumps(keywords[-1], kind=Kind.KEYWORD)
293
+ + dumps(keywords[-1])
308
294
  + b"\n"
309
295
  + indentation
310
296
  + b"{\n"
@@ -322,7 +308,7 @@ class FoamFile(
322
308
  normalize(data, kind=kind),
323
309
  before
324
310
  + indentation
325
- + dumps(keywords[-1], kind=Kind.KEYWORD)
311
+ + dumps(keywords[-1])
326
312
  + b" "
327
313
  + dumps(data, kind=kind)
328
314
  + b";"
@@ -1,6 +1,5 @@
1
1
  from __future__ import annotations
2
2
 
3
- import array
4
3
  import re
5
4
  import sys
6
5
  from enum import Enum, auto
@@ -16,6 +15,7 @@ if sys.version_info >= (3, 10):
16
15
  else:
17
16
  EllipsisType = type(...)
18
17
 
18
+ import numpy as np
19
19
  from pyparsing import (
20
20
  Combine,
21
21
  Dict,
@@ -40,34 +40,37 @@ from pyparsing import (
40
40
  from ._types import Data, Dimensioned, DimensionSet, File
41
41
 
42
42
 
43
- class Tensor(Enum):
43
+ class _Tensor(Enum):
44
44
  SCALAR = auto()
45
45
  VECTOR = auto()
46
46
  SYMM_TENSOR = auto()
47
47
  TENSOR = auto()
48
48
 
49
49
  @property
50
- def shape(self) -> tuple[int, ...]:
51
- return {
52
- Tensor.SCALAR: (),
53
- Tensor.VECTOR: (3,),
54
- Tensor.SYMM_TENSOR: (6,),
55
- Tensor.TENSOR: (9,),
56
- }[self]
50
+ def shape(self) -> tuple[()] | tuple[int]:
51
+ if self == _Tensor.SCALAR:
52
+ return ()
53
+ if self == _Tensor.VECTOR:
54
+ return (3,)
55
+ if self == _Tensor.SYMM_TENSOR:
56
+ return (6,)
57
+ if self == _Tensor.TENSOR:
58
+ return (9,)
59
+ raise NotImplementedError
57
60
 
58
61
  @property
59
62
  def size(self) -> int:
60
63
  return {
61
- Tensor.SCALAR: 1,
62
- Tensor.VECTOR: 3,
63
- Tensor.SYMM_TENSOR: 6,
64
- Tensor.TENSOR: 9,
64
+ _Tensor.SCALAR: 1,
65
+ _Tensor.VECTOR: 3,
66
+ _Tensor.SYMM_TENSOR: 6,
67
+ _Tensor.TENSOR: 9,
65
68
  }[self]
66
69
 
67
70
  def pattern(self, *, ignore: Regex | None = None) -> str:
68
71
  float_pattern = r"(?i:[+-]?(?:(?:\d+\.?\d*(?:e[+-]?\d+)?)|nan|inf(?:inity)?))"
69
72
 
70
- if self == Tensor.SCALAR:
73
+ if self == _Tensor.SCALAR:
71
74
  return float_pattern
72
75
 
73
76
  ignore_pattern = (
@@ -77,21 +80,21 @@ class Tensor(Enum):
77
80
  return rf"\((?:{ignore_pattern})?(?:{float_pattern}{ignore_pattern}){{{self.size - 1}}}{float_pattern}(?:{ignore_pattern})?\)"
78
81
 
79
82
  def parser(self) -> ParserElement:
80
- if self == Tensor.SCALAR:
83
+ if self == _Tensor.SCALAR:
81
84
  return common.ieee_float
82
85
 
83
86
  return (
84
87
  Literal("(").suppress()
85
88
  + Group(common.ieee_float[self.size], aslist=True)
86
89
  + Literal(")").suppress()
87
- )
90
+ ).add_parse_action(lambda tks: np.array(tks[0], dtype=float))
88
91
 
89
92
  def __str__(self) -> str:
90
93
  return {
91
- Tensor.SCALAR: "scalar",
92
- Tensor.VECTOR: "vector",
93
- Tensor.SYMM_TENSOR: "symmTensor",
94
- Tensor.TENSOR: "tensor",
94
+ _Tensor.SCALAR: "scalar",
95
+ _Tensor.VECTOR: "vector",
96
+ _Tensor.SYMM_TENSOR: "symmTensor",
97
+ _Tensor.TENSOR: "tensor",
95
98
  }[self]
96
99
 
97
100
 
@@ -115,52 +118,34 @@ def _list_of(entry: ParserElement) -> ParserElement:
115
118
 
116
119
 
117
120
  def _parse_ascii_field(
118
- s: str, tensor_kind: Tensor, *, ignore: Regex | None
119
- ) -> list[float] | list[list[float]]:
120
- values = [
121
- float(v)
122
- for v in (re.sub(ignore.re, " ", s) if ignore is not None else s)
123
- .replace("(", " ")
124
- .replace(")", " ")
125
- .split()
126
- ]
121
+ s: str, tensor_kind: _Tensor, *, ignore: Regex | None
122
+ ) -> np.ndarray[tuple[int] | tuple[int, int], np.dtype[np.float64]]:
123
+ if ignore is not None:
124
+ s = re.sub(ignore.re, " ", s)
125
+ s = s.replace("(", " ").replace(")", " ")
127
126
 
128
- if tensor_kind == Tensor.SCALAR:
129
- return values
130
-
131
- return [
132
- values[i : i + tensor_kind.size]
133
- for i in range(0, len(values), tensor_kind.size)
134
- ]
127
+ return np.fromstring(s, dtype=float, sep=" ").reshape(-1, *tensor_kind.shape)
135
128
 
136
129
 
137
130
  def _unpack_binary_field(
138
- b: bytes, tensor_kind: Tensor, *, length: int
139
- ) -> list[float] | list[list[float]]:
131
+ b: bytes, tensor_kind: _Tensor, *, length: int
132
+ ) -> np.ndarray[tuple[int] | tuple[int, int], np.dtype[np.float64 | np.float32]]:
140
133
  float_size = len(b) / tensor_kind.size / length
141
134
  assert float_size in (4, 8)
142
135
 
143
- arr = array.array("f" if float_size == 4 else "d", b)
144
- values = arr.tolist()
145
-
146
- if tensor_kind == Tensor.SCALAR:
147
- return values
148
-
149
- return [
150
- values[i : i + tensor_kind.size]
151
- for i in range(0, len(values), tensor_kind.size)
152
- ]
136
+ dtype = np.float32 if float_size == 4 else float
137
+ return np.frombuffer(b, dtype=dtype).reshape(-1, *tensor_kind.shape)
153
138
 
154
139
 
155
140
  def _tensor_list(
156
- tensor_kind: Tensor | None = None, *, ignore: Regex | None = None
141
+ tensor_kind: _Tensor | None = None, *, ignore: Regex | None = None
157
142
  ) -> ParserElement:
158
143
  if tensor_kind is None:
159
144
  return (
160
- _tensor_list(Tensor.SCALAR, ignore=ignore)
161
- | _tensor_list(Tensor.VECTOR, ignore=ignore)
162
- | _tensor_list(Tensor.SYMM_TENSOR, ignore=ignore)
163
- | _tensor_list(Tensor.TENSOR, ignore=ignore)
145
+ _tensor_list(_Tensor.SCALAR, ignore=ignore)
146
+ | _tensor_list(_Tensor.VECTOR, ignore=ignore)
147
+ | _tensor_list(_Tensor.SYMM_TENSOR, ignore=ignore)
148
+ | _tensor_list(_Tensor.TENSOR, ignore=ignore)
164
149
  )
165
150
 
166
151
  tensor_pattern = tensor_kind.pattern(ignore=ignore)
@@ -187,7 +172,7 @@ def _tensor_list(
187
172
  )
188
173
  | Regex(
189
174
  rf"\((?s:.{{{length * tensor_kind.size * 8}}}|.{{{length * tensor_kind.size * 4}}})\)"
190
- ).set_parse_action(
175
+ ).add_parse_action(
191
176
  lambda tks: [
192
177
  _unpack_binary_field(
193
178
  tks[0][1:-1].encode("latin-1"), tensor_kind, length=length
@@ -196,7 +181,9 @@ def _tensor_list(
196
181
  )
197
182
  | (
198
183
  Literal("{").suppress() + tensor_kind.parser() + Literal("}").suppress()
199
- ).set_parse_action(lambda tks: [[tks[0]] * length])
184
+ ).add_parse_action(
185
+ lambda tks: [np.full((length, *tensor_kind.shape), tks[0], dtype=float)]
186
+ )
200
187
  )
201
188
 
202
189
  count = common.integer.copy().add_parse_action(count_parse_action)
@@ -274,10 +261,10 @@ _DIMENSIONS = (
274
261
  Literal("[").suppress() + common.number[0, 7] + Literal("]").suppress()
275
262
  ).set_parse_action(lambda tks: DimensionSet(*tks))
276
263
  _TENSOR = (
277
- Tensor.SCALAR.parser()
278
- | Tensor.VECTOR.parser()
279
- | Tensor.SYMM_TENSOR.parser()
280
- | Tensor.TENSOR.parser()
264
+ _Tensor.SCALAR.parser()
265
+ | _Tensor.VECTOR.parser()
266
+ | _Tensor.SYMM_TENSOR.parser()
267
+ | _Tensor.TENSOR.parser()
281
268
  )
282
269
  _IDENTIFIER = Combine(
283
270
  Word(_IDENTCHARS, _IDENTBODYCHARS, exclude_chars="()")
@@ -289,27 +276,32 @@ _DIMENSIONED = (Opt(_IDENTIFIER) + _DIMENSIONS + _TENSOR).set_parse_action(
289
276
  _FIELD = (Keyword("uniform", _IDENTBODYCHARS).suppress() + _TENSOR) | (
290
277
  Keyword("nonuniform", _IDENTBODYCHARS).suppress() + _tensor_list(ignore=_COMMENT)
291
278
  )
292
- TOKEN = dbl_quoted_string | _IDENTIFIER
293
- DATA = Forward()
294
- _KEYWORD_ENTRY = _keyword_entry_of(TOKEN | _list_of(_IDENTIFIER), DATA)
295
- _DICT = _dict_of(TOKEN, DATA)
279
+ _TOKEN = dbl_quoted_string | _IDENTIFIER
280
+ _DATA = Forward()
281
+ _KEYWORD_ENTRY = _keyword_entry_of(_TOKEN | _list_of(_IDENTIFIER), _DATA)
282
+ _DICT = _dict_of(_TOKEN, _DATA)
296
283
  _DATA_ENTRY = Forward()
297
284
  _LIST_ENTRY = _DICT | _KEYWORD_ENTRY | _DATA_ENTRY
298
285
  _LIST = _list_of(_LIST_ENTRY)
299
286
  _NUMBER = common.signed_integer ^ common.ieee_float
300
- _DATA_ENTRY <<= _FIELD | _LIST | _DIMENSIONED | _DIMENSIONS | _NUMBER | _SWITCH | TOKEN
287
+ _DATA_ENTRY <<= _FIELD | _LIST | _DIMENSIONED | _DIMENSIONS | _NUMBER | _SWITCH | _TOKEN
301
288
 
302
- DATA <<= (
289
+ _DATA <<= (
303
290
  _DATA_ENTRY[1, ...]
304
291
  .set_parse_action(lambda tks: [tuple(tks)] if len(tks) > 1 else [tks[0]])
305
292
  .ignore(_COMMENT)
306
293
  .parse_with_tabs()
307
294
  )
308
295
 
296
+
297
+ def parse_data(s: str) -> Data:
298
+ return cast(Data, _DATA.parse_string(s, parse_all=True)[0])
299
+
300
+
309
301
  _LOCATED_DICTIONARY = Group(
310
- _keyword_entry_of(TOKEN, Opt(DATA, default=""), located=True)
302
+ _keyword_entry_of(_TOKEN, Opt(_DATA, default=""), located=True)
311
303
  )[...]
312
- _LOCATED_DATA = Group(Located(DATA.copy().add_parse_action(lambda tks: ["", tks[0]])))
304
+ _LOCATED_DATA = Group(Located(_DATA.copy().add_parse_action(lambda tks: ["", tks[0]])))
313
305
 
314
306
  _FILE = (
315
307
  Dict(_LOCATED_DICTIONARY + Opt(_LOCATED_DATA) + _LOCATED_DICTIONARY)
@@ -1,35 +1,25 @@
1
1
  from __future__ import annotations
2
2
 
3
- import array
4
- import itertools
5
3
  import sys
6
4
  from enum import Enum, auto
7
- from typing import cast, overload
5
+ from typing import overload
8
6
 
9
7
  if sys.version_info >= (3, 9):
10
- from collections.abc import Mapping, Sequence
8
+ from collections.abc import Mapping
11
9
  else:
12
- from typing import Mapping, Sequence
10
+ from typing import Mapping
13
11
 
14
- from ._parsing import DATA, TOKEN
15
- from ._types import Data, Dimensioned, DimensionSet, Entry
16
- from ._util import is_sequence
12
+ import numpy as np
17
13
 
18
- try:
19
- import numpy as np
20
-
21
- numpy = True
22
- except ModuleNotFoundError:
23
- numpy = False
14
+ from ._parsing import parse_data
15
+ from ._types import Data, Dimensioned, DimensionSet, Entry, is_sequence
24
16
 
25
17
 
26
18
  class Kind(Enum):
27
19
  DEFAULT = auto()
28
- KEYWORD = auto()
29
20
  SINGLE_ENTRY = auto()
30
21
  ASCII_FIELD = auto()
31
- DOUBLE_PRECISION_BINARY_FIELD = auto()
32
- SINGLE_PRECISION_BINARY_FIELD = auto()
22
+ BINARY_FIELD = auto()
33
23
  DIMENSIONS = auto()
34
24
 
35
25
 
@@ -42,9 +32,29 @@ def normalize(data: Entry, *, kind: Kind = Kind.DEFAULT) -> Entry: ...
42
32
 
43
33
 
44
34
  def normalize(data: Entry, *, kind: Kind = Kind.DEFAULT) -> Entry:
45
- if numpy and isinstance(data, np.ndarray):
35
+ if kind in (Kind.ASCII_FIELD, Kind.BINARY_FIELD):
36
+ if is_sequence(data):
37
+ try:
38
+ arr = np.asarray(data)
39
+ except ValueError:
40
+ pass
41
+ else:
42
+ if not np.issubdtype(arr.dtype, np.floating):
43
+ arr = arr.astype(float)
44
+
45
+ if arr.ndim == 1 or (arr.ndim == 2 and arr.shape[1] in (3, 6, 9)):
46
+ return arr
47
+
48
+ return data
49
+
50
+ if isinstance(data, int):
51
+ return float(data)
52
+
53
+ return data
54
+
55
+ if isinstance(data, np.ndarray):
46
56
  ret = data.tolist()
47
- assert isinstance(ret, list)
57
+ assert isinstance(ret, (int, float, list))
48
58
  return ret
49
59
 
50
60
  if isinstance(data, Mapping):
@@ -56,32 +66,21 @@ def normalize(data: Entry, *, kind: Kind = Kind.DEFAULT) -> Entry:
56
66
  and len(data) <= 7
57
67
  and all(isinstance(d, (int, float)) for d in data)
58
68
  ):
59
- data = cast(Sequence[float], data)
60
69
  return DimensionSet(*data)
61
70
 
62
71
  if isinstance(data, tuple) and kind == Kind.SINGLE_ENTRY and len(data) == 2:
63
72
  k, v = data
64
- return (normalize(k, kind=Kind.KEYWORD), normalize(v))
73
+ return (normalize(k), normalize(v))
65
74
 
66
75
  if is_sequence(data) and (kind == Kind.SINGLE_ENTRY or not isinstance(data, tuple)):
67
76
  return [normalize(d, kind=Kind.SINGLE_ENTRY) for d in data]
68
77
 
69
- if isinstance(data, Dimensioned):
70
- value = normalize(data.value, kind=Kind.SINGLE_ENTRY)
71
- assert isinstance(value, (int, float, list))
72
- return Dimensioned(value, data.dimensions, data.name)
73
-
74
78
  if isinstance(data, str):
75
- if kind == Kind.KEYWORD:
76
- data = TOKEN.parse_string(data, parse_all=True)[0]
77
- assert isinstance(data, str)
78
- return data
79
-
80
- return cast(Data, DATA.parse_string(data, parse_all=True)[0])
79
+ return parse_data(data)
81
80
 
82
81
  if isinstance(
83
82
  data,
84
- (int, float, bool, tuple, DimensionSet),
83
+ (int, float, bool, tuple, DimensionSet, Dimensioned),
85
84
  ):
86
85
  return data
87
86
 
@@ -105,7 +104,7 @@ def dumps(
105
104
 
106
105
  if isinstance(data, tuple) and kind == Kind.SINGLE_ENTRY and len(data) == 2:
107
106
  k, v = data
108
- ret = dumps(k, kind=Kind.KEYWORD) + b" " + dumps(v)
107
+ ret = dumps(k) + b" " + dumps(v)
109
108
  if not isinstance(v, Mapping):
110
109
  ret += b";"
111
110
  return ret
@@ -113,58 +112,35 @@ def dumps(
113
112
  if isinstance(data, DimensionSet):
114
113
  return b"[" + b" ".join(dumps(v) for v in data) + b"]"
115
114
 
116
- if kind in (
117
- Kind.ASCII_FIELD,
118
- Kind.DOUBLE_PRECISION_BINARY_FIELD,
119
- Kind.SINGLE_PRECISION_BINARY_FIELD,
120
- ) and (
121
- isinstance(data, (int, float))
122
- or (
123
- is_sequence(data)
124
- and data
125
- and isinstance(data[0], (int, float))
126
- and len(data) in (3, 6, 9)
127
- )
115
+ if kind in (Kind.ASCII_FIELD, Kind.BINARY_FIELD) and (
116
+ isinstance(data, (int, float, np.ndarray))
128
117
  ):
129
- return b"uniform " + dumps(data, kind=Kind.SINGLE_ENTRY)
130
-
131
- if kind in (
132
- Kind.ASCII_FIELD,
133
- Kind.DOUBLE_PRECISION_BINARY_FIELD,
134
- Kind.SINGLE_PRECISION_BINARY_FIELD,
135
- ) and is_sequence(data):
136
- if data and isinstance(data[0], (int, float)):
118
+ shape = np.shape(data)
119
+ if shape in ((), (3,), (6,), (9,)):
120
+ return b"uniform " + dumps(data, kind=Kind.SINGLE_ENTRY)
121
+
122
+ assert isinstance(data, np.ndarray)
123
+ ndim = len(shape)
124
+ if ndim == 1:
137
125
  tensor_kind = b"scalar"
138
- elif is_sequence(data[0]) and data[0] and isinstance(data[0][0], (int, float)):
139
- if len(data[0]) == 3:
126
+
127
+ elif ndim == 2:
128
+ if shape[1] == 3:
140
129
  tensor_kind = b"vector"
141
- elif len(data[0]) == 6:
130
+ elif shape[1] == 6:
142
131
  tensor_kind = b"symmTensor"
143
- elif len(data[0]) == 9:
132
+ elif shape[1] == 9:
144
133
  tensor_kind = b"tensor"
145
134
  else:
146
135
  return dumps(data)
136
+
147
137
  else:
148
138
  return dumps(data)
149
139
 
150
- if kind in (
151
- Kind.DOUBLE_PRECISION_BINARY_FIELD,
152
- Kind.SINGLE_PRECISION_BINARY_FIELD,
153
- ):
154
- typecode = "f" if kind == Kind.SINGLE_PRECISION_BINARY_FIELD else "d"
155
- if tensor_kind == b"scalar":
156
- data = cast(Sequence[float], data)
157
- contents = b"(" + array.array(typecode, data).tobytes() + b")"
158
- else:
159
- data = cast(Sequence[Sequence[float]], data)
160
- contents = (
161
- b"("
162
- + array.array(
163
- typecode, itertools.chain.from_iterable(data)
164
- ).tobytes()
165
- + b")"
166
- )
140
+ if kind == Kind.BINARY_FIELD:
141
+ contents = b"(" + data.tobytes() + b")"
167
142
  else:
143
+ assert kind == Kind.ASCII_FIELD
168
144
  contents = dumps(data, kind=Kind.SINGLE_ENTRY)
169
145
 
170
146
  return b"nonuniform List<" + tensor_kind + b"> " + dumps(len(data)) + contents
foamlib/_files/_types.py CHANGED
@@ -2,16 +2,20 @@ from __future__ import annotations
2
2
 
3
3
  import sys
4
4
  from dataclasses import dataclass
5
- from typing import TYPE_CHECKING, Dict, NamedTuple, Optional, Union
5
+ from typing import Dict, NamedTuple, Optional, Tuple, Union
6
6
 
7
- if TYPE_CHECKING:
8
- import numpy as np
7
+ import numpy as np
9
8
 
10
9
  if sys.version_info >= (3, 9):
11
10
  from collections.abc import Mapping, MutableMapping, Sequence
12
11
  else:
13
12
  from typing import Mapping, MutableMapping, Sequence
14
13
 
14
+ if sys.version_info >= (3, 10):
15
+ from typing import TypeGuard
16
+ else:
17
+ from typing_extensions import TypeGuard
18
+
15
19
 
16
20
  class DimensionSet(NamedTuple):
17
21
  mass: float = 0
@@ -29,7 +33,7 @@ class DimensionSet(NamedTuple):
29
33
  Tensor = Union[
30
34
  float,
31
35
  Sequence[float],
32
- "np.ndarray[tuple[()] | tuple[int], np.dtype[np.float64 | np.int_]]",
36
+ "np.ndarray[tuple[()] | Tuple[int], np.dtype[np.float64]]",
33
37
  ]
34
38
 
35
39
 
@@ -40,14 +44,30 @@ class Dimensioned:
40
44
  name: str | None = None
41
45
 
42
46
  def __post_init__(self) -> None:
47
+ if is_sequence(self.value):
48
+ self.value = np.asarray(self.value, dtype=float)
49
+ else:
50
+ assert isinstance(self.value, (int, float, np.ndarray))
51
+ self.value = float(self.value)
52
+
43
53
  if not isinstance(self.dimensions, DimensionSet):
44
54
  self.dimensions = DimensionSet(*self.dimensions)
45
55
 
56
+ def __eq__(self, other: object) -> bool:
57
+ if not isinstance(other, Dimensioned):
58
+ return NotImplemented
59
+
60
+ return (
61
+ self.dimensions == other.dimensions
62
+ and np.array_equal(self.value, other.value)
63
+ and self.name == other.name
64
+ )
65
+
46
66
 
47
67
  Field = Union[
48
68
  Tensor,
49
69
  Sequence[Tensor],
50
- "np.ndarray[tuple[int] | tuple[int, int], np.dtype[np.float64 | np.int_]]",
70
+ "np.ndarray[tuple[int] | tuple[int, int], np.dtype[np.float64 | np.float32]]",
51
71
  ]
52
72
 
53
73
  Data = Union[
@@ -58,7 +78,6 @@ Data = Union[
58
78
  Dimensioned,
59
79
  DimensionSet,
60
80
  Sequence["Entry"],
61
- Tensor,
62
81
  Field,
63
82
  ]
64
83
 
@@ -70,6 +89,18 @@ Entry = Union[
70
89
  A value that can be stored in an OpenFOAM file.
71
90
  """
72
91
 
92
+
93
+ def is_sequence(
94
+ value: Entry,
95
+ ) -> TypeGuard[
96
+ Sequence[Entry]
97
+ | np.ndarray[tuple[int] | tuple[int, int], np.dtype[np.float64 | np.float32]]
98
+ ]:
99
+ return (isinstance(value, Sequence) and not isinstance(value, str)) or (
100
+ isinstance(value, np.ndarray) and value.ndim > 0
101
+ )
102
+
103
+
73
104
  MutableEntry = Union[
74
105
  Data,
75
106
  MutableMapping[str, "MutableEntry"],
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: foamlib
3
- Version: 0.8.0
3
+ Version: 0.8.1
4
4
  Summary: A Python interface for interacting with OpenFOAM
5
5
  Project-URL: Homepage, https://github.com/gerlero/foamlib
6
6
  Project-URL: Repository, https://github.com/gerlero/foamlib
@@ -25,12 +25,12 @@ Classifier: Topic :: Software Development
25
25
  Classifier: Typing :: Typed
26
26
  Requires-Python: >=3.7
27
27
  Requires-Dist: aioshutil<2,>=1
28
+ Requires-Dist: numpy<3,>=1
29
+ Requires-Dist: numpy<3,>=1.25.0; python_version >= '3.10'
28
30
  Requires-Dist: pyparsing<4,>=3
29
31
  Requires-Dist: typing-extensions<5,>=4; python_version < '3.11'
30
32
  Provides-Extra: dev
31
33
  Requires-Dist: mypy<2,>=1; extra == 'dev'
32
- Requires-Dist: numpy<3,>=1; extra == 'dev'
33
- Requires-Dist: numpy<3,>=1.25.0; (python_version >= '3.10') and extra == 'dev'
34
34
  Requires-Dist: pytest-asyncio<0.25,>=0.21; extra == 'dev'
35
35
  Requires-Dist: pytest-cov; extra == 'dev'
36
36
  Requires-Dist: pytest<9,>=7; extra == 'dev'
@@ -38,25 +38,16 @@ Requires-Dist: ruff; extra == 'dev'
38
38
  Requires-Dist: sphinx-rtd-theme; extra == 'dev'
39
39
  Requires-Dist: sphinx<9,>=5; extra == 'dev'
40
40
  Provides-Extra: docs
41
- Requires-Dist: numpy<3,>=1; extra == 'docs'
42
- Requires-Dist: numpy<3,>=1.25.0; (python_version >= '3.10') and extra == 'docs'
43
41
  Requires-Dist: sphinx-rtd-theme; extra == 'docs'
44
42
  Requires-Dist: sphinx<9,>=5; extra == 'docs'
45
43
  Provides-Extra: lint
46
44
  Requires-Dist: ruff; extra == 'lint'
47
- Provides-Extra: numpy
48
- Requires-Dist: numpy<3,>=1; extra == 'numpy'
49
- Requires-Dist: numpy<3,>=1.25.0; (python_version >= '3.10') and extra == 'numpy'
50
45
  Provides-Extra: test
51
- Requires-Dist: numpy<3,>=1; extra == 'test'
52
- Requires-Dist: numpy<3,>=1.25.0; (python_version >= '3.10') and extra == 'test'
53
46
  Requires-Dist: pytest-asyncio<0.25,>=0.21; extra == 'test'
54
47
  Requires-Dist: pytest-cov; extra == 'test'
55
48
  Requires-Dist: pytest<9,>=7; extra == 'test'
56
49
  Provides-Extra: typing
57
50
  Requires-Dist: mypy<2,>=1; extra == 'typing'
58
- Requires-Dist: numpy<3,>=1; extra == 'typing'
59
- Requires-Dist: numpy<3,>=1.25.0; (python_version >= '3.10') and extra == 'typing'
60
51
  Requires-Dist: pytest-asyncio<0.25,>=0.21; extra == 'typing'
61
52
  Requires-Dist: pytest-cov; extra == 'typing'
62
53
  Requires-Dist: pytest<9,>=7; extra == 'typing'
@@ -1,4 +1,4 @@
1
- foamlib/__init__.py,sha256=iN_p0PYnIJNOiwBl-JcsqwLyxvZMD2qW6X7alo-YOO4,452
1
+ foamlib/__init__.py,sha256=TATCOFe7Qz20bMXgF1iX2nZmLzME7Wtg3gIocDfQTZk,452
2
2
  foamlib/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
3
  foamlib/_cases/__init__.py,sha256=_A1TTHuQfS9FH2_33lSEyLtOJZGFHZBco1tWJCVOHks,358
4
4
  foamlib/_cases/_async.py,sha256=i6g4EBHqvI-1PkdrxsRto2ynW7sxsOga2bSYk1XVG1U,7795
@@ -9,13 +9,12 @@ foamlib/_cases/_subprocess.py,sha256=6BlBRxknj2-BFcGkx7oVcuL63_utSaY1Axmsc1qV9j8
9
9
  foamlib/_cases/_sync.py,sha256=2BJXB7Nzldb4OgPukqupgYqdceUGkI2mYhhtGPWEBrc,5901
10
10
  foamlib/_cases/_util.py,sha256=tK4SM5WT3eEgGsFLnidIySbom1qowBAua9z13gipKJk,1518
11
11
  foamlib/_files/__init__.py,sha256=q1vkjXnjnSZvo45jPAICpWeF2LZv5V6xfzAR6S8fS5A,96
12
- foamlib/_files/_files.py,sha256=Afkv9X8Om-AxIe9Ojqbz203_69okdlMNIlSeEAMnkzY,15832
12
+ foamlib/_files/_files.py,sha256=hVzsAhUFukF7sGLUNp1wFUkzIMpRfrpKCKObLj8MKhA,15070
13
13
  foamlib/_files/_io.py,sha256=BGbbm6HKxL2ka0YMCmHqZQZ1R4PPQlkvWWb4FHMAS8k,2217
14
- foamlib/_files/_parsing.py,sha256=Go-gwu5HAZF__iF29l_EiPWouBIhjeQG8e6P9vM2cXY,13833
15
- foamlib/_files/_serialization.py,sha256=gc0ybQFKU68Ytdv1Uwa1kA8GgunI69XnIch259K4ing,5826
16
- foamlib/_files/_types.py,sha256=jvXegp5vKLMJpN8I3jTpAhnT2WQ5FJ5WVBcqw5pRjdQ,1666
17
- foamlib/_files/_util.py,sha256=lkoSJHXjd6MvDxx39ZF75mhPq-_QX9AjrruVcQ7I9WI,496
18
- foamlib-0.8.0.dist-info/METADATA,sha256=RZN9GJjprIVwmk39qVXBNJq3yr9Sy9LhuVjHAIPcHfc,8499
19
- foamlib-0.8.0.dist-info/WHEEL,sha256=C2FUgwZgiLbznR-k0b_5k3Ai_1aASOXDss3lzCUsUug,87
20
- foamlib-0.8.0.dist-info/licenses/LICENSE.txt,sha256=5Dte9TUnLZzPRs4NQzl-Jc2-Ljd-t_v0ZR5Ng5r0UsY,35131
21
- foamlib-0.8.0.dist-info/RECORD,,
14
+ foamlib/_files/_parsing.py,sha256=iKtreUueJ3NrE8a2cIpUf4zF7z3o4pJtKLufx0mnb3A,13986
15
+ foamlib/_files/_serialization.py,sha256=Vuq9Ityb55a7kheGCqeuMHyzddb4mXozglMsc90_3Cc,4746
16
+ foamlib/_files/_types.py,sha256=3l6MCwyK1wAv3B5xJsyF9Q4Ndgt-BF8zCd1N7_ZBxPM,2554
17
+ foamlib-0.8.1.dist-info/METADATA,sha256=izsNijMcEBsVrrgyWoLzx80Gi6LoJ5PAIM4zNVngxwk,7938
18
+ foamlib-0.8.1.dist-info/WHEEL,sha256=C2FUgwZgiLbznR-k0b_5k3Ai_1aASOXDss3lzCUsUug,87
19
+ foamlib-0.8.1.dist-info/licenses/LICENSE.txt,sha256=5Dte9TUnLZzPRs4NQzl-Jc2-Ljd-t_v0ZR5Ng5r0UsY,35131
20
+ foamlib-0.8.1.dist-info/RECORD,,
foamlib/_files/_util.py DELETED
@@ -1,23 +0,0 @@
1
- from __future__ import annotations
2
-
3
- import sys
4
- from typing import TYPE_CHECKING
5
-
6
- if sys.version_info >= (3, 9):
7
- from collections.abc import Sequence
8
- else:
9
- from typing import Sequence
10
-
11
- if sys.version_info >= (3, 10):
12
- from typing import TypeGuard
13
- else:
14
- from typing_extensions import TypeGuard
15
-
16
- if TYPE_CHECKING:
17
- from ._types import Entry
18
-
19
-
20
- def is_sequence(
21
- value: Entry,
22
- ) -> TypeGuard[Sequence[Entry]]:
23
- return isinstance(value, Sequence) and not isinstance(value, str)