foamlib 0.7.2__py3-none-any.whl → 0.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
foamlib/__init__.py CHANGED
@@ -1,6 +1,6 @@
1
1
  """A Python interface for interacting with OpenFOAM."""
2
2
 
3
- __version__ = "0.7.2"
3
+ __version__ = "0.8.0"
4
4
 
5
5
  from ._cases import (
6
6
  AsyncFoamCase,
@@ -16,9 +16,9 @@ __all__ = [
16
16
  "AsyncFoamCase",
17
17
  "AsyncSlurmFoamCase",
18
18
  "CalledProcessError",
19
- "FoamFile",
20
19
  "FoamCase",
21
20
  "FoamCaseBase",
22
21
  "FoamCaseRunBase",
23
22
  "FoamFieldFile",
23
+ "FoamFile",
24
24
  ]
@@ -7,9 +7,9 @@ from ._sync import FoamCase
7
7
 
8
8
  __all__ = [
9
9
  "AsyncFoamCase",
10
- "FoamCaseBase",
11
- "FoamCaseRunBase",
12
10
  "AsyncSlurmFoamCase",
13
11
  "CalledProcessError",
14
12
  "FoamCase",
13
+ "FoamCaseBase",
14
+ "FoamCaseRunBase",
15
15
  ]
foamlib/_cases/_base.py CHANGED
@@ -6,17 +6,10 @@ from pathlib import Path
6
6
  from typing import TYPE_CHECKING, overload
7
7
 
8
8
  if sys.version_info >= (3, 9):
9
- from collections.abc import (
10
- Iterator,
11
- Sequence,
12
- )
9
+ from collections.abc import Iterator, Sequence
13
10
  from collections.abc import Set as AbstractSet
14
11
  else:
15
- from typing import (
16
- AbstractSet,
17
- Iterator,
18
- Sequence,
19
- )
12
+ from typing import AbstractSet, Iterator, Sequence
20
13
 
21
14
  from .._files import FoamFieldFile, FoamFile
22
15
 
foamlib/_cases/_util.py CHANGED
@@ -2,14 +2,7 @@ from __future__ import annotations
2
2
 
3
3
  import functools
4
4
  import sys
5
- from typing import (
6
- TYPE_CHECKING,
7
- Any,
8
- AsyncContextManager,
9
- Callable,
10
- Generic,
11
- TypeVar,
12
- )
5
+ from typing import TYPE_CHECKING, Any, AsyncContextManager, Callable, Generic, TypeVar
13
6
 
14
7
  if TYPE_CHECKING:
15
8
  from types import TracebackType
@@ -1,6 +1,6 @@
1
1
  from ._files import FoamFieldFile, FoamFile
2
2
 
3
3
  __all__ = [
4
- "FoamFile",
5
4
  "FoamFieldFile",
5
+ "FoamFile",
6
6
  ]
foamlib/_files/_files.py CHANGED
@@ -19,13 +19,13 @@ from ._io import FoamFileIO
19
19
  from ._serialization import Kind, dumps, normalize
20
20
  from ._types import (
21
21
  Data,
22
- DataEntry,
23
22
  Dict_,
24
23
  Dimensioned,
25
24
  DimensionSet,
25
+ Entry,
26
26
  Field,
27
27
  File,
28
- MutableData,
28
+ MutableEntry,
29
29
  )
30
30
  from ._util import is_sequence
31
31
 
@@ -33,7 +33,7 @@ from ._util import is_sequence
33
33
  class FoamFile(
34
34
  MutableMapping[
35
35
  Optional[Union[str, Tuple[str, ...]]],
36
- MutableData,
36
+ MutableEntry,
37
37
  ],
38
38
  FoamFileIO,
39
39
  ):
@@ -49,7 +49,7 @@ class FoamFile(
49
49
  DimensionSet = DimensionSet
50
50
 
51
51
  class SubDict(
52
- MutableMapping[str, MutableData],
52
+ MutableMapping[str, MutableEntry],
53
53
  ):
54
54
  """An OpenFOAM dictionary within a file as a mutable mapping."""
55
55
 
@@ -57,13 +57,13 @@ class FoamFile(
57
57
  self._file = _file
58
58
  self._keywords = _keywords
59
59
 
60
- def __getitem__(self, keyword: str) -> DataEntry | FoamFile.SubDict:
60
+ def __getitem__(self, keyword: str) -> Data | FoamFile.SubDict:
61
61
  return self._file[(*self._keywords, keyword)]
62
62
 
63
63
  def __setitem__(
64
64
  self,
65
65
  keyword: str,
66
- data: Data,
66
+ data: Entry,
67
67
  ) -> None:
68
68
  self._file[(*self._keywords, keyword)] = data
69
69
 
@@ -174,7 +174,7 @@ class FoamFile(
174
174
 
175
175
  def __getitem__(
176
176
  self, keywords: str | tuple[str, ...] | None
177
- ) -> DataEntry | FoamFile.SubDict:
177
+ ) -> Data | FoamFile.SubDict:
178
178
  if not keywords:
179
179
  keywords = ()
180
180
  elif not isinstance(keywords, tuple):
@@ -190,7 +190,7 @@ class FoamFile(
190
190
  return FoamFile.SubDict(self, keywords)
191
191
  return deepcopy(value)
192
192
 
193
- def __setitem__(self, keywords: str | tuple[str, ...] | None, data: Data) -> None:
193
+ def __setitem__(self, keywords: str | tuple[str, ...] | None, data: Entry) -> None:
194
194
  if not keywords:
195
195
  keywords = ()
196
196
  elif not isinstance(keywords, tuple):
@@ -438,7 +438,7 @@ class FoamFieldFile(FoamFile):
438
438
 
439
439
  def __getitem__(
440
440
  self, keywords: str | tuple[str, ...] | None
441
- ) -> DataEntry | FoamFile.SubDict:
441
+ ) -> Data | FoamFile.SubDict:
442
442
  if not keywords:
443
443
  keywords = ()
444
444
  elif not isinstance(keywords, tuple):
foamlib/_files/_io.py CHANGED
@@ -3,9 +3,7 @@ from __future__ import annotations
3
3
  import gzip
4
4
  import sys
5
5
  from pathlib import Path
6
- from typing import (
7
- TYPE_CHECKING,
8
- )
6
+ from typing import TYPE_CHECKING
9
7
 
10
8
  if sys.version_info >= (3, 11):
11
9
  from typing import Self
@@ -3,6 +3,7 @@ from __future__ import annotations
3
3
  import array
4
4
  import re
5
5
  import sys
6
+ from enum import Enum, auto
6
7
  from typing import Tuple, Union, cast
7
8
 
8
9
  if sys.version_info >= (3, 9):
@@ -13,10 +14,9 @@ else:
13
14
  if sys.version_info >= (3, 10):
14
15
  from types import EllipsisType
15
16
  else:
16
- from typing import Any as EllipsisType
17
+ EllipsisType = type(...)
17
18
 
18
19
  from pyparsing import (
19
- CharsNotIn,
20
20
  Combine,
21
21
  Dict,
22
22
  Forward,
@@ -28,16 +28,71 @@ from pyparsing import (
28
28
  Opt,
29
29
  ParserElement,
30
30
  ParseResults,
31
- QuotedString,
32
31
  Regex,
33
32
  Word,
34
33
  common,
35
34
  counted_array,
35
+ dbl_quoted_string,
36
36
  identchars,
37
37
  printables,
38
38
  )
39
39
 
40
- from ._types import DataEntry, Dimensioned, DimensionSet, File
40
+ from ._types import Data, Dimensioned, DimensionSet, File
41
+
42
+
43
+ class Tensor(Enum):
44
+ SCALAR = auto()
45
+ VECTOR = auto()
46
+ SYMM_TENSOR = auto()
47
+ TENSOR = auto()
48
+
49
+ @property
50
+ def shape(self) -> tuple[int, ...]:
51
+ return {
52
+ Tensor.SCALAR: (),
53
+ Tensor.VECTOR: (3,),
54
+ Tensor.SYMM_TENSOR: (6,),
55
+ Tensor.TENSOR: (9,),
56
+ }[self]
57
+
58
+ @property
59
+ def size(self) -> int:
60
+ return {
61
+ Tensor.SCALAR: 1,
62
+ Tensor.VECTOR: 3,
63
+ Tensor.SYMM_TENSOR: 6,
64
+ Tensor.TENSOR: 9,
65
+ }[self]
66
+
67
+ def pattern(self, *, ignore: Regex | None = None) -> str:
68
+ float_pattern = r"(?i:[+-]?(?:(?:\d+\.?\d*(?:e[+-]?\d+)?)|nan|inf(?:inity)?))"
69
+
70
+ if self == Tensor.SCALAR:
71
+ return float_pattern
72
+
73
+ ignore_pattern = (
74
+ rf"(?:\s|{ignore.re.pattern})+" if ignore is not None else r"\s+"
75
+ )
76
+
77
+ return rf"\((?:{ignore_pattern})?(?:{float_pattern}{ignore_pattern}){{{self.size - 1}}}{float_pattern}(?:{ignore_pattern})?\)"
78
+
79
+ def parser(self) -> ParserElement:
80
+ if self == Tensor.SCALAR:
81
+ return common.ieee_float
82
+
83
+ return (
84
+ Literal("(").suppress()
85
+ + Group(common.ieee_float[self.size], aslist=True)
86
+ + Literal(")").suppress()
87
+ )
88
+
89
+ def __str__(self) -> str:
90
+ return {
91
+ Tensor.SCALAR: "scalar",
92
+ Tensor.VECTOR: "vector",
93
+ Tensor.SYMM_TENSOR: "symmTensor",
94
+ Tensor.TENSOR: "tensor",
95
+ }[self]
41
96
 
42
97
 
43
98
  def _list_of(entry: ParserElement) -> ParserElement:
@@ -59,96 +114,135 @@ def _list_of(entry: ParserElement) -> ParserElement:
59
114
  )
60
115
 
61
116
 
62
- def _counted_tensor_list(*, size: int, ignore: Regex) -> ParserElement:
63
- float_pattern = r"[+-]?((\d+\.?\d*(e[+-]?\d+)?)|nan|inf(inity)?)"
64
- ignore_pattern = rf"(?:\s|{ignore.re.pattern})+"
117
+ def _parse_ascii_field(
118
+ s: str, tensor_kind: Tensor, *, ignore: Regex | None
119
+ ) -> list[float] | list[list[float]]:
120
+ values = [
121
+ float(v)
122
+ for v in (re.sub(ignore.re, " ", s) if ignore is not None else s)
123
+ .replace("(", " ")
124
+ .replace(")", " ")
125
+ .split()
126
+ ]
65
127
 
66
- if size == 1:
67
- tensor_pattern = float_pattern
68
- tensor = common.ieee_float
69
- else:
70
- tensor_pattern = rf"\((?:{ignore_pattern})?(?:{float_pattern}{ignore_pattern}){{{size - 1}}}{float_pattern}(?:{ignore_pattern})?\)"
71
- tensor = (
72
- Literal("(").suppress()
73
- + Group(common.ieee_float[size], aslist=True)
74
- + Literal(")").suppress()
128
+ if tensor_kind == Tensor.SCALAR:
129
+ return values
130
+
131
+ return [
132
+ values[i : i + tensor_kind.size]
133
+ for i in range(0, len(values), tensor_kind.size)
134
+ ]
135
+
136
+
137
+ def _unpack_binary_field(
138
+ b: bytes, tensor_kind: Tensor, *, length: int
139
+ ) -> list[float] | list[list[float]]:
140
+ float_size = len(b) / tensor_kind.size / length
141
+ assert float_size in (4, 8)
142
+
143
+ arr = array.array("f" if float_size == 4 else "d", b)
144
+ values = arr.tolist()
145
+
146
+ if tensor_kind == Tensor.SCALAR:
147
+ return values
148
+
149
+ return [
150
+ values[i : i + tensor_kind.size]
151
+ for i in range(0, len(values), tensor_kind.size)
152
+ ]
153
+
154
+
155
+ def _tensor_list(
156
+ tensor_kind: Tensor | None = None, *, ignore: Regex | None = None
157
+ ) -> ParserElement:
158
+ if tensor_kind is None:
159
+ return (
160
+ _tensor_list(Tensor.SCALAR, ignore=ignore)
161
+ | _tensor_list(Tensor.VECTOR, ignore=ignore)
162
+ | _tensor_list(Tensor.SYMM_TENSOR, ignore=ignore)
163
+ | _tensor_list(Tensor.TENSOR, ignore=ignore)
75
164
  )
76
165
 
166
+ tensor_pattern = tensor_kind.pattern(ignore=ignore)
167
+ ignore_pattern = rf"(?:\s|{ignore.re.pattern})+" if ignore is not None else r"\s+"
168
+
77
169
  list_ = Forward()
78
170
 
171
+ list_ <<= Regex(
172
+ rf"\((?:{ignore_pattern})?(?:{tensor_pattern}{ignore_pattern})*{tensor_pattern}(?:{ignore_pattern})?\)"
173
+ ).add_parse_action(
174
+ lambda tks: [_parse_ascii_field(tks[0], tensor_kind, ignore=ignore)]
175
+ )
176
+
79
177
  def count_parse_action(tks: ParseResults) -> None:
80
178
  nonlocal list_
81
179
  length = tks[0]
82
180
  assert isinstance(length, int)
83
181
 
84
- list_ <<= Regex(
85
- rf"\((?:{ignore_pattern})?(?:{tensor_pattern}{ignore_pattern}){{{length - 1}}}{tensor_pattern}(?:{ignore_pattern})?\)",
86
- flags=re.IGNORECASE,
182
+ list_ <<= (
183
+ Regex(
184
+ rf"\((?:{ignore_pattern})?(?:{tensor_pattern}{ignore_pattern}){{{length - 1}}}{tensor_pattern}(?:{ignore_pattern})?\)"
185
+ ).add_parse_action(
186
+ lambda tks: [_parse_ascii_field(tks[0], tensor_kind, ignore=ignore)]
187
+ )
188
+ | Regex(
189
+ rf"\((?s:.{{{length * tensor_kind.size * 8}}}|.{{{length * tensor_kind.size * 4}}})\)"
190
+ ).set_parse_action(
191
+ lambda tks: [
192
+ _unpack_binary_field(
193
+ tks[0][1:-1].encode("latin-1"), tensor_kind, length=length
194
+ )
195
+ ]
196
+ )
197
+ | (
198
+ Literal("{").suppress() + tensor_kind.parser() + Literal("}").suppress()
199
+ ).set_parse_action(lambda tks: [[tks[0]] * length])
87
200
  )
88
201
 
89
- count = common.integer.add_parse_action(count_parse_action)
202
+ count = common.integer.copy().add_parse_action(count_parse_action)
203
+
204
+ return (
205
+ Opt(Literal("List") + Literal("<") + str(tensor_kind) + Literal(">")).suppress()
206
+ + Opt(count).suppress()
207
+ + list_
208
+ )
209
+
90
210
 
91
- def list_parse_action(
92
- tks: ParseResults,
93
- ) -> list[list[float]] | list[list[list[float]]]:
94
- values = [
95
- float(v)
96
- for v in re.sub(ignore.re, " ", tks[0])
97
- .replace("(", " ")
98
- .replace(")", " ")
99
- .split()
100
- ]
211
+ def _dict_of(
212
+ keyword: ParserElement, data: ParserElement, *, located: bool = False
213
+ ) -> ParserElement:
214
+ dict_ = Forward()
101
215
 
102
- if size == 1:
103
- return [values]
216
+ keyword_entry = keyword + (dict_ | (data + Literal(";").suppress()))
104
217
 
105
- return [[values[i : i + size] for i in range(0, len(values), size)]]
218
+ if located:
219
+ keyword_entry = Located(keyword_entry)
106
220
 
107
- list_.add_parse_action(list_parse_action)
221
+ dict_ <<= (
222
+ Literal("{").suppress()
223
+ + Dict(Group(keyword_entry)[...], asdict=not located)
224
+ + Literal("}").suppress()
225
+ )
108
226
 
109
- return (count.suppress() + list_) | (
110
- common.integer + Literal("{").suppress() + tensor + Literal("}").suppress()
111
- ).set_parse_action(lambda tks: [[tks[1]] * tks[0]])
227
+ return dict_
112
228
 
113
229
 
114
230
  def _keyword_entry_of(
115
231
  keyword: ParserElement,
116
- data_entries: ParserElement,
232
+ data: ParserElement,
117
233
  *,
118
234
  located: bool = False,
119
235
  ) -> ParserElement:
120
- subdict = Forward()
121
-
122
236
  keyword_entry = keyword + (
123
- (Literal("{").suppress() + subdict + Literal("}").suppress())
124
- | (data_entries + Literal(";").suppress())
237
+ _dict_of(keyword, data, located=located) | (data + Literal(";").suppress())
125
238
  )
126
239
 
127
240
  if located:
128
241
  keyword_entry = Located(keyword_entry)
129
-
130
- subdict <<= Dict(Group(keyword_entry)[...], asdict=not located)
131
-
132
- return keyword_entry
133
-
134
-
135
- def _unpack_binary_field(
136
- tks: ParseResults,
137
- *,
138
- elsize: int = 1,
139
- ) -> Sequence[Sequence[float] | Sequence[Sequence[float]]]:
140
- float_size = len(tks[0]) // elsize
141
-
142
- arr = array.array("f" if float_size == 4 else "d", "".join(tks).encode("latin-1"))
143
-
144
- values: Sequence[float] | Sequence[Sequence[float]]
145
-
146
- if elsize != 1:
147
- values = [arr[i : i + elsize].tolist() for i in range(0, len(arr), elsize)]
148
242
  else:
149
- values = arr.tolist()
243
+ keyword_entry = keyword_entry.copy().set_parse_action(lambda tks: tuple(tks))
150
244
 
151
- return [values]
245
+ return keyword_entry
152
246
 
153
247
 
154
248
  # https://github.com/pyparsing/pyparsing/pull/584
@@ -179,12 +273,11 @@ _SWITCH = (
179
273
  _DIMENSIONS = (
180
274
  Literal("[").suppress() + common.number[0, 7] + Literal("]").suppress()
181
275
  ).set_parse_action(lambda tks: DimensionSet(*tks))
182
- _TENSOR = common.ieee_float | (
183
- Literal("(").suppress()
184
- + Group(
185
- common.ieee_float[3] | common.ieee_float[6] | common.ieee_float[9], aslist=True
186
- )
187
- + Literal(")").suppress()
276
+ _TENSOR = (
277
+ Tensor.SCALAR.parser()
278
+ | Tensor.VECTOR.parser()
279
+ | Tensor.SYMM_TENSOR.parser()
280
+ | Tensor.TENSOR.parser()
188
281
  )
189
282
  _IDENTIFIER = Combine(
190
283
  Word(_IDENTCHARS, _IDENTBODYCHARS, exclude_chars="()")
@@ -194,150 +287,43 @@ _DIMENSIONED = (Opt(_IDENTIFIER) + _DIMENSIONS + _TENSOR).set_parse_action(
194
287
  lambda tks: Dimensioned(*reversed(tks.as_list()))
195
288
  )
196
289
  _FIELD = (Keyword("uniform", _IDENTBODYCHARS).suppress() + _TENSOR) | (
197
- Keyword("nonuniform", _IDENTBODYCHARS).suppress()
198
- + (
199
- Literal("List").suppress()
200
- + Literal("<").suppress()
201
- + (
202
- (
203
- Literal("scalar").suppress()
204
- + Literal(">").suppress()
205
- + (
206
- _counted_tensor_list(size=1, ignore=_COMMENT)
207
- | (
208
- (
209
- (
210
- counted_array(
211
- CharsNotIn(exact=8),
212
- common.integer + Literal("(").suppress(),
213
- )
214
- )
215
- | (
216
- counted_array(
217
- CharsNotIn(exact=4),
218
- common.integer + Literal("(").suppress(),
219
- )
220
- )
221
- )
222
- + Literal(")").suppress()
223
- ).set_parse_action(_unpack_binary_field)
224
- )
225
- )
226
- | (
227
- Literal("vector").suppress()
228
- + Literal(">").suppress()
229
- + (
230
- _counted_tensor_list(size=3, ignore=_COMMENT)
231
- | (
232
- (
233
- (
234
- counted_array(
235
- CharsNotIn(exact=8 * 3),
236
- common.integer + Literal("(").suppress(),
237
- )
238
- )
239
- | (
240
- counted_array(
241
- CharsNotIn(exact=4 * 3),
242
- common.integer + Literal("(").suppress(),
243
- )
244
- )
245
- )
246
- + Literal(")").suppress()
247
- ).set_parse_action(lambda tks: _unpack_binary_field(tks, elsize=3))
248
- )
249
- )
250
- | (
251
- Literal("symmTensor").suppress()
252
- + Literal(">").suppress()
253
- + (
254
- _counted_tensor_list(size=6, ignore=_COMMENT)
255
- | (
256
- (
257
- (
258
- counted_array(
259
- CharsNotIn(exact=8 * 6),
260
- common.integer + Literal("(").suppress(),
261
- )
262
- )
263
- | (
264
- counted_array(
265
- CharsNotIn(exact=4 * 6),
266
- common.integer + Literal("(").suppress(),
267
- )
268
- )
269
- )
270
- + Literal(")").suppress()
271
- ).set_parse_action(lambda tks: _unpack_binary_field(tks, elsize=6))
272
- )
273
- )
274
- | (
275
- Literal("tensor").suppress()
276
- + Literal(">").suppress()
277
- + (
278
- _counted_tensor_list(size=9, ignore=_COMMENT)
279
- | (
280
- (
281
- (
282
- counted_array(
283
- CharsNotIn(exact=8 * 9),
284
- common.integer + Literal("(").suppress(),
285
- )
286
- )
287
- | (
288
- counted_array(
289
- CharsNotIn(exact=4 * 9),
290
- common.integer + Literal("(").suppress(),
291
- )
292
- )
293
- )
294
- + Literal(")").suppress()
295
- ).set_parse_action(lambda tks: _unpack_binary_field(tks, elsize=9))
296
- )
297
- )
298
- )
299
- )
290
+ Keyword("nonuniform", _IDENTBODYCHARS).suppress() + _tensor_list(ignore=_COMMENT)
300
291
  )
301
- _TOKEN = QuotedString('"', unquote_results=False) | _IDENTIFIER
292
+ TOKEN = dbl_quoted_string | _IDENTIFIER
302
293
  DATA = Forward()
303
- KEYWORD = (
304
- _TOKEN
305
- | _list_of(_IDENTIFIER)
306
- .set_parse_action(lambda tks: "(" + " ".join(tks[0]) + ")")
307
- .ignore(_COMMENT)
308
- .parse_with_tabs()
309
- )
310
- _KEYWORD_ENTRY = Dict(Group(_keyword_entry_of(KEYWORD, DATA)), asdict=True)
294
+ _KEYWORD_ENTRY = _keyword_entry_of(TOKEN | _list_of(_IDENTIFIER), DATA)
295
+ _DICT = _dict_of(TOKEN, DATA)
311
296
  _DATA_ENTRY = Forward()
312
- _LIST_ENTRY = _KEYWORD_ENTRY | _DATA_ENTRY
297
+ _LIST_ENTRY = _DICT | _KEYWORD_ENTRY | _DATA_ENTRY
313
298
  _LIST = _list_of(_LIST_ENTRY)
314
299
  _NUMBER = common.signed_integer ^ common.ieee_float
315
- _DATA_ENTRY <<= _FIELD | _LIST | _DIMENSIONED | _DIMENSIONS | _NUMBER | _SWITCH | _TOKEN
300
+ _DATA_ENTRY <<= _FIELD | _LIST | _DIMENSIONED | _DIMENSIONS | _NUMBER | _SWITCH | TOKEN
316
301
 
317
302
  DATA <<= (
318
303
  _DATA_ENTRY[1, ...]
319
- .set_parse_action(lambda tks: tuple(tks) if len(tks) > 1 else [tks[0]])
304
+ .set_parse_action(lambda tks: [tuple(tks)] if len(tks) > 1 else [tks[0]])
320
305
  .ignore(_COMMENT)
321
306
  .parse_with_tabs()
322
307
  )
323
308
 
309
+ _LOCATED_DICTIONARY = Group(
310
+ _keyword_entry_of(TOKEN, Opt(DATA, default=""), located=True)
311
+ )[...]
312
+ _LOCATED_DATA = Group(Located(DATA.copy().add_parse_action(lambda tks: ["", tks[0]])))
313
+
324
314
  _FILE = (
325
- Dict(
326
- Group(_keyword_entry_of(KEYWORD, Opt(DATA, default=""), located=True))[...]
327
- + Opt(Group(Located(DATA.copy().add_parse_action(lambda tks: ["", tks[0]]))))
328
- + Group(_keyword_entry_of(KEYWORD, Opt(DATA, default=""), located=True))[...]
329
- )
315
+ Dict(_LOCATED_DICTIONARY + Opt(_LOCATED_DATA) + _LOCATED_DICTIONARY)
330
316
  .ignore(_COMMENT)
331
317
  .ignore(Literal("#include") + ... + LineEnd()) # type: ignore [no-untyped-call]
332
318
  .parse_with_tabs()
333
319
  )
334
320
 
335
321
 
336
- class Parsed(Mapping[Tuple[str, ...], Union[DataEntry, EllipsisType]]):
322
+ class Parsed(Mapping[Tuple[str, ...], Union[Data, EllipsisType]]):
337
323
  def __init__(self, contents: bytes) -> None:
338
324
  self._parsed: MutableMapping[
339
325
  tuple[str, ...],
340
- tuple[int, DataEntry | EllipsisType, int],
326
+ tuple[int, Data | EllipsisType, int],
341
327
  ] = {}
342
328
  for parse_result in _FILE.parse_string(
343
329
  contents.decode("latin-1"), parse_all=True
@@ -350,10 +336,10 @@ class Parsed(Mapping[Tuple[str, ...], Union[DataEntry, EllipsisType]]):
350
336
  @staticmethod
351
337
  def _flatten_result(
352
338
  parse_result: ParseResults, *, _keywords: tuple[str, ...] = ()
353
- ) -> Mapping[tuple[str, ...], tuple[int, DataEntry | EllipsisType, int]]:
339
+ ) -> Mapping[tuple[str, ...], tuple[int, Data | EllipsisType, int]]:
354
340
  ret: MutableMapping[
355
341
  tuple[str, ...],
356
- tuple[int, DataEntry | EllipsisType, int],
342
+ tuple[int, Data | EllipsisType, int],
357
343
  ] = {}
358
344
  start = parse_result.locn_start
359
345
  assert isinstance(start, int)
@@ -380,14 +366,14 @@ class Parsed(Mapping[Tuple[str, ...], Union[DataEntry, EllipsisType]]):
380
366
  ret[(*_keywords, keyword)] = (start, d, end)
381
367
  return ret
382
368
 
383
- def __getitem__(self, keywords: tuple[str, ...]) -> DataEntry | EllipsisType:
369
+ def __getitem__(self, keywords: tuple[str, ...]) -> Data | EllipsisType:
384
370
  _, data, _ = self._parsed[keywords]
385
371
  return data
386
372
 
387
373
  def put(
388
374
  self,
389
375
  keywords: tuple[str, ...],
390
- data: DataEntry | EllipsisType,
376
+ data: Data | EllipsisType,
391
377
  content: bytes,
392
378
  ) -> None:
393
379
  start, end = self.entry_location(keywords, missing_ok=True)
@@ -11,8 +11,8 @@ if sys.version_info >= (3, 9):
11
11
  else:
12
12
  from typing import Mapping, Sequence
13
13
 
14
- from ._parsing import DATA, KEYWORD
15
- from ._types import Data, DataEntry, Dimensioned, DimensionSet
14
+ from ._parsing import DATA, TOKEN
15
+ from ._types import Data, Dimensioned, DimensionSet, Entry
16
16
  from ._util import is_sequence
17
17
 
18
18
  try:
@@ -34,14 +34,14 @@ class Kind(Enum):
34
34
 
35
35
 
36
36
  @overload
37
- def normalize(data: DataEntry, *, kind: Kind = Kind.DEFAULT) -> DataEntry: ...
37
+ def normalize(data: Data, *, kind: Kind = Kind.DEFAULT) -> Data: ...
38
38
 
39
39
 
40
40
  @overload
41
- def normalize(data: Data, *, kind: Kind = Kind.DEFAULT) -> Data: ...
41
+ def normalize(data: Entry, *, kind: Kind = Kind.DEFAULT) -> Entry: ...
42
42
 
43
43
 
44
- def normalize(data: Data, *, kind: Kind = Kind.DEFAULT) -> Data:
44
+ def normalize(data: Entry, *, kind: Kind = Kind.DEFAULT) -> Entry:
45
45
  if numpy and isinstance(data, np.ndarray):
46
46
  ret = data.tolist()
47
47
  assert isinstance(ret, list)
@@ -59,10 +59,11 @@ def normalize(data: Data, *, kind: Kind = Kind.DEFAULT) -> Data:
59
59
  data = cast(Sequence[float], data)
60
60
  return DimensionSet(*data)
61
61
 
62
- if is_sequence(data) and (kind == Kind.SINGLE_ENTRY or not isinstance(data, tuple)):
63
- if len(data) == 1 and isinstance(data[0], Mapping) and len(data[0]) > 1:
64
- return [normalize({k: v}) for k, v in data[0].items()]
62
+ if isinstance(data, tuple) and kind == Kind.SINGLE_ENTRY and len(data) == 2:
63
+ k, v = data
64
+ return (normalize(k, kind=Kind.KEYWORD), normalize(v))
65
65
 
66
+ if is_sequence(data) and (kind == Kind.SINGLE_ENTRY or not isinstance(data, tuple)):
66
67
  return [normalize(d, kind=Kind.SINGLE_ENTRY) for d in data]
67
68
 
68
69
  if isinstance(data, Dimensioned):
@@ -72,11 +73,11 @@ def normalize(data: Data, *, kind: Kind = Kind.DEFAULT) -> Data:
72
73
 
73
74
  if isinstance(data, str):
74
75
  if kind == Kind.KEYWORD:
75
- data = KEYWORD.parse_string(data, parse_all=True)[0]
76
+ data = TOKEN.parse_string(data, parse_all=True)[0]
76
77
  assert isinstance(data, str)
77
78
  return data
78
79
 
79
- return cast(DataEntry, DATA.parse_string(data, parse_all=True)[0])
80
+ return cast(Data, DATA.parse_string(data, parse_all=True)[0])
80
81
 
81
82
  if isinstance(
82
83
  data,
@@ -89,26 +90,25 @@ def normalize(data: Data, *, kind: Kind = Kind.DEFAULT) -> Data:
89
90
 
90
91
 
91
92
  def dumps(
92
- data: Data,
93
+ data: Entry,
93
94
  *,
94
95
  kind: Kind = Kind.DEFAULT,
95
96
  ) -> bytes:
96
97
  data = normalize(data, kind=kind)
97
98
 
98
99
  if isinstance(data, Mapping):
99
- entries = []
100
- for k, v in data.items():
101
- value = normalize(v)
102
- if isinstance(value, Mapping):
103
- entries.append(
104
- dumps(k, kind=Kind.KEYWORD) + b" {" + dumps(value) + b"}"
105
- )
106
- elif not value:
107
- entries.append(dumps(k, kind=Kind.KEYWORD) + b";")
108
- else:
109
- entries.append(dumps(k, kind=Kind.KEYWORD) + b" " + dumps(value) + b";")
100
+ return (
101
+ b"{"
102
+ + b" ".join(dumps((k, v), kind=Kind.SINGLE_ENTRY) for k, v in data.items())
103
+ + b"}"
104
+ )
110
105
 
111
- return b" ".join(entries)
106
+ if isinstance(data, tuple) and kind == Kind.SINGLE_ENTRY and len(data) == 2:
107
+ k, v = data
108
+ ret = dumps(k, kind=Kind.KEYWORD) + b" " + dumps(v)
109
+ if not isinstance(v, Mapping):
110
+ ret += b";"
111
+ return ret
112
112
 
113
113
  if isinstance(data, DimensionSet):
114
114
  return b"[" + b" ".join(dumps(v) for v in data) + b"]"
@@ -119,10 +119,12 @@ def dumps(
119
119
  Kind.SINGLE_PRECISION_BINARY_FIELD,
120
120
  ) and (
121
121
  isinstance(data, (int, float))
122
- or is_sequence(data)
123
- and data
124
- and isinstance(data[0], (int, float))
125
- and len(data) in (3, 6, 9)
122
+ or (
123
+ is_sequence(data)
124
+ and data
125
+ and isinstance(data[0], (int, float))
126
+ and len(data) in (3, 6, 9)
127
+ )
126
128
  ):
127
129
  return b"uniform " + dumps(data, kind=Kind.SINGLE_ENTRY)
128
130
 
@@ -185,7 +187,7 @@ def dumps(
185
187
  if isinstance(data, tuple):
186
188
  return b" ".join(dumps(v) for v in data)
187
189
 
188
- if is_sequence(data):
190
+ if is_sequence(data) and not isinstance(data, tuple):
189
191
  return b"(" + b" ".join(dumps(v, kind=Kind.SINGLE_ENTRY) for v in data) + b")"
190
192
 
191
193
  if data is True:
foamlib/_files/_types.py CHANGED
@@ -2,7 +2,7 @@ from __future__ import annotations
2
2
 
3
3
  import sys
4
4
  from dataclasses import dataclass
5
- from typing import TYPE_CHECKING, Dict, NamedTuple, Optional, Tuple, Union
5
+ from typing import TYPE_CHECKING, Dict, NamedTuple, Optional, Union
6
6
 
7
7
  if TYPE_CHECKING:
8
8
  import numpy as np
@@ -29,8 +29,7 @@ class DimensionSet(NamedTuple):
29
29
  Tensor = Union[
30
30
  float,
31
31
  Sequence[float],
32
- "np.ndarray[Tuple[()], np.dtype[np.generic]]",
33
- "np.ndarray[Tuple[int], np.dtype[np.generic]]",
32
+ "np.ndarray[tuple[()] | tuple[int], np.dtype[np.float64 | np.int_]]",
34
33
  ]
35
34
 
36
35
 
@@ -46,33 +45,35 @@ class Dimensioned:
46
45
 
47
46
 
48
47
  Field = Union[
49
- Tensor, Sequence[Tensor], "np.ndarray[Tuple[int, int], np.dtype[np.generic]]"
48
+ Tensor,
49
+ Sequence[Tensor],
50
+ "np.ndarray[tuple[int] | tuple[int, int], np.dtype[np.float64 | np.int_]]",
50
51
  ]
51
52
 
52
- DataEntry = Union[
53
+ Data = Union[
53
54
  str,
54
55
  int,
55
56
  float,
56
57
  bool,
57
58
  Dimensioned,
58
59
  DimensionSet,
59
- Sequence["Data"],
60
+ Sequence["Entry"],
60
61
  Tensor,
61
62
  Field,
62
63
  ]
63
64
 
64
- Data = Union[
65
- DataEntry,
66
- Mapping[str, "Data"],
65
+ Entry = Union[
66
+ Data,
67
+ Mapping[str, "Entry"],
67
68
  ]
68
69
  """
69
70
  A value that can be stored in an OpenFOAM file.
70
71
  """
71
72
 
72
- MutableData = Union[
73
- DataEntry,
74
- MutableMapping[str, "MutableData"],
73
+ MutableEntry = Union[
74
+ Data,
75
+ MutableMapping[str, "MutableEntry"],
75
76
  ]
76
77
 
77
- Dict_ = Dict[str, Union["Data", "Dict_"]]
78
- File = Dict[Optional[str], Union["Data", "Dict_"]]
78
+ Dict_ = Dict[str, Union["Entry", "Dict_"]]
79
+ File = Dict[Optional[str], Union["Entry", "Dict_"]]
foamlib/_files/_util.py CHANGED
@@ -14,10 +14,10 @@ else:
14
14
  from typing_extensions import TypeGuard
15
15
 
16
16
  if TYPE_CHECKING:
17
- from ._types import Data
17
+ from ._types import Entry
18
18
 
19
19
 
20
20
  def is_sequence(
21
- value: Data,
22
- ) -> TypeGuard[Sequence[Data]]:
21
+ value: Entry,
22
+ ) -> TypeGuard[Sequence[Entry]]:
23
23
  return isinstance(value, Sequence) and not isinstance(value, str)
@@ -1,15 +1,15 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.3
2
2
  Name: foamlib
3
- Version: 0.7.2
3
+ Version: 0.8.0
4
4
  Summary: A Python interface for interacting with OpenFOAM
5
- Author-email: "Gabriel S. Gerlero" <ggerlero@cimec.unl.edu.ar>
6
5
  Project-URL: Homepage, https://github.com/gerlero/foamlib
7
6
  Project-URL: Repository, https://github.com/gerlero/foamlib
8
7
  Project-URL: Documentation, https://foamlib.readthedocs.io
8
+ Author-email: "Gabriel S. Gerlero" <ggerlero@cimec.unl.edu.ar>
9
9
  Classifier: Development Status :: 4 - Beta
10
10
  Classifier: Framework :: AsyncIO
11
- Classifier: Intended Audience :: Science/Research
12
11
  Classifier: Intended Audience :: Developers
12
+ Classifier: Intended Audience :: Science/Research
13
13
  Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3)
14
14
  Classifier: Operating System :: POSIX
15
15
  Classifier: Programming Language :: Python :: 3 :: Only
@@ -24,34 +24,43 @@ Classifier: Topic :: Scientific/Engineering
24
24
  Classifier: Topic :: Software Development
25
25
  Classifier: Typing :: Typed
26
26
  Requires-Python: >=3.7
27
- Description-Content-Type: text/markdown
28
- License-File: LICENSE.txt
29
- Requires-Dist: aioshutil <2,>=1
30
- Requires-Dist: pyparsing <4,>=3
31
- Requires-Dist: typing-extensions <5,>=4 ; python_version < "3.11"
27
+ Requires-Dist: aioshutil<2,>=1
28
+ Requires-Dist: pyparsing<4,>=3
29
+ Requires-Dist: typing-extensions<5,>=4; python_version < '3.11'
32
30
  Provides-Extra: dev
33
- Requires-Dist: foamlib[numpy] ; extra == 'dev'
34
- Requires-Dist: foamlib[lint] ; extra == 'dev'
35
- Requires-Dist: foamlib[test] ; extra == 'dev'
36
- Requires-Dist: foamlib[typing] ; extra == 'dev'
37
- Requires-Dist: foamlib[docs] ; extra == 'dev'
31
+ Requires-Dist: mypy<2,>=1; extra == 'dev'
32
+ Requires-Dist: numpy<3,>=1; extra == 'dev'
33
+ Requires-Dist: numpy<3,>=1.25.0; (python_version >= '3.10') and extra == 'dev'
34
+ Requires-Dist: pytest-asyncio<0.25,>=0.21; extra == 'dev'
35
+ Requires-Dist: pytest-cov; extra == 'dev'
36
+ Requires-Dist: pytest<9,>=7; extra == 'dev'
37
+ Requires-Dist: ruff; extra == 'dev'
38
+ Requires-Dist: sphinx-rtd-theme; extra == 'dev'
39
+ Requires-Dist: sphinx<9,>=5; extra == 'dev'
38
40
  Provides-Extra: docs
39
- Requires-Dist: foamlib[numpy] ; extra == 'docs'
40
- Requires-Dist: sphinx <9,>=5 ; extra == 'docs'
41
- Requires-Dist: sphinx-rtd-theme ; extra == 'docs'
41
+ Requires-Dist: numpy<3,>=1; extra == 'docs'
42
+ Requires-Dist: numpy<3,>=1.25.0; (python_version >= '3.10') and extra == 'docs'
43
+ Requires-Dist: sphinx-rtd-theme; extra == 'docs'
44
+ Requires-Dist: sphinx<9,>=5; extra == 'docs'
42
45
  Provides-Extra: lint
43
- Requires-Dist: ruff ; extra == 'lint'
46
+ Requires-Dist: ruff; extra == 'lint'
44
47
  Provides-Extra: numpy
45
- Requires-Dist: numpy <3,>=1 ; extra == 'numpy'
46
- Requires-Dist: numpy <3,>=1.25.0 ; (python_version >= "3.10") and extra == 'numpy'
48
+ Requires-Dist: numpy<3,>=1; extra == 'numpy'
49
+ Requires-Dist: numpy<3,>=1.25.0; (python_version >= '3.10') and extra == 'numpy'
47
50
  Provides-Extra: test
48
- Requires-Dist: foamlib[numpy] ; extra == 'test'
49
- Requires-Dist: pytest <9,>=7 ; extra == 'test'
50
- Requires-Dist: pytest-asyncio <0.25,>=0.21 ; extra == 'test'
51
- Requires-Dist: pytest-cov ; extra == 'test'
51
+ Requires-Dist: numpy<3,>=1; extra == 'test'
52
+ Requires-Dist: numpy<3,>=1.25.0; (python_version >= '3.10') and extra == 'test'
53
+ Requires-Dist: pytest-asyncio<0.25,>=0.21; extra == 'test'
54
+ Requires-Dist: pytest-cov; extra == 'test'
55
+ Requires-Dist: pytest<9,>=7; extra == 'test'
52
56
  Provides-Extra: typing
53
- Requires-Dist: foamlib[test] ; extra == 'typing'
54
- Requires-Dist: mypy <2,>=1 ; extra == 'typing'
57
+ Requires-Dist: mypy<2,>=1; extra == 'typing'
58
+ Requires-Dist: numpy<3,>=1; extra == 'typing'
59
+ Requires-Dist: numpy<3,>=1.25.0; (python_version >= '3.10') and extra == 'typing'
60
+ Requires-Dist: pytest-asyncio<0.25,>=0.21; extra == 'typing'
61
+ Requires-Dist: pytest-cov; extra == 'typing'
62
+ Requires-Dist: pytest<9,>=7; extra == 'typing'
63
+ Description-Content-Type: text/markdown
55
64
 
56
65
  [<img alt="foamlib" src="https://github.com/gerlero/foamlib/raw/main/logo.png" height="65">](https://github.com/gerlero/foamlib)
57
66
 
@@ -0,0 +1,21 @@
1
+ foamlib/__init__.py,sha256=iN_p0PYnIJNOiwBl-JcsqwLyxvZMD2qW6X7alo-YOO4,452
2
+ foamlib/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
+ foamlib/_cases/__init__.py,sha256=_A1TTHuQfS9FH2_33lSEyLtOJZGFHZBco1tWJCVOHks,358
4
+ foamlib/_cases/_async.py,sha256=i6g4EBHqvI-1PkdrxsRto2ynW7sxsOga2bSYk1XVG1U,7795
5
+ foamlib/_cases/_base.py,sha256=37oBbM3NM-hpG7dKewZvyJNtqSAogMurcbmX-wLIgMU,6727
6
+ foamlib/_cases/_run.py,sha256=lveqKZium_qK_eTxYE8jOjwx0eiIoolCBbi56-zLw1o,14420
7
+ foamlib/_cases/_slurm.py,sha256=kj4wqgr3foMyAoUkoHOZODRBmVqH1B9KqAIEEjM8ZBg,2328
8
+ foamlib/_cases/_subprocess.py,sha256=6BlBRxknj2-BFcGkx7oVcuL63_utSaY1Axmsc1qV9j8,3887
9
+ foamlib/_cases/_sync.py,sha256=2BJXB7Nzldb4OgPukqupgYqdceUGkI2mYhhtGPWEBrc,5901
10
+ foamlib/_cases/_util.py,sha256=tK4SM5WT3eEgGsFLnidIySbom1qowBAua9z13gipKJk,1518
11
+ foamlib/_files/__init__.py,sha256=q1vkjXnjnSZvo45jPAICpWeF2LZv5V6xfzAR6S8fS5A,96
12
+ foamlib/_files/_files.py,sha256=Afkv9X8Om-AxIe9Ojqbz203_69okdlMNIlSeEAMnkzY,15832
13
+ foamlib/_files/_io.py,sha256=BGbbm6HKxL2ka0YMCmHqZQZ1R4PPQlkvWWb4FHMAS8k,2217
14
+ foamlib/_files/_parsing.py,sha256=Go-gwu5HAZF__iF29l_EiPWouBIhjeQG8e6P9vM2cXY,13833
15
+ foamlib/_files/_serialization.py,sha256=gc0ybQFKU68Ytdv1Uwa1kA8GgunI69XnIch259K4ing,5826
16
+ foamlib/_files/_types.py,sha256=jvXegp5vKLMJpN8I3jTpAhnT2WQ5FJ5WVBcqw5pRjdQ,1666
17
+ foamlib/_files/_util.py,sha256=lkoSJHXjd6MvDxx39ZF75mhPq-_QX9AjrruVcQ7I9WI,496
18
+ foamlib-0.8.0.dist-info/METADATA,sha256=RZN9GJjprIVwmk39qVXBNJq3yr9Sy9LhuVjHAIPcHfc,8499
19
+ foamlib-0.8.0.dist-info/WHEEL,sha256=C2FUgwZgiLbznR-k0b_5k3Ai_1aASOXDss3lzCUsUug,87
20
+ foamlib-0.8.0.dist-info/licenses/LICENSE.txt,sha256=5Dte9TUnLZzPRs4NQzl-Jc2-Ljd-t_v0ZR5Ng5r0UsY,35131
21
+ foamlib-0.8.0.dist-info/RECORD,,
@@ -1,5 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.5.0)
2
+ Generator: hatchling 1.26.3
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
-
@@ -1,22 +0,0 @@
1
- foamlib/__init__.py,sha256=YrFnCARXwvjW-QZzK9cS_ivto2gvTEJwdSVU5ZfCBkM,452
2
- foamlib/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
- foamlib/_cases/__init__.py,sha256=wTUHcUgU1CBgpu0cUMtksQ5VKG6B8CFu9xc3dWwsQuo,358
4
- foamlib/_cases/_async.py,sha256=i6g4EBHqvI-1PkdrxsRto2ynW7sxsOga2bSYk1XVG1U,7795
5
- foamlib/_cases/_base.py,sha256=FKfZxP7HspWfSswQ6yZ5bGJRdZUlupQxj2tDqngXPmc,6785
6
- foamlib/_cases/_run.py,sha256=lveqKZium_qK_eTxYE8jOjwx0eiIoolCBbi56-zLw1o,14420
7
- foamlib/_cases/_slurm.py,sha256=kj4wqgr3foMyAoUkoHOZODRBmVqH1B9KqAIEEjM8ZBg,2328
8
- foamlib/_cases/_subprocess.py,sha256=6BlBRxknj2-BFcGkx7oVcuL63_utSaY1Axmsc1qV9j8,3887
9
- foamlib/_cases/_sync.py,sha256=2BJXB7Nzldb4OgPukqupgYqdceUGkI2mYhhtGPWEBrc,5901
10
- foamlib/_cases/_util.py,sha256=lhVca3ERY0zwYjDam6W2QMROt0yX5vAF-9_DS5RuMbM,1547
11
- foamlib/_files/__init__.py,sha256=GDkYkF3F-ADhkCRT3j9dQQHPP5LyJJYb8TaBbZTQ6fo,96
12
- foamlib/_files/_files.py,sha256=LVLMeP9Zt9esuVGhntnjBA4_u_NhiX5xkm09Qemcle8,15846
13
- foamlib/_files/_io.py,sha256=IQLqoqnA1TpHf21NbUho2wsYWevyqC6MKo-wfpaObUU,2226
14
- foamlib/_files/_parsing.py,sha256=D1l7j_KldD6h-iJvFT6c0CiwctWvUxeGJxh-J93zDs8,15430
15
- foamlib/_files/_serialization.py,sha256=0vqJxltjscqp16mIvd0iKXbeRMbq3a7uLG2INWxzCBg,5861
16
- foamlib/_files/_types.py,sha256=eY06nox8cZe6FixUQigvZRV9Fc7gwehJxuQz_wCCnqo,1678
17
- foamlib/_files/_util.py,sha256=O9t2W26XDs63cVLroW8rZ35Puas20NhnKr_gLMlZixI,493
18
- foamlib-0.7.2.dist-info/LICENSE.txt,sha256=5Dte9TUnLZzPRs4NQzl-Jc2-Ljd-t_v0ZR5Ng5r0UsY,35131
19
- foamlib-0.7.2.dist-info/METADATA,sha256=dK_DN2ZLU49org-G_XI4kdNDg551I8b68d52Q4cAePU,7957
20
- foamlib-0.7.2.dist-info/WHEEL,sha256=R06PA3UVYHThwHvxuRWMqaGcr-PuniXahwjmQRFMEkY,91
21
- foamlib-0.7.2.dist-info/top_level.txt,sha256=ZdVYtetXGwPwyfL-WhlhbTFQGAwKX5P_gXxtH9JYFPI,8
22
- foamlib-0.7.2.dist-info/RECORD,,
@@ -1 +0,0 @@
1
- foamlib