foamlib 0.8.1__py3-none-any.whl → 0.8.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
foamlib/__init__.py CHANGED
@@ -1,6 +1,6 @@
1
1
  """A Python interface for interacting with OpenFOAM."""
2
2
 
3
- __version__ = "0.8.1"
3
+ __version__ = "0.8.3"
4
4
 
5
5
  from ._cases import (
6
6
  AsyncFoamCase,
foamlib/_cases/_async.py CHANGED
@@ -106,7 +106,7 @@ class AsyncFoamCase(FoamCaseRunBase):
106
106
  async def _rmtree(
107
107
  path: os.PathLike[str] | str, *, ignore_errors: bool = False
108
108
  ) -> None:
109
- await aioshutil.rmtree(path, ignore_errors=ignore_errors)
109
+ await aioshutil.rmtree(path, ignore_errors=ignore_errors) # type: ignore [call-arg]
110
110
 
111
111
  @staticmethod
112
112
  async def _copytree(
foamlib/_files/_files.py CHANGED
@@ -260,6 +260,11 @@ class FoamFile(
260
260
  elif shape[1] == 9:
261
261
  self.class_ = "volTensorField"
262
262
 
263
+ if kind == Kind.ASCII_FIELD and self.class_.endswith("scalarField"):
264
+ kind = Kind.SCALAR_ASCII_FIELD
265
+ elif kind == Kind.BINARY_FIELD and self.class_.endswith("scalarField"):
266
+ kind = Kind.SCALAR_BINARY_FIELD
267
+
263
268
  parsed = self._get_parsed(missing_ok=True)
264
269
 
265
270
  start, end = parsed.entry_location(keywords, missing_ok=True)
@@ -2,7 +2,6 @@ from __future__ import annotations
2
2
 
3
3
  import re
4
4
  import sys
5
- from enum import Enum, auto
6
5
  from typing import Tuple, Union, cast
7
6
 
8
7
  if sys.version_info >= (3, 9):
@@ -37,65 +36,31 @@ from pyparsing import (
37
36
  printables,
38
37
  )
39
38
 
40
- from ._types import Data, Dimensioned, DimensionSet, File
41
-
42
-
43
- class _Tensor(Enum):
44
- SCALAR = auto()
45
- VECTOR = auto()
46
- SYMM_TENSOR = auto()
47
- TENSOR = auto()
48
-
49
- @property
50
- def shape(self) -> tuple[()] | tuple[int]:
51
- if self == _Tensor.SCALAR:
52
- return ()
53
- if self == _Tensor.VECTOR:
54
- return (3,)
55
- if self == _Tensor.SYMM_TENSOR:
56
- return (6,)
57
- if self == _Tensor.TENSOR:
58
- return (9,)
59
- raise NotImplementedError
60
-
61
- @property
62
- def size(self) -> int:
63
- return {
64
- _Tensor.SCALAR: 1,
65
- _Tensor.VECTOR: 3,
66
- _Tensor.SYMM_TENSOR: 6,
67
- _Tensor.TENSOR: 9,
68
- }[self]
69
-
70
- def pattern(self, *, ignore: Regex | None = None) -> str:
71
- float_pattern = r"(?i:[+-]?(?:(?:\d+\.?\d*(?:e[+-]?\d+)?)|nan|inf(?:inity)?))"
72
-
73
- if self == _Tensor.SCALAR:
74
- return float_pattern
75
-
76
- ignore_pattern = (
77
- rf"(?:\s|{ignore.re.pattern})+" if ignore is not None else r"\s+"
78
- )
39
+ from ._types import Data, Dimensioned, DimensionSet, File, TensorKind
79
40
 
80
- return rf"\((?:{ignore_pattern})?(?:{float_pattern}{ignore_pattern}){{{self.size - 1}}}{float_pattern}(?:{ignore_pattern})?\)"
81
41
 
82
- def parser(self) -> ParserElement:
83
- if self == _Tensor.SCALAR:
84
- return common.ieee_float
42
+ def _tensor(tensor_kind: TensorKind, *, ignore: Regex | None = None) -> Regex:
43
+ float_pattern = r"(?i:[+-]?(?:(?:\d+\.?\d*(?:e[+-]?\d+)?)|nan|inf(?:inity)?))"
85
44
 
86
- return (
87
- Literal("(").suppress()
88
- + Group(common.ieee_float[self.size], aslist=True)
89
- + Literal(")").suppress()
90
- ).add_parse_action(lambda tks: np.array(tks[0], dtype=float))
45
+ if tensor_kind == TensorKind.SCALAR:
46
+ ret = Regex(float_pattern)
47
+ ret.add_parse_action(lambda tks: [float(tks[0])])
48
+ return ret
49
+
50
+ ignore_pattern = rf"(?:\s|{ignore.re.pattern})+" if ignore is not None else r"\s+"
91
51
 
92
- def __str__(self) -> str:
93
- return {
94
- _Tensor.SCALAR: "scalar",
95
- _Tensor.VECTOR: "vector",
96
- _Tensor.SYMM_TENSOR: "symmTensor",
97
- _Tensor.TENSOR: "tensor",
98
- }[self]
52
+ ret = Regex(
53
+ rf"\((?:{ignore_pattern})?(?:{float_pattern}{ignore_pattern}){{{tensor_kind.size - 1}}}{float_pattern}(?:{ignore_pattern})?\)"
54
+ )
55
+ ret.add_parse_action(
56
+ lambda tks: np.fromstring(
57
+ re.sub(ignore.re, " ", tks[0][1:-1])
58
+ if ignore is not None
59
+ else tks[0][1:-1],
60
+ sep=" ",
61
+ )
62
+ )
63
+ return ret
99
64
 
100
65
 
101
66
  def _list_of(entry: ParserElement) -> ParserElement:
@@ -118,43 +83,35 @@ def _list_of(entry: ParserElement) -> ParserElement:
118
83
 
119
84
 
120
85
  def _parse_ascii_field(
121
- s: str, tensor_kind: _Tensor, *, ignore: Regex | None
86
+ s: str, tensor_kind: TensorKind, *, ignore: Regex | None
122
87
  ) -> np.ndarray[tuple[int] | tuple[int, int], np.dtype[np.float64]]:
123
88
  if ignore is not None:
124
89
  s = re.sub(ignore.re, " ", s)
125
90
  s = s.replace("(", " ").replace(")", " ")
126
91
 
127
- return np.fromstring(s, dtype=float, sep=" ").reshape(-1, *tensor_kind.shape)
92
+ return np.fromstring(s, sep=" ").reshape(-1, *tensor_kind.shape) # type: ignore [return-value]
128
93
 
129
94
 
130
95
  def _unpack_binary_field(
131
- b: bytes, tensor_kind: _Tensor, *, length: int
96
+ b: bytes, tensor_kind: TensorKind, *, length: int
132
97
  ) -> np.ndarray[tuple[int] | tuple[int, int], np.dtype[np.float64 | np.float32]]:
133
98
  float_size = len(b) / tensor_kind.size / length
134
99
  assert float_size in (4, 8)
135
100
 
136
101
  dtype = np.float32 if float_size == 4 else float
137
- return np.frombuffer(b, dtype=dtype).reshape(-1, *tensor_kind.shape)
102
+ return np.frombuffer(b, dtype=dtype).reshape(-1, *tensor_kind.shape) # type: ignore [return-value]
138
103
 
139
104
 
140
105
  def _tensor_list(
141
- tensor_kind: _Tensor | None = None, *, ignore: Regex | None = None
106
+ tensor_kind: TensorKind, *, ignore: Regex | None = None
142
107
  ) -> ParserElement:
143
- if tensor_kind is None:
144
- return (
145
- _tensor_list(_Tensor.SCALAR, ignore=ignore)
146
- | _tensor_list(_Tensor.VECTOR, ignore=ignore)
147
- | _tensor_list(_Tensor.SYMM_TENSOR, ignore=ignore)
148
- | _tensor_list(_Tensor.TENSOR, ignore=ignore)
149
- )
150
-
151
- tensor_pattern = tensor_kind.pattern(ignore=ignore)
108
+ tensor = _tensor(tensor_kind, ignore=ignore)
152
109
  ignore_pattern = rf"(?:\s|{ignore.re.pattern})+" if ignore is not None else r"\s+"
153
110
 
154
111
  list_ = Forward()
155
112
 
156
113
  list_ <<= Regex(
157
- rf"\((?:{ignore_pattern})?(?:{tensor_pattern}{ignore_pattern})*{tensor_pattern}(?:{ignore_pattern})?\)"
114
+ rf"\((?:{ignore_pattern})?(?:{tensor.re.pattern}{ignore_pattern})*{tensor.re.pattern}(?:{ignore_pattern})?\)"
158
115
  ).add_parse_action(
159
116
  lambda tks: [_parse_ascii_field(tks[0], tensor_kind, ignore=ignore)]
160
117
  )
@@ -166,7 +123,7 @@ def _tensor_list(
166
123
 
167
124
  list_ <<= (
168
125
  Regex(
169
- rf"\((?:{ignore_pattern})?(?:{tensor_pattern}{ignore_pattern}){{{length - 1}}}{tensor_pattern}(?:{ignore_pattern})?\)"
126
+ rf"\((?:{ignore_pattern})?(?:{tensor.re.pattern}{ignore_pattern}){{{length - 1}}}{tensor.re.pattern}(?:{ignore_pattern})?\)"
170
127
  ).add_parse_action(
171
128
  lambda tks: [_parse_ascii_field(tks[0], tensor_kind, ignore=ignore)]
172
129
  )
@@ -180,7 +137,7 @@ def _tensor_list(
180
137
  ]
181
138
  )
182
139
  | (
183
- Literal("{").suppress() + tensor_kind.parser() + Literal("}").suppress()
140
+ Literal("{").suppress() + tensor + Literal("}").suppress()
184
141
  ).add_parse_action(
185
142
  lambda tks: [np.full((length, *tensor_kind.shape), tks[0], dtype=float)]
186
143
  )
@@ -261,20 +218,27 @@ _DIMENSIONS = (
261
218
  Literal("[").suppress() + common.number[0, 7] + Literal("]").suppress()
262
219
  ).set_parse_action(lambda tks: DimensionSet(*tks))
263
220
  _TENSOR = (
264
- _Tensor.SCALAR.parser()
265
- | _Tensor.VECTOR.parser()
266
- | _Tensor.SYMM_TENSOR.parser()
267
- | _Tensor.TENSOR.parser()
221
+ _tensor(TensorKind.SCALAR)
222
+ | _tensor(TensorKind.VECTOR)
223
+ | _tensor(TensorKind.SYMM_TENSOR)
224
+ | _tensor(TensorKind.TENSOR)
268
225
  )
269
- _IDENTIFIER = Combine(
226
+ _IDENTIFIER = Forward()
227
+ _IDENTIFIER <<= Combine(
270
228
  Word(_IDENTCHARS, _IDENTBODYCHARS, exclude_chars="()")
271
- + Opt(Literal("(") + Word(_IDENTBODYCHARS, exclude_chars="()") + Literal(")"))
229
+ + Opt(Literal("(") + _IDENTIFIER + Literal(")"))
272
230
  )
273
231
  _DIMENSIONED = (Opt(_IDENTIFIER) + _DIMENSIONS + _TENSOR).set_parse_action(
274
232
  lambda tks: Dimensioned(*reversed(tks.as_list()))
275
233
  )
276
234
  _FIELD = (Keyword("uniform", _IDENTBODYCHARS).suppress() + _TENSOR) | (
277
- Keyword("nonuniform", _IDENTBODYCHARS).suppress() + _tensor_list(ignore=_COMMENT)
235
+ Keyword("nonuniform", _IDENTBODYCHARS).suppress()
236
+ + (
237
+ _tensor_list(TensorKind.SCALAR, ignore=_COMMENT)
238
+ | _tensor_list(TensorKind.VECTOR, ignore=_COMMENT)
239
+ | _tensor_list(TensorKind.SYMM_TENSOR, ignore=_COMMENT)
240
+ | _tensor_list(TensorKind.TENSOR, ignore=_COMMENT)
241
+ )
278
242
  )
279
243
  _TOKEN = dbl_quoted_string | _IDENTIFIER
280
244
  _DATA = Forward()
@@ -19,7 +19,9 @@ class Kind(Enum):
19
19
  DEFAULT = auto()
20
20
  SINGLE_ENTRY = auto()
21
21
  ASCII_FIELD = auto()
22
+ SCALAR_ASCII_FIELD = auto()
22
23
  BINARY_FIELD = auto()
24
+ SCALAR_BINARY_FIELD = auto()
23
25
  DIMENSIONS = auto()
24
26
 
25
27
 
@@ -32,7 +34,12 @@ def normalize(data: Entry, *, kind: Kind = Kind.DEFAULT) -> Entry: ...
32
34
 
33
35
 
34
36
  def normalize(data: Entry, *, kind: Kind = Kind.DEFAULT) -> Entry:
35
- if kind in (Kind.ASCII_FIELD, Kind.BINARY_FIELD):
37
+ if kind in (
38
+ Kind.ASCII_FIELD,
39
+ Kind.SCALAR_ASCII_FIELD,
40
+ Kind.BINARY_FIELD,
41
+ Kind.SCALAR_BINARY_FIELD,
42
+ ):
36
43
  if is_sequence(data):
37
44
  try:
38
45
  arr = np.asarray(data)
@@ -43,7 +50,7 @@ def normalize(data: Entry, *, kind: Kind = Kind.DEFAULT) -> Entry:
43
50
  arr = arr.astype(float)
44
51
 
45
52
  if arr.ndim == 1 or (arr.ndim == 2 and arr.shape[1] in (3, 6, 9)):
46
- return arr
53
+ return arr # type: ignore [return-value]
47
54
 
48
55
  return data
49
56
 
@@ -112,11 +119,17 @@ def dumps(
112
119
  if isinstance(data, DimensionSet):
113
120
  return b"[" + b" ".join(dumps(v) for v in data) + b"]"
114
121
 
115
- if kind in (Kind.ASCII_FIELD, Kind.BINARY_FIELD) and (
116
- isinstance(data, (int, float, np.ndarray))
117
- ):
122
+ if kind in (
123
+ Kind.ASCII_FIELD,
124
+ Kind.SCALAR_ASCII_FIELD,
125
+ Kind.BINARY_FIELD,
126
+ Kind.SCALAR_BINARY_FIELD,
127
+ ) and (isinstance(data, (int, float, np.ndarray))):
118
128
  shape = np.shape(data)
119
- if shape in ((), (3,), (6,), (9,)):
129
+ if not shape or (
130
+ kind not in (Kind.SCALAR_ASCII_FIELD, Kind.SCALAR_BINARY_FIELD)
131
+ and shape in ((3,), (6,), (9,))
132
+ ):
120
133
  return b"uniform " + dumps(data, kind=Kind.SINGLE_ENTRY)
121
134
 
122
135
  assert isinstance(data, np.ndarray)
@@ -137,10 +150,10 @@ def dumps(
137
150
  else:
138
151
  return dumps(data)
139
152
 
140
- if kind == Kind.BINARY_FIELD:
153
+ if kind in (Kind.BINARY_FIELD, Kind.SCALAR_BINARY_FIELD):
141
154
  contents = b"(" + data.tobytes() + b")"
142
155
  else:
143
- assert kind == Kind.ASCII_FIELD
156
+ assert kind in (Kind.ASCII_FIELD, Kind.SCALAR_ASCII_FIELD)
144
157
  contents = dumps(data, kind=Kind.SINGLE_ENTRY)
145
158
 
146
159
  return b"nonuniform List<" + tensor_kind + b"> " + dumps(len(data)) + contents
foamlib/_files/_types.py CHANGED
@@ -2,7 +2,8 @@ from __future__ import annotations
2
2
 
3
3
  import sys
4
4
  from dataclasses import dataclass
5
- from typing import Dict, NamedTuple, Optional, Tuple, Union
5
+ from enum import Enum
6
+ from typing import Dict, NamedTuple, Optional, Union
6
7
 
7
8
  import numpy as np
8
9
 
@@ -33,10 +34,43 @@ class DimensionSet(NamedTuple):
33
34
  Tensor = Union[
34
35
  float,
35
36
  Sequence[float],
36
- "np.ndarray[tuple[()] | Tuple[int], np.dtype[np.float64]]",
37
+ "np.ndarray[tuple[()] | tuple[int], np.dtype[np.float64]]",
37
38
  ]
38
39
 
39
40
 
41
+ class TensorKind(Enum):
42
+ SCALAR = ()
43
+ VECTOR = (3,)
44
+ SYMM_TENSOR = (6,)
45
+ TENSOR = (9,)
46
+
47
+ @property
48
+ def shape(self) -> tuple[()] | tuple[int]:
49
+ shape: tuple[()] | tuple[int] = self.value
50
+ return shape
51
+
52
+ @property
53
+ def size(self) -> int:
54
+ return int(np.prod(self.shape))
55
+
56
+ def __str__(self) -> str:
57
+ return {
58
+ TensorKind.SCALAR: "scalar",
59
+ TensorKind.VECTOR: "vector",
60
+ TensorKind.SYMM_TENSOR: "symmTensor",
61
+ TensorKind.TENSOR: "tensor",
62
+ }[self]
63
+
64
+ @staticmethod
65
+ def from_shape(shape: tuple[int, ...]) -> TensorKind:
66
+ for kind in TensorKind:
67
+ if kind.shape == shape:
68
+ return kind
69
+
70
+ msg = f"No tensor kind for shape {shape!r}"
71
+ raise ValueError(msg)
72
+
73
+
40
74
  @dataclass
41
75
  class Dimensioned:
42
76
  value: Tensor = 0
@@ -45,7 +79,7 @@ class Dimensioned:
45
79
 
46
80
  def __post_init__(self) -> None:
47
81
  if is_sequence(self.value):
48
- self.value = np.asarray(self.value, dtype=float)
82
+ self.value = np.asarray(self.value, dtype=float) # type: ignore [assignment]
49
83
  else:
50
84
  assert isinstance(self.value, (int, float, np.ndarray))
51
85
  self.value = float(self.value)
@@ -1,11 +1,12 @@
1
- Metadata-Version: 2.3
1
+ Metadata-Version: 2.4
2
2
  Name: foamlib
3
- Version: 0.8.1
3
+ Version: 0.8.3
4
4
  Summary: A Python interface for interacting with OpenFOAM
5
5
  Project-URL: Homepage, https://github.com/gerlero/foamlib
6
6
  Project-URL: Repository, https://github.com/gerlero/foamlib
7
7
  Project-URL: Documentation, https://foamlib.readthedocs.io
8
8
  Author-email: "Gabriel S. Gerlero" <ggerlero@cimec.unl.edu.ar>
9
+ License-File: LICENSE.txt
9
10
  Classifier: Development Status :: 4 - Beta
10
11
  Classifier: Framework :: AsyncIO
11
12
  Classifier: Intended Audience :: Developers
@@ -27,11 +28,11 @@ Requires-Python: >=3.7
27
28
  Requires-Dist: aioshutil<2,>=1
28
29
  Requires-Dist: numpy<3,>=1
29
30
  Requires-Dist: numpy<3,>=1.25.0; python_version >= '3.10'
30
- Requires-Dist: pyparsing<4,>=3
31
+ Requires-Dist: pyparsing<4,>=3.1.2
31
32
  Requires-Dist: typing-extensions<5,>=4; python_version < '3.11'
32
33
  Provides-Extra: dev
33
34
  Requires-Dist: mypy<2,>=1; extra == 'dev'
34
- Requires-Dist: pytest-asyncio<0.25,>=0.21; extra == 'dev'
35
+ Requires-Dist: pytest-asyncio<0.26,>=0.21; extra == 'dev'
35
36
  Requires-Dist: pytest-cov; extra == 'dev'
36
37
  Requires-Dist: pytest<9,>=7; extra == 'dev'
37
38
  Requires-Dist: ruff; extra == 'dev'
@@ -43,12 +44,12 @@ Requires-Dist: sphinx<9,>=5; extra == 'docs'
43
44
  Provides-Extra: lint
44
45
  Requires-Dist: ruff; extra == 'lint'
45
46
  Provides-Extra: test
46
- Requires-Dist: pytest-asyncio<0.25,>=0.21; extra == 'test'
47
+ Requires-Dist: pytest-asyncio<0.26,>=0.21; extra == 'test'
47
48
  Requires-Dist: pytest-cov; extra == 'test'
48
49
  Requires-Dist: pytest<9,>=7; extra == 'test'
49
50
  Provides-Extra: typing
50
51
  Requires-Dist: mypy<2,>=1; extra == 'typing'
51
- Requires-Dist: pytest-asyncio<0.25,>=0.21; extra == 'typing'
52
+ Requires-Dist: pytest-asyncio<0.26,>=0.21; extra == 'typing'
52
53
  Requires-Dist: pytest-cov; extra == 'typing'
53
54
  Requires-Dist: pytest<9,>=7; extra == 'typing'
54
55
  Description-Content-Type: text/markdown
@@ -1,7 +1,7 @@
1
- foamlib/__init__.py,sha256=TATCOFe7Qz20bMXgF1iX2nZmLzME7Wtg3gIocDfQTZk,452
1
+ foamlib/__init__.py,sha256=ZODAE7gaDlcc4N4EKtrB1AyZz5tuAbw9rgkfSM-WKew,452
2
2
  foamlib/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
3
  foamlib/_cases/__init__.py,sha256=_A1TTHuQfS9FH2_33lSEyLtOJZGFHZBco1tWJCVOHks,358
4
- foamlib/_cases/_async.py,sha256=i6g4EBHqvI-1PkdrxsRto2ynW7sxsOga2bSYk1XVG1U,7795
4
+ foamlib/_cases/_async.py,sha256=onECxRLQCF7Kd-GeuLqH_Xv3gbMMhKOSbFaUG5Ttgmk,7822
5
5
  foamlib/_cases/_base.py,sha256=37oBbM3NM-hpG7dKewZvyJNtqSAogMurcbmX-wLIgMU,6727
6
6
  foamlib/_cases/_run.py,sha256=lveqKZium_qK_eTxYE8jOjwx0eiIoolCBbi56-zLw1o,14420
7
7
  foamlib/_cases/_slurm.py,sha256=kj4wqgr3foMyAoUkoHOZODRBmVqH1B9KqAIEEjM8ZBg,2328
@@ -9,12 +9,12 @@ foamlib/_cases/_subprocess.py,sha256=6BlBRxknj2-BFcGkx7oVcuL63_utSaY1Axmsc1qV9j8
9
9
  foamlib/_cases/_sync.py,sha256=2BJXB7Nzldb4OgPukqupgYqdceUGkI2mYhhtGPWEBrc,5901
10
10
  foamlib/_cases/_util.py,sha256=tK4SM5WT3eEgGsFLnidIySbom1qowBAua9z13gipKJk,1518
11
11
  foamlib/_files/__init__.py,sha256=q1vkjXnjnSZvo45jPAICpWeF2LZv5V6xfzAR6S8fS5A,96
12
- foamlib/_files/_files.py,sha256=hVzsAhUFukF7sGLUNp1wFUkzIMpRfrpKCKObLj8MKhA,15070
12
+ foamlib/_files/_files.py,sha256=5sxtHQuN25w1gszZBpSMOaF--KOJy2HLmM6CScLof9s,15331
13
13
  foamlib/_files/_io.py,sha256=BGbbm6HKxL2ka0YMCmHqZQZ1R4PPQlkvWWb4FHMAS8k,2217
14
- foamlib/_files/_parsing.py,sha256=iKtreUueJ3NrE8a2cIpUf4zF7z3o4pJtKLufx0mnb3A,13986
15
- foamlib/_files/_serialization.py,sha256=Vuq9Ityb55a7kheGCqeuMHyzddb4mXozglMsc90_3Cc,4746
16
- foamlib/_files/_types.py,sha256=3l6MCwyK1wAv3B5xJsyF9Q4Ndgt-BF8zCd1N7_ZBxPM,2554
17
- foamlib-0.8.1.dist-info/METADATA,sha256=izsNijMcEBsVrrgyWoLzx80Gi6LoJ5PAIM4zNVngxwk,7938
18
- foamlib-0.8.1.dist-info/WHEEL,sha256=C2FUgwZgiLbznR-k0b_5k3Ai_1aASOXDss3lzCUsUug,87
19
- foamlib-0.8.1.dist-info/licenses/LICENSE.txt,sha256=5Dte9TUnLZzPRs4NQzl-Jc2-Ljd-t_v0ZR5Ng5r0UsY,35131
20
- foamlib-0.8.1.dist-info/RECORD,,
14
+ foamlib/_files/_parsing.py,sha256=1lwXdcsYrHXONbNbLYkl9wvwFBCcBt3-eVJUVcL3pIM,13055
15
+ foamlib/_files/_serialization.py,sha256=Tp3Sx5Kav4Bh1K98FAYiN0O7hBVizu8eP-sDC_ZDmfc,5176
16
+ foamlib/_files/_types.py,sha256=m-fFjJnS4sFSavDsijlXpAfEhnbh10RBumSHAT0GOgQ,3408
17
+ foamlib-0.8.3.dist-info/METADATA,sha256=wJxGTASnWb3zZmfB0UWPdytUbVs13sJ8TogpaQJ8Pik,7968
18
+ foamlib-0.8.3.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
19
+ foamlib-0.8.3.dist-info/licenses/LICENSE.txt,sha256=5Dte9TUnLZzPRs4NQzl-Jc2-Ljd-t_v0ZR5Ng5r0UsY,35131
20
+ foamlib-0.8.3.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: hatchling 1.26.3
2
+ Generator: hatchling 1.27.0
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any