foamlib 0.9.6__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
foamlib/__init__.py CHANGED
@@ -1,6 +1,6 @@
1
1
  """A Python interface for interacting with OpenFOAM."""
2
2
 
3
- __version__ = "0.9.6"
3
+ __version__ = "1.0.0"
4
4
 
5
5
  from ._cases import (
6
6
  AsyncFoamCase,
foamlib/_files/_files.py CHANGED
@@ -2,7 +2,7 @@ from __future__ import annotations
2
2
 
3
3
  import sys
4
4
  from copy import deepcopy
5
- from typing import Any, Optional, Tuple, Union, cast
5
+ from typing import Any, Optional, Tuple, Union, cast, overload
6
6
 
7
7
  if sys.version_info >= (3, 8):
8
8
  from typing import Literal
@@ -65,7 +65,7 @@ def _tensor_kind_for_field(
65
65
  class FoamFile(
66
66
  MutableMapping[
67
67
  Optional[Union[str, Tuple[str, ...]]],
68
- Union[Data, MutableSubDict],
68
+ Union[Data, StandaloneData, MutableSubDict],
69
69
  ],
70
70
  FoamFileIO,
71
71
  ):
@@ -156,7 +156,7 @@ class FoamFile(
156
156
  self._keywords = _keywords
157
157
 
158
158
  def __getitem__(self, keyword: str) -> Data | FoamFile.SubDict:
159
- return self._file[(*self._keywords, keyword)]
159
+ return self._file[(*self._keywords, keyword)] # type: ignore [return-value]
160
160
 
161
161
  def __setitem__(
162
162
  self,
@@ -270,10 +270,21 @@ class FoamFile(
270
270
  def object_(self, value: str) -> None:
271
271
  self["FoamFile", "object"] = value
272
272
 
273
+ @overload # type: ignore [override]
274
+ def __getitem__(self, keywords: None | tuple[()]) -> StandaloneData: ...
275
+
276
+ @overload
277
+ def __getitem__(self, keywords: str) -> Data | FoamFile.SubDict: ...
278
+
279
+ @overload
280
+ def __getitem__(
281
+ self, keywords: tuple[str, ...]
282
+ ) -> Data | StandaloneData | FoamFile.SubDict: ...
283
+
273
284
  def __getitem__(
274
285
  self, keywords: str | tuple[str, ...] | None
275
- ) -> Data | FoamFile.SubDict:
276
- if not keywords:
286
+ ) -> Data | StandaloneData | FoamFile.SubDict:
287
+ if keywords is None:
277
288
  keywords = ()
278
289
  elif not isinstance(keywords, tuple):
279
290
  keywords = (keywords,)
@@ -288,10 +299,27 @@ class FoamFile(
288
299
  return FoamFile.SubDict(self, keywords)
289
300
  return deepcopy(value)
290
301
 
302
+ @overload # type: ignore [override]
291
303
  def __setitem__(
292
- self, keywords: str | tuple[str, ...] | None, data: DataLike | SubDictLike
304
+ self, keywords: None | tuple[()], data: StandaloneDataLike
305
+ ) -> None: ...
306
+
307
+ @overload
308
+ def __setitem__(self, keywords: str, data: DataLike | SubDictLike) -> None: ...
309
+
310
+ @overload
311
+ def __setitem__(
312
+ self,
313
+ keywords: tuple[str, ...],
314
+ data: DataLike | StandaloneDataLike | SubDictLike,
315
+ ) -> None: ...
316
+
317
+ def __setitem__(
318
+ self,
319
+ keywords: str | tuple[str, ...] | None,
320
+ data: DataLike | StandaloneDataLike | SubDictLike,
293
321
  ) -> None:
294
- if not keywords:
322
+ if keywords is None:
295
323
  keywords = ()
296
324
  elif not isinstance(keywords, tuple):
297
325
  keywords = (keywords,)
@@ -416,7 +444,7 @@ class FoamFile(
416
444
  )
417
445
 
418
446
  def __delitem__(self, keywords: str | tuple[str, ...] | None) -> None:
419
- if not keywords:
447
+ if keywords is None:
420
448
  keywords = ()
421
449
  elif not isinstance(keywords, tuple):
422
450
  keywords = (keywords,)
@@ -433,7 +461,7 @@ class FoamFile(
433
461
  yield from (k for k in self._iter() if k != "FoamFile")
434
462
 
435
463
  def __contains__(self, keywords: object) -> bool:
436
- if not keywords:
464
+ if keywords is None:
437
465
  keywords = ()
438
466
  elif not isinstance(keywords, tuple):
439
467
  keywords = (keywords,)
@@ -481,7 +509,7 @@ class FoamFile(
481
509
  :param include_header: Whether to include the "FoamFile" header in the output.
482
510
  If `True`, the header will be included if it is present in the input object.
483
511
  """
484
- ret = loads(s)
512
+ ret = loads(s, keywords=())
485
513
 
486
514
  if not include_header and isinstance(ret, Mapping) and "FoamFile" in ret:
487
515
  del ret["FoamFile"]
@@ -507,15 +535,23 @@ class FoamFile(
507
535
  If ``True``, a header will be included if it is not already present in the
508
536
  input object.
509
537
  """
510
- header: SubDict | None
538
+ header: SubDictLike | None
511
539
  if isinstance(file, Mapping):
512
- header = file.get("FoamFile", None) # type: ignore [assignment]
540
+ h = file.get("FoamFile", None)
541
+ assert h is None or isinstance(h, Mapping)
542
+ header = h
513
543
 
514
544
  entries: list[bytes] = []
515
545
  for k, v in file.items():
516
546
  if k is not None:
547
+ v = cast("Union[Data, SubDict]", v)
517
548
  entries.append(
518
- dumps((k, v), keywords=(), header=header, tuple_is_entry=True) # type: ignore [arg-type]
549
+ dumps(
550
+ (k, v),
551
+ keywords=(),
552
+ header=header,
553
+ tuple_is_keyword_entry=True,
554
+ )
519
555
  )
520
556
  else:
521
557
  assert not isinstance(v, Mapping)
@@ -629,10 +665,21 @@ class FoamFieldFile(FoamFile):
629
665
  def value(self) -> None:
630
666
  del self["value"]
631
667
 
668
+ @overload # type: ignore [override]
669
+ def __getitem__(self, keywords: None | tuple[()]) -> StandaloneData: ...
670
+
671
+ @overload
672
+ def __getitem__(self, keywords: str) -> Data | FoamFieldFile.SubDict: ...
673
+
674
+ @overload
675
+ def __getitem__(
676
+ self, keywords: tuple[str, ...]
677
+ ) -> Data | StandaloneData | FoamFieldFile.SubDict: ...
678
+
632
679
  def __getitem__(
633
680
  self, keywords: str | tuple[str, ...] | None
634
- ) -> Data | FoamFile.SubDict:
635
- if not keywords:
681
+ ) -> Data | StandaloneData | FoamFile.SubDict:
682
+ if keywords is None:
636
683
  keywords = ()
637
684
  elif not isinstance(keywords, tuple):
638
685
  keywords = (keywords,)
@@ -2,7 +2,7 @@ from __future__ import annotations
2
2
 
3
3
  import re
4
4
  import sys
5
- from typing import TYPE_CHECKING, Tuple, Union, cast
5
+ from typing import TYPE_CHECKING, Tuple, Union, cast, overload
6
6
 
7
7
  if sys.version_info >= (3, 9):
8
8
  from collections.abc import Iterator, Mapping, MutableMapping, Sequence
@@ -38,7 +38,7 @@ from pyparsing import (
38
38
  printables,
39
39
  )
40
40
 
41
- from ._types import Data, Dimensioned, DimensionSet, File
41
+ from ._types import Data, Dimensioned, DimensionSet, File, StandaloneData, SubDict
42
42
 
43
43
  if TYPE_CHECKING:
44
44
  from numpy.typing import DTypeLike
@@ -190,6 +190,60 @@ def _binary_numeric_list(
190
190
  ).add_parse_action(to_array)
191
191
 
192
192
 
193
+ def _ascii_face_list(*, ignore: Regex | None = None) -> ParserElement:
194
+ element_pattern = r"(?:-?\d+)"
195
+ spacing_pattern = (
196
+ rf"(?:(?:\s|{ignore.re.pattern})+)" if ignore is not None else r"(?:\s+)"
197
+ )
198
+
199
+ element_pattern = rf"(?:(?:3{spacing_pattern}?\((?:{element_pattern}{spacing_pattern}){{2}}{element_pattern}{spacing_pattern}?\))|(?:4{spacing_pattern}?\((?:{element_pattern}{spacing_pattern}){{3}}{element_pattern}{spacing_pattern}?\)))"
200
+
201
+ list_ = Forward()
202
+
203
+ def process_count(tks: ParseResults) -> None:
204
+ nonlocal list_
205
+ if not tks:
206
+ count = None
207
+ else:
208
+ (count,) = tks
209
+ assert isinstance(count, int)
210
+
211
+ if count is None:
212
+ list_pattern = rf"\({spacing_pattern}?(?:{element_pattern}{spacing_pattern})*{element_pattern}{spacing_pattern}?\)"
213
+
214
+ elif count == 0:
215
+ list_ <<= NoMatch()
216
+ return
217
+
218
+ else:
219
+ list_pattern = rf"\({spacing_pattern}?(?:{element_pattern}{spacing_pattern}){{{count - 1}}}{element_pattern}{spacing_pattern}?\)"
220
+
221
+ list_ <<= Regex(list_pattern).add_parse_action(to_face_list)
222
+
223
+ def to_face_list(
224
+ tks: ParseResults,
225
+ ) -> list[list[np.ndarray[tuple[int], np.dtype[np.int64]]]]:
226
+ (s,) = tks
227
+ assert s.startswith("(")
228
+ assert s.endswith(")")
229
+ if ignore is not None:
230
+ s = re.sub(ignore.re, " ", s)
231
+ s = s.replace("(", " ").replace(")", " ")
232
+
233
+ raw = np.fromstring(s, sep=" ", dtype=int)
234
+
235
+ values: list[np.ndarray[tuple[int], np.dtype[np.int64]]] = []
236
+ i = 0
237
+ while i < raw.size:
238
+ assert raw[i] in (3, 4)
239
+ values.append(raw[i + 1 : i + raw[i] + 1]) # type: ignore[arg-type]
240
+ i += raw[i] + 1
241
+
242
+ return [values]
243
+
244
+ return Opt(common.integer).add_parse_action(process_count).suppress() + list_
245
+
246
+
193
247
  def _list_of(entry: ParserElement) -> ParserElement:
194
248
  return (
195
249
  (
@@ -370,7 +424,7 @@ _KEYWORD_ENTRY = _keyword_entry_of(
370
424
  directive=_DIRECTIVE,
371
425
  data_entry=_DATA_ENTRY,
372
426
  )
373
- _DICT = _dict_of(_TOKEN, _DATA)
427
+ _DICT = _dict_of(_TOKEN, Opt(_DATA, default=""))
374
428
  _LIST_ENTRY = _DICT | _KEYWORD_ENTRY | _DATA_ENTRY
375
429
  _LIST = _list_of(_LIST_ENTRY)
376
430
  _NUMBER = (
@@ -391,9 +445,14 @@ _DATA <<= _DATA_ENTRY[1, ...].set_parse_action(
391
445
 
392
446
  _STANDALONE_DATA = (
393
447
  _ascii_numeric_list(dtype=int, ignore=_COMMENT)
448
+ | _ascii_face_list(ignore=_COMMENT)
394
449
  | _ascii_numeric_list(dtype=float, nested=3, ignore=_COMMENT)
395
- | _binary_numeric_list(dtype=np.int64)
396
- | _binary_numeric_list(dtype=np.int32)
450
+ | (
451
+ _binary_numeric_list(dtype=np.int64) + Opt(_binary_numeric_list(dtype=np.int64))
452
+ ).add_parse_action(lambda tks: tuple(tks) if len(tks) > 1 else tks[0])
453
+ | (
454
+ _binary_numeric_list(dtype=np.int32) + Opt(_binary_numeric_list(dtype=np.int32))
455
+ ).add_parse_action(lambda tks: tuple(tks) if len(tks) > 1 else tks[0])
397
456
  | _binary_numeric_list(dtype=np.float64, nested=3)
398
457
  | _binary_numeric_list(dtype=np.float32, nested=3)
399
458
  | _DATA
@@ -406,17 +465,35 @@ _FILE = (
406
465
  .parse_with_tabs()
407
466
  )
408
467
 
468
+ _DATA_OR_DICT = (_DATA | _DICT).ignore(_COMMENT).parse_with_tabs()
469
+
470
+
471
+ @overload
472
+ def loads(s: bytes | str, *, keywords: tuple[()]) -> File | StandaloneData: ...
409
473
 
410
- def loads(s: bytes | str) -> File | Data:
474
+
475
+ @overload
476
+ def loads(
477
+ s: bytes | str, *, keywords: tuple[str, ...] | None = None
478
+ ) -> File | StandaloneData | Data | SubDict: ...
479
+
480
+
481
+ def loads(
482
+ s: bytes | str, *, keywords: tuple[str, ...] | None = None
483
+ ) -> File | StandaloneData | Data | SubDict:
411
484
  if isinstance(s, bytes):
412
485
  s = s.decode("latin-1")
413
486
 
414
- file = _FILE.parse_string(s, parse_all=True).as_dict()
487
+ if keywords == ():
488
+ data = _FILE.parse_string(s, parse_all=True).as_dict()
415
489
 
416
- if len(file) == 1 and None in file:
417
- return file[None] # type: ignore[no-any-return]
490
+ if len(data) == 1 and None in data:
491
+ data = data[None]
492
+
493
+ else:
494
+ data = _DATA_OR_DICT.parse_string(s, parse_all=True)[0]
418
495
 
419
- return file
496
+ return data
420
497
 
421
498
 
422
499
  _LOCATED_KEYWORD_ENTRIES = Group(
@@ -441,11 +518,11 @@ _LOCATED_FILE = (
441
518
  )
442
519
 
443
520
 
444
- class Parsed(Mapping[Tuple[str, ...], Union[Data, EllipsisType]]):
521
+ class Parsed(Mapping[Tuple[str, ...], Union[Data, StandaloneData, EllipsisType]]):
445
522
  def __init__(self, contents: bytes) -> None:
446
523
  self._parsed: MutableMapping[
447
524
  tuple[str, ...],
448
- tuple[int, Data | EllipsisType, int],
525
+ tuple[int, Data | StandaloneData | EllipsisType, int],
449
526
  ] = {}
450
527
  for parse_result in _LOCATED_FILE.parse_string(
451
528
  contents.decode("latin-1"), parse_all=True
@@ -458,10 +535,12 @@ class Parsed(Mapping[Tuple[str, ...], Union[Data, EllipsisType]]):
458
535
  @staticmethod
459
536
  def _flatten_result(
460
537
  parse_result: ParseResults, *, _keywords: tuple[str, ...] = ()
461
- ) -> Mapping[tuple[str, ...], tuple[int, Data | EllipsisType, int]]:
538
+ ) -> Mapping[
539
+ tuple[str, ...], tuple[int, Data | StandaloneData | EllipsisType, int]
540
+ ]:
462
541
  ret: MutableMapping[
463
542
  tuple[str, ...],
464
- tuple[int, Data | EllipsisType, int],
543
+ tuple[int, Data | StandaloneData | EllipsisType, int],
465
544
  ] = {}
466
545
  start = parse_result.locn_start
467
546
  assert isinstance(start, int)
@@ -487,14 +566,16 @@ class Parsed(Mapping[Tuple[str, ...], Union[Data, EllipsisType]]):
487
566
  ret[(*_keywords, keyword)] = (start, d, end)
488
567
  return ret
489
568
 
490
- def __getitem__(self, keywords: tuple[str, ...]) -> Data | EllipsisType:
569
+ def __getitem__(
570
+ self, keywords: tuple[str, ...]
571
+ ) -> Data | StandaloneData | EllipsisType:
491
572
  _, data, _ = self._parsed[keywords]
492
573
  return data
493
574
 
494
575
  def put(
495
576
  self,
496
577
  keywords: tuple[str, ...],
497
- data: Data | EllipsisType,
578
+ data: Data | StandaloneData | EllipsisType,
498
579
  content: bytes,
499
580
  ) -> None:
500
581
  start, end = self.entry_location(keywords, missing_ok=True)
@@ -16,6 +16,7 @@ from ._types import (
16
16
  DataLike,
17
17
  Dimensioned,
18
18
  DimensionSet,
19
+ KeywordEntryLike,
19
20
  StandaloneData,
20
21
  StandaloneDataLike,
21
22
  SubDict,
@@ -87,7 +88,7 @@ def normalize_data(
87
88
  if arr.ndim == 1 or (arr.ndim == 2 and arr.shape[1] in (3, 6, 9)):
88
89
  return arr # type: ignore [return-value]
89
90
 
90
- return [normalize_data(d) for d in data] # type: ignore [arg-type, misc]
91
+ return [normalize_data(d) for d in data] # type: ignore [arg-type, return-value]
91
92
 
92
93
  if isinstance(data, int):
93
94
  return float(data)
@@ -114,7 +115,7 @@ def normalize_data(
114
115
  assert not isinstance(k, Mapping)
115
116
  return ( # type: ignore [return-value]
116
117
  normalize_keyword(k), # type: ignore [arg-type]
117
- normalize_data(v) if not isinstance(v, Mapping) else v, # type: ignore [arg-type, misc]
118
+ normalize_data(v) if not isinstance(v, Mapping) else v, # type: ignore [arg-type]
118
119
  )
119
120
 
120
121
  if (
@@ -122,13 +123,13 @@ def normalize_data(
122
123
  and not isinstance(data, DimensionSet)
123
124
  and not isinstance(data, tuple)
124
125
  ):
125
- return [normalize_data(d) for d in data] # type: ignore [arg-type, misc]
126
+ return [normalize_data(d) for d in data] # type: ignore [arg-type, return-value]
126
127
 
127
128
  if isinstance(data, tuple) and not isinstance(data, DimensionSet):
128
- return tuple(normalize_data(d) for d in data) # type: ignore [misc]
129
+ return tuple(normalize_data(d, keywords=keywords) for d in data) # type: ignore [misc]
129
130
 
130
131
  if isinstance(data, str):
131
- s = loads(data)
132
+ s = loads(data, keywords=keywords)
132
133
  if isinstance(s, (str, tuple, bool)):
133
134
  return s
134
135
 
@@ -152,11 +153,11 @@ def normalize_keyword(data: DataLike) -> Data:
152
153
 
153
154
 
154
155
  def dumps(
155
- data: DataLike | StandaloneDataLike | SubDictLike,
156
+ data: DataLike | StandaloneDataLike | KeywordEntryLike | SubDictLike,
156
157
  *,
157
158
  keywords: tuple[str, ...] | None = None,
158
159
  header: SubDictLike | None = None,
159
- tuple_is_entry: bool = False,
160
+ tuple_is_keyword_entry: bool = False,
160
161
  ) -> bytes:
161
162
  data = normalize_data(data, keywords=keywords) # type: ignore [arg-type, misc]
162
163
 
@@ -167,7 +168,7 @@ def dumps(
167
168
  dumps(
168
169
  (k, v),
169
170
  keywords=keywords,
170
- tuple_is_entry=True,
171
+ tuple_is_keyword_entry=True,
171
172
  )
172
173
  for k, v in data.items()
173
174
  )
@@ -245,7 +246,7 @@ def dumps(
245
246
  return dumps(data.dimensions) + b" " + dumps(data.value)
246
247
 
247
248
  if isinstance(data, tuple):
248
- if tuple_is_entry:
249
+ if tuple_is_keyword_entry:
249
250
  k, v = data
250
251
  ret = b"\n" if isinstance(k, str) and k[0] == "#" else b""
251
252
  ret += dumps(k)
@@ -263,10 +264,12 @@ def dumps(
263
264
  ret += b";"
264
265
  return ret
265
266
 
266
- return b" ".join(dumps(v) for v in data)
267
+ return b" ".join(dumps(v, keywords=keywords, header=header) for v in data)
267
268
 
268
269
  if is_sequence(data):
269
- return b"(" + b" ".join(dumps(v, tuple_is_entry=True) for v in data) + b")" # type: ignore [arg-type]
270
+ return (
271
+ b"(" + b" ".join(dumps(v, tuple_is_keyword_entry=True) for v in data) + b")"
272
+ )
270
273
 
271
274
  if data is True:
272
275
  return b"yes"
foamlib/_files/_types.py CHANGED
@@ -198,6 +198,8 @@ FieldLike = Union[
198
198
  Sequence[TensorLike],
199
199
  ]
200
200
 
201
+ KeywordEntry = Tuple["DataEntry", Union["DataEntry", "SubDict"]]
202
+ KeywordEntryLike = Tuple["DataEntryLike", Union["DataEntryLike", "SubDictLike"]]
201
203
 
202
204
  DataEntry = Union[
203
205
  str,
@@ -206,16 +208,15 @@ DataEntry = Union[
206
208
  bool,
207
209
  Dimensioned,
208
210
  DimensionSet,
209
- List[Union["DataEntry", Tuple["DataEntry", Union["DataEntry", "SubDict"]]]],
211
+ List[Union["DataEntry", KeywordEntry]],
210
212
  Field,
211
213
  ]
212
-
213
214
  DataEntryLike = Union[
214
215
  DataEntry,
215
216
  Sequence[
216
217
  Union[
217
218
  "DataEntryLike",
218
- Tuple["DataEntryLike", Union["DataEntryLike", "SubDictLike"]],
219
+ "KeywordEntryLike",
219
220
  ]
220
221
  ],
221
222
  FieldLike,
@@ -225,7 +226,6 @@ Data = Union[
225
226
  DataEntry,
226
227
  Tuple[DataEntry, ...],
227
228
  ]
228
-
229
229
  DataLike = Union[
230
230
  DataEntryLike,
231
231
  Tuple["DataEntryLike", ...],
@@ -234,13 +234,19 @@ DataLike = Union[
234
234
  StandaloneData = Union[
235
235
  Data,
236
236
  "np.ndarray[tuple[int], np.dtype[np.int64 | np.int32]]",
237
- "np.ndarray[tuple[int], np.dtype[np.float64 | np.float32]]",
237
+ "np.ndarray[tuple[int, int], np.dtype[np.float64 | np.float32]]",
238
+ List["np.ndarray[tuple[int], np.dtype[np.int64 | np.int32]]"],
239
+ Tuple[
240
+ "np.ndarray[tuple[int], np.dtype[np.int64 | np.int32]]",
241
+ "np.ndarray[tuple[int], np.dtype[np.int64 | np.int32]]",
242
+ ],
238
243
  ]
239
-
240
244
  StandaloneDataLike = Union[
245
+ StandaloneData,
241
246
  DataLike,
242
- "np.ndarray[tuple[int], np.dtype[np.int64 | np.int32]]",
243
- "np.ndarray[tuple[int], np.dtype[np.float64 | np.float32]]",
247
+ Sequence["np.ndarray[tuple[int], np.dtype[np.int64 | np.int32]]"],
248
+ Sequence[Sequence[int]],
249
+ Tuple[Sequence[int], Sequence[int]],
244
250
  ]
245
251
 
246
252
 
@@ -259,5 +265,6 @@ SubDict = Dict[str, Union[Data, "SubDict"]]
259
265
  SubDictLike = Mapping[str, Union[DataLike, "SubDictLike"]]
260
266
  MutableSubDict = MutableMapping[str, Union[Data, "MutableSubDict"]]
261
267
 
262
- File = Dict[Optional[str], Union[StandaloneData, Data, "SubDict"]]
263
- FileLike = Mapping[Optional[str], Union[StandaloneDataLike, DataLike, "FileLike"]]
268
+
269
+ File = Dict[Optional[str], Union[StandaloneData, Data, SubDict]]
270
+ FileLike = Mapping[Optional[str], Union[StandaloneDataLike, DataLike, SubDictLike]]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: foamlib
3
- Version: 0.9.6
3
+ Version: 1.0.0
4
4
  Summary: A Python interface for interacting with OpenFOAM
5
5
  Project-URL: Homepage, https://github.com/gerlero/foamlib
6
6
  Project-URL: Repository, https://github.com/gerlero/foamlib
@@ -1,4 +1,4 @@
1
- foamlib/__init__.py,sha256=vCYvM91T-aVKjhzaBbxMdeEvOdQ_0dFOLV5vLi7yMoU,452
1
+ foamlib/__init__.py,sha256=SRh8WyeRM2a0qihkJBUyUH1n5DEfiMnAhzMNmbbKB3Y,452
2
2
  foamlib/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
3
  foamlib/_cases/__init__.py,sha256=_A1TTHuQfS9FH2_33lSEyLtOJZGFHZBco1tWJCVOHks,358
4
4
  foamlib/_cases/_async.py,sha256=1NuBaKa7NC-320SFNYW7JWZ5rAi344br_SoEdl64dmo,11797
@@ -9,12 +9,12 @@ foamlib/_cases/_subprocess.py,sha256=VHV2SuOLqa711an6kCuvN6UlIkeh4qqFfdrpNoKzQps
9
9
  foamlib/_cases/_sync.py,sha256=lsgJV2dMAAmmsiJMtzqy1bhW3yAZQOUMXh3h8jNqyes,9799
10
10
  foamlib/_cases/_util.py,sha256=QCizfbuJdOCeF9ogU2R-y-iWX5kfaOA4U2W68t6QlOM,2544
11
11
  foamlib/_files/__init__.py,sha256=q1vkjXnjnSZvo45jPAICpWeF2LZv5V6xfzAR6S8fS5A,96
12
- foamlib/_files/_files.py,sha256=HwK7ptuxmbpymXaAr6Lsd1g-qc6trGZSC1Ch1wVHQFQ,22863
12
+ foamlib/_files/_files.py,sha256=uMCn4kNdVJBbcEl7sTSDn9bpc6JUZtNUBbyio7oMqSg,24346
13
13
  foamlib/_files/_io.py,sha256=BGbbm6HKxL2ka0YMCmHqZQZ1R4PPQlkvWWb4FHMAS8k,2217
14
- foamlib/_files/_parsing.py,sha256=AOrAEhX418ExfuX-_gKezQPLpu6B-fZmAVpJ1EVMWGw,17718
15
- foamlib/_files/_serialization.py,sha256=P7u2EUx0OwvfVYinp46CpzdjGYGXJVN0-xXXuOYogfA,8020
16
- foamlib/_files/_types.py,sha256=eNVxuK_NDRqh0mrTcseuAD3lqn4VEBGVUYhXn-T1zEU,7884
17
- foamlib-0.9.6.dist-info/METADATA,sha256=V2i6Gb4Yl07tXbwKGNLNUyi3LN6ozr4cl79CUWeuK-w,8701
18
- foamlib-0.9.6.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
19
- foamlib-0.9.6.dist-info/licenses/LICENSE.txt,sha256=5Dte9TUnLZzPRs4NQzl-Jc2-Ljd-t_v0ZR5Ng5r0UsY,35131
20
- foamlib-0.9.6.dist-info/RECORD,,
14
+ foamlib/_files/_parsing.py,sha256=zLRXwv9PEil-vlIr1QiIEw8bhanRQ_vbVIEdTHv4bdI,20534
15
+ foamlib/_files/_serialization.py,sha256=kQfPfuTXtc9jryQdieCbAX0-8_Oz__vY_kr7uH9f_rU,8172
16
+ foamlib/_files/_types.py,sha256=7reA_TjRjCFV3waQVaGaYWURFoN8u92ao-NH9rESiAk,8202
17
+ foamlib-1.0.0.dist-info/METADATA,sha256=Hfbzls1UGXMQQBRiTvHwAiCeKt5mxMwjqpMpmvG6l7M,8701
18
+ foamlib-1.0.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
19
+ foamlib-1.0.0.dist-info/licenses/LICENSE.txt,sha256=5Dte9TUnLZzPRs4NQzl-Jc2-Ljd-t_v0ZR5Ng5r0UsY,35131
20
+ foamlib-1.0.0.dist-info/RECORD,,