anemoi-utils 0.4.22__py3-none-any.whl → 0.4.24__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of anemoi-utils might be problematic. Click here for more details.

anemoi/utils/_version.py CHANGED
@@ -17,5 +17,5 @@ __version__: str
17
17
  __version_tuple__: VERSION_TUPLE
18
18
  version_tuple: VERSION_TUPLE
19
19
 
20
- __version__ = version = '0.4.22'
21
- __version_tuple__ = version_tuple = (0, 4, 22)
20
+ __version__ = version = '0.4.24'
21
+ __version_tuple__ = version_tuple = (0, 4, 24)
@@ -47,8 +47,11 @@ def lookup_git_repo(path: str) -> Optional[Any]:
47
47
  Repo, optional
48
48
  The git repository if found, otherwise None.
49
49
  """
50
- from git import InvalidGitRepositoryError
51
- from git import Repo
50
+ try:
51
+ from git import InvalidGitRepositoryError
52
+ from git import Repo
53
+ except ImportError:
54
+ return None
52
55
 
53
56
  while path != "/":
54
57
  try:
anemoi/utils/remote/s3.py CHANGED
@@ -1,10 +1,13 @@
1
- # (C) Copyright 2024 European Centre for Medium-Range Weather Forecasts.
1
+ # (C) Copyright 2024-2025 Anemoi contributors.
2
+ #
2
3
  # This software is licensed under the terms of the Apache Licence Version 2.0
3
4
  # which can be obtained at http://www.apache.org/licenses/LICENSE-2.0.
5
+ #
4
6
  # In applying this licence, ECMWF does not waive the privileges and immunities
5
7
  # granted to it by virtue of its status as an intergovernmental organisation
6
8
  # nor does it submit to any jurisdiction.
7
9
 
10
+
8
11
  """This module provides functions to upload, download, list and delete files and folders on S3.
9
12
  The functions of this package expect that the AWS credentials are set up in the environment
10
13
  typicaly by setting the `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` environment variables or
@@ -21,6 +24,7 @@ the `~/.config/anemoi/settings.toml`
21
24
  or `~/.config/anemoi/settings-secrets.toml` files.
22
25
  """
23
26
 
27
+ import fnmatch
24
28
  import logging
25
29
  import os
26
30
  import threading
@@ -35,15 +39,15 @@ from ..humanize import bytes_to_human
35
39
  from . import BaseDownload
36
40
  from . import BaseUpload
37
41
 
38
- LOGGER = logging.getLogger(__name__)
39
-
42
+ LOG = logging.getLogger(__name__)
43
+ SECRETS = ["aws_access_key_id", "aws_secret_access_key"]
40
44
 
41
45
  # s3_clients are not thread-safe, so we need to create a new client for each thread
42
46
 
43
47
  thread_local = threading.local()
44
48
 
45
49
 
46
- def s3_client(bucket: str, region: str = None) -> Any:
50
+ def s3_client(bucket: str, *, region: str = None, service: str = "s3") -> Any:
47
51
  """Get an S3 client for the specified bucket and region.
48
52
 
49
53
  Parameters
@@ -52,6 +56,8 @@ def s3_client(bucket: str, region: str = None) -> Any:
52
56
  The name of the S3 bucket.
53
57
  region : str, optional
54
58
  The AWS region of the S3 bucket.
59
+ service : str, optional
60
+ The AWS service to use, default is "s3".
55
61
 
56
62
  Returns
57
63
  -------
@@ -65,14 +71,16 @@ def s3_client(bucket: str, region: str = None) -> Any:
65
71
  if not hasattr(thread_local, "s3_clients"):
66
72
  thread_local.s3_clients = {}
67
73
 
68
- key = f"{bucket}-{region}"
69
-
70
- boto3_config = dict(max_pool_connections=25)
74
+ key = f"{bucket}-{region}-{service}"
71
75
 
72
76
  if key in thread_local.s3_clients:
73
77
  return thread_local.s3_clients[key]
74
78
 
75
- boto3_config = dict(max_pool_connections=25)
79
+ boto3_config = dict(
80
+ max_pool_connections=25,
81
+ request_checksum_calculation="when_required",
82
+ response_checksum_validation="when_required",
83
+ )
76
84
 
77
85
  if region:
78
86
  # This is using AWS
@@ -91,17 +99,27 @@ def s3_client(bucket: str, region: str = None) -> Any:
91
99
  # We may be accessing a different S3 compatible service
92
100
  # Use anemoi.config to get the configuration
93
101
 
94
- options = {}
95
- config = load_config(secrets=["aws_access_key_id", "aws_secret_access_key"])
102
+ region = "unknown-region"
103
+
104
+ options = {"region_name": region}
105
+ config = load_config(secrets=SECRETS)
96
106
 
97
107
  cfg = config.get("object-storage", {})
108
+ candidate = None
98
109
  for k, v in cfg.items():
99
110
  if isinstance(v, (str, int, float, bool)):
100
111
  options[k] = v
101
112
 
102
- for k, v in cfg.get(bucket, {}).items():
103
- if isinstance(v, (str, int, float, bool)):
104
- options[k] = v
113
+ if isinstance(v, dict):
114
+ if fnmatch.fnmatch(bucket, k):
115
+ if candidate is not None:
116
+ raise ValueError(f"Multiple object storage configurations match {bucket}: {candidate} and {k}")
117
+ candidate = k
118
+
119
+ if candidate is not None:
120
+ for k, v in cfg.get(candidate, {}).items():
121
+ if isinstance(v, (str, int, float, bool)):
122
+ options[k] = v
105
123
 
106
124
  type = options.pop("type", "s3")
107
125
  if type != "s3":
@@ -110,11 +128,27 @@ def s3_client(bucket: str, region: str = None) -> Any:
110
128
  if "config" in options:
111
129
  boto3_config.update(options["config"])
112
130
  del options["config"]
113
- from botocore.client import Config
114
131
 
115
132
  options["config"] = Config(**boto3_config)
116
133
 
117
- thread_local.s3_clients[key] = boto3.client("s3", **options)
134
+ def _(options):
135
+
136
+ def __(k, v):
137
+ if k in SECRETS:
138
+ return "***"
139
+ return v
140
+
141
+ if isinstance(options, dict):
142
+ return {k: __(k, v) for k, v in options.items()}
143
+
144
+ if isinstance(options, list):
145
+ return [_(o) for o in options]
146
+
147
+ return options
148
+
149
+ LOG.info(f"Using S3 options: {_(options)}")
150
+
151
+ thread_local.s3_clients[key] = boto3.client(service, **options)
118
152
 
119
153
  return thread_local.s3_clients[key]
120
154
 
@@ -162,7 +196,14 @@ class S3Upload(BaseUpload):
162
196
  # delete(target)
163
197
 
164
198
  def _transfer_file(
165
- self, source: str, target: str, overwrite: bool, resume: bool, verbosity: int, threads: int, config: dict = None
199
+ self,
200
+ source: str,
201
+ target: str,
202
+ overwrite: bool,
203
+ resume: bool,
204
+ verbosity: int,
205
+ threads: int,
206
+ config: dict = None,
166
207
  ) -> int:
167
208
  """Transfer a file to S3.
168
209
 
@@ -203,7 +244,7 @@ class S3Upload(BaseUpload):
203
244
  size = os.path.getsize(source)
204
245
 
205
246
  if verbosity > 0:
206
- LOGGER.info(f"{self.action} {source} to {target} ({bytes_to_human(size)})")
247
+ LOG.info(f"{self.action} {source} to {target} ({bytes_to_human(size)})")
207
248
 
208
249
  try:
209
250
  results = s3.head_object(Bucket=bucket, Key=key)
@@ -215,7 +256,7 @@ class S3Upload(BaseUpload):
215
256
 
216
257
  if remote_size is not None:
217
258
  if remote_size != size:
218
- LOGGER.warning(
259
+ LOG.warning(
219
260
  f"{target} already exists, but with different size, re-uploading (remote={remote_size}, local={size})"
220
261
  )
221
262
  elif resume:
@@ -227,7 +268,13 @@ class S3Upload(BaseUpload):
227
268
 
228
269
  if verbosity > 0:
229
270
  with tqdm.tqdm(total=size, unit="B", unit_scale=True, unit_divisor=1024, leave=False) as pbar:
230
- s3.upload_file(source, bucket, key, Callback=lambda x: pbar.update(x), Config=config)
271
+ s3.upload_file(
272
+ source,
273
+ bucket,
274
+ key,
275
+ Callback=lambda x: pbar.update(x),
276
+ Config=config,
277
+ )
231
278
  else:
232
279
  s3.upload_file(source, bucket, key, Config=config)
233
280
 
@@ -326,7 +373,14 @@ class S3Download(BaseDownload):
326
373
  return s3_object["Size"]
327
374
 
328
375
  def _transfer_file(
329
- self, source: str, target: str, overwrite: bool, resume: bool, verbosity: int, threads: int, config: dict = None
376
+ self,
377
+ source: str,
378
+ target: str,
379
+ overwrite: bool,
380
+ resume: bool,
381
+ verbosity: int,
382
+ threads: int,
383
+ config: dict = None,
330
384
  ) -> int:
331
385
  """Transfer a file from S3 to the local filesystem.
332
386
 
@@ -375,7 +429,7 @@ class S3Download(BaseDownload):
375
429
  size = int(response["ContentLength"])
376
430
 
377
431
  if verbosity > 0:
378
- LOGGER.info(f"{self.action} {source} to {target} ({bytes_to_human(size)})")
432
+ LOG.info(f"{self.action} {source} to {target} ({bytes_to_human(size)})")
379
433
 
380
434
  if overwrite:
381
435
  resume = False
@@ -384,7 +438,7 @@ class S3Download(BaseDownload):
384
438
  if os.path.exists(target):
385
439
  local_size = os.path.getsize(target)
386
440
  if local_size != size:
387
- LOGGER.warning(
441
+ LOG.warning(
388
442
  f"{target} already with different size, re-downloading (remote={size}, local={local_size})"
389
443
  )
390
444
  else:
@@ -397,7 +451,13 @@ class S3Download(BaseDownload):
397
451
 
398
452
  if verbosity > 0:
399
453
  with tqdm.tqdm(total=size, unit="B", unit_scale=True, unit_divisor=1024, leave=False) as pbar:
400
- s3.download_file(bucket, key, target, Callback=lambda x: pbar.update(x), Config=config)
454
+ s3.download_file(
455
+ bucket,
456
+ key,
457
+ target,
458
+ Callback=lambda x: pbar.update(x),
459
+ Config=config,
460
+ )
401
461
  else:
402
462
  s3.download_file(bucket, key, target, Config=config)
403
463
 
@@ -433,7 +493,7 @@ def _list_objects(target: str, batch: bool = False) -> Iterable:
433
493
  yield from objects
434
494
 
435
495
 
436
- def _delete_folder(target: str) -> None:
496
+ def delete_folder(target: str) -> None:
437
497
  """Delete a folder from S3.
438
498
 
439
499
  Parameters
@@ -446,13 +506,13 @@ def _delete_folder(target: str) -> None:
446
506
 
447
507
  total = 0
448
508
  for batch in _list_objects(target, batch=True):
449
- LOGGER.info(f"Deleting {len(batch):,} objects from {target}")
509
+ LOG.info(f"Deleting {len(batch):,} objects from {target}")
450
510
  s3.delete_objects(Bucket=bucket, Delete={"Objects": [{"Key": o["Key"]} for o in batch]})
451
511
  total += len(batch)
452
- LOGGER.info(f"Deleted {len(batch):,} objects (total={total:,})")
512
+ LOG.info(f"Deleted {len(batch):,} objects (total={total:,})")
453
513
 
454
514
 
455
- def _delete_file(target: str) -> None:
515
+ def delete_file(target: str) -> None:
456
516
  """Delete a file from S3.
457
517
 
458
518
  Parameters
@@ -474,12 +534,12 @@ def _delete_file(target: str) -> None:
474
534
  exits = False
475
535
 
476
536
  if not exits:
477
- LOGGER.warning(f"{target} does not exist. Did you mean to delete a folder? Then add a trailing '/'")
537
+ LOG.warning(f"{target} does not exist. Did you mean to delete a folder? Then add a trailing '/'")
478
538
  return
479
539
 
480
- LOGGER.info(f"Deleting {target}")
540
+ LOG.info(f"Deleting {target}")
481
541
  s3.delete_object(Bucket=bucket, Key=key)
482
- LOGGER.info(f"{target} is deleted")
542
+ LOG.info(f"{target} is deleted")
483
543
 
484
544
 
485
545
  def delete(target: str) -> None:
@@ -494,9 +554,9 @@ def delete(target: str) -> None:
494
554
  assert target.startswith("s3://")
495
555
 
496
556
  if target.endswith("/"):
497
- _delete_folder(target)
557
+ delete_folder(target)
498
558
  else:
499
- _delete_file(target)
559
+ delete_file(target)
500
560
 
501
561
 
502
562
  def list_folder(folder: str) -> Iterable:
@@ -524,7 +584,9 @@ def list_folder(folder: str) -> Iterable:
524
584
 
525
585
  for page in paginator.paginate(Bucket=bucket, Prefix=prefix, Delimiter="/"):
526
586
  if "CommonPrefixes" in page:
527
- yield from [folder + _["Prefix"] for _ in page.get("CommonPrefixes")]
587
+ yield from [folder + _["Prefix"] for _ in page.get("CommonPrefixes") if _["Prefix"] != "/"]
588
+ if "Contents" in page:
589
+ yield from [folder + _["Key"] for _ in page.get("Contents")]
528
590
 
529
591
 
530
592
  def object_info(target: str) -> dict:
@@ -548,7 +610,33 @@ def object_info(target: str) -> dict:
548
610
  return s3.head_object(Bucket=bucket, Key=key)
549
611
  except s3.exceptions.ClientError as e:
550
612
  if e.response["Error"]["Code"] == "404":
551
- raise ValueError(f"{target} does not exist")
613
+ raise FileNotFoundError(f"{target} does not exist")
614
+ raise
615
+
616
+
617
+ def object_exists(target: str) -> bool:
618
+ """Check if an object exists.
619
+
620
+ Parameters
621
+ ----------
622
+ target : str
623
+ The URL of a file or a folder on S3. The URL should start with 's3://'.
624
+
625
+ Returns
626
+ -------
627
+ bool
628
+ True if the object exists, False otherwise.
629
+ """
630
+
631
+ _, _, bucket, key = target.split("/", 3)
632
+ s3 = s3_client(bucket)
633
+
634
+ try:
635
+ s3.head_object(Bucket=bucket, Key=key)
636
+ return True
637
+ except s3.exceptions.ClientError as e:
638
+ if e.response["Error"]["Code"] == "404":
639
+ return False
552
640
  raise
553
641
 
554
642
 
@@ -567,7 +655,7 @@ def object_acl(target: str) -> dict:
567
655
  """
568
656
 
569
657
  _, _, bucket, key = target.split("/", 3)
570
- s3 = s3_client()
658
+ s3 = s3_client(bucket)
571
659
 
572
660
  return s3.get_object_acl(Bucket=bucket, Key=key)
573
661
 
@@ -610,3 +698,29 @@ def upload(source: str, target: str, *args, **kwargs) -> None:
610
698
 
611
699
  assert target.startswith("s3://"), f"target {target} should start with 's3://'"
612
700
  return transfer(source, target, *args, **kwargs)
701
+
702
+
703
+ def quotas(target: str) -> dict:
704
+ """Get the quotas for an S3 bucket.
705
+
706
+ Parameters
707
+ ----------
708
+ target : str
709
+ The URL of a file or a folder on S3. The URL should start with 's3://'.
710
+
711
+ Returns
712
+ -------
713
+ dict
714
+ A dictionary with the quotas for the bucket.
715
+ """
716
+ from botocore.exceptions import ClientError
717
+
718
+ _, _, bucket, _ = target.split("/", 3)
719
+ s3 = s3_client(bucket, service="service-quotas")
720
+
721
+ try:
722
+ return s3.list_service_quotas(ServiceCode="ec2")
723
+ except ClientError as e:
724
+ if e.response["Error"]["Code"] == "404":
725
+ raise ValueError(f"{target} does not exist")
726
+ raise
anemoi/utils/testing.py CHANGED
@@ -261,12 +261,12 @@ def _run_slow_tests() -> bool:
261
261
  @lru_cache(maxsize=None)
262
262
  def _offline() -> bool:
263
263
  """Check if we are offline."""
264
-
265
- import socket
264
+ from urllib import request
266
265
 
267
266
  try:
268
- socket.create_connection(("anemoi.ecmwf.int", 443), timeout=5)
269
- except OSError:
267
+ request.urlopen("https://anemoi.ecmwf.int", timeout=1)
268
+ return False
269
+ except request.URLError:
270
270
  return True
271
271
 
272
272
  return False
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: anemoi-utils
3
- Version: 0.4.22
3
+ Version: 0.4.24
4
4
  Summary: A package to hold various functions to support training of ML models on ECMWF data.
5
5
  Author-email: "European Centre for Medium-Range Weather Forecasts (ECMWF)" <software.support@ecmwf.int>
6
6
  License: Apache License
@@ -252,7 +252,7 @@ Provides-Extra: provenance
252
252
  Requires-Dist: gitpython; extra == "provenance"
253
253
  Requires-Dist: nvsmi; extra == "provenance"
254
254
  Provides-Extra: s3
255
- Requires-Dist: boto3<1.36; extra == "s3"
255
+ Requires-Dist: boto3>1.36; extra == "s3"
256
256
  Provides-Extra: tests
257
257
  Requires-Dist: pytest; extra == "tests"
258
258
  Provides-Extra: text
@@ -1,6 +1,6 @@
1
1
  anemoi/utils/__init__.py,sha256=uVhpF-VjIl_4mMywOVtgTutgsdIsqz-xdkwxeMhzuag,730
2
2
  anemoi/utils/__main__.py,sha256=6LlE4MYrPvqqrykxXh7XMi50UZteUY59NeM8P9Zs2dU,910
3
- anemoi/utils/_version.py,sha256=ncPL58_Lzy93I55YUff3cwMUk7Kbpd4bhXC7ujoaqGw,513
3
+ anemoi/utils/_version.py,sha256=eqPTx1mit5QVY_CIZPeIYTsnPPxmLZEDtDv2l58DdBE,513
4
4
  anemoi/utils/caching.py,sha256=rXbeAmpBcMbbfN4EVblaHWKicsrtx1otER84FEBtz98,6183
5
5
  anemoi/utils/checkpoints.py,sha256=N4WpAZXa4etrpSEKhHqUUtG2-x9w3FJMHcLO-dDAXPY,9600
6
6
  anemoi/utils/cli.py,sha256=IyZfnSw0u0yYnrjOrzvm2RuuKvDk4cVb8pf8BkaChgA,6209
@@ -13,13 +13,13 @@ anemoi/utils/grids.py,sha256=uYgkU_KIg8FTUiuKV0Pho2swMMeXcSQ9CQe0MFlRr_I,5262
13
13
  anemoi/utils/hindcasts.py,sha256=iYVIxSNFL2HJcc_k1abCFLkpJFGHT8WKRIR4wcAwA3s,2144
14
14
  anemoi/utils/humanize.py,sha256=pjnFJAKHbEAOfcvn8c48kt-8eFy6FGW_U2ruJvfamrA,25189
15
15
  anemoi/utils/logs.py,sha256=naTgrmPwWHD4eekFttXftS4gtcAGYHpCqG4iwYprNDA,1804
16
- anemoi/utils/provenance.py,sha256=xC6mTstF7f_asqtPSrulC7c34xjOSuAxWhkwc3yKhHg,14629
16
+ anemoi/utils/provenance.py,sha256=iTsn4r-VPq2D8tSHPSuAIqG077_opkqMT42G03DRWJg,14690
17
17
  anemoi/utils/registry.py,sha256=e3nOIRyMYQ-mpEvaHAv5tuvMYNbkJ5yz94ns7BnvkjM,9717
18
18
  anemoi/utils/rules.py,sha256=VspUoPmw7tijrs6l_wl4vDjr_zVQsFjx9ITiBSvxgc8,6972
19
19
  anemoi/utils/s3.py,sha256=xMT48kbcelcjjqsaU567WI3oZ5eqo88Rlgyx5ECszAU,4074
20
20
  anemoi/utils/sanitise.py,sha256=ZYGdSX6qihQANr3pHZjbKnoapnzP1KcrWdW1Ul1mOGk,3668
21
21
  anemoi/utils/sanitize.py,sha256=43ZKDcfVpeXSsJ9TFEc9aZnD6oe2cUh151XnDspM98M,462
22
- anemoi/utils/testing.py,sha256=psfHfluNqXa-cXSDF4xD7xIQCVPvBGDNCWWZ-yVh_24,7750
22
+ anemoi/utils/testing.py,sha256=kwgAgLh3exYOTZSaX4xcPFjiMOyQDz-vcAlPcJqMiZk,7784
23
23
  anemoi/utils/text.py,sha256=HkzIvi24obDceFLpJEwBJ9PmPrJUkQN2TrElJ-A87gU,14441
24
24
  anemoi/utils/timer.py,sha256=_leKMYza2faM7JKlGE7LCNy13rbdPnwaCF7PSrI_NmI,3895
25
25
  anemoi/utils/commands/__init__.py,sha256=5u_6EwdqYczIAgJfCwRSyQAYFEqh2ZuHHT57g9g7sdI,808
@@ -29,13 +29,13 @@ anemoi/utils/mars/__init__.py,sha256=b-Lc3L1TAQd9ODs0Z1YSJzgZCO1K_M3DSgx_yd2qXvM
29
29
  anemoi/utils/mars/mars.yaml,sha256=R0dujp75lLA4wCWhPeOQnzJ45WZAYLT8gpx509cBFlc,66
30
30
  anemoi/utils/mars/requests.py,sha256=VFMHBVAAl0_2lOcMBa1lvaKHctN0lDJsI6_U4BucGew,1142
31
31
  anemoi/utils/remote/__init__.py,sha256=swPWHQoh-B6Xq9R489tPw0FykMue7f-bJ8enneFYSYE,20776
32
- anemoi/utils/remote/s3.py,sha256=spQ8l0rwQjLZh9dZu5cOsYIvNwKihQfCJ6YsFYegeqI,17339
32
+ anemoi/utils/remote/s3.py,sha256=lhLxwBU-AKERzYURXJ-cOlaccEYmjoXqlvX3zYu5h-8,19977
33
33
  anemoi/utils/remote/ssh.py,sha256=xNtsawh8okytCKRehkRCVExbHZj-CRUQNormEHglfuw,8088
34
34
  anemoi/utils/schemas/__init__.py,sha256=nkinKlsPLPXEjfTYQT1mpKC4cvs-14w_zBkDRxakwxw,698
35
35
  anemoi/utils/schemas/errors.py,sha256=lgOXzVTYzAE0qWQf3OZ42vCWixv8lilSqLLhzARBmvI,1831
36
- anemoi_utils-0.4.22.dist-info/licenses/LICENSE,sha256=8HznKF1Vi2IvfLsKNE5A2iVyiri3pRjRPvPC9kxs6qk,11354
37
- anemoi_utils-0.4.22.dist-info/METADATA,sha256=DOdoppZLABKmpmK5DAgYtEl5BoKs7zMkWmquG6qFfgo,15439
38
- anemoi_utils-0.4.22.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
39
- anemoi_utils-0.4.22.dist-info/entry_points.txt,sha256=LENOkn88xzFQo-V59AKoA_F_cfYQTJYtrNTtf37YgHY,60
40
- anemoi_utils-0.4.22.dist-info/top_level.txt,sha256=DYn8VPs-fNwr7fNH9XIBqeXIwiYYd2E2k5-dUFFqUz0,7
41
- anemoi_utils-0.4.22.dist-info/RECORD,,
36
+ anemoi_utils-0.4.24.dist-info/licenses/LICENSE,sha256=8HznKF1Vi2IvfLsKNE5A2iVyiri3pRjRPvPC9kxs6qk,11354
37
+ anemoi_utils-0.4.24.dist-info/METADATA,sha256=EsIP40bMdXsSEAp1v4iFkKMrzdVaE4Cw892SLNlzuWs,15439
38
+ anemoi_utils-0.4.24.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
39
+ anemoi_utils-0.4.24.dist-info/entry_points.txt,sha256=LENOkn88xzFQo-V59AKoA_F_cfYQTJYtrNTtf37YgHY,60
40
+ anemoi_utils-0.4.24.dist-info/top_level.txt,sha256=DYn8VPs-fNwr7fNH9XIBqeXIwiYYd2E2k5-dUFFqUz0,7
41
+ anemoi_utils-0.4.24.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (78.1.0)
2
+ Generator: setuptools (80.9.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5