anemoi-utils 0.4.4__py3-none-any.whl → 0.4.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of anemoi-utils might be problematic. Click here for more details.

anemoi/utils/s3.py CHANGED
@@ -7,554 +7,57 @@
7
7
  # granted to it by virtue of its status as an intergovernmental organisation
8
8
  # nor does it submit to any jurisdiction.
9
9
 
10
+ import warnings
10
11
 
11
- """This module provides functions to upload, download, list and delete files and folders on S3.
12
- The functions of this package expect that the AWS credentials are set up in the environment
13
- typicaly by setting the `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` environment variables or
14
- by creating a `~/.aws/credentials` file. It is also possible to set the `endpoint_url` in the same file
15
- to use a different S3 compatible service::
12
+ from .remote import transfer
13
+ from .remote.s3 import delete as delete_
14
+ from .remote.s3 import s3_client as s3_client_
16
15
 
17
- [default]
18
- endpoint_url = https://some-storage.somewhere.world
19
- aws_access_key_id = xxxxxxxxxxxxxxxxxxxxxxxx
20
- aws_secret_access_key = xxxxxxxxxxxxxxxxxxxxxxxx
16
+ warnings.warn(
17
+ "The anemoi.utils.s3 module is deprecated and will be removed in a future release. "
18
+ "Please use the 'anemoi.utils.remote' or 'anemoi.utils.remote.s3' module instead.",
19
+ DeprecationWarning,
20
+ stacklevel=2,
21
+ )
21
22
 
22
- Alternatively, the `endpoint_url`, and keys can be set in one of
23
- the `~/.config/anemoi/settings.toml`
24
- or `~/.config/anemoi/settings-secrets.toml` files.
25
23
 
26
- """
27
-
28
- import concurrent.futures
29
- import logging
30
- import os
31
- import threading
32
- from copy import deepcopy
33
-
34
- import tqdm
35
-
36
- from .config import load_config
37
- from .humanize import bytes_to_human
38
-
39
- LOGGER = logging.getLogger(__name__)
40
-
41
-
42
- # s3_clients are not thread-safe, so we need to create a new client for each thread
43
-
44
- thread_local = threading.local()
45
-
46
-
47
- def s3_client(bucket, region=None):
48
- import boto3
49
- from botocore import UNSIGNED
50
- from botocore.client import Config
51
-
52
- if not hasattr(thread_local, "s3_clients"):
53
- thread_local.s3_clients = {}
54
-
55
- key = f"{bucket}-{region}"
56
-
57
- boto3_config = dict(max_pool_connections=25)
58
-
59
- if key in thread_local.s3_clients:
60
- return thread_local.s3_clients[key]
61
-
62
- boto3_config = dict(max_pool_connections=25)
63
-
64
- if region:
65
- # This is using AWS
66
-
67
- options = {"region_name": region}
68
-
69
- # Anonymous access
70
- if not (
71
- os.path.exists(os.path.expanduser("~/.aws/credentials"))
72
- or ("AWS_ACCESS_KEY_ID" in os.environ and "AWS_SECRET_ACCESS_KEY" in os.environ)
73
- ):
74
- boto3_config["signature_version"] = UNSIGNED
75
-
76
- else:
77
-
78
- # We may be accessing a different S3 compatible service
79
- # Use anemoi.config to get the configuration
80
-
81
- options = {}
82
- config = load_config(secrets=["aws_access_key_id", "aws_secret_access_key"])
83
-
84
- cfg = config.get("object-storage", {})
85
- for k, v in cfg.items():
86
- if isinstance(v, (str, int, float, bool)):
87
- options[k] = v
88
-
89
- for k, v in cfg.get(bucket, {}).items():
90
- if isinstance(v, (str, int, float, bool)):
91
- options[k] = v
92
-
93
- type = options.pop("type", "s3")
94
- if type != "s3":
95
- raise ValueError(f"Unsupported object storage type {type}")
96
-
97
- if "config" in options:
98
- boto3_config.update(options["config"])
99
- del options["config"]
100
- from botocore.client import Config
101
-
102
- options["config"] = Config(**boto3_config)
103
-
104
- thread_local.s3_clients[key] = boto3.client("s3", **options)
105
-
106
- return thread_local.s3_clients[key]
107
-
108
-
109
- def _ignore(number_of_files, total_size, total_transferred, transfering):
110
- pass
111
-
112
-
113
- class Transfer:
114
-
115
- def transfer_folder(self, *, source, target, overwrite=False, resume=False, verbosity=1, threads=1, progress=None):
116
- assert verbosity == 1, verbosity
117
-
118
- if progress is None:
119
- progress = _ignore
120
-
121
- # from boto3.s3.transfer import TransferConfig
122
- # config = TransferConfig(use_threads=False)
123
- config = None
124
- with concurrent.futures.ThreadPoolExecutor(max_workers=threads) as executor:
125
- try:
126
- if verbosity > 0:
127
- LOGGER.info(f"{self.action} {source} to {target}")
128
-
129
- total_size = 0
130
- total_transferred = 0
131
-
132
- futures = []
133
- for name in self.list_source(source):
134
-
135
- futures.append(
136
- executor.submit(
137
- self.transfer_file,
138
- source=self.source_path(name, source),
139
- target=self.target_path(name, source, target),
140
- overwrite=overwrite,
141
- resume=resume,
142
- verbosity=verbosity - 1,
143
- config=config,
144
- )
145
- )
146
- total_size += self.source_size(name)
147
-
148
- if len(futures) % 10000 == 0:
149
-
150
- progress(len(futures), total_size, 0, False)
151
-
152
- if verbosity > 0:
153
- LOGGER.info(f"Preparing transfer, {len(futures):,} files... ({bytes_to_human(total_size)})")
154
- done, _ = concurrent.futures.wait(
155
- futures,
156
- timeout=0.001,
157
- return_when=concurrent.futures.FIRST_EXCEPTION,
158
- )
159
- # Trigger exceptions if any
160
- for future in done:
161
- future.result()
162
-
163
- number_of_files = len(futures)
164
- progress(number_of_files, total_size, 0, True)
165
-
166
- if verbosity > 0:
167
- LOGGER.info(f"{self.action} {number_of_files:,} files ({bytes_to_human(total_size)})")
168
- with tqdm.tqdm(total=total_size, unit="B", unit_scale=True, unit_divisor=1024) as pbar:
169
- for future in concurrent.futures.as_completed(futures):
170
- size = future.result()
171
- pbar.update(size)
172
- total_transferred += size
173
- progress(number_of_files, total_size, total_transferred, True)
174
- else:
175
- for future in concurrent.futures.as_completed(futures):
176
- size = future.result()
177
- total_transferred += size
178
- progress(number_of_files, total_size, total_transferred, True)
179
-
180
- except Exception:
181
- executor.shutdown(wait=False, cancel_futures=True)
182
- raise
183
-
184
-
185
- class Upload(Transfer):
186
- action = "Uploading"
187
-
188
- def list_source(self, source):
189
- for root, _, files in os.walk(source):
190
- for file in files:
191
- yield os.path.join(root, file)
192
-
193
- def source_path(self, local_path, source):
194
- return local_path
195
-
196
- def target_path(self, source_path, source, target):
197
- relative_path = os.path.relpath(source_path, source)
198
- s3_path = os.path.join(target, relative_path)
199
- return s3_path
200
-
201
- def source_size(self, local_path):
202
- return os.path.getsize(local_path)
203
-
204
- def transfer_file(self, source, target, overwrite, resume, verbosity, progress=None, config=None):
205
- try:
206
- return self._transfer_file(source, target, overwrite, resume, verbosity, config=config)
207
- except Exception as e:
208
- LOGGER.exception(f"Error transferring {source} to {target}")
209
- LOGGER.error(e)
210
- raise
211
-
212
- def _transfer_file(self, source, target, overwrite, resume, verbosity, config=None):
213
-
214
- from botocore.exceptions import ClientError
215
-
216
- assert target.startswith("s3://")
217
-
218
- _, _, bucket, key = target.split("/", 3)
219
- s3 = s3_client(bucket)
220
-
221
- size = os.path.getsize(source)
222
-
223
- if verbosity > 0:
224
- LOGGER.info(f"{self.action} {source} to {target} ({bytes_to_human(size)})")
225
-
226
- try:
227
- results = s3.head_object(Bucket=bucket, Key=key)
228
- remote_size = int(results["ContentLength"])
229
- except ClientError as e:
230
- if e.response["Error"]["Code"] != "404":
231
- raise
232
- remote_size = None
233
-
234
- if remote_size is not None:
235
- if remote_size != size:
236
- LOGGER.warning(
237
- f"{target} already exists, but with different size, re-uploading (remote={remote_size}, local={size})"
238
- )
239
- elif resume:
240
- # LOGGER.info(f"{target} already exists, skipping")
241
- return size
242
-
243
- if remote_size is not None and not overwrite and not resume:
244
- raise ValueError(f"{target} already exists, use 'overwrite' to replace or 'resume' to skip")
245
-
246
- if verbosity > 0:
247
- with tqdm.tqdm(total=size, unit="B", unit_scale=True, unit_divisor=1024, leave=False) as pbar:
248
- s3.upload_file(source, bucket, key, Callback=lambda x: pbar.update(x), Config=config)
249
- else:
250
- s3.upload_file(source, bucket, key, Config=config)
251
-
252
- return size
253
-
254
-
255
- class Download(Transfer):
256
- action = "Downloading"
257
-
258
- def list_source(self, source):
259
- yield from _list_objects(source)
260
-
261
- def source_path(self, s3_object, source):
262
- _, _, bucket, _ = source.split("/", 3)
263
- return f"s3://{bucket}/{s3_object['Key']}"
264
-
265
- def target_path(self, s3_object, source, target):
266
- _, _, _, folder = source.split("/", 3)
267
- local_path = os.path.join(target, os.path.relpath(s3_object["Key"], folder))
268
- os.makedirs(os.path.dirname(local_path), exist_ok=True)
269
- return local_path
270
-
271
- def source_size(self, s3_object):
272
- return s3_object["Size"]
273
-
274
- def transfer_file(self, source, target, overwrite, resume, verbosity, progress=None, config=None):
275
- try:
276
- return self._transfer_file(source, target, overwrite, resume, verbosity, config=config)
277
- except Exception as e:
278
- LOGGER.exception(f"Error transferring {source} to {target}")
279
- LOGGER.error(e)
280
- raise
281
-
282
- def _transfer_file(self, source, target, overwrite, resume, verbosity, config=None):
283
- # from boto3.s3.transfer import TransferConfig
284
-
285
- _, _, bucket, key = source.split("/", 3)
286
- s3 = s3_client(bucket)
287
-
288
- try:
289
- response = s3.head_object(Bucket=bucket, Key=key)
290
- except s3.exceptions.ClientError as e:
291
- if e.response["Error"]["Code"] == "404":
292
- raise ValueError(f"{source} does not exist ({bucket}, {key})")
293
- raise
294
-
295
- size = int(response["ContentLength"])
296
-
297
- if verbosity > 0:
298
- LOGGER.info(f"Downloading {source} to {target} ({bytes_to_human(size)})")
299
-
300
- if overwrite:
301
- resume = False
302
-
303
- if resume:
304
- if os.path.exists(target):
305
- local_size = os.path.getsize(target)
306
- if local_size != size:
307
- LOGGER.warning(
308
- f"{target} already with different size, re-downloading (remote={size}, local={size})"
309
- )
310
- else:
311
- # if verbosity > 0:
312
- # LOGGER.info(f"{target} already exists, skipping")
313
- return size
314
-
315
- if os.path.exists(target) and not overwrite:
316
- raise ValueError(f"{target} already exists, use 'overwrite' to replace or 'resume' to skip")
317
-
318
- if verbosity > 0:
319
- with tqdm.tqdm(total=size, unit="B", unit_scale=True, unit_divisor=1024, leave=False) as pbar:
320
- s3.download_file(bucket, key, target, Callback=lambda x: pbar.update(x), Config=config)
321
- else:
322
- s3.download_file(bucket, key, target, Config=config)
323
-
324
- return size
24
+ def s3_client(*args, **kwargs):
25
+ warnings.warn(
26
+ "The 's3_client' function (from anemoi.utils.s3 import s3_client) function is deprecated and will be removed in a future release. "
27
+ "Please use the 's3_client' function (from anemoi.utils.remote.s3 import s3_client) instead.",
28
+ DeprecationWarning,
29
+ stacklevel=2,
30
+ )
31
+ return s3_client_(*args, **kwargs)
325
32
 
326
33
 
327
34
  def upload(source, target, *, overwrite=False, resume=False, verbosity=1, progress=None, threads=1) -> None:
328
- """Upload a file or a folder to S3.
329
-
330
- Parameters
331
- ----------
332
- source : str
333
- A path to a file or a folder to upload.
334
- target : str
335
- A URL to a file or a folder on S3. The url should start with 's3://'.
336
- overwrite : bool, optional
337
- If the data is alreay on S3 it will be overwritten, by default False
338
- resume : bool, optional
339
- If the data is alreay on S3 it will not be uploaded, unless the remote file
340
- has a different size, by default False
341
- verbosity : int, optional
342
- The level of verbosity, by default 1
343
- progress: callable, optional
344
- A callable that will be called with the number of files, the total size of the files, the total size
345
- transferred and a boolean indicating if the transfer has started. By default None
346
- threads : int, optional
347
- The number of threads to use when uploading a directory, by default 1
348
- """
349
-
350
- uploader = Upload()
351
-
352
- if os.path.isdir(source):
353
- uploader.transfer_folder(
354
- source=source,
355
- target=target,
356
- overwrite=overwrite,
357
- resume=resume,
358
- verbosity=verbosity,
359
- progress=progress,
360
- threads=threads,
361
- )
362
- else:
363
- uploader.transfer_file(
364
- source=source,
365
- target=target,
366
- overwrite=overwrite,
367
- resume=resume,
368
- verbosity=verbosity,
369
- progress=progress,
370
- )
371
-
372
-
373
- def download(source, target, *, overwrite=False, resume=False, verbosity=1, progress=None, threads=1) -> None:
374
- """Download a file or a folder from S3.
375
-
376
- Parameters
377
- ----------
378
- source : str
379
- The URL of a file or a folder on S3. The url should start with 's3://'. If the URL ends with a '/' it is
380
- assumed to be a folder, otherwise it is assumed to be a file.
381
- target : str
382
- The local path where the file or folder will be downloaded.
383
- overwrite : bool, optional
384
- If false, files which have already been download will be skipped, unless their size
385
- does not match their size on S3 , by default False
386
- resume : bool, optional
387
- If the data is alreay on local it will not be downloaded, unless the remote file
388
- has a different size, by default False
389
- verbosity : int, optional
390
- The level of verbosity, by default 1
391
- progress: callable, optional
392
- A callable that will be called with the number of files, the total size of the files, the total size
393
- transferred and a boolean indicating if the transfer has started. By default None
394
- threads : int, optional
395
- The number of threads to use when downloading a directory, by default 1
396
- """
397
- assert source.startswith("s3://")
398
-
399
- downloader = Download()
400
-
401
- if source.endswith("/"):
402
- downloader.transfer_folder(
403
- source=source,
404
- target=target,
405
- overwrite=overwrite,
406
- resume=resume,
407
- verbosity=verbosity,
408
- progress=progress,
409
- threads=threads,
410
- )
411
- else:
412
- downloader.transfer_file(
413
- source=source,
414
- target=target,
415
- overwrite=overwrite,
416
- resume=resume,
417
- verbosity=verbosity,
418
- progress=progress,
419
- )
420
-
421
-
422
- def _list_objects(target, batch=False):
423
- _, _, bucket, prefix = target.split("/", 3)
424
- s3 = s3_client(bucket)
425
-
426
- paginator = s3.get_paginator("list_objects_v2")
427
-
428
- for page in paginator.paginate(Bucket=bucket, Prefix=prefix):
429
- if "Contents" in page:
430
- objects = deepcopy(page["Contents"])
431
- if batch:
432
- yield objects
433
- else:
434
- yield from objects
435
-
436
-
437
- def _delete_folder(target) -> None:
438
- _, _, bucket, _ = target.split("/", 3)
439
- s3 = s3_client(bucket)
440
-
441
- total = 0
442
- for batch in _list_objects(target, batch=True):
443
- LOGGER.info(f"Deleting {len(batch):,} objects from {target}")
444
- s3.delete_objects(Bucket=bucket, Delete={"Objects": [{"Key": o["Key"]} for o in batch]})
445
- total += len(batch)
446
- LOGGER.info(f"Deleted {len(batch):,} objects (total={total:,})")
447
-
448
-
449
- def _delete_file(target) -> None:
450
- from botocore.exceptions import ClientError
451
-
452
- _, _, bucket, key = target.split("/", 3)
453
- s3 = s3_client(bucket)
454
-
455
- try:
456
- s3.head_object(Bucket=bucket, Key=key)
457
- exits = True
458
- except ClientError as e:
459
- if e.response["Error"]["Code"] != "404":
460
- raise
461
- exits = False
462
-
463
- if not exits:
464
- LOGGER.warning(f"{target} does not exist. Did you mean to delete a folder? Then add a trailing '/'")
465
- return
466
-
467
- LOGGER.info(f"Deleting {target}")
468
- s3.delete_object(Bucket=bucket, Key=key)
469
- LOGGER.info(f"{target} is deleted")
470
-
471
-
472
- def delete(target) -> None:
473
- """Delete a file or a folder from S3.
474
-
475
- Parameters
476
- ----------
477
- target : str
478
- The URL of a file or a folder on S3. The url should start with 's3://'. If the URL ends with a '/' it is
479
- assumed to be a folder, otherwise it is assumed to be a file.
480
- """
481
-
482
- assert target.startswith("s3://")
483
-
484
- if target.endswith("/"):
485
- _delete_folder(target)
486
- else:
487
- _delete_file(target)
488
-
489
-
490
- def list_folder(folder) -> list:
491
- """List the sub folders in a folder on S3.
492
-
493
- Parameters
494
- ----------
495
- folder : str
496
- The URL of a folder on S3. The url should start with 's3://'.
497
-
498
- Returns
499
- -------
500
- list
501
- A list of the subfolders names in the folder.
502
- """
503
-
504
- assert folder.startswith("s3://")
505
- if not folder.endswith("/"):
506
- folder += "/"
507
-
508
- _, _, bucket, prefix = folder.split("/", 3)
509
-
510
- s3 = s3_client(bucket)
511
- paginator = s3.get_paginator("list_objects_v2")
512
-
513
- for page in paginator.paginate(Bucket=bucket, Prefix=prefix, Delimiter="/"):
514
- if "CommonPrefixes" in page:
515
- yield from [folder + _["Prefix"] for _ in page.get("CommonPrefixes")]
516
-
517
-
518
- def object_info(target) -> dict:
519
- """Get information about an object on S3.
520
-
521
- Parameters
522
- ----------
523
- target : str
524
- The URL of a file or a folder on S3. The url should start with 's3://'.
525
-
526
- Returns
527
- -------
528
- dict
529
- A dictionary with information about the object.
530
- """
531
-
532
- _, _, bucket, key = target.split("/", 3)
533
- s3 = s3_client(bucket)
534
-
535
- try:
536
- return s3.head_object(Bucket=bucket, Key=key)
537
- except s3.exceptions.ClientError as e:
538
- if e.response["Error"]["Code"] == "404":
539
- raise ValueError(f"{target} does not exist")
540
- raise
541
-
542
-
543
- def object_acl(target) -> dict:
544
- """Get information about an object's ACL on S3.
545
-
546
- Parameters
547
- ----------
548
- target : str
549
- The URL of a file or a folder on S3. The url should start with 's3://'.
550
-
551
- Returns
552
- -------
553
- dict
554
- A dictionary with information about the object's ACL.
555
- """
556
-
557
- _, _, bucket, key = target.split("/", 3)
558
- s3 = s3_client()
559
-
560
- return s3.get_object_acl(Bucket=bucket, Key=key)
35
+ warnings.warn(
36
+ "The 'upload' function (from anemoi.utils.s3 import upload) function is deprecated and will be removed in a future release. "
37
+ "Please use the 'transfer' function (from anemoi.utils.remote import transfer) instead.",
38
+ DeprecationWarning,
39
+ stacklevel=2,
40
+ )
41
+ return transfer(
42
+ source, target, overwrite=overwrite, resume=resume, verbosity=verbosity, progress=progress, threads=threads
43
+ )
44
+
45
+
46
+ def download(*args, **kwargs):
47
+ warnings.warn(
48
+ "The 'download' function (from anemoi.utils.s3 import download) function is deprecated and will be removed in a future release. "
49
+ "Please use the 'transfer' function (from anemoi.utils.remote import transfer) instead.",
50
+ DeprecationWarning,
51
+ stacklevel=2,
52
+ )
53
+ return transfer(*args, **kwargs)
54
+
55
+
56
+ def delete(*args, **kwargs):
57
+ warnings.warn(
58
+ "The 'delete' function (from anemoi.utils.s3 import delete) function is deprecated and will be removed in a future release. "
59
+ "Please use the 'transfer' function (from anemoi.utils.remote.s3 import delete) instead.",
60
+ DeprecationWarning,
61
+ stacklevel=2,
62
+ )
63
+ return delete_(*args, **kwargs)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: anemoi-utils
3
- Version: 0.4.4
3
+ Version: 0.4.6
4
4
  Summary: A package to hold various functions to support training of ML models on ECMWF data.
5
5
  Author-email: "European Centre for Medium-Range Weather Forecasts (ECMWF)" <software.support@ecmwf.int>
6
6
  License: Apache License
@@ -219,6 +219,7 @@ Classifier: Programming Language :: Python :: 3.9
219
219
  Classifier: Programming Language :: Python :: 3.10
220
220
  Classifier: Programming Language :: Python :: 3.11
221
221
  Classifier: Programming Language :: Python :: 3.12
222
+ Classifier: Programming Language :: Python :: 3.13
222
223
  Classifier: Programming Language :: Python :: Implementation :: CPython
223
224
  Classifier: Programming Language :: Python :: Implementation :: PyPy
224
225
  Requires-Python: >=3.9
@@ -0,0 +1,32 @@
1
+ anemoi/utils/__init__.py,sha256=0u0eIdu5-H1frf6V4KHpNmlh_SS-bJnxjzIejlsLqdw,702
2
+ anemoi/utils/__main__.py,sha256=5NW2A3OgTimB4ptwYThivIRSeCrvabMuvnr8mmnVx0E,715
3
+ anemoi/utils/_version.py,sha256=zpZoWJd6Ckmo0bRYgkjKcEXS2EIwkKGRgif5PN8gs6Y,411
4
+ anemoi/utils/caching.py,sha256=0cznpvaaox14NSVi-Q3PqumfuGtXo0YNcEFwDPxvMZw,1948
5
+ anemoi/utils/checkpoints.py,sha256=q8QqKlZ6qChjzEfq7KM1gVXuyqgsVRGIb4dJFtkGk58,7774
6
+ anemoi/utils/cli.py,sha256=rmMP60VY3em99rQP6TCrKibMngWwVe5h_0GDcf16c5U,4117
7
+ anemoi/utils/compatibility.py,sha256=0_nIcbdQbNMrS6AkqrBgJGJlSJXW8R23ncaZaDwdJ4c,2190
8
+ anemoi/utils/config.py,sha256=Fpy4wzj3dahTwwO75Iet6zmQQFGmTvhXml6-EsTEvgk,9873
9
+ anemoi/utils/dates.py,sha256=wwYD5_QI7EWY_jhpENNYtL5O7fjwYkzmqHkNoayvmrY,12452
10
+ anemoi/utils/grib.py,sha256=zBICyOsYtR_9px1C5UDT6wL_D6kiIhUi_00kjFmas5c,3047
11
+ anemoi/utils/hindcasts.py,sha256=TEYDmrZUajuhp_dfWeg6z5c6XfntE-mwugUQJyAgUco,1419
12
+ anemoi/utils/humanize.py,sha256=tSQkiUHiDj3VYk-DeruHp9P79sJO1b0whsPBphqy9qA,16627
13
+ anemoi/utils/provenance.py,sha256=SqOiNoY1y36Zec83Pjt7OhihbwxMyknscfmogHCuriA,10894
14
+ anemoi/utils/registry.py,sha256=Iit_CfTGuoVffXkZA2A5mUXb4AdGIUX9TpnUqWT4HJ0,4291
15
+ anemoi/utils/s3.py,sha256=UOEETko08hnIXeFy8u10eQbqpcape9d-L6IgsjFMe18,2473
16
+ anemoi/utils/sanitise.py,sha256=MqEMLwVZ1jSemLDBoQXuJyXKIfyR0gzYi7DoITBcir8,2866
17
+ anemoi/utils/sanitize.py,sha256=43ZKDcfVpeXSsJ9TFEc9aZnD6oe2cUh151XnDspM98M,462
18
+ anemoi/utils/text.py,sha256=Xfr_3wvsjg7m-BwvdJVz1bV6f5KNMnGIIFRtXaiMfbs,10496
19
+ anemoi/utils/timer.py,sha256=Twnr3GZu-n0WzgboELRKJWs87qyDYqy6Dwr9cQ_JG18,1803
20
+ anemoi/utils/commands/__init__.py,sha256=O5W3yHZywRoAqmRUioAr3zMCh0hGVV18wZYGvc00ioM,698
21
+ anemoi/utils/commands/config.py,sha256=zt4PFATYJ-zs0C5mpUlrQ4Fj5m1kM3CcsszUP1VBbzA,816
22
+ anemoi/utils/mars/__init__.py,sha256=kvbu-gSaYI9jSNEzfQltrtHPVIameYGoLjOJKwI7x_U,1723
23
+ anemoi/utils/mars/mars.yaml,sha256=R0dujp75lLA4wCWhPeOQnzJ45WZAYLT8gpx509cBFlc,66
24
+ anemoi/utils/remote/__init__.py,sha256=nLoXHj0Jp3YrGSwb0xkGyeuquDolWiiSBtGPJJX8gbM,11481
25
+ anemoi/utils/remote/s3.py,sha256=CNEYq8P7o7lSB1acUp_GPU8c-WrEa7Cduv3rGw4Kba0,11924
26
+ anemoi/utils/remote/ssh.py,sha256=0Kj-W6U5pSK60pIiAiDUzqNPH7_dVnebrajWrew-Ye0,4546
27
+ anemoi_utils-0.4.6.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
28
+ anemoi_utils-0.4.6.dist-info/METADATA,sha256=VkNVI8JM7Sqbllhp3afhI6CWZ6MD-X1YcMgDf7A6L40,15222
29
+ anemoi_utils-0.4.6.dist-info/WHEEL,sha256=R06PA3UVYHThwHvxuRWMqaGcr-PuniXahwjmQRFMEkY,91
30
+ anemoi_utils-0.4.6.dist-info/entry_points.txt,sha256=LENOkn88xzFQo-V59AKoA_F_cfYQTJYtrNTtf37YgHY,60
31
+ anemoi_utils-0.4.6.dist-info/top_level.txt,sha256=DYn8VPs-fNwr7fNH9XIBqeXIwiYYd2E2k5-dUFFqUz0,7
32
+ anemoi_utils-0.4.6.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.3.0)
2
+ Generator: setuptools (75.5.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,28 +0,0 @@
1
- anemoi/utils/__init__.py,sha256=0u0eIdu5-H1frf6V4KHpNmlh_SS-bJnxjzIejlsLqdw,702
2
- anemoi/utils/__main__.py,sha256=cLA2PidDTOUHaDGzd0_E5iioKYNe-PSTv567Y2fuwQk,723
3
- anemoi/utils/_version.py,sha256=t0Mfy7vCENQWSYE4xRfluKBHgAYWK48fjSubsSGEQEI,411
4
- anemoi/utils/caching.py,sha256=0cznpvaaox14NSVi-Q3PqumfuGtXo0YNcEFwDPxvMZw,1948
5
- anemoi/utils/checkpoints.py,sha256=719HjvY8zyseQxwk-08rMB3X3vI_o26Sq_AiBFkZ8Fk,7802
6
- anemoi/utils/cli.py,sha256=rmMP60VY3em99rQP6TCrKibMngWwVe5h_0GDcf16c5U,4117
7
- anemoi/utils/config.py,sha256=Fpy4wzj3dahTwwO75Iet6zmQQFGmTvhXml6-EsTEvgk,9873
8
- anemoi/utils/dates.py,sha256=wwYD5_QI7EWY_jhpENNYtL5O7fjwYkzmqHkNoayvmrY,12452
9
- anemoi/utils/grib.py,sha256=zBICyOsYtR_9px1C5UDT6wL_D6kiIhUi_00kjFmas5c,3047
10
- anemoi/utils/hindcasts.py,sha256=OUOY2nDa3LBnzJ3ncgANzJDapouh82KgVyofDAu7K_Q,1426
11
- anemoi/utils/humanize.py,sha256=tSQkiUHiDj3VYk-DeruHp9P79sJO1b0whsPBphqy9qA,16627
12
- anemoi/utils/provenance.py,sha256=SqOiNoY1y36Zec83Pjt7OhihbwxMyknscfmogHCuriA,10894
13
- anemoi/utils/registry.py,sha256=m7jNJKTkMPOv_muUCn1RPeVW8D8DtggRIhY36RtcQfU,2847
14
- anemoi/utils/s3.py,sha256=LMljA5OoaVcgZcg_rmH-_LOX4uicMZl1FY64Bx4uOO8,18694
15
- anemoi/utils/sanitise.py,sha256=MqEMLwVZ1jSemLDBoQXuJyXKIfyR0gzYi7DoITBcir8,2866
16
- anemoi/utils/sanitize.py,sha256=43ZKDcfVpeXSsJ9TFEc9aZnD6oe2cUh151XnDspM98M,462
17
- anemoi/utils/text.py,sha256=Xfr_3wvsjg7m-BwvdJVz1bV6f5KNMnGIIFRtXaiMfbs,10496
18
- anemoi/utils/timer.py,sha256=Twnr3GZu-n0WzgboELRKJWs87qyDYqy6Dwr9cQ_JG18,1803
19
- anemoi/utils/commands/__init__.py,sha256=qAybFZPBBQs0dyx7dZ3X5JsLpE90pwrqt1vSV7cqEIw,706
20
- anemoi/utils/commands/config.py,sha256=cAt6yYF3rN1shr57c8wXsgMEvQMRN08l_fCdA1Ux9gE,839
21
- anemoi/utils/mars/__init__.py,sha256=RAeY8gJ7ZvsPlcIvrQ4fy9xVHs3SphTAPw_XJDtNIKo,1750
22
- anemoi/utils/mars/mars.yaml,sha256=R0dujp75lLA4wCWhPeOQnzJ45WZAYLT8gpx509cBFlc,66
23
- anemoi_utils-0.4.4.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
24
- anemoi_utils-0.4.4.dist-info/METADATA,sha256=9Tnb0OBqtTnOcJPBg3yRRYKLEgPW0FByS_GkAke-mGg,15171
25
- anemoi_utils-0.4.4.dist-info/WHEEL,sha256=P9jw-gEje8ByB7_hXoICnHtVCrEwMQh-630tKvQWehc,91
26
- anemoi_utils-0.4.4.dist-info/entry_points.txt,sha256=LENOkn88xzFQo-V59AKoA_F_cfYQTJYtrNTtf37YgHY,60
27
- anemoi_utils-0.4.4.dist-info/top_level.txt,sha256=DYn8VPs-fNwr7fNH9XIBqeXIwiYYd2E2k5-dUFFqUz0,7
28
- anemoi_utils-0.4.4.dist-info/RECORD,,