wandb 0.18.1__py3-none-win_amd64.whl → 0.18.3__py3-none-win_amd64.whl
Sign up to get free protection for your applications and to get access to all the features.
- wandb/__init__.py +3 -3
- wandb/__init__.pyi +67 -12
- wandb/apis/internal.py +3 -0
- wandb/apis/public/api.py +128 -2
- wandb/apis/public/artifacts.py +11 -7
- wandb/apis/public/jobs.py +8 -0
- wandb/apis/public/runs.py +16 -5
- wandb/bin/nvidia_gpu_stats.exe +0 -0
- wandb/bin/wandb-core +0 -0
- wandb/cli/cli.py +0 -3
- wandb/errors/__init__.py +11 -40
- wandb/errors/errors.py +37 -0
- wandb/errors/warnings.py +2 -0
- wandb/integration/tensorboard/log.py +1 -1
- wandb/old/core.py +2 -80
- wandb/plot/bar.py +7 -4
- wandb/plot/confusion_matrix.py +5 -4
- wandb/plot/histogram.py +7 -4
- wandb/plot/line.py +7 -4
- wandb/proto/v3/wandb_internal_pb2.py +31 -21
- wandb/proto/v3/wandb_settings_pb2.py +2 -2
- wandb/proto/v4/wandb_internal_pb2.py +23 -21
- wandb/proto/v4/wandb_settings_pb2.py +2 -2
- wandb/proto/v5/wandb_internal_pb2.py +23 -21
- wandb/proto/v5/wandb_settings_pb2.py +2 -2
- wandb/sdk/artifacts/_validators.py +48 -3
- wandb/sdk/artifacts/artifact.py +160 -186
- wandb/sdk/artifacts/artifact_file_cache.py +13 -11
- wandb/sdk/artifacts/artifact_instance_cache.py +4 -2
- wandb/sdk/artifacts/artifact_manifest.py +13 -11
- wandb/sdk/artifacts/artifact_manifest_entry.py +24 -22
- wandb/sdk/artifacts/artifact_manifests/artifact_manifest_v1.py +9 -7
- wandb/sdk/artifacts/artifact_saver.py +27 -25
- wandb/sdk/artifacts/exceptions.py +26 -25
- wandb/sdk/artifacts/storage_handler.py +11 -9
- wandb/sdk/artifacts/storage_handlers/azure_handler.py +16 -14
- wandb/sdk/artifacts/storage_handlers/gcs_handler.py +15 -13
- wandb/sdk/artifacts/storage_handlers/http_handler.py +15 -14
- wandb/sdk/artifacts/storage_handlers/local_file_handler.py +10 -8
- wandb/sdk/artifacts/storage_handlers/multi_handler.py +14 -12
- wandb/sdk/artifacts/storage_handlers/s3_handler.py +19 -19
- wandb/sdk/artifacts/storage_handlers/tracking_handler.py +10 -8
- wandb/sdk/artifacts/storage_handlers/wb_artifact_handler.py +12 -10
- wandb/sdk/artifacts/storage_handlers/wb_local_artifact_handler.py +9 -7
- wandb/sdk/artifacts/storage_policies/wandb_storage_policy.py +31 -29
- wandb/sdk/artifacts/storage_policy.py +20 -20
- wandb/sdk/backend/backend.py +8 -26
- wandb/sdk/data_types/base_types/wb_value.py +1 -3
- wandb/sdk/data_types/video.py +2 -2
- wandb/sdk/interface/interface.py +0 -24
- wandb/sdk/interface/interface_shared.py +0 -12
- wandb/sdk/internal/handler.py +0 -10
- wandb/sdk/internal/internal_api.py +71 -0
- wandb/sdk/internal/sender.py +0 -43
- wandb/sdk/internal/tb_watcher.py +1 -1
- wandb/sdk/lib/_settings_toposort_generated.py +1 -0
- wandb/sdk/lib/hashutil.py +34 -12
- wandb/sdk/lib/service_connection.py +216 -0
- wandb/sdk/lib/service_token.py +94 -0
- wandb/sdk/lib/sock_client.py +7 -3
- wandb/sdk/service/server.py +2 -5
- wandb/sdk/service/service.py +0 -22
- wandb/sdk/wandb_init.py +33 -22
- wandb/sdk/wandb_run.py +45 -33
- wandb/sdk/wandb_settings.py +2 -0
- wandb/sdk/wandb_setup.py +25 -16
- wandb/sdk/wandb_sync.py +9 -3
- wandb/sdk/wandb_watch.py +31 -15
- wandb/util.py +8 -1
- {wandb-0.18.1.dist-info → wandb-0.18.3.dist-info}/METADATA +3 -2
- {wandb-0.18.1.dist-info → wandb-0.18.3.dist-info}/RECORD +75 -74
- wandb/sdk/internal/update.py +0 -113
- wandb/sdk/service/service_base.py +0 -50
- wandb/sdk/service/service_sock.py +0 -70
- wandb/sdk/wandb_manager.py +0 -232
- /wandb/{sdk/lib → plot}/viz.py +0 -0
- {wandb-0.18.1.dist-info → wandb-0.18.3.dist-info}/WHEEL +0 -0
- {wandb-0.18.1.dist-info → wandb-0.18.3.dist-info}/entry_points.txt +0 -0
- {wandb-0.18.1.dist-info → wandb-0.18.3.dist-info}/licenses/LICENSE +0 -0
wandb/sdk/artifacts/artifact.py
CHANGED
@@ -1,5 +1,7 @@
|
|
1
1
|
"""Artifact class."""
|
2
2
|
|
3
|
+
from __future__ import annotations
|
4
|
+
|
3
5
|
import atexit
|
4
6
|
import concurrent.futures
|
5
7
|
import contextlib
|
@@ -17,21 +19,7 @@ from copy import copy
|
|
17
19
|
from datetime import datetime, timedelta
|
18
20
|
from functools import partial
|
19
21
|
from pathlib import PurePosixPath
|
20
|
-
from typing import
|
21
|
-
IO,
|
22
|
-
TYPE_CHECKING,
|
23
|
-
Any,
|
24
|
-
Dict,
|
25
|
-
Generator,
|
26
|
-
List,
|
27
|
-
Optional,
|
28
|
-
Sequence,
|
29
|
-
Set,
|
30
|
-
Tuple,
|
31
|
-
Type,
|
32
|
-
Union,
|
33
|
-
cast,
|
34
|
-
)
|
22
|
+
from typing import IO, TYPE_CHECKING, Any, Dict, Iterator, Sequence, Type, cast
|
35
23
|
|
36
24
|
from wandb.sdk.artifacts.storage_handlers.gcs_handler import _GCSIsADirectoryError
|
37
25
|
|
@@ -50,7 +38,12 @@ from wandb.apis.normalize import normalize_exceptions
|
|
50
38
|
from wandb.apis.public import ArtifactCollection, ArtifactFiles, RetryingClient, Run
|
51
39
|
from wandb.data_types import WBValue
|
52
40
|
from wandb.errors.term import termerror, termlog, termwarn
|
53
|
-
from wandb.sdk.artifacts._validators import
|
41
|
+
from wandb.sdk.artifacts._validators import (
|
42
|
+
ensure_logged,
|
43
|
+
ensure_not_finalized,
|
44
|
+
validate_aliases,
|
45
|
+
validate_tags,
|
46
|
+
)
|
54
47
|
from wandb.sdk.artifacts.artifact_download_logger import ArtifactDownloadLogger
|
55
48
|
from wandb.sdk.artifacts.artifact_instance_cache import artifact_instance_cache
|
56
49
|
from wandb.sdk.artifacts.artifact_manifest import ArtifactManifest
|
@@ -60,11 +53,7 @@ from wandb.sdk.artifacts.artifact_manifests.artifact_manifest_v1 import (
|
|
60
53
|
)
|
61
54
|
from wandb.sdk.artifacts.artifact_state import ArtifactState
|
62
55
|
from wandb.sdk.artifacts.artifact_ttl import ArtifactTTL
|
63
|
-
from wandb.sdk.artifacts.exceptions import
|
64
|
-
ArtifactFinalizedError,
|
65
|
-
ArtifactNotLoggedError,
|
66
|
-
WaitTimeoutError,
|
67
|
-
)
|
56
|
+
from wandb.sdk.artifacts.exceptions import ArtifactNotLoggedError, WaitTimeoutError
|
68
57
|
from wandb.sdk.artifacts.staging import get_staging_dir
|
69
58
|
from wandb.sdk.artifacts.storage_layout import StorageLayout
|
70
59
|
from wandb.sdk.artifacts.storage_policies import WANDB_STORAGE_POLICY
|
@@ -129,10 +118,10 @@ class Artifact:
|
|
129
118
|
self,
|
130
119
|
name: str,
|
131
120
|
type: str,
|
132
|
-
description:
|
133
|
-
metadata:
|
121
|
+
description: str | None = None,
|
122
|
+
metadata: dict[str, Any] | None = None,
|
134
123
|
incremental: bool = False,
|
135
|
-
use_as:
|
124
|
+
use_as: str | None = None,
|
136
125
|
) -> None:
|
137
126
|
if not re.match(r"^[a-zA-Z0-9_\-.]+$", name):
|
138
127
|
raise ValueError(
|
@@ -148,55 +137,53 @@ class Artifact:
|
|
148
137
|
termwarn("Using experimental arg `incremental`")
|
149
138
|
|
150
139
|
# Internal.
|
151
|
-
self._client:
|
140
|
+
self._client: RetryingClient | None = None
|
152
141
|
|
153
142
|
storage_policy_cls = StoragePolicy.lookup_by_name(WANDB_STORAGE_POLICY)
|
154
143
|
layout = StorageLayout.V1 if env.get_use_v1_artifacts() else StorageLayout.V2
|
155
144
|
policy_config = {"storageLayout": layout}
|
156
145
|
self._storage_policy = storage_policy_cls.from_config(config=policy_config)
|
157
146
|
|
158
|
-
self._tmp_dir:
|
159
|
-
self._added_objs:
|
160
|
-
|
161
|
-
|
162
|
-
self.
|
163
|
-
self._save_future: Optional[MessageFuture] = None
|
164
|
-
self._download_roots: Set[str] = set()
|
147
|
+
self._tmp_dir: tempfile.TemporaryDirectory | None = None
|
148
|
+
self._added_objs: dict[int, tuple[WBValue, ArtifactManifestEntry]] = {}
|
149
|
+
self._added_local_paths: dict[str, ArtifactManifestEntry] = {}
|
150
|
+
self._save_future: MessageFuture | None = None
|
151
|
+
self._download_roots: set[str] = set()
|
165
152
|
# Set by new_draft(), otherwise the latest artifact will be used as the base.
|
166
|
-
self._base_id:
|
153
|
+
self._base_id: str | None = None
|
167
154
|
# Properties.
|
168
|
-
self._id:
|
155
|
+
self._id: str | None = None
|
169
156
|
self._client_id: str = runid.generate_id(128)
|
170
157
|
self._sequence_client_id: str = runid.generate_id(128)
|
171
|
-
self._entity:
|
172
|
-
self._project:
|
158
|
+
self._entity: str | None = None
|
159
|
+
self._project: str | None = None
|
173
160
|
self._name: str = name # includes version after saving
|
174
|
-
self._version:
|
175
|
-
self._source_entity:
|
176
|
-
self._source_project:
|
161
|
+
self._version: str | None = None
|
162
|
+
self._source_entity: str | None = None
|
163
|
+
self._source_project: str | None = None
|
177
164
|
self._source_name: str = name # includes version after saving
|
178
|
-
self._source_version:
|
165
|
+
self._source_version: str | None = None
|
179
166
|
self._type: str = type
|
180
|
-
self._description:
|
167
|
+
self._description: str | None = description
|
181
168
|
self._metadata: dict = self._normalize_metadata(metadata)
|
182
|
-
self._ttl_duration_seconds:
|
169
|
+
self._ttl_duration_seconds: int | None = None
|
183
170
|
self._ttl_is_inherited: bool = True
|
184
171
|
self._ttl_changed: bool = False
|
185
|
-
self._aliases:
|
186
|
-
self._saved_aliases:
|
187
|
-
self._tags:
|
188
|
-
self._saved_tags:
|
189
|
-
self._distributed_id:
|
172
|
+
self._aliases: list[str] = []
|
173
|
+
self._saved_aliases: list[str] = []
|
174
|
+
self._tags: list[str] = []
|
175
|
+
self._saved_tags: list[str] = []
|
176
|
+
self._distributed_id: str | None = None
|
190
177
|
self._incremental: bool = incremental
|
191
|
-
self._use_as:
|
178
|
+
self._use_as: str | None = use_as
|
192
179
|
self._state: ArtifactState = ArtifactState.PENDING
|
193
|
-
self._manifest:
|
180
|
+
self._manifest: ArtifactManifest | None = ArtifactManifestV1(
|
194
181
|
self._storage_policy
|
195
182
|
)
|
196
|
-
self._commit_hash:
|
197
|
-
self._file_count:
|
198
|
-
self._created_at:
|
199
|
-
self._updated_at:
|
183
|
+
self._commit_hash: str | None = None
|
184
|
+
self._file_count: int | None = None
|
185
|
+
self._created_at: str | None = None
|
186
|
+
self._updated_at: str | None = None
|
200
187
|
self._final: bool = False
|
201
188
|
|
202
189
|
# Cache.
|
@@ -206,7 +193,7 @@ class Artifact:
|
|
206
193
|
return f"<Artifact {self.id or self.name}>"
|
207
194
|
|
208
195
|
@classmethod
|
209
|
-
def _from_id(cls, artifact_id: str, client: RetryingClient) ->
|
196
|
+
def _from_id(cls, artifact_id: str, client: RetryingClient) -> Artifact | None:
|
210
197
|
artifact = artifact_instance_cache.get(artifact_id)
|
211
198
|
if artifact is not None:
|
212
199
|
return artifact
|
@@ -245,7 +232,7 @@ class Artifact:
|
|
245
232
|
@classmethod
|
246
233
|
def _from_name(
|
247
234
|
cls, entity: str, project: str, name: str, client: RetryingClient
|
248
|
-
) ->
|
235
|
+
) -> Artifact:
|
249
236
|
query = gql(
|
250
237
|
"""
|
251
238
|
query ArtifactByName(
|
@@ -284,9 +271,9 @@ class Artifact:
|
|
284
271
|
entity: str,
|
285
272
|
project: str,
|
286
273
|
name: str,
|
287
|
-
attrs:
|
274
|
+
attrs: dict[str, Any],
|
288
275
|
client: RetryingClient,
|
289
|
-
) ->
|
276
|
+
) -> Artifact:
|
290
277
|
# Placeholder is required to skip validation.
|
291
278
|
artifact = cls("placeholder", type="placeholder")
|
292
279
|
artifact._client = client
|
@@ -352,7 +339,8 @@ class Artifact:
|
|
352
339
|
artifact_instance_cache[artifact.id] = artifact
|
353
340
|
return artifact
|
354
341
|
|
355
|
-
|
342
|
+
@ensure_logged
|
343
|
+
def new_draft(self) -> Artifact:
|
356
344
|
"""Create a new draft artifact with the same content as this committed artifact.
|
357
345
|
|
358
346
|
The artifact returned can be extended or modified and logged as a new version.
|
@@ -363,8 +351,6 @@ class Artifact:
|
|
363
351
|
Raises:
|
364
352
|
ArtifactNotLoggedError: If the artifact is not logged.
|
365
353
|
"""
|
366
|
-
self._ensure_logged("new_draft")
|
367
|
-
|
368
354
|
# Name, _entity and _project are set to the *source* name/entity/project:
|
369
355
|
# if this artifact is saved it must be saved to the source sequence.
|
370
356
|
artifact = Artifact(self.source_name.split(":")[0], self.type)
|
@@ -389,7 +375,7 @@ class Artifact:
|
|
389
375
|
# Properties (Python Class managed attributes).
|
390
376
|
|
391
377
|
@property
|
392
|
-
def id(self) ->
|
378
|
+
def id(self) -> str | None:
|
393
379
|
"""The artifact's ID."""
|
394
380
|
if self.is_draft():
|
395
381
|
return None
|
@@ -397,16 +383,16 @@ class Artifact:
|
|
397
383
|
return self._id
|
398
384
|
|
399
385
|
@property
|
386
|
+
@ensure_logged
|
400
387
|
def entity(self) -> str:
|
401
388
|
"""The name of the entity of the secondary (portfolio) artifact collection."""
|
402
|
-
self._ensure_logged("entity")
|
403
389
|
assert self._entity is not None
|
404
390
|
return self._entity
|
405
391
|
|
406
392
|
@property
|
393
|
+
@ensure_logged
|
407
394
|
def project(self) -> str:
|
408
395
|
"""The name of the project of the secondary (portfolio) artifact collection."""
|
409
|
-
self._ensure_logged("project")
|
410
396
|
assert self._project is not None
|
411
397
|
return self._project
|
412
398
|
|
@@ -425,13 +411,14 @@ class Artifact:
|
|
425
411
|
return f"{self.entity}/{self.project}/{self.name}"
|
426
412
|
|
427
413
|
@property
|
414
|
+
@ensure_logged
|
428
415
|
def version(self) -> str:
|
429
416
|
"""The artifact's version in its secondary (portfolio) collection."""
|
430
|
-
self._ensure_logged("version")
|
431
417
|
assert self._version is not None
|
432
418
|
return self._version
|
433
419
|
|
434
420
|
@property
|
421
|
+
@ensure_logged
|
435
422
|
def collection(self) -> ArtifactCollection:
|
436
423
|
"""The collection this artifact was retrieved from.
|
437
424
|
|
@@ -441,23 +428,22 @@ class Artifact:
|
|
441
428
|
that an artifact version originated from. The collection
|
442
429
|
that an artifact originates from is known as the source sequence.
|
443
430
|
"""
|
444
|
-
self._ensure_logged("collection")
|
445
431
|
base_name = self.name.split(":")[0]
|
446
432
|
return ArtifactCollection(
|
447
433
|
self._client, self.entity, self.project, base_name, self.type
|
448
434
|
)
|
449
435
|
|
450
436
|
@property
|
437
|
+
@ensure_logged
|
451
438
|
def source_entity(self) -> str:
|
452
439
|
"""The name of the entity of the primary (sequence) artifact collection."""
|
453
|
-
self._ensure_logged("source_entity")
|
454
440
|
assert self._source_entity is not None
|
455
441
|
return self._source_entity
|
456
442
|
|
457
443
|
@property
|
444
|
+
@ensure_logged
|
458
445
|
def source_project(self) -> str:
|
459
446
|
"""The name of the project of the primary (sequence) artifact collection."""
|
460
|
-
self._ensure_logged("source_project")
|
461
447
|
assert self._source_project is not None
|
462
448
|
return self._source_project
|
463
449
|
|
@@ -476,19 +462,19 @@ class Artifact:
|
|
476
462
|
return f"{self.source_entity}/{self.source_project}/{self.source_name}"
|
477
463
|
|
478
464
|
@property
|
465
|
+
@ensure_logged
|
479
466
|
def source_version(self) -> str:
|
480
467
|
"""The artifact's version in its primary (sequence) collection.
|
481
468
|
|
482
469
|
A string with the format "v{number}".
|
483
470
|
"""
|
484
|
-
self._ensure_logged("source_version")
|
485
471
|
assert self._source_version is not None
|
486
472
|
return self._source_version
|
487
473
|
|
488
474
|
@property
|
475
|
+
@ensure_logged
|
489
476
|
def source_collection(self) -> ArtifactCollection:
|
490
477
|
"""The artifact's primary (sequence) collection."""
|
491
|
-
self._ensure_logged("source_collection")
|
492
478
|
base_name = self.source_name.split(":")[0]
|
493
479
|
return ArtifactCollection(
|
494
480
|
self._client, self.source_entity, self.source_project, base_name, self.type
|
@@ -500,12 +486,12 @@ class Artifact:
|
|
500
486
|
return self._type
|
501
487
|
|
502
488
|
@property
|
503
|
-
def description(self) ->
|
489
|
+
def description(self) -> str | None:
|
504
490
|
"""A description of the artifact."""
|
505
491
|
return self._description
|
506
492
|
|
507
493
|
@description.setter
|
508
|
-
def description(self, description:
|
494
|
+
def description(self, description: str | None) -> None:
|
509
495
|
"""Set the description of the artifact.
|
510
496
|
|
511
497
|
For model or dataset Artifacts, add documentation for your
|
@@ -540,7 +526,7 @@ class Artifact:
|
|
540
526
|
self._metadata = self._normalize_metadata(metadata)
|
541
527
|
|
542
528
|
@property
|
543
|
-
def ttl(self) ->
|
529
|
+
def ttl(self) -> timedelta | None:
|
544
530
|
"""The time-to-live (TTL) policy of an artifact.
|
545
531
|
|
546
532
|
Artifacts are deleted shortly after a TTL policy's duration passes.
|
@@ -554,13 +540,13 @@ class Artifact:
|
|
554
540
|
ArtifactNotLoggedError: Unable to fetch inherited TTL if the artifact has not been logged or saved
|
555
541
|
"""
|
556
542
|
if self._ttl_is_inherited and (self.is_draft() or self._ttl_changed):
|
557
|
-
raise ArtifactNotLoggedError(self,
|
543
|
+
raise ArtifactNotLoggedError(f"{type(self).__name__}.ttl", self)
|
558
544
|
if self._ttl_duration_seconds is None:
|
559
545
|
return None
|
560
546
|
return timedelta(seconds=self._ttl_duration_seconds)
|
561
547
|
|
562
548
|
@ttl.setter
|
563
|
-
def ttl(self, ttl:
|
549
|
+
def ttl(self, ttl: timedelta | ArtifactTTL | None) -> None:
|
564
550
|
"""The time-to-live (TTL) policy of an artifact.
|
565
551
|
|
566
552
|
Artifacts are deleted shortly after a TTL policy's duration passes.
|
@@ -595,7 +581,8 @@ class Artifact:
|
|
595
581
|
self._ttl_duration_seconds = int(ttl.total_seconds())
|
596
582
|
|
597
583
|
@property
|
598
|
-
|
584
|
+
@ensure_logged
|
585
|
+
def aliases(self) -> list[str]:
|
599
586
|
"""List of one or more semantically-friendly references or identifying "nicknames" assigned to an artifact version.
|
600
587
|
|
601
588
|
Aliases are mutable references that you can programmatically reference.
|
@@ -603,33 +590,32 @@ class Artifact:
|
|
603
590
|
See [Create new artifact versions](https://docs.wandb.ai/guides/artifacts/create-a-new-artifact-version)
|
604
591
|
for more information.
|
605
592
|
"""
|
606
|
-
self._ensure_logged("aliases")
|
607
593
|
return self._aliases
|
608
594
|
|
609
595
|
@aliases.setter
|
610
|
-
|
596
|
+
@ensure_logged
|
597
|
+
def aliases(self, aliases: list[str]) -> None:
|
611
598
|
"""Set the aliases associated with this artifact."""
|
612
|
-
self._ensure_logged("aliases")
|
613
599
|
self._aliases = validate_aliases(aliases)
|
614
600
|
|
615
601
|
@property
|
616
|
-
|
602
|
+
@ensure_logged
|
603
|
+
def tags(self) -> list[str]:
|
617
604
|
"""List of one or more tags assigned to this artifact version."""
|
618
|
-
self._ensure_logged("tags")
|
619
605
|
return self._tags
|
620
606
|
|
621
607
|
@tags.setter
|
622
|
-
|
608
|
+
@ensure_logged
|
609
|
+
def tags(self, tags: list[str]) -> None:
|
623
610
|
"""Set the tags associated with this artifact."""
|
624
|
-
self._ensure_logged("tags")
|
625
611
|
self._tags = validate_tags(tags)
|
626
612
|
|
627
613
|
@property
|
628
|
-
def distributed_id(self) ->
|
614
|
+
def distributed_id(self) -> str | None:
|
629
615
|
return self._distributed_id
|
630
616
|
|
631
617
|
@distributed_id.setter
|
632
|
-
def distributed_id(self, distributed_id:
|
618
|
+
def distributed_id(self, distributed_id: str | None) -> None:
|
633
619
|
self._distributed_id = distributed_id
|
634
620
|
|
635
621
|
@property
|
@@ -637,7 +623,7 @@ class Artifact:
|
|
637
623
|
return self._incremental
|
638
624
|
|
639
625
|
@property
|
640
|
-
def use_as(self) ->
|
626
|
+
def use_as(self) -> str | None:
|
641
627
|
return self._use_as
|
642
628
|
|
643
629
|
@property
|
@@ -708,30 +694,30 @@ class Artifact:
|
|
708
694
|
return total_size
|
709
695
|
|
710
696
|
@property
|
697
|
+
@ensure_logged
|
711
698
|
def commit_hash(self) -> str:
|
712
699
|
"""The hash returned when this artifact was committed."""
|
713
|
-
self._ensure_logged("commit_hash")
|
714
700
|
assert self._commit_hash is not None
|
715
701
|
return self._commit_hash
|
716
702
|
|
717
703
|
@property
|
704
|
+
@ensure_logged
|
718
705
|
def file_count(self) -> int:
|
719
706
|
"""The number of files (including references)."""
|
720
|
-
self._ensure_logged("file_count")
|
721
707
|
assert self._file_count is not None
|
722
708
|
return self._file_count
|
723
709
|
|
724
710
|
@property
|
711
|
+
@ensure_logged
|
725
712
|
def created_at(self) -> str:
|
726
713
|
"""Timestamp when the artifact was created."""
|
727
|
-
self._ensure_logged("created_at")
|
728
714
|
assert self._created_at is not None
|
729
715
|
return self._created_at
|
730
716
|
|
731
717
|
@property
|
718
|
+
@ensure_logged
|
732
719
|
def updated_at(self) -> str:
|
733
720
|
"""The time when the artifact was last updated."""
|
734
|
-
self._ensure_logged("updated_at")
|
735
721
|
assert self._created_at is not None
|
736
722
|
return self._updated_at or self._created_at
|
737
723
|
|
@@ -747,14 +733,6 @@ class Artifact:
|
|
747
733
|
"""
|
748
734
|
self._final = True
|
749
735
|
|
750
|
-
def _ensure_can_add(self) -> None:
|
751
|
-
if self._final:
|
752
|
-
raise ArtifactFinalizedError(artifact=self)
|
753
|
-
|
754
|
-
def _ensure_logged(self, attr: Optional[str] = None) -> None:
|
755
|
-
if self.is_draft():
|
756
|
-
raise ArtifactNotLoggedError(self, attr)
|
757
|
-
|
758
736
|
def is_draft(self) -> bool:
|
759
737
|
"""Check if artifact is not saved.
|
760
738
|
|
@@ -767,8 +745,8 @@ class Artifact:
|
|
767
745
|
|
768
746
|
def save(
|
769
747
|
self,
|
770
|
-
project:
|
771
|
-
settings:
|
748
|
+
project: str | None = None,
|
749
|
+
settings: wandb.Settings | None = None,
|
772
750
|
) -> None:
|
773
751
|
"""Persist any changes made to the artifact.
|
774
752
|
|
@@ -807,12 +785,12 @@ class Artifact:
|
|
807
785
|
wandb.run.log_artifact(self)
|
808
786
|
|
809
787
|
def _set_save_future(
|
810
|
-
self, save_future:
|
788
|
+
self, save_future: MessageFuture, client: RetryingClient
|
811
789
|
) -> None:
|
812
790
|
self._save_future = save_future
|
813
791
|
self._client = client
|
814
792
|
|
815
|
-
def wait(self, timeout:
|
793
|
+
def wait(self, timeout: int | None = None) -> Artifact:
|
816
794
|
"""If needed, wait for this artifact to finish logging.
|
817
795
|
|
818
796
|
Arguments:
|
@@ -823,7 +801,7 @@ class Artifact:
|
|
823
801
|
"""
|
824
802
|
if self.is_draft():
|
825
803
|
if self._save_future is None:
|
826
|
-
raise ArtifactNotLoggedError(self,
|
804
|
+
raise ArtifactNotLoggedError(type(self).wait.__qualname__, self)
|
827
805
|
result = self._save_future.get(timeout)
|
828
806
|
if not result:
|
829
807
|
raise WaitTimeoutError(
|
@@ -1134,7 +1112,7 @@ class Artifact:
|
|
1134
1112
|
|
1135
1113
|
# Adding, removing, getting entries.
|
1136
1114
|
|
1137
|
-
def __getitem__(self, name: str) ->
|
1115
|
+
def __getitem__(self, name: str) -> WBValue | None:
|
1138
1116
|
"""Get the WBValue object located at the artifact relative `name`.
|
1139
1117
|
|
1140
1118
|
Arguments:
|
@@ -1148,7 +1126,7 @@ class Artifact:
|
|
1148
1126
|
"""
|
1149
1127
|
return self.get(name)
|
1150
1128
|
|
1151
|
-
def __setitem__(self, name: str, item:
|
1129
|
+
def __setitem__(self, name: str, item: WBValue) -> ArtifactManifestEntry:
|
1152
1130
|
"""Add `item` to the artifact at path `name`.
|
1153
1131
|
|
1154
1132
|
Arguments:
|
@@ -1165,9 +1143,10 @@ class Artifact:
|
|
1165
1143
|
return self.add(item, name)
|
1166
1144
|
|
1167
1145
|
@contextlib.contextmanager
|
1146
|
+
@ensure_not_finalized
|
1168
1147
|
def new_file(
|
1169
|
-
self, name: str, mode: str = "w", encoding:
|
1170
|
-
) ->
|
1148
|
+
self, name: str, mode: str = "w", encoding: str | None = None
|
1149
|
+
) -> Iterator[IO]:
|
1171
1150
|
"""Open a new temporary file and add it to the artifact.
|
1172
1151
|
|
1173
1152
|
Arguments:
|
@@ -1183,7 +1162,6 @@ class Artifact:
|
|
1183
1162
|
ArtifactFinalizedError: You cannot make changes to the current artifact
|
1184
1163
|
version because it is finalized. Log a new artifact version instead.
|
1185
1164
|
"""
|
1186
|
-
self._ensure_can_add()
|
1187
1165
|
if self._tmp_dir is None:
|
1188
1166
|
self._tmp_dir = tempfile.TemporaryDirectory()
|
1189
1167
|
path = os.path.join(self._tmp_dir.name, name.lstrip("/"))
|
@@ -1203,13 +1181,14 @@ class Artifact:
|
|
1203
1181
|
|
1204
1182
|
self.add_file(path, name=name, policy="immutable", skip_cache=True)
|
1205
1183
|
|
1184
|
+
@ensure_not_finalized
|
1206
1185
|
def add_file(
|
1207
1186
|
self,
|
1208
1187
|
local_path: str,
|
1209
|
-
name:
|
1210
|
-
is_tmp:
|
1211
|
-
skip_cache:
|
1212
|
-
policy:
|
1188
|
+
name: str | None = None,
|
1189
|
+
is_tmp: bool | None = False,
|
1190
|
+
skip_cache: bool | None = False,
|
1191
|
+
policy: Literal["mutable", "immutable"] | None = "mutable",
|
1213
1192
|
) -> ArtifactManifestEntry:
|
1214
1193
|
"""Add a local file to the artifact.
|
1215
1194
|
|
@@ -1220,9 +1199,9 @@ class Artifact:
|
|
1220
1199
|
is_tmp: If true, then the file is renamed deterministically to avoid
|
1221
1200
|
collisions.
|
1222
1201
|
skip_cache: If set to `True`, W&B will not copy files to the cache after uploading.
|
1223
|
-
policy:
|
1224
|
-
|
1225
|
-
|
1202
|
+
policy: By default, set to "mutable". If set to "mutable", create a temporary copy of the
|
1203
|
+
file to prevent corruption during upload. If set to "immutable", disable
|
1204
|
+
protection and rely on the user not to delete or change the file.
|
1226
1205
|
|
1227
1206
|
Returns:
|
1228
1207
|
The added manifest entry
|
@@ -1232,7 +1211,6 @@ class Artifact:
|
|
1232
1211
|
version because it is finalized. Log a new artifact version instead.
|
1233
1212
|
ValueError: Policy must be "mutable" or "immutable"
|
1234
1213
|
"""
|
1235
|
-
self._ensure_can_add()
|
1236
1214
|
if not os.path.isfile(local_path):
|
1237
1215
|
raise ValueError("Path is not a file: {}".format(local_path))
|
1238
1216
|
|
@@ -1249,12 +1227,13 @@ class Artifact:
|
|
1249
1227
|
name, local_path, digest=digest, skip_cache=skip_cache, policy=policy
|
1250
1228
|
)
|
1251
1229
|
|
1230
|
+
@ensure_not_finalized
|
1252
1231
|
def add_dir(
|
1253
1232
|
self,
|
1254
1233
|
local_path: str,
|
1255
|
-
name:
|
1256
|
-
skip_cache:
|
1257
|
-
policy:
|
1234
|
+
name: str | None = None,
|
1235
|
+
skip_cache: bool | None = False,
|
1236
|
+
policy: Literal["mutable", "immutable"] | None = "mutable",
|
1258
1237
|
) -> None:
|
1259
1238
|
"""Add a local directory to the artifact.
|
1260
1239
|
|
@@ -1273,7 +1252,6 @@ class Artifact:
|
|
1273
1252
|
version because it is finalized. Log a new artifact version instead.
|
1274
1253
|
ValueError: Policy must be "mutable" or "immutable"
|
1275
1254
|
"""
|
1276
|
-
self._ensure_can_add()
|
1277
1255
|
if not os.path.isdir(local_path):
|
1278
1256
|
raise ValueError("Path is not a directory: {}".format(local_path))
|
1279
1257
|
|
@@ -1294,7 +1272,7 @@ class Artifact:
|
|
1294
1272
|
logical_path = os.path.join(name, logical_path)
|
1295
1273
|
paths.append((logical_path, physical_path))
|
1296
1274
|
|
1297
|
-
def add_manifest_file(log_phy_path:
|
1275
|
+
def add_manifest_file(log_phy_path: tuple[str, str]) -> None:
|
1298
1276
|
logical_path, physical_path = log_phy_path
|
1299
1277
|
self._add_local_file(
|
1300
1278
|
name=logical_path,
|
@@ -1311,12 +1289,13 @@ class Artifact:
|
|
1311
1289
|
|
1312
1290
|
termlog("Done. %.1fs" % (time.time() - start_time), prefix=False)
|
1313
1291
|
|
1292
|
+
@ensure_not_finalized
|
1314
1293
|
def add_reference(
|
1315
1294
|
self,
|
1316
|
-
uri:
|
1317
|
-
name:
|
1295
|
+
uri: ArtifactManifestEntry | str,
|
1296
|
+
name: StrPath | None = None,
|
1318
1297
|
checksum: bool = True,
|
1319
|
-
max_objects:
|
1298
|
+
max_objects: int | None = None,
|
1320
1299
|
) -> Sequence[ArtifactManifestEntry]:
|
1321
1300
|
"""Add a reference denoted by a URI to the artifact.
|
1322
1301
|
|
@@ -1366,7 +1345,6 @@ class Artifact:
|
|
1366
1345
|
ArtifactFinalizedError: You cannot make changes to the current artifact
|
1367
1346
|
version because it is finalized. Log a new artifact version instead.
|
1368
1347
|
"""
|
1369
|
-
self._ensure_can_add()
|
1370
1348
|
if name is not None:
|
1371
1349
|
name = LogicalPath(name)
|
1372
1350
|
|
@@ -1394,7 +1372,8 @@ class Artifact:
|
|
1394
1372
|
|
1395
1373
|
return manifest_entries
|
1396
1374
|
|
1397
|
-
|
1375
|
+
@ensure_not_finalized
|
1376
|
+
def add(self, obj: WBValue, name: StrPath) -> ArtifactManifestEntry:
|
1398
1377
|
"""Add wandb.WBValue `obj` to the artifact.
|
1399
1378
|
|
1400
1379
|
Arguments:
|
@@ -1410,7 +1389,6 @@ class Artifact:
|
|
1410
1389
|
ArtifactFinalizedError: You cannot make changes to the current artifact
|
1411
1390
|
version because it is finalized. Log a new artifact version instead.
|
1412
1391
|
"""
|
1413
|
-
self._ensure_can_add()
|
1414
1392
|
name = LogicalPath(name)
|
1415
1393
|
|
1416
1394
|
# This is a "hack" to automatically rename tables added to
|
@@ -1497,9 +1475,9 @@ class Artifact:
|
|
1497
1475
|
self,
|
1498
1476
|
name: StrPath,
|
1499
1477
|
path: StrPath,
|
1500
|
-
digest:
|
1501
|
-
skip_cache:
|
1502
|
-
policy:
|
1478
|
+
digest: B64MD5 | None = None,
|
1479
|
+
skip_cache: bool | None = False,
|
1480
|
+
policy: Literal["mutable", "immutable"] | None = "mutable",
|
1503
1481
|
) -> ArtifactManifestEntry:
|
1504
1482
|
policy = policy or "mutable"
|
1505
1483
|
if policy not in ["mutable", "immutable"]:
|
@@ -1526,7 +1504,8 @@ class Artifact:
|
|
1526
1504
|
self._added_local_paths[os.fspath(path)] = entry
|
1527
1505
|
return entry
|
1528
1506
|
|
1529
|
-
|
1507
|
+
@ensure_not_finalized
|
1508
|
+
def remove(self, item: StrPath | ArtifactManifestEntry) -> None:
|
1530
1509
|
"""Remove an item from the artifact.
|
1531
1510
|
|
1532
1511
|
Arguments:
|
@@ -1539,8 +1518,6 @@ class Artifact:
|
|
1539
1518
|
version because it is finalized. Log a new artifact version instead.
|
1540
1519
|
FileNotFoundError: If the item isn't found in the artifact.
|
1541
1520
|
"""
|
1542
|
-
self._ensure_can_add()
|
1543
|
-
|
1544
1521
|
if isinstance(item, ArtifactManifestEntry):
|
1545
1522
|
self.manifest.remove_entry(item)
|
1546
1523
|
return
|
@@ -1565,6 +1542,7 @@ class Artifact:
|
|
1565
1542
|
)
|
1566
1543
|
return self.get_entry(name)
|
1567
1544
|
|
1545
|
+
@ensure_logged
|
1568
1546
|
def get_entry(self, name: StrPath) -> ArtifactManifestEntry:
|
1569
1547
|
"""Get the entry with the given name.
|
1570
1548
|
|
@@ -1578,8 +1556,6 @@ class Artifact:
|
|
1578
1556
|
ArtifactNotLoggedError: if the artifact isn't logged or the run is offline.
|
1579
1557
|
KeyError: if the artifact doesn't contain an entry with the given name.
|
1580
1558
|
"""
|
1581
|
-
self._ensure_logged("get_entry")
|
1582
|
-
|
1583
1559
|
name = LogicalPath(name)
|
1584
1560
|
entry = self.manifest.entries.get(name) or self._get_obj_entry(name)[0]
|
1585
1561
|
if entry is None:
|
@@ -1587,7 +1563,8 @@ class Artifact:
|
|
1587
1563
|
entry._parent_artifact = self
|
1588
1564
|
return entry
|
1589
1565
|
|
1590
|
-
|
1566
|
+
@ensure_logged
|
1567
|
+
def get(self, name: str) -> WBValue | None:
|
1591
1568
|
"""Get the WBValue object located at the artifact relative `name`.
|
1592
1569
|
|
1593
1570
|
Arguments:
|
@@ -1599,8 +1576,6 @@ class Artifact:
|
|
1599
1576
|
Raises:
|
1600
1577
|
ArtifactNotLoggedError: if the artifact isn't logged or the run is offline
|
1601
1578
|
"""
|
1602
|
-
self._ensure_logged("get")
|
1603
|
-
|
1604
1579
|
entry, wb_class = self._get_obj_entry(name)
|
1605
1580
|
if entry is None or wb_class is None:
|
1606
1581
|
return None
|
@@ -1635,7 +1610,7 @@ class Artifact:
|
|
1635
1610
|
result._set_artifact_source(self, name)
|
1636
1611
|
return result
|
1637
1612
|
|
1638
|
-
def get_added_local_path_name(self, local_path: str) ->
|
1613
|
+
def get_added_local_path_name(self, local_path: str) -> str | None:
|
1639
1614
|
"""Get the artifact relative name of a file added by a local filesystem path.
|
1640
1615
|
|
1641
1616
|
Arguments:
|
@@ -1651,7 +1626,7 @@ class Artifact:
|
|
1651
1626
|
|
1652
1627
|
def _get_obj_entry(
|
1653
1628
|
self, name: str
|
1654
|
-
) ->
|
1629
|
+
) -> tuple[ArtifactManifestEntry, Type[WBValue]] | tuple[None, None]: # noqa: UP006 # `type` shadows `Artifact.type`
|
1655
1630
|
"""Return an object entry by name, handling any type suffixes.
|
1656
1631
|
|
1657
1632
|
When objects are added with `.add(obj, name)`, the name is typically changed to
|
@@ -1671,12 +1646,13 @@ class Artifact:
|
|
1671
1646
|
|
1672
1647
|
# Downloading.
|
1673
1648
|
|
1649
|
+
@ensure_logged
|
1674
1650
|
def download(
|
1675
1651
|
self,
|
1676
|
-
root:
|
1652
|
+
root: StrPath | None = None,
|
1677
1653
|
allow_missing_references: bool = False,
|
1678
|
-
skip_cache:
|
1679
|
-
path_prefix:
|
1654
|
+
skip_cache: bool | None = None,
|
1655
|
+
path_prefix: StrPath | None = None,
|
1680
1656
|
) -> FilePathStr:
|
1681
1657
|
"""Download the contents of the artifact to the specified root directory.
|
1682
1658
|
|
@@ -1701,8 +1677,6 @@ class Artifact:
|
|
1701
1677
|
ArtifactNotLoggedError: If the artifact is not logged.
|
1702
1678
|
RuntimeError: If the artifact is attempted to be downloaded in offline mode.
|
1703
1679
|
"""
|
1704
|
-
self._ensure_logged("download")
|
1705
|
-
|
1706
1680
|
root = FilePathStr(str(root or self._default_root()))
|
1707
1681
|
self._add_download_root(root)
|
1708
1682
|
|
@@ -1730,7 +1704,7 @@ class Artifact:
|
|
1730
1704
|
root: str,
|
1731
1705
|
allow_missing_references: bool = False,
|
1732
1706
|
skip_cache: bool = False,
|
1733
|
-
path_prefix:
|
1707
|
+
path_prefix: StrPath | None = None,
|
1734
1708
|
) -> FilePathStr:
|
1735
1709
|
import pathlib
|
1736
1710
|
|
@@ -1738,7 +1712,9 @@ class Artifact:
|
|
1738
1712
|
|
1739
1713
|
if wandb.run is None:
|
1740
1714
|
# ensure wandb-core is up and running
|
1741
|
-
|
1715
|
+
from wandb.sdk import wandb_setup
|
1716
|
+
|
1717
|
+
wl = wandb_setup.setup()
|
1742
1718
|
assert wl is not None
|
1743
1719
|
|
1744
1720
|
stream_id = generate_id()
|
@@ -1752,11 +1728,17 @@ class Artifact:
|
|
1752
1728
|
settings.files_dir.value = str(tmp_dir / "files")
|
1753
1729
|
settings.run_id.value = stream_id
|
1754
1730
|
|
1755
|
-
|
1756
|
-
|
1731
|
+
service = wl.service
|
1732
|
+
assert service
|
1733
|
+
|
1734
|
+
service.inform_init(settings=settings, run_id=stream_id)
|
1757
1735
|
|
1758
1736
|
mailbox = Mailbox()
|
1759
|
-
backend = Backend(
|
1737
|
+
backend = Backend(
|
1738
|
+
settings=wl.settings,
|
1739
|
+
service=service,
|
1740
|
+
mailbox=mailbox,
|
1741
|
+
)
|
1760
1742
|
backend.ensure_launched()
|
1761
1743
|
|
1762
1744
|
assert backend.interface
|
@@ -1797,8 +1779,8 @@ class Artifact:
|
|
1797
1779
|
self,
|
1798
1780
|
root: str,
|
1799
1781
|
allow_missing_references: bool = False,
|
1800
|
-
skip_cache:
|
1801
|
-
path_prefix:
|
1782
|
+
skip_cache: bool | None = None,
|
1783
|
+
path_prefix: StrPath | None = None,
|
1802
1784
|
) -> FilePathStr:
|
1803
1785
|
nfiles = len(self.manifest.entries)
|
1804
1786
|
size = sum(e.size or 0 for e in self.manifest.entries.values())
|
@@ -1815,9 +1797,9 @@ class Artifact:
|
|
1815
1797
|
|
1816
1798
|
def _download_entry(
|
1817
1799
|
entry: ArtifactManifestEntry,
|
1818
|
-
api_key:
|
1819
|
-
cookies:
|
1820
|
-
headers:
|
1800
|
+
api_key: str | None,
|
1801
|
+
cookies: dict | None,
|
1802
|
+
headers: dict | None,
|
1821
1803
|
) -> None:
|
1822
1804
|
_thread_local_api_settings.api_key = api_key
|
1823
1805
|
_thread_local_api_settings.cookies = cookies
|
@@ -1888,9 +1870,7 @@ class Artifact:
|
|
1888
1870
|
retry_timedelta=timedelta(minutes=3),
|
1889
1871
|
retryable_exceptions=(requests.RequestException),
|
1890
1872
|
)
|
1891
|
-
def _fetch_file_urls(
|
1892
|
-
self, cursor: Optional[str], per_page: Optional[int] = 5000
|
1893
|
-
) -> Any:
|
1873
|
+
def _fetch_file_urls(self, cursor: str | None, per_page: int | None = 5000) -> Any:
|
1894
1874
|
query = gql(
|
1895
1875
|
"""
|
1896
1876
|
query ArtifactFileURLs($id: ID!, $cursor: String, $perPage: Int) {
|
@@ -1919,7 +1899,8 @@ class Artifact:
|
|
1919
1899
|
)
|
1920
1900
|
return response["artifact"]["files"]
|
1921
1901
|
|
1922
|
-
|
1902
|
+
@ensure_logged
|
1903
|
+
def checkout(self, root: str | None = None) -> str:
|
1923
1904
|
"""Replace the specified root directory with the contents of the artifact.
|
1924
1905
|
|
1925
1906
|
WARNING: This will delete all files in `root` that are not included in the
|
@@ -1934,8 +1915,6 @@ class Artifact:
|
|
1934
1915
|
Raises:
|
1935
1916
|
ArtifactNotLoggedError: If the artifact is not logged.
|
1936
1917
|
"""
|
1937
|
-
self._ensure_logged("checkout")
|
1938
|
-
|
1939
1918
|
root = root or self._default_root(include_version=False)
|
1940
1919
|
|
1941
1920
|
for dirpath, _, files in os.walk(root):
|
@@ -1950,7 +1929,8 @@ class Artifact:
|
|
1950
1929
|
|
1951
1930
|
return self.download(root=root)
|
1952
1931
|
|
1953
|
-
|
1932
|
+
@ensure_logged
|
1933
|
+
def verify(self, root: str | None = None) -> None:
|
1954
1934
|
"""Verify that the contents of an artifact match the manifest.
|
1955
1935
|
|
1956
1936
|
All files in the directory are checksummed and the checksums are then
|
@@ -1964,8 +1944,6 @@ class Artifact:
|
|
1964
1944
|
ArtifactNotLoggedError: If the artifact is not logged.
|
1965
1945
|
ValueError: If the verification fails.
|
1966
1946
|
"""
|
1967
|
-
self._ensure_logged("verify")
|
1968
|
-
|
1969
1947
|
root = root or self._default_root()
|
1970
1948
|
|
1971
1949
|
for dirpath, _, files in os.walk(root):
|
@@ -1991,7 +1969,8 @@ class Artifact:
|
|
1991
1969
|
if ref_count > 0:
|
1992
1970
|
print("Warning: skipped verification of {} refs".format(ref_count))
|
1993
1971
|
|
1994
|
-
|
1972
|
+
@ensure_logged
|
1973
|
+
def file(self, root: str | None = None) -> StrPath:
|
1995
1974
|
"""Download a single file artifact to the directory you specify with `root`.
|
1996
1975
|
|
1997
1976
|
Arguments:
|
@@ -2005,8 +1984,6 @@ class Artifact:
|
|
2005
1984
|
ArtifactNotLoggedError: If the artifact is not logged.
|
2006
1985
|
ValueError: If the artifact contains more than one file.
|
2007
1986
|
"""
|
2008
|
-
self._ensure_logged("file")
|
2009
|
-
|
2010
1987
|
if root is None:
|
2011
1988
|
root = os.path.join(".", "artifacts", self.name)
|
2012
1989
|
|
@@ -2018,8 +1995,9 @@ class Artifact:
|
|
2018
1995
|
|
2019
1996
|
return self.get_entry(list(self.manifest.entries)[0]).download(root)
|
2020
1997
|
|
1998
|
+
@ensure_logged
|
2021
1999
|
def files(
|
2022
|
-
self, names:
|
2000
|
+
self, names: list[str] | None = None, per_page: int = 50
|
2023
2001
|
) -> ArtifactFiles:
|
2024
2002
|
"""Iterate over all files stored in this artifact.
|
2025
2003
|
|
@@ -2034,7 +2012,6 @@ class Artifact:
|
|
2034
2012
|
Raises:
|
2035
2013
|
ArtifactNotLoggedError: If the artifact is not logged.
|
2036
2014
|
"""
|
2037
|
-
self._ensure_logged("files")
|
2038
2015
|
return ArtifactFiles(self._client, self, names, per_page)
|
2039
2016
|
|
2040
2017
|
def _default_root(self, include_version: bool = True) -> FilePathStr:
|
@@ -2049,7 +2026,7 @@ class Artifact:
|
|
2049
2026
|
def _add_download_root(self, dir_path: str) -> None:
|
2050
2027
|
self._download_roots.add(os.path.abspath(dir_path))
|
2051
2028
|
|
2052
|
-
def _local_path_to_name(self, file_path: str) ->
|
2029
|
+
def _local_path_to_name(self, file_path: str) -> str | None:
|
2053
2030
|
"""Convert a local file path to a path entry in the artifact."""
|
2054
2031
|
abs_file_path = os.path.abspath(file_path)
|
2055
2032
|
abs_file_parts = abs_file_path.split(os.sep)
|
@@ -2060,6 +2037,7 @@ class Artifact:
|
|
2060
2037
|
|
2061
2038
|
# Others.
|
2062
2039
|
|
2040
|
+
@ensure_logged
|
2063
2041
|
def delete(self, delete_aliases: bool = False) -> None:
|
2064
2042
|
"""Delete an artifact and its files.
|
2065
2043
|
|
@@ -2075,7 +2053,6 @@ class Artifact:
|
|
2075
2053
|
Raises:
|
2076
2054
|
ArtifactNotLoggedError: If the artifact is not logged.
|
2077
2055
|
"""
|
2078
|
-
self._ensure_logged("delete")
|
2079
2056
|
if self.collection.is_sequence():
|
2080
2057
|
self._delete(delete_aliases)
|
2081
2058
|
else:
|
@@ -2107,7 +2084,7 @@ class Artifact:
|
|
2107
2084
|
)
|
2108
2085
|
|
2109
2086
|
@normalize_exceptions
|
2110
|
-
def link(self, target_path: str, aliases:
|
2087
|
+
def link(self, target_path: str, aliases: list[str] | None = None) -> None:
|
2111
2088
|
"""Link this artifact to a portfolio (a promoted collection of artifacts).
|
2112
2089
|
|
2113
2090
|
Arguments:
|
@@ -2136,6 +2113,7 @@ class Artifact:
|
|
2136
2113
|
else:
|
2137
2114
|
wandb.run.link_artifact(self, target_path, aliases)
|
2138
2115
|
|
2116
|
+
@ensure_logged
|
2139
2117
|
def unlink(self) -> None:
|
2140
2118
|
"""Unlink this artifact if it is currently a member of a portfolio (a promoted collection of artifacts).
|
2141
2119
|
|
@@ -2143,8 +2121,6 @@ class Artifact:
|
|
2143
2121
|
ArtifactNotLoggedError: If the artifact is not logged.
|
2144
2122
|
ValueError: If the artifact is not linked, i.e. it is not a member of a portfolio collection.
|
2145
2123
|
"""
|
2146
|
-
self._ensure_logged("unlink")
|
2147
|
-
|
2148
2124
|
# Fail early if this isn't a linked artifact to begin with
|
2149
2125
|
if self.collection.is_sequence():
|
2150
2126
|
raise ValueError(
|
@@ -2178,7 +2154,8 @@ class Artifact:
|
|
2178
2154
|
},
|
2179
2155
|
)
|
2180
2156
|
|
2181
|
-
|
2157
|
+
@ensure_logged
|
2158
|
+
def used_by(self) -> list[Run]:
|
2182
2159
|
"""Get a list of the runs that have used this artifact.
|
2183
2160
|
|
2184
2161
|
Returns:
|
@@ -2187,8 +2164,6 @@ class Artifact:
|
|
2187
2164
|
Raises:
|
2188
2165
|
ArtifactNotLoggedError: If the artifact is not logged.
|
2189
2166
|
"""
|
2190
|
-
self._ensure_logged("used_by")
|
2191
|
-
|
2192
2167
|
query = gql(
|
2193
2168
|
"""
|
2194
2169
|
query ArtifactUsedBy(
|
@@ -2225,7 +2200,8 @@ class Artifact:
|
|
2225
2200
|
for edge in response.get("artifact", {}).get("usedBy", {}).get("edges", [])
|
2226
2201
|
]
|
2227
2202
|
|
2228
|
-
|
2203
|
+
@ensure_logged
|
2204
|
+
def logged_by(self) -> Run | None:
|
2229
2205
|
"""Get the W&B run that originally logged the artifact.
|
2230
2206
|
|
2231
2207
|
Returns:
|
@@ -2234,8 +2210,6 @@ class Artifact:
|
|
2234
2210
|
Raises:
|
2235
2211
|
ArtifactNotLoggedError: If the artifact is not logged.
|
2236
2212
|
"""
|
2237
|
-
self._ensure_logged("logged_by")
|
2238
|
-
|
2239
2213
|
query = gql(
|
2240
2214
|
"""
|
2241
2215
|
query ArtifactCreatedBy(
|
@@ -2270,19 +2244,19 @@ class Artifact:
|
|
2270
2244
|
creator["name"],
|
2271
2245
|
)
|
2272
2246
|
|
2273
|
-
|
2247
|
+
@ensure_logged
|
2248
|
+
def json_encode(self) -> dict[str, Any]:
|
2274
2249
|
"""Returns the artifact encoded to the JSON format.
|
2275
2250
|
|
2276
2251
|
Returns:
|
2277
2252
|
A `dict` with `string` keys representing attributes of the artifact.
|
2278
2253
|
"""
|
2279
|
-
self._ensure_logged("json_encode")
|
2280
2254
|
return util.artifact_to_json(self)
|
2281
2255
|
|
2282
2256
|
@staticmethod
|
2283
2257
|
def _expected_type(
|
2284
2258
|
entity_name: str, project_name: str, name: str, client: RetryingClient
|
2285
|
-
) ->
|
2259
|
+
) -> str | None:
|
2286
2260
|
"""Returns the expected type for a given artifact name and project."""
|
2287
2261
|
query = gql(
|
2288
2262
|
"""
|
@@ -2317,7 +2291,7 @@ class Artifact:
|
|
2317
2291
|
).get("name")
|
2318
2292
|
|
2319
2293
|
@staticmethod
|
2320
|
-
def _normalize_metadata(metadata:
|
2294
|
+
def _normalize_metadata(metadata: dict[str, Any] | None) -> dict[str, Any]:
|
2321
2295
|
if metadata is None:
|
2322
2296
|
return {}
|
2323
2297
|
if not isinstance(metadata, dict):
|
@@ -2384,7 +2358,7 @@ class Artifact:
|
|
2384
2358
|
|
2385
2359
|
return fragment
|
2386
2360
|
|
2387
|
-
def _ttl_duration_seconds_to_gql(self) ->
|
2361
|
+
def _ttl_duration_seconds_to_gql(self) -> int | None:
|
2388
2362
|
# Set artifact ttl value to ttl_duration_seconds if the user set a value
|
2389
2363
|
# otherwise use ttl_status to indicate the backend INHERIT(-1) or DISABLED(-2) when the TTL is None
|
2390
2364
|
# When ttl_change = None its a no op since nothing changed
|
@@ -2398,8 +2372,8 @@ class Artifact:
|
|
2398
2372
|
return self._ttl_duration_seconds or DISABLED
|
2399
2373
|
|
2400
2374
|
def _ttl_duration_seconds_from_gql(
|
2401
|
-
self, gql_ttl_duration_seconds:
|
2402
|
-
) ->
|
2375
|
+
self, gql_ttl_duration_seconds: int | None
|
2376
|
+
) -> int | None:
|
2403
2377
|
# If gql_ttl_duration_seconds is not positive, its indicating that TTL is DISABLED(-2)
|
2404
2378
|
# gql_ttl_duration_seconds only returns None if the server is not compatible with setting Artifact TTLs
|
2405
2379
|
if gql_ttl_duration_seconds and gql_ttl_duration_seconds > 0:
|