wandb 0.19.9__py3-none-any.whl → 0.19.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- wandb/__init__.py +1 -1
- wandb/__init__.pyi +6 -3
- wandb/_pydantic/__init__.py +14 -8
- wandb/_pydantic/base.py +51 -36
- wandb/_pydantic/utils.py +73 -0
- wandb/_pydantic/v1_compat.py +79 -57
- wandb/apis/public/__init__.py +2 -2
- wandb/apis/public/api.py +684 -4
- wandb/apis/public/artifacts.py +377 -677
- wandb/apis/public/automations.py +69 -0
- wandb/apis/public/integrations.py +180 -0
- wandb/apis/public/projects.py +29 -0
- wandb/apis/public/registries/__init__.py +0 -0
- wandb/apis/public/registries/_freezable_list.py +179 -0
- wandb/apis/public/{registries.py → registries/registries_search.py} +22 -129
- wandb/apis/public/registries/registry.py +357 -0
- wandb/apis/public/registries/utils.py +140 -0
- wandb/apis/public/runs.py +58 -56
- wandb/apis/public/utils.py +107 -1
- wandb/automations/__init__.py +73 -0
- wandb/automations/_filters/__init__.py +40 -0
- wandb/automations/_filters/expressions.py +181 -0
- wandb/automations/_filters/operators.py +258 -0
- wandb/automations/_filters/run_metrics.py +332 -0
- wandb/automations/_generated/__init__.py +177 -0
- wandb/automations/_generated/create_automation.py +17 -0
- wandb/automations/_generated/create_generic_webhook_integration.py +43 -0
- wandb/automations/_generated/delete_automation.py +17 -0
- wandb/automations/_generated/enums.py +33 -0
- wandb/automations/_generated/fragments.py +358 -0
- wandb/automations/_generated/generic_webhook_integrations_by_entity.py +22 -0
- wandb/automations/_generated/get_automations.py +24 -0
- wandb/automations/_generated/get_automations_by_entity.py +26 -0
- wandb/automations/_generated/input_types.py +104 -0
- wandb/automations/_generated/integrations_by_entity.py +22 -0
- wandb/automations/_generated/operations.py +647 -0
- wandb/automations/_generated/slack_integrations_by_entity.py +22 -0
- wandb/automations/_generated/update_automation.py +17 -0
- wandb/automations/_utils.py +237 -0
- wandb/automations/_validators.py +165 -0
- wandb/automations/actions.py +220 -0
- wandb/automations/automations.py +87 -0
- wandb/automations/events.py +287 -0
- wandb/automations/integrations.py +45 -0
- wandb/automations/scopes.py +78 -0
- wandb/beta/workflows.py +9 -10
- wandb/bin/gpu_stats +0 -0
- wandb/cli/cli.py +3 -3
- wandb/env.py +11 -0
- wandb/integration/keras/keras.py +2 -1
- wandb/integration/langchain/wandb_tracer.py +2 -1
- wandb/jupyter.py +137 -118
- wandb/old/settings.py +4 -1
- wandb/old/summary.py +0 -2
- wandb/proto/v3/wandb_internal_pb2.py +297 -292
- wandb/proto/v3/wandb_settings_pb2.py +2 -2
- wandb/proto/v3/wandb_telemetry_pb2.py +10 -10
- wandb/proto/v4/wandb_internal_pb2.py +292 -292
- wandb/proto/v4/wandb_settings_pb2.py +2 -2
- wandb/proto/v4/wandb_telemetry_pb2.py +10 -10
- wandb/proto/v5/wandb_internal_pb2.py +292 -292
- wandb/proto/v5/wandb_settings_pb2.py +2 -2
- wandb/proto/v5/wandb_telemetry_pb2.py +10 -10
- wandb/proto/v6/wandb_base_pb2.py +41 -0
- wandb/proto/v6/wandb_internal_pb2.py +393 -0
- wandb/proto/v6/wandb_server_pb2.py +78 -0
- wandb/proto/v6/wandb_settings_pb2.py +58 -0
- wandb/proto/v6/wandb_telemetry_pb2.py +52 -0
- wandb/proto/wandb_base_pb2.py +2 -0
- wandb/proto/wandb_deprecated.py +8 -0
- wandb/proto/wandb_internal_pb2.py +3 -1
- wandb/proto/wandb_server_pb2.py +2 -0
- wandb/proto/wandb_settings_pb2.py +2 -0
- wandb/proto/wandb_telemetry_pb2.py +2 -0
- wandb/sdk/artifacts/_generated/__init__.py +289 -0
- wandb/sdk/artifacts/_generated/add_aliases.py +21 -0
- wandb/sdk/artifacts/_generated/artifact_collection_membership_files.py +43 -0
- wandb/sdk/artifacts/_generated/artifact_version_files.py +36 -0
- wandb/sdk/artifacts/_generated/create_artifact_collection_tag_assignments.py +36 -0
- wandb/sdk/artifacts/_generated/delete_aliases.py +21 -0
- wandb/sdk/artifacts/_generated/delete_artifact_collection_tag_assignments.py +25 -0
- wandb/sdk/artifacts/_generated/delete_artifact_portfolio.py +35 -0
- wandb/sdk/artifacts/_generated/delete_artifact_sequence.py +35 -0
- wandb/sdk/artifacts/_generated/enums.py +17 -0
- wandb/sdk/artifacts/_generated/fetch_linked_artifacts.py +67 -0
- wandb/sdk/artifacts/_generated/fragments.py +221 -0
- wandb/sdk/artifacts/_generated/input_types.py +28 -0
- wandb/sdk/artifacts/_generated/move_artifact_collection.py +35 -0
- wandb/sdk/artifacts/_generated/operations.py +611 -0
- wandb/sdk/artifacts/_generated/project_artifact_collection.py +101 -0
- wandb/sdk/artifacts/_generated/project_artifact_collections.py +33 -0
- wandb/sdk/artifacts/_generated/project_artifact_type.py +24 -0
- wandb/sdk/artifacts/_generated/project_artifact_types.py +24 -0
- wandb/sdk/artifacts/_generated/project_artifacts.py +42 -0
- wandb/sdk/artifacts/_generated/run_input_artifacts.py +51 -0
- wandb/sdk/artifacts/_generated/run_output_artifacts.py +51 -0
- wandb/sdk/artifacts/_generated/update_artifact.py +26 -0
- wandb/sdk/artifacts/_generated/update_artifact_portfolio.py +35 -0
- wandb/sdk/artifacts/_generated/update_artifact_sequence.py +35 -0
- wandb/sdk/artifacts/_graphql_fragments.py +57 -79
- wandb/sdk/artifacts/_validators.py +120 -1
- wandb/sdk/artifacts/artifact.py +419 -215
- wandb/sdk/artifacts/artifact_file_cache.py +4 -6
- wandb/sdk/artifacts/artifact_manifest_entry.py +13 -3
- wandb/sdk/artifacts/storage_handlers/azure_handler.py +1 -0
- wandb/sdk/artifacts/storage_policies/wandb_storage_policy.py +182 -1
- wandb/sdk/artifacts/storage_policy.py +3 -0
- wandb/sdk/data_types/base_types/media.py +2 -3
- wandb/sdk/data_types/base_types/wb_value.py +34 -11
- wandb/sdk/data_types/html.py +36 -9
- wandb/sdk/data_types/image.py +12 -12
- wandb/sdk/data_types/table.py +5 -0
- wandb/sdk/data_types/trace_tree.py +2 -0
- wandb/sdk/data_types/utils.py +1 -1
- wandb/sdk/data_types/video.py +59 -57
- wandb/sdk/interface/interface.py +4 -3
- wandb/sdk/internal/internal_api.py +21 -31
- wandb/sdk/internal/profiler.py +6 -5
- wandb/sdk/internal/run.py +13 -6
- wandb/sdk/internal/sender.py +5 -2
- wandb/sdk/launch/sweeps/utils.py +8 -0
- wandb/sdk/lib/apikey.py +25 -4
- wandb/sdk/lib/asyncio_compat.py +1 -1
- wandb/sdk/lib/deprecate.py +13 -22
- wandb/sdk/lib/disabled.py +2 -1
- wandb/sdk/lib/printer.py +37 -8
- wandb/sdk/lib/printer_asyncio.py +46 -0
- wandb/sdk/lib/redirect.py +10 -5
- wandb/sdk/projects/_generated/__init__.py +47 -0
- wandb/sdk/projects/_generated/delete_project.py +22 -0
- wandb/sdk/projects/_generated/enums.py +4 -0
- wandb/sdk/projects/_generated/fetch_registry.py +22 -0
- wandb/sdk/projects/_generated/fragments.py +41 -0
- wandb/sdk/projects/_generated/input_types.py +13 -0
- wandb/sdk/projects/_generated/operations.py +88 -0
- wandb/sdk/projects/_generated/rename_project.py +27 -0
- wandb/sdk/projects/_generated/upsert_registry_project.py +27 -0
- wandb/sdk/service/server_sock.py +19 -14
- wandb/sdk/service/service.py +18 -8
- wandb/sdk/service/streams.py +5 -0
- wandb/sdk/verify/verify.py +6 -3
- wandb/sdk/wandb_init.py +217 -70
- wandb/sdk/wandb_login.py +13 -4
- wandb/sdk/wandb_run.py +419 -295
- wandb/sdk/wandb_settings.py +27 -10
- wandb/sdk/wandb_setup.py +61 -0
- wandb/util.py +33 -29
- {wandb-0.19.9.dist-info → wandb-0.19.11.dist-info}/METADATA +5 -5
- {wandb-0.19.9.dist-info → wandb-0.19.11.dist-info}/RECORD +152 -82
- wandb/_globals.py +0 -19
- wandb/sdk/internal/_generated/base.py +0 -226
- wandb/sdk/internal/_generated/typing_compat.py +0 -14
- {wandb-0.19.9.dist-info → wandb-0.19.11.dist-info}/WHEEL +0 -0
- {wandb-0.19.9.dist-info → wandb-0.19.11.dist-info}/entry_points.txt +0 -0
- {wandb-0.19.9.dist-info → wandb-0.19.11.dist-info}/licenses/LICENSE +0 -0
wandb/sdk/artifacts/artifact.py
CHANGED
@@ -14,6 +14,7 @@ import shutil
|
|
14
14
|
import stat
|
15
15
|
import tempfile
|
16
16
|
import time
|
17
|
+
from collections import deque
|
17
18
|
from copy import copy
|
18
19
|
from dataclasses import dataclass
|
19
20
|
from datetime import datetime, timedelta
|
@@ -28,15 +29,27 @@ import wandb
|
|
28
29
|
from wandb import data_types, env, util
|
29
30
|
from wandb.apis.normalize import normalize_exceptions
|
30
31
|
from wandb.apis.public import ArtifactCollection, ArtifactFiles, RetryingClient, Run
|
32
|
+
from wandb.apis.public.utils import gql_compat
|
31
33
|
from wandb.data_types import WBValue
|
34
|
+
from wandb.errors import CommError
|
32
35
|
from wandb.errors.term import termerror, termlog, termwarn
|
33
36
|
from wandb.proto import wandb_internal_pb2 as pb
|
34
|
-
from wandb.
|
37
|
+
from wandb.proto.wandb_deprecated import Deprecated
|
38
|
+
from wandb.sdk import wandb_setup
|
39
|
+
from wandb.sdk.artifacts._generated.fetch_linked_artifacts import FetchLinkedArtifacts
|
40
|
+
from wandb.sdk.artifacts._generated.operations import FETCH_LINKED_ARTIFACTS_GQL
|
41
|
+
from wandb.sdk.artifacts._graphql_fragments import (
|
42
|
+
_gql_artifact_fragment,
|
43
|
+
omit_artifact_fields,
|
44
|
+
)
|
35
45
|
from wandb.sdk.artifacts._validators import (
|
46
|
+
LINKED_ARTIFACT_COLLECTION_TYPE,
|
47
|
+
_LinkArtifactFields,
|
36
48
|
ensure_logged,
|
37
49
|
ensure_not_finalized,
|
38
50
|
is_artifact_registry_project,
|
39
51
|
validate_aliases,
|
52
|
+
validate_artifact_name,
|
40
53
|
validate_tags,
|
41
54
|
)
|
42
55
|
from wandb.sdk.artifacts.artifact_download_logger import ArtifactDownloadLogger
|
@@ -59,12 +72,22 @@ from wandb.sdk.data_types._dtypes import TypeRegistry
|
|
59
72
|
from wandb.sdk.internal.internal_api import Api as InternalApi
|
60
73
|
from wandb.sdk.internal.thread_local_settings import _thread_local_api_settings
|
61
74
|
from wandb.sdk.lib import filesystem, retry, runid, telemetry
|
62
|
-
from wandb.sdk.lib.deprecate import
|
75
|
+
from wandb.sdk.lib.deprecate import deprecate
|
63
76
|
from wandb.sdk.lib.hashutil import B64MD5, b64_to_hex_id, md5_file_b64
|
64
77
|
from wandb.sdk.lib.paths import FilePathStr, LogicalPath, StrPath, URIStr
|
65
78
|
from wandb.sdk.lib.runid import generate_id
|
66
79
|
from wandb.sdk.mailbox import MailboxHandle
|
67
80
|
|
81
|
+
from ._generated import (
|
82
|
+
ADD_ALIASES_GQL,
|
83
|
+
DELETE_ALIASES_GQL,
|
84
|
+
UPDATE_ARTIFACT_GQL,
|
85
|
+
ArtifactAliasInput,
|
86
|
+
ArtifactCollectionAliasInput,
|
87
|
+
TagInput,
|
88
|
+
UpdateArtifact,
|
89
|
+
)
|
90
|
+
|
68
91
|
reset_path = util.vendor_setup()
|
69
92
|
|
70
93
|
from wandb_gql import gql # noqa: E402
|
@@ -110,6 +133,7 @@ class Artifact:
|
|
110
133
|
incremental: Use `Artifact.new_draft()` method instead to modify an
|
111
134
|
existing artifact.
|
112
135
|
use_as: W&B Launch specific parameter. Not recommended for general use.
|
136
|
+
is_link: Boolean indication of if the artifact is a linked artifact(`True`) or source artifact(`False`).
|
113
137
|
|
114
138
|
Returns:
|
115
139
|
An `Artifact` object.
|
@@ -161,12 +185,14 @@ class Artifact:
|
|
161
185
|
self._sequence_client_id: str = runid.generate_id(128)
|
162
186
|
self._entity: str | None = None
|
163
187
|
self._project: str | None = None
|
164
|
-
self._name: str = name # includes version after saving
|
188
|
+
self._name: str = validate_artifact_name(name) # includes version after saving
|
165
189
|
self._version: str | None = None
|
166
190
|
self._source_entity: str | None = None
|
167
191
|
self._source_project: str | None = None
|
168
192
|
self._source_name: str = name # includes version after saving
|
169
193
|
self._source_version: str | None = None
|
194
|
+
self._source_artifact: Artifact | None = None
|
195
|
+
self._is_link: bool = False
|
170
196
|
self._type: str = type
|
171
197
|
self._description: str | None = description
|
172
198
|
self._metadata: dict = self._normalize_metadata(metadata)
|
@@ -189,6 +215,8 @@ class Artifact:
|
|
189
215
|
self._created_at: str | None = None
|
190
216
|
self._updated_at: str | None = None
|
191
217
|
self._final: bool = False
|
218
|
+
self._history_step: int | None = None
|
219
|
+
self._linked_artifacts: list[Artifact] = []
|
192
220
|
|
193
221
|
# Cache.
|
194
222
|
artifact_instance_cache[self._client_id] = self
|
@@ -308,8 +336,13 @@ class Artifact:
|
|
308
336
|
artifact_instance_cache[artifact.id] = artifact
|
309
337
|
return artifact
|
310
338
|
|
339
|
+
# TODO: Eventually factor out is_link. Have to currently use it since some forms of fetching the artifact
|
340
|
+
# doesn't make it clear if the artifact is a link or not and have to manually set it.
|
311
341
|
def _assign_attrs(
|
312
|
-
self,
|
342
|
+
self,
|
343
|
+
attrs: dict[str, Any],
|
344
|
+
aliases: list[str] | None = None,
|
345
|
+
is_link: bool | None = None,
|
313
346
|
) -> None:
|
314
347
|
"""Update this Artifact's attributes using the server response."""
|
315
348
|
self._id = attrs["id"]
|
@@ -331,6 +364,17 @@ class Artifact:
|
|
331
364
|
if self._name is None:
|
332
365
|
self._name = self._source_name
|
333
366
|
|
367
|
+
# TODO: Refactor artifact query to fetch artifact via membership instead
|
368
|
+
# and get the collection type
|
369
|
+
if is_link is None:
|
370
|
+
self._is_link = (
|
371
|
+
self._entity != self._source_entity
|
372
|
+
or self._project != self._source_project
|
373
|
+
or self._name != self._source_name
|
374
|
+
)
|
375
|
+
else:
|
376
|
+
self._is_link = is_link
|
377
|
+
|
334
378
|
self._type = attrs["artifactType"]["name"]
|
335
379
|
self._description = attrs["description"]
|
336
380
|
|
@@ -380,12 +424,12 @@ class Artifact:
|
|
380
424
|
self._aliases = other_aliases
|
381
425
|
self._saved_aliases = copy(other_aliases)
|
382
426
|
|
383
|
-
tags = [obj["name"] for obj in attrs.get("tags"
|
427
|
+
tags = [obj["name"] for obj in (attrs.get("tags") or [])]
|
384
428
|
self._tags = tags
|
385
429
|
self._saved_tags = copy(tags)
|
386
430
|
|
387
431
|
metadata_str = attrs["metadata"]
|
388
|
-
self.
|
432
|
+
self._metadata = self._normalize_metadata(
|
389
433
|
json.loads(metadata_str) if metadata_str else {}
|
390
434
|
)
|
391
435
|
|
@@ -409,6 +453,7 @@ class Artifact:
|
|
409
453
|
self._file_count = attrs["fileCount"]
|
410
454
|
self._created_at = attrs["createdAt"]
|
411
455
|
self._updated_at = attrs["updatedAt"]
|
456
|
+
self._history_step = attrs.get("historyStep", None)
|
412
457
|
|
413
458
|
@ensure_logged
|
414
459
|
def new_draft(self) -> Artifact:
|
@@ -458,35 +503,48 @@ class Artifact:
|
|
458
503
|
@property
|
459
504
|
@ensure_logged
|
460
505
|
def entity(self) -> str:
|
461
|
-
"""The name of the entity
|
506
|
+
"""The name of the entity that the artifact collection belongs to.
|
507
|
+
|
508
|
+
If the artifact is a link, the entity will be the entity of the linked artifact.
|
509
|
+
"""
|
462
510
|
assert self._entity is not None
|
463
511
|
return self._entity
|
464
512
|
|
465
513
|
@property
|
466
514
|
@ensure_logged
|
467
515
|
def project(self) -> str:
|
468
|
-
"""The name of the project
|
516
|
+
"""The name of the project that the artifact collection belongs to.
|
517
|
+
|
518
|
+
If the artifact is a link, the project will be the project of the linked artifact.
|
519
|
+
"""
|
469
520
|
assert self._project is not None
|
470
521
|
return self._project
|
471
522
|
|
472
523
|
@property
|
473
524
|
def name(self) -> str:
|
474
|
-
"""The artifact name and version
|
525
|
+
"""The artifact name and version of the artifact.
|
475
526
|
|
476
|
-
A string with the format `{collection}:{alias}`.
|
477
|
-
|
527
|
+
A string with the format `{collection}:{alias}`. If fetched before an artifact is logged/saved, the name won't contain the alias.
|
528
|
+
If the artifact is a link, the name will be the name of the linked artifact.
|
478
529
|
"""
|
479
530
|
return self._name
|
480
531
|
|
481
532
|
@property
|
482
533
|
def qualified_name(self) -> str:
|
483
|
-
"""The entity/project/name of the
|
534
|
+
"""The entity/project/name of the artifact.
|
535
|
+
|
536
|
+
If the artifact is a link, the qualified name will be the qualified name of the linked artifact path.
|
537
|
+
"""
|
484
538
|
return f"{self.entity}/{self.project}/{self.name}"
|
485
539
|
|
486
540
|
@property
|
487
541
|
@ensure_logged
|
488
542
|
def version(self) -> str:
|
489
|
-
"""The artifact's version
|
543
|
+
"""The artifact's version.
|
544
|
+
|
545
|
+
A string with the format `v{number}`.
|
546
|
+
If the artifact is a link artifact, the version will be from the linked collection.
|
547
|
+
"""
|
490
548
|
assert self._version is not None
|
491
549
|
return self._version
|
492
550
|
|
@@ -509,35 +567,35 @@ class Artifact:
|
|
509
567
|
@property
|
510
568
|
@ensure_logged
|
511
569
|
def source_entity(self) -> str:
|
512
|
-
"""The name of the entity of the
|
570
|
+
"""The name of the entity of the source artifact."""
|
513
571
|
assert self._source_entity is not None
|
514
572
|
return self._source_entity
|
515
573
|
|
516
574
|
@property
|
517
575
|
@ensure_logged
|
518
576
|
def source_project(self) -> str:
|
519
|
-
"""The name of the project of the
|
577
|
+
"""The name of the project of the source artifact."""
|
520
578
|
assert self._source_project is not None
|
521
579
|
return self._source_project
|
522
580
|
|
523
581
|
@property
|
524
582
|
def source_name(self) -> str:
|
525
|
-
"""The artifact name and version
|
583
|
+
"""The artifact name and version of the source artifact.
|
526
584
|
|
527
|
-
A string with the format `{
|
585
|
+
A string with the format `{source_collection}:{alias}`. Before the artifact is saved,
|
528
586
|
contains only the name since the version is not yet known.
|
529
587
|
"""
|
530
588
|
return self._source_name
|
531
589
|
|
532
590
|
@property
|
533
591
|
def source_qualified_name(self) -> str:
|
534
|
-
"""The
|
592
|
+
"""The source_entity/source_project/source_name of the source artifact."""
|
535
593
|
return f"{self.source_entity}/{self.source_project}/{self.source_name}"
|
536
594
|
|
537
595
|
@property
|
538
596
|
@ensure_logged
|
539
597
|
def source_version(self) -> str:
|
540
|
-
"""The artifact's version
|
598
|
+
"""The source artifact's version.
|
541
599
|
|
542
600
|
A string with the format `v{number}`.
|
543
601
|
"""
|
@@ -547,12 +605,60 @@ class Artifact:
|
|
547
605
|
@property
|
548
606
|
@ensure_logged
|
549
607
|
def source_collection(self) -> ArtifactCollection:
|
550
|
-
"""The artifact's
|
608
|
+
"""The artifact's source collection.
|
609
|
+
|
610
|
+
The source collection is the collection that the artifact was logged from.
|
611
|
+
"""
|
551
612
|
base_name = self.source_name.split(":")[0]
|
552
613
|
return ArtifactCollection(
|
553
614
|
self._client, self.source_entity, self.source_project, base_name, self.type
|
554
615
|
)
|
555
616
|
|
617
|
+
@property
|
618
|
+
def is_link(self) -> bool:
|
619
|
+
"""Boolean flag indicating if the artifact is a link artifact.
|
620
|
+
|
621
|
+
True: The artifact is a link artifact to a source artifact.
|
622
|
+
False: The artifact is a source artifact.
|
623
|
+
"""
|
624
|
+
return self._is_link
|
625
|
+
|
626
|
+
@property
|
627
|
+
@ensure_logged
|
628
|
+
def linked_artifacts(self) -> list[Artifact]:
|
629
|
+
"""Returns a list of all the linked artifacts of a source artifact.
|
630
|
+
|
631
|
+
If the artifact is a link artifact (`artifact.is_link == True`), it will return an empty list.
|
632
|
+
Limited to 500 results."""
|
633
|
+
if not self.is_link:
|
634
|
+
self._linked_artifacts = self._fetch_linked_artifacts()
|
635
|
+
return self._linked_artifacts
|
636
|
+
|
637
|
+
@property
|
638
|
+
@ensure_logged
|
639
|
+
def source_artifact(self) -> Artifact:
|
640
|
+
"""Returns the source artifact. The source artifact is the original logged artifact.
|
641
|
+
|
642
|
+
If the artifact itself is a source artifact (`artifact.is_link == False`), it will return itself."""
|
643
|
+
if not self.is_link:
|
644
|
+
return self
|
645
|
+
if self._source_artifact is None:
|
646
|
+
try:
|
647
|
+
if self._client is None:
|
648
|
+
raise ValueError("Client is not initialized")
|
649
|
+
artifact = self._from_name(
|
650
|
+
entity=self.source_entity,
|
651
|
+
project=self.source_project,
|
652
|
+
name=self.source_name,
|
653
|
+
client=self._client,
|
654
|
+
)
|
655
|
+
self._source_artifact = artifact
|
656
|
+
except Exception as e:
|
657
|
+
raise ValueError(
|
658
|
+
f"Unable to fetch source artifact for linked artifact {self.name}"
|
659
|
+
) from e
|
660
|
+
return self._source_artifact
|
661
|
+
|
556
662
|
@property
|
557
663
|
def type(self) -> str:
|
558
664
|
"""The artifact's type. Common types include `dataset` or `model`."""
|
@@ -572,7 +678,7 @@ class Artifact:
|
|
572
678
|
except AttributeError:
|
573
679
|
return ""
|
574
680
|
|
575
|
-
if self.
|
681
|
+
if not self.is_link:
|
576
682
|
return self._construct_standard_url(base_url)
|
577
683
|
if is_artifact_registry_project(self.project):
|
578
684
|
return self._construct_registry_url(base_url)
|
@@ -656,9 +762,15 @@ class Artifact:
|
|
656
762
|
standardized team model or dataset card. In the W&B UI the
|
657
763
|
description is rendered as markdown.
|
658
764
|
|
765
|
+
Editing the description will apply the changes to the source artifact and all linked artifacts associated with it.
|
766
|
+
|
659
767
|
Args:
|
660
768
|
description: Free text that offers a description of the artifact.
|
661
769
|
"""
|
770
|
+
if self.is_link:
|
771
|
+
wandb.termwarn(
|
772
|
+
"Editing the description of this linked artifact will edit the description for the source artifact and it's linked artifacts as well."
|
773
|
+
)
|
662
774
|
self._description = description
|
663
775
|
|
664
776
|
@property
|
@@ -677,10 +789,15 @@ class Artifact:
|
|
677
789
|
the class distribution of a dataset.
|
678
790
|
|
679
791
|
Note: There is currently a limit of 100 total keys.
|
792
|
+
Editing the metadata will apply the changes to the source artifact and all linked artifacts associated with it.
|
680
793
|
|
681
794
|
Args:
|
682
795
|
metadata: Structured data associated with the artifact.
|
683
796
|
"""
|
797
|
+
if self.is_link:
|
798
|
+
wandb.termwarn(
|
799
|
+
"Editing the metadata of this linked artifact will edit the metadata for the source artifact and it's linked artifacts as well."
|
800
|
+
)
|
684
801
|
self._metadata = self._normalize_metadata(metadata)
|
685
802
|
|
686
803
|
@property
|
@@ -721,6 +838,12 @@ class Artifact:
|
|
721
838
|
if self.type == "wandb-history":
|
722
839
|
raise ValueError("Cannot set artifact TTL for type wandb-history")
|
723
840
|
|
841
|
+
if self.is_link:
|
842
|
+
raise ValueError(
|
843
|
+
"Cannot set TTL for link artifact. "
|
844
|
+
"Unlink the artifact first then set the TTL for the source artifact"
|
845
|
+
)
|
846
|
+
|
724
847
|
self._ttl_changed = True
|
725
848
|
if isinstance(ttl, ArtifactTTL):
|
726
849
|
if ttl == ArtifactTTL.INHERIT:
|
@@ -765,7 +888,14 @@ class Artifact:
|
|
765
888
|
@tags.setter
|
766
889
|
@ensure_logged
|
767
890
|
def tags(self, tags: list[str]) -> None:
|
768
|
-
"""Set the tags associated with this artifact.
|
891
|
+
"""Set the tags associated with this artifact.
|
892
|
+
|
893
|
+
Editing tags will apply the changes to the source artifact and all linked artifacts associated with it.
|
894
|
+
"""
|
895
|
+
if self.is_link:
|
896
|
+
wandb.termwarn(
|
897
|
+
"Editing tags will apply the changes to the source artifact and all linked artifacts associated with it."
|
898
|
+
)
|
769
899
|
self._tags = validate_tags(tags)
|
770
900
|
|
771
901
|
@property
|
@@ -886,6 +1016,26 @@ class Artifact:
|
|
886
1016
|
assert self._created_at is not None
|
887
1017
|
return self._updated_at or self._created_at
|
888
1018
|
|
1019
|
+
@property
|
1020
|
+
@ensure_logged
|
1021
|
+
def history_step(self) -> int | None:
|
1022
|
+
"""The nearest step at which history metrics were logged for the source run of the artifact.
|
1023
|
+
|
1024
|
+
Examples:
|
1025
|
+
```python
|
1026
|
+
run = artifact.logged_by()
|
1027
|
+
if run and (artifact.history_step is not None):
|
1028
|
+
history = run.sample_history(
|
1029
|
+
min_step=artifact.history_step,
|
1030
|
+
max_step=artifact.history_step + 1,
|
1031
|
+
keys=["my_metric"],
|
1032
|
+
)
|
1033
|
+
```
|
1034
|
+
"""
|
1035
|
+
if self._history_step is None:
|
1036
|
+
return None
|
1037
|
+
return max(0, self._history_step - 1)
|
1038
|
+
|
889
1039
|
# State management.
|
890
1040
|
|
891
1041
|
def finalize(self) -> None:
|
@@ -931,7 +1081,12 @@ class Artifact:
|
|
931
1081
|
with telemetry.context() as tel:
|
932
1082
|
tel.feature.artifact_incremental = True
|
933
1083
|
|
934
|
-
|
1084
|
+
singleton = wandb_setup._setup(start_service=False)
|
1085
|
+
|
1086
|
+
if run := singleton.most_recent_active_run:
|
1087
|
+
# TODO: Deprecate and encourage explicit log_artifact().
|
1088
|
+
run.log_artifact(self)
|
1089
|
+
else:
|
935
1090
|
if settings is None:
|
936
1091
|
settings = wandb.Settings(silent="true")
|
937
1092
|
with wandb.init( # type: ignore
|
@@ -946,8 +1101,6 @@ class Artifact:
|
|
946
1101
|
with telemetry.context(run=run) as tel:
|
947
1102
|
tel.feature.artifact_incremental = True
|
948
1103
|
run.log_artifact(self)
|
949
|
-
else:
|
950
|
-
wandb.run.log_artifact(self)
|
951
1104
|
|
952
1105
|
def _set_save_handle(
|
953
1106
|
self,
|
@@ -1005,11 +1158,21 @@ class Artifact:
|
|
1005
1158
|
except LookupError:
|
1006
1159
|
raise ValueError(f"Unable to fetch artifact with id: {artifact_id!r}")
|
1007
1160
|
else:
|
1008
|
-
|
1161
|
+
# _populate_after_save is only called on source artifacts, not linked artifacts
|
1162
|
+
# We have to manually set is_link because we aren't fetching the collection the artifact.
|
1163
|
+
# That requires greater refactoring for commitArtifact to return the artifact collection type.
|
1164
|
+
self._assign_attrs(attrs, is_link=False)
|
1009
1165
|
|
1010
1166
|
@normalize_exceptions
|
1011
1167
|
def _update(self) -> None:
|
1012
1168
|
"""Persists artifact changes to the wandb backend."""
|
1169
|
+
if self._client is None:
|
1170
|
+
raise RuntimeError("Client not initialized for artifact mutations")
|
1171
|
+
|
1172
|
+
entity = self.entity
|
1173
|
+
project = self.project
|
1174
|
+
collection = self.name.split(":")[0]
|
1175
|
+
|
1013
1176
|
aliases = None
|
1014
1177
|
introspect_query = gql(
|
1015
1178
|
"""
|
@@ -1023,180 +1186,109 @@ class Artifact:
|
|
1023
1186
|
}
|
1024
1187
|
"""
|
1025
1188
|
)
|
1026
|
-
|
1027
|
-
|
1028
|
-
if
|
1029
|
-
|
1030
|
-
|
1031
|
-
|
1032
|
-
|
1033
|
-
|
1034
|
-
|
1035
|
-
|
1036
|
-
|
1037
|
-
)
|
1038
|
-
|
1039
|
-
|
1040
|
-
|
1041
|
-
|
1042
|
-
|
1043
|
-
|
1044
|
-
|
1045
|
-
|
1046
|
-
|
1047
|
-
|
1048
|
-
|
1049
|
-
|
1050
|
-
"
|
1051
|
-
"aliases"
|
1052
|
-
|
1053
|
-
|
1054
|
-
|
1055
|
-
|
1056
|
-
|
1057
|
-
|
1058
|
-
|
1059
|
-
|
1060
|
-
|
1061
|
-
|
1062
|
-
|
1063
|
-
|
1064
|
-
|
1065
|
-
|
1066
|
-
|
1067
|
-
|
1068
|
-
)
|
1069
|
-
|
1070
|
-
|
1071
|
-
|
1072
|
-
|
1073
|
-
}
|
1074
|
-
|
1075
|
-
|
1076
|
-
|
1077
|
-
|
1078
|
-
self._client.execute(
|
1079
|
-
delete_mutation,
|
1080
|
-
variable_values={
|
1081
|
-
"artifactID": self.id,
|
1082
|
-
"aliases": [
|
1083
|
-
{
|
1084
|
-
"entityName": self._entity,
|
1085
|
-
"projectName": self._project,
|
1086
|
-
"artifactCollectionName": self._name.split(":")[0],
|
1087
|
-
"alias": alias,
|
1088
|
-
}
|
1089
|
-
for alias in aliases_to_delete
|
1090
|
-
],
|
1091
|
-
},
|
1092
|
-
)
|
1093
|
-
self._saved_aliases = copy(self._aliases)
|
1189
|
+
|
1190
|
+
data = self._client.execute(introspect_query)
|
1191
|
+
if data.get("AddAliasesInputInfoType"): # wandb backend version >= 0.13.0
|
1192
|
+
alias_props = {
|
1193
|
+
"entity_name": entity,
|
1194
|
+
"project_name": project,
|
1195
|
+
"artifact_collection_name": collection,
|
1196
|
+
}
|
1197
|
+
if aliases_to_add := (set(self.aliases) - set(self._saved_aliases)):
|
1198
|
+
add_mutation = gql(ADD_ALIASES_GQL)
|
1199
|
+
add_alias_inputs = [
|
1200
|
+
ArtifactCollectionAliasInput(**alias_props, alias=alias)
|
1201
|
+
for alias in aliases_to_add
|
1202
|
+
]
|
1203
|
+
try:
|
1204
|
+
self._client.execute(
|
1205
|
+
add_mutation,
|
1206
|
+
variable_values={
|
1207
|
+
"artifactID": self.id,
|
1208
|
+
"aliases": [a.model_dump() for a in add_alias_inputs],
|
1209
|
+
},
|
1210
|
+
)
|
1211
|
+
except CommError as e:
|
1212
|
+
raise CommError(
|
1213
|
+
"You do not have permission to add"
|
1214
|
+
f" {'at least one of the following aliases' if len(aliases_to_add) > 1 else 'the following alias'}"
|
1215
|
+
f" to this artifact: {aliases_to_add}"
|
1216
|
+
) from e
|
1217
|
+
|
1218
|
+
if aliases_to_delete := (set(self._saved_aliases) - set(self.aliases)):
|
1219
|
+
delete_mutation = gql(DELETE_ALIASES_GQL)
|
1220
|
+
delete_alias_inputs = [
|
1221
|
+
ArtifactCollectionAliasInput(**alias_props, alias=alias)
|
1222
|
+
for alias in aliases_to_delete
|
1223
|
+
]
|
1224
|
+
try:
|
1225
|
+
self._client.execute(
|
1226
|
+
delete_mutation,
|
1227
|
+
variable_values={
|
1228
|
+
"artifactID": self.id,
|
1229
|
+
"aliases": [a.model_dump() for a in delete_alias_inputs],
|
1230
|
+
},
|
1231
|
+
)
|
1232
|
+
except CommError as e:
|
1233
|
+
raise CommError(
|
1234
|
+
f"You do not have permission to delete"
|
1235
|
+
f" {'at least one of the following aliases' if len(aliases_to_delete) > 1 else 'the following alias'}"
|
1236
|
+
f" from this artifact: {aliases_to_delete}"
|
1237
|
+
) from e
|
1238
|
+
|
1239
|
+
self._saved_aliases = copy(self.aliases)
|
1240
|
+
|
1094
1241
|
else: # wandb backend version < 0.13.0
|
1095
1242
|
aliases = [
|
1096
|
-
|
1097
|
-
|
1098
|
-
|
1099
|
-
|
1100
|
-
for alias in self._aliases
|
1243
|
+
ArtifactAliasInput(
|
1244
|
+
artifact_collection_name=collection, alias=alias
|
1245
|
+
).model_dump()
|
1246
|
+
for alias in self.aliases
|
1101
1247
|
]
|
1102
1248
|
|
1103
|
-
|
1104
|
-
|
1105
|
-
$artifactID: ID!
|
1106
|
-
$description: String
|
1107
|
-
$metadata: JSONString
|
1108
|
-
_TTL_DURATION_SECONDS_TYPE_
|
1109
|
-
_TAGS_TO_ADD_TYPE_
|
1110
|
-
_TAGS_TO_DELETE_TYPE_
|
1111
|
-
$aliases: [ArtifactAliasInput!]
|
1112
|
-
) {
|
1113
|
-
updateArtifact(
|
1114
|
-
input: {
|
1115
|
-
artifactID: $artifactID,
|
1116
|
-
description: $description,
|
1117
|
-
metadata: $metadata,
|
1118
|
-
_TTL_DURATION_SECONDS_VALUE_
|
1119
|
-
_TAGS_TO_ADD_VALUE_
|
1120
|
-
_TAGS_TO_DELETE_VALUE_
|
1121
|
-
aliases: $aliases
|
1122
|
-
}
|
1123
|
-
) {
|
1124
|
-
artifact {
|
1125
|
-
...ArtifactFragment
|
1126
|
-
}
|
1127
|
-
}
|
1128
|
-
}
|
1129
|
-
""" + _gql_artifact_fragment()
|
1249
|
+
omit_fields = omit_artifact_fields(api=InternalApi())
|
1250
|
+
omit_variables = set()
|
1130
1251
|
|
1131
|
-
|
1132
|
-
if "ttlIsInherited" in fields:
|
1133
|
-
mutation_template = (
|
1134
|
-
mutation_template.replace(
|
1135
|
-
"_TTL_DURATION_SECONDS_TYPE_",
|
1136
|
-
"$ttlDurationSeconds: Int64",
|
1137
|
-
)
|
1138
|
-
.replace(
|
1139
|
-
"_TTL_DURATION_SECONDS_VALUE_",
|
1140
|
-
"ttlDurationSeconds: $ttlDurationSeconds",
|
1141
|
-
)
|
1142
|
-
.replace(
|
1143
|
-
"_TTL_DURATION_SECONDS_FIELDS_",
|
1144
|
-
"ttlDurationSeconds ttlIsInherited",
|
1145
|
-
)
|
1146
|
-
)
|
1147
|
-
else:
|
1252
|
+
if {"ttlIsInherited", "ttlDurationSeconds"} & omit_fields:
|
1148
1253
|
if self._ttl_changed:
|
1149
1254
|
termwarn(
|
1150
1255
|
"Server not compatible with setting Artifact TTLs, please upgrade the server to use Artifact TTL"
|
1151
1256
|
)
|
1152
|
-
mutation_template = (
|
1153
|
-
mutation_template.replace("_TTL_DURATION_SECONDS_TYPE_", "")
|
1154
|
-
.replace("_TTL_DURATION_SECONDS_VALUE_", "")
|
1155
|
-
.replace("_TTL_DURATION_SECONDS_FIELDS_", "")
|
1156
|
-
)
|
1157
1257
|
|
1158
|
-
|
1159
|
-
|
1160
|
-
|
1161
|
-
|
1162
|
-
|
1163
|
-
|
1164
|
-
|
1165
|
-
.replace("_TAGS_TO_DELETE_TYPE_", "$tagsToDelete: [TagInput!]")
|
1166
|
-
.replace("_TAGS_TO_ADD_VALUE_", "tagsToAdd: $tagsToAdd")
|
1167
|
-
.replace("_TAGS_TO_DELETE_VALUE_", "tagsToDelete: $tagsToDelete")
|
1168
|
-
)
|
1169
|
-
else:
|
1170
|
-
if tags_to_add or tags_to_delete:
|
1258
|
+
omit_variables |= {"ttlDurationSeconds"}
|
1259
|
+
|
1260
|
+
tags_to_add = validate_tags(set(self.tags) - set(self._saved_tags))
|
1261
|
+
tags_to_del = validate_tags(set(self._saved_tags) - set(self.tags))
|
1262
|
+
|
1263
|
+
if {"tags"} & omit_fields:
|
1264
|
+
if tags_to_add or tags_to_del:
|
1171
1265
|
termwarn(
|
1172
1266
|
"Server not compatible with Artifact tags. "
|
1173
1267
|
"To use Artifact tags, please upgrade the server to v0.85 or higher."
|
1174
1268
|
)
|
1175
|
-
mutation_template = (
|
1176
|
-
mutation_template.replace("_TAGS_TO_ADD_TYPE_", "")
|
1177
|
-
.replace("_TAGS_TO_DELETE_TYPE_", "")
|
1178
|
-
.replace("_TAGS_TO_ADD_VALUE_", "")
|
1179
|
-
.replace("_TAGS_TO_DELETE_VALUE_", "")
|
1180
|
-
)
|
1181
1269
|
|
1182
|
-
|
1183
|
-
assert self._client is not None
|
1270
|
+
omit_variables |= {"tagsToAdd", "tagsToDelete"}
|
1184
1271
|
|
1185
|
-
|
1186
|
-
|
1187
|
-
mutation,
|
1188
|
-
variable_values={
|
1189
|
-
"artifactID": self.id,
|
1190
|
-
"description": self.description,
|
1191
|
-
"metadata": util.json_dumps_safer(self.metadata),
|
1192
|
-
"ttlDurationSeconds": ttl_duration_input,
|
1193
|
-
"aliases": aliases,
|
1194
|
-
"tagsToAdd": [{"tagName": tag_name} for tag_name in tags_to_add],
|
1195
|
-
"tagsToDelete": [{"tagName": tag_name} for tag_name in tags_to_delete],
|
1196
|
-
},
|
1272
|
+
mutation = gql_compat(
|
1273
|
+
UPDATE_ARTIFACT_GQL, omit_variables=omit_variables, omit_fields=omit_fields
|
1197
1274
|
)
|
1198
|
-
|
1199
|
-
|
1275
|
+
|
1276
|
+
gql_vars = {
|
1277
|
+
"artifactID": self.id,
|
1278
|
+
"description": self.description,
|
1279
|
+
"metadata": util.json_dumps_safer(self.metadata),
|
1280
|
+
"ttlDurationSeconds": self._ttl_duration_seconds_to_gql(),
|
1281
|
+
"aliases": aliases,
|
1282
|
+
"tagsToAdd": [TagInput(tag_name=t).model_dump() for t in tags_to_add],
|
1283
|
+
"tagsToDelete": [TagInput(tag_name=t).model_dump() for t in tags_to_del],
|
1284
|
+
}
|
1285
|
+
|
1286
|
+
data = self._client.execute(mutation, variable_values=gql_vars)
|
1287
|
+
|
1288
|
+
result = UpdateArtifact.model_validate(data).update_artifact
|
1289
|
+
if not (result and (artifact := result.artifact)):
|
1290
|
+
raise ValueError("Unable to parse updateArtifact response")
|
1291
|
+
self._assign_attrs(artifact.model_dump())
|
1200
1292
|
|
1201
1293
|
self._ttl_changed = False # Reset after updating artifact
|
1202
1294
|
|
@@ -1749,6 +1841,7 @@ class Artifact:
|
|
1749
1841
|
allow_missing_references: bool = False,
|
1750
1842
|
skip_cache: bool | None = None,
|
1751
1843
|
path_prefix: StrPath | None = None,
|
1844
|
+
multipart: bool | None = None,
|
1752
1845
|
) -> FilePathStr:
|
1753
1846
|
"""Download the contents of the artifact to the specified root directory.
|
1754
1847
|
|
@@ -1765,21 +1858,20 @@ class Artifact:
|
|
1765
1858
|
specified download directory.
|
1766
1859
|
path_prefix: If specified, only files with a path that starts with the given
|
1767
1860
|
prefix will be downloaded. Uses unix format (forward slashes).
|
1861
|
+
multipart: If set to `None` (default), the artifact will be downloaded
|
1862
|
+
in parallel using multipart download if individual file size is greater than
|
1863
|
+
2GB. If set to `True` or `False`, the artifact will be downloaded in
|
1864
|
+
parallel or serially regardless of the file size.
|
1768
1865
|
|
1769
1866
|
Returns:
|
1770
1867
|
The path to the downloaded contents.
|
1771
1868
|
|
1772
1869
|
Raises:
|
1773
1870
|
ArtifactNotLoggedError: If the artifact is not logged.
|
1774
|
-
RuntimeError: If the artifact is attempted to be downloaded in offline mode.
|
1775
1871
|
"""
|
1776
1872
|
root = FilePathStr(str(root or self._default_root()))
|
1777
1873
|
self._add_download_root(root)
|
1778
1874
|
|
1779
|
-
# TODO: we need a better way to check for offline mode across the app, as this is an anti-pattern
|
1780
|
-
if env.is_offline() or util._is_offline():
|
1781
|
-
raise RuntimeError("Cannot download artifacts in offline mode.")
|
1782
|
-
|
1783
1875
|
# TODO: download artifacts using core when implemented
|
1784
1876
|
# if is_require_core():
|
1785
1877
|
# return self._download_using_core(
|
@@ -1793,6 +1885,7 @@ class Artifact:
|
|
1793
1885
|
allow_missing_references=allow_missing_references,
|
1794
1886
|
skip_cache=skip_cache,
|
1795
1887
|
path_prefix=path_prefix,
|
1888
|
+
multipart=multipart,
|
1796
1889
|
)
|
1797
1890
|
|
1798
1891
|
def _download_using_core(
|
@@ -1806,6 +1899,7 @@ class Artifact:
|
|
1806
1899
|
|
1807
1900
|
from wandb.sdk.backend.backend import Backend
|
1808
1901
|
|
1902
|
+
# TODO: Create a special stream instead of relying on an existing run.
|
1809
1903
|
if wandb.run is None:
|
1810
1904
|
wl = wandb.setup()
|
1811
1905
|
|
@@ -1861,6 +1955,7 @@ class Artifact:
|
|
1861
1955
|
allow_missing_references: bool = False,
|
1862
1956
|
skip_cache: bool | None = None,
|
1863
1957
|
path_prefix: StrPath | None = None,
|
1958
|
+
multipart: bool | None = None,
|
1864
1959
|
) -> FilePathStr:
|
1865
1960
|
nfiles = len(self.manifest.entries)
|
1866
1961
|
size = sum(e.size or 0 for e in self.manifest.entries.values())
|
@@ -1877,6 +1972,7 @@ class Artifact:
|
|
1877
1972
|
|
1878
1973
|
def _download_entry(
|
1879
1974
|
entry: ArtifactManifestEntry,
|
1975
|
+
executor: concurrent.futures.Executor,
|
1880
1976
|
api_key: str | None,
|
1881
1977
|
cookies: dict | None,
|
1882
1978
|
headers: dict | None,
|
@@ -1886,7 +1982,12 @@ class Artifact:
|
|
1886
1982
|
_thread_local_api_settings.headers = headers
|
1887
1983
|
|
1888
1984
|
try:
|
1889
|
-
entry.download(
|
1985
|
+
entry.download(
|
1986
|
+
root,
|
1987
|
+
skip_cache=skip_cache,
|
1988
|
+
executor=executor,
|
1989
|
+
multipart=multipart,
|
1990
|
+
)
|
1890
1991
|
except FileNotFoundError as e:
|
1891
1992
|
if allow_missing_references:
|
1892
1993
|
wandb.termwarn(str(e))
|
@@ -1897,14 +1998,14 @@ class Artifact:
|
|
1897
1998
|
return
|
1898
1999
|
download_logger.notify_downloaded()
|
1899
2000
|
|
1900
|
-
download_entry = partial(
|
1901
|
-
_download_entry,
|
1902
|
-
api_key=_thread_local_api_settings.api_key,
|
1903
|
-
cookies=_thread_local_api_settings.cookies,
|
1904
|
-
headers=_thread_local_api_settings.headers,
|
1905
|
-
)
|
1906
|
-
|
1907
2001
|
with concurrent.futures.ThreadPoolExecutor(64) as executor:
|
2002
|
+
download_entry = partial(
|
2003
|
+
_download_entry,
|
2004
|
+
executor=executor,
|
2005
|
+
api_key=_thread_local_api_settings.api_key,
|
2006
|
+
cookies=_thread_local_api_settings.cookies,
|
2007
|
+
headers=_thread_local_api_settings.headers,
|
2008
|
+
)
|
1908
2009
|
active_futures = set()
|
1909
2010
|
has_next_page = True
|
1910
2011
|
cursor = None
|
@@ -1940,8 +2041,9 @@ class Artifact:
|
|
1940
2041
|
hours = int(delta // 3600)
|
1941
2042
|
minutes = int((delta - hours * 3600) // 60)
|
1942
2043
|
seconds = delta - hours * 3600 - minutes * 60
|
2044
|
+
speed = size / 1024 / 1024 / delta
|
1943
2045
|
termlog(
|
1944
|
-
f"Done. {hours}:{minutes}:{seconds:.1f}",
|
2046
|
+
f"Done. {hours}:{minutes}:{seconds:.1f} ({speed:.1f}MB/s)",
|
1945
2047
|
prefix=False,
|
1946
2048
|
)
|
1947
2049
|
return FilePathStr(root)
|
@@ -2169,6 +2271,8 @@ class Artifact:
|
|
2169
2271
|
If called on a linked artifact (i.e. a member of a portfolio collection): only the link is deleted, and the
|
2170
2272
|
source artifact is unaffected.
|
2171
2273
|
|
2274
|
+
Use `artifact.unlink()` instead of `artifact.delete()` to remove a link between a source artifact and a linked artifact.
|
2275
|
+
|
2172
2276
|
Args:
|
2173
2277
|
delete_aliases: If set to `True`, deletes all aliases associated with the artifact.
|
2174
2278
|
Otherwise, this raises an exception if the artifact has existing
|
@@ -2178,10 +2282,13 @@ class Artifact:
|
|
2178
2282
|
Raises:
|
2179
2283
|
ArtifactNotLoggedError: If the artifact is not logged.
|
2180
2284
|
"""
|
2181
|
-
if self.
|
2182
|
-
|
2183
|
-
|
2285
|
+
if self.is_link:
|
2286
|
+
wandb.termwarn(
|
2287
|
+
"Deleting a link artifact will only unlink the artifact from the source artifact and not delete the source artifact and the data of the source artifact."
|
2288
|
+
)
|
2184
2289
|
self._unlink()
|
2290
|
+
else:
|
2291
|
+
self._delete(delete_aliases)
|
2185
2292
|
|
2186
2293
|
@normalize_exceptions
|
2187
2294
|
def _delete(self, delete_aliases: bool = False) -> None:
|
@@ -2209,7 +2316,9 @@ class Artifact:
|
|
2209
2316
|
)
|
2210
2317
|
|
2211
2318
|
@normalize_exceptions
|
2212
|
-
def link(
|
2319
|
+
def link(
|
2320
|
+
self, target_path: str, aliases: list[str] | None = None
|
2321
|
+
) -> Artifact | None:
|
2213
2322
|
"""Link this artifact to a portfolio (a promoted collection of artifacts).
|
2214
2323
|
|
2215
2324
|
Args:
|
@@ -2226,17 +2335,31 @@ class Artifact:
|
|
2226
2335
|
|
2227
2336
|
Raises:
|
2228
2337
|
ArtifactNotLoggedError: If the artifact is not logged.
|
2338
|
+
|
2339
|
+
Returns:
|
2340
|
+
The linked artifact if linking was successful, otherwise None.
|
2229
2341
|
"""
|
2230
|
-
if
|
2231
|
-
|
2342
|
+
if self.is_link:
|
2343
|
+
wandb.termwarn(
|
2344
|
+
"Linking to a link artifact will result in directly linking to the source artifact of that link artifact."
|
2345
|
+
)
|
2346
|
+
|
2347
|
+
singleton = wandb_setup._setup(start_service=False)
|
2348
|
+
|
2349
|
+
if run := singleton.most_recent_active_run:
|
2350
|
+
# TODO: Deprecate and encourage explicit link_artifact().
|
2351
|
+
return run.link_artifact(self, target_path, aliases)
|
2352
|
+
|
2353
|
+
else:
|
2354
|
+
with wandb.init(
|
2232
2355
|
entity=self._source_entity,
|
2233
2356
|
project=self._source_project,
|
2234
2357
|
job_type="auto",
|
2235
2358
|
settings=wandb.Settings(silent="true"),
|
2236
2359
|
) as run:
|
2237
|
-
run.link_artifact(self, target_path, aliases)
|
2238
|
-
|
2239
|
-
|
2360
|
+
return run.link_artifact(self, target_path, aliases)
|
2361
|
+
|
2362
|
+
return None
|
2240
2363
|
|
2241
2364
|
@ensure_logged
|
2242
2365
|
def unlink(self) -> None:
|
@@ -2247,7 +2370,7 @@ class Artifact:
|
|
2247
2370
|
ValueError: If the artifact is not linked, i.e. it is not a member of a portfolio collection.
|
2248
2371
|
"""
|
2249
2372
|
# Fail early if this isn't a linked artifact to begin with
|
2250
|
-
if self.
|
2373
|
+
if not self.is_link:
|
2251
2374
|
raise ValueError(
|
2252
2375
|
f"Artifact {self.qualified_name!r} is not a linked artifact and cannot be unlinked. "
|
2253
2376
|
f"To delete it, use {self.delete.__qualname__!r} instead."
|
@@ -2271,17 +2394,22 @@ class Artifact:
|
|
2271
2394
|
"""
|
2272
2395
|
)
|
2273
2396
|
assert self._client is not None
|
2274
|
-
|
2275
|
-
|
2276
|
-
|
2277
|
-
|
2278
|
-
|
2279
|
-
|
2280
|
-
|
2397
|
+
try:
|
2398
|
+
self._client.execute(
|
2399
|
+
mutation,
|
2400
|
+
variable_values={
|
2401
|
+
"artifactID": self.id,
|
2402
|
+
"artifactPortfolioID": self.collection.id,
|
2403
|
+
},
|
2404
|
+
)
|
2405
|
+
except CommError as e:
|
2406
|
+
raise CommError(
|
2407
|
+
f"You do not have permission to unlink the artifact {self.qualified_name}"
|
2408
|
+
) from e
|
2281
2409
|
|
2282
2410
|
@ensure_logged
|
2283
2411
|
def used_by(self) -> list[Run]:
|
2284
|
-
"""Get a list of the runs that have used this artifact.
|
2412
|
+
"""Get a list of the runs that have used this artifact and its linked artifacts.
|
2285
2413
|
|
2286
2414
|
Returns:
|
2287
2415
|
A list of `Run` objects.
|
@@ -2443,6 +2571,82 @@ class Artifact:
|
|
2443
2571
|
return INHERIT
|
2444
2572
|
return self._ttl_duration_seconds or DISABLED
|
2445
2573
|
|
2574
|
+
def _fetch_linked_artifacts(self) -> list[Artifact]:
|
2575
|
+
"""Fetches all linked artifacts from the server."""
|
2576
|
+
if self.id is None:
|
2577
|
+
raise ValueError(
|
2578
|
+
"Unable to find any artifact memberships for artifact without an ID"
|
2579
|
+
)
|
2580
|
+
if self._client is None:
|
2581
|
+
raise ValueError("Client is not initialized")
|
2582
|
+
response = self._client.execute(
|
2583
|
+
gql_compat(FETCH_LINKED_ARTIFACTS_GQL),
|
2584
|
+
variable_values={"artifactID": self.id},
|
2585
|
+
)
|
2586
|
+
result = FetchLinkedArtifacts.model_validate(response)
|
2587
|
+
|
2588
|
+
if not (
|
2589
|
+
(artifact := result.artifact)
|
2590
|
+
and (memberships := artifact.artifact_memberships)
|
2591
|
+
and (membership_edges := memberships.edges)
|
2592
|
+
):
|
2593
|
+
raise ValueError("Unable to find any artifact memberships for artifact")
|
2594
|
+
|
2595
|
+
linked_artifacts: deque[Artifact] = deque()
|
2596
|
+
linked_nodes = (
|
2597
|
+
node
|
2598
|
+
for edge in membership_edges
|
2599
|
+
if (
|
2600
|
+
(node := edge.node)
|
2601
|
+
and (col := node.artifact_collection)
|
2602
|
+
and (col.typename__ == LINKED_ARTIFACT_COLLECTION_TYPE)
|
2603
|
+
)
|
2604
|
+
)
|
2605
|
+
for node in linked_nodes:
|
2606
|
+
# Trick for O(1) membership check that maintains order
|
2607
|
+
alias_names = dict.fromkeys(a.alias for a in node.aliases)
|
2608
|
+
version = f"v{node.version_index}"
|
2609
|
+
aliases = (
|
2610
|
+
[*alias_names, version]
|
2611
|
+
if version not in alias_names
|
2612
|
+
else [*alias_names]
|
2613
|
+
)
|
2614
|
+
|
2615
|
+
if not (
|
2616
|
+
node
|
2617
|
+
and (col := node.artifact_collection)
|
2618
|
+
and (proj := col.project)
|
2619
|
+
and (proj.entity_name and proj.name)
|
2620
|
+
):
|
2621
|
+
raise ValueError("Unable to fetch fields for linked artifact")
|
2622
|
+
|
2623
|
+
link_fields = _LinkArtifactFields(
|
2624
|
+
entity_name=proj.entity_name,
|
2625
|
+
project_name=proj.name,
|
2626
|
+
name=f"{col.name}:{version}",
|
2627
|
+
version=version,
|
2628
|
+
aliases=aliases,
|
2629
|
+
)
|
2630
|
+
link = self._create_linked_artifact_using_source_artifact(link_fields)
|
2631
|
+
linked_artifacts.append(link)
|
2632
|
+
return list(linked_artifacts)
|
2633
|
+
|
2634
|
+
def _create_linked_artifact_using_source_artifact(
|
2635
|
+
self,
|
2636
|
+
link_fields: _LinkArtifactFields,
|
2637
|
+
) -> Artifact:
|
2638
|
+
"""Copies the source artifact to a linked artifact."""
|
2639
|
+
linked_artifact = copy(self)
|
2640
|
+
linked_artifact._version = link_fields.version
|
2641
|
+
linked_artifact._aliases = link_fields.aliases
|
2642
|
+
linked_artifact._saved_aliases = copy(link_fields.aliases)
|
2643
|
+
linked_artifact._name = link_fields.name
|
2644
|
+
linked_artifact._entity = link_fields.entity_name
|
2645
|
+
linked_artifact._project = link_fields.project_name
|
2646
|
+
linked_artifact._is_link = link_fields.is_link
|
2647
|
+
linked_artifact._linked_artifacts = link_fields.linked_artifacts
|
2648
|
+
return linked_artifact
|
2649
|
+
|
2446
2650
|
|
2447
2651
|
def _ttl_duration_seconds_from_gql(gql_ttl_duration_seconds: int | None) -> int | None:
|
2448
2652
|
# If gql_ttl_duration_seconds is not positive, its indicating that TTL is DISABLED(-2)
|