wandb 0.22.2__py3-none-win_amd64.whl → 0.22.3__py3-none-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- wandb/__init__.py +1 -1
- wandb/__init__.pyi +2 -2
- wandb/_pydantic/__init__.py +8 -1
- wandb/_pydantic/base.py +54 -18
- wandb/_pydantic/field_types.py +8 -3
- wandb/_pydantic/pagination.py +46 -0
- wandb/_pydantic/utils.py +2 -2
- wandb/apis/public/api.py +24 -19
- wandb/apis/public/artifacts.py +259 -270
- wandb/apis/public/registries/_utils.py +40 -54
- wandb/apis/public/registries/registries_search.py +70 -85
- wandb/apis/public/registries/registry.py +173 -156
- wandb/apis/public/runs.py +27 -6
- wandb/apis/public/utils.py +43 -20
- wandb/automations/_generated/create_automation.py +2 -2
- wandb/automations/_generated/create_generic_webhook_integration.py +4 -4
- wandb/automations/_generated/delete_automation.py +2 -2
- wandb/automations/_generated/fragments.py +31 -52
- wandb/automations/_generated/generic_webhook_integrations_by_entity.py +3 -3
- wandb/automations/_generated/get_automations.py +3 -3
- wandb/automations/_generated/get_automations_by_entity.py +3 -3
- wandb/automations/_generated/input_types.py +9 -9
- wandb/automations/_generated/integrations_by_entity.py +3 -3
- wandb/automations/_generated/operations.py +6 -6
- wandb/automations/_generated/slack_integrations_by_entity.py +3 -3
- wandb/automations/_generated/update_automation.py +2 -2
- wandb/automations/_utils.py +3 -3
- wandb/automations/actions.py +3 -3
- wandb/automations/automations.py +6 -5
- wandb/bin/gpu_stats.exe +0 -0
- wandb/bin/wandb-core +0 -0
- wandb/cli/beta.py +8 -2
- wandb/cli/beta_leet.py +2 -1
- wandb/cli/beta_sync.py +1 -1
- wandb/errors/term.py +8 -8
- wandb/jupyter.py +0 -51
- wandb/old/settings.py +6 -6
- wandb/proto/v3/wandb_internal_pb2.py +351 -352
- wandb/proto/v3/wandb_server_pb2.py +38 -37
- wandb/proto/v3/wandb_settings_pb2.py +2 -2
- wandb/proto/v3/wandb_sync_pb2.py +19 -6
- wandb/proto/v4/wandb_internal_pb2.py +351 -352
- wandb/proto/v4/wandb_server_pb2.py +38 -37
- wandb/proto/v4/wandb_settings_pb2.py +2 -2
- wandb/proto/v4/wandb_sync_pb2.py +10 -6
- wandb/proto/v5/wandb_internal_pb2.py +351 -352
- wandb/proto/v5/wandb_server_pb2.py +38 -37
- wandb/proto/v5/wandb_settings_pb2.py +2 -2
- wandb/proto/v5/wandb_sync_pb2.py +10 -6
- wandb/proto/v6/wandb_internal_pb2.py +351 -352
- wandb/proto/v6/wandb_server_pb2.py +38 -37
- wandb/proto/v6/wandb_settings_pb2.py +2 -2
- wandb/proto/v6/wandb_sync_pb2.py +10 -6
- wandb/sdk/artifacts/_generated/__init__.py +96 -40
- wandb/sdk/artifacts/_generated/add_aliases.py +3 -3
- wandb/sdk/artifacts/_generated/add_artifact_collection_tags.py +26 -0
- wandb/sdk/artifacts/_generated/artifact_by_id.py +2 -2
- wandb/sdk/artifacts/_generated/artifact_by_name.py +3 -3
- wandb/sdk/artifacts/_generated/artifact_collection_membership_file_urls.py +27 -8
- wandb/sdk/artifacts/_generated/artifact_collection_membership_files.py +27 -8
- wandb/sdk/artifacts/_generated/artifact_created_by.py +7 -20
- wandb/sdk/artifacts/_generated/artifact_file_urls.py +19 -6
- wandb/sdk/artifacts/_generated/artifact_membership_by_name.py +26 -0
- wandb/sdk/artifacts/_generated/artifact_type.py +5 -5
- wandb/sdk/artifacts/_generated/artifact_used_by.py +8 -17
- wandb/sdk/artifacts/_generated/artifact_version_files.py +19 -8
- wandb/sdk/artifacts/_generated/delete_aliases.py +3 -3
- wandb/sdk/artifacts/_generated/delete_artifact.py +4 -4
- wandb/sdk/artifacts/_generated/delete_artifact_collection_tags.py +23 -0
- wandb/sdk/artifacts/_generated/delete_artifact_portfolio.py +4 -4
- wandb/sdk/artifacts/_generated/delete_artifact_sequence.py +4 -4
- wandb/sdk/artifacts/_generated/delete_registry.py +21 -0
- wandb/sdk/artifacts/_generated/fetch_artifact_manifest.py +8 -20
- wandb/sdk/artifacts/_generated/fetch_linked_artifacts.py +13 -35
- wandb/sdk/artifacts/_generated/fetch_org_info_from_entity.py +28 -0
- wandb/sdk/artifacts/_generated/fetch_registries.py +18 -8
- wandb/sdk/{projects → artifacts}/_generated/fetch_registry.py +4 -4
- wandb/sdk/artifacts/_generated/fragments.py +183 -333
- wandb/sdk/artifacts/_generated/input_types.py +133 -7
- wandb/sdk/artifacts/_generated/link_artifact.py +5 -5
- wandb/sdk/artifacts/_generated/operations.py +1053 -548
- wandb/sdk/artifacts/_generated/project_artifact_collection.py +9 -77
- wandb/sdk/artifacts/_generated/project_artifact_collections.py +21 -9
- wandb/sdk/artifacts/_generated/project_artifact_type.py +3 -3
- wandb/sdk/artifacts/_generated/project_artifact_types.py +19 -6
- wandb/sdk/artifacts/_generated/project_artifacts.py +7 -8
- wandb/sdk/artifacts/_generated/registry_collections.py +21 -9
- wandb/sdk/artifacts/_generated/registry_versions.py +20 -9
- wandb/sdk/artifacts/_generated/rename_registry.py +25 -0
- wandb/sdk/artifacts/_generated/run_input_artifacts.py +5 -9
- wandb/sdk/artifacts/_generated/run_output_artifacts.py +5 -9
- wandb/sdk/artifacts/_generated/type_info.py +2 -2
- wandb/sdk/artifacts/_generated/unlink_artifact.py +3 -5
- wandb/sdk/artifacts/_generated/update_artifact.py +3 -3
- wandb/sdk/artifacts/_generated/update_artifact_collection_type.py +28 -0
- wandb/sdk/artifacts/_generated/update_artifact_portfolio.py +7 -16
- wandb/sdk/artifacts/_generated/update_artifact_sequence.py +7 -16
- wandb/sdk/artifacts/_generated/upsert_registry.py +25 -0
- wandb/sdk/artifacts/_gqlutils.py +170 -6
- wandb/sdk/artifacts/_models/__init__.py +9 -0
- wandb/sdk/artifacts/_models/artifact_collection.py +109 -0
- wandb/sdk/artifacts/_models/manifest.py +26 -0
- wandb/sdk/artifacts/_models/pagination.py +26 -0
- wandb/sdk/artifacts/_models/registry.py +100 -0
- wandb/sdk/artifacts/_validators.py +45 -27
- wandb/sdk/artifacts/artifact.py +220 -215
- wandb/sdk/artifacts/artifact_file_cache.py +1 -1
- wandb/sdk/artifacts/artifact_manifest.py +37 -32
- wandb/sdk/artifacts/artifact_manifest_entry.py +80 -125
- wandb/sdk/artifacts/artifact_manifests/artifact_manifest_v1.py +43 -61
- wandb/sdk/artifacts/storage_handlers/gcs_handler.py +8 -6
- wandb/sdk/data_types/image.py +2 -2
- wandb/sdk/interface/interface.py +72 -64
- wandb/sdk/interface/interface_queue.py +27 -18
- wandb/sdk/interface/interface_shared.py +61 -23
- wandb/sdk/interface/interface_sock.py +9 -5
- wandb/sdk/internal/_generated/server_features_query.py +4 -4
- wandb/sdk/launch/inputs/schema.py +13 -10
- wandb/sdk/lib/apikey.py +8 -12
- wandb/sdk/lib/asyncio_compat.py +1 -1
- wandb/sdk/lib/asyncio_manager.py +5 -5
- wandb/sdk/lib/console_capture.py +38 -30
- wandb/sdk/lib/progress.py +159 -64
- wandb/sdk/lib/retry.py +3 -2
- wandb/sdk/lib/service/service_connection.py +2 -2
- wandb/sdk/lib/wb_logging.py +2 -1
- wandb/sdk/mailbox/mailbox.py +1 -1
- wandb/sdk/wandb_init.py +10 -13
- wandb/sdk/wandb_run.py +9 -46
- wandb/sdk/wandb_settings.py +102 -19
- {wandb-0.22.2.dist-info → wandb-0.22.3.dist-info}/METADATA +2 -1
- {wandb-0.22.2.dist-info → wandb-0.22.3.dist-info}/RECORD +135 -134
- wandb/sdk/artifacts/_generated/artifact_via_membership_by_name.py +0 -26
- wandb/sdk/artifacts/_generated/create_artifact_collection_tag_assignments.py +0 -36
- wandb/sdk/artifacts/_generated/delete_artifact_collection_tag_assignments.py +0 -25
- wandb/sdk/artifacts/_generated/move_artifact_collection.py +0 -35
- wandb/sdk/projects/_generated/__init__.py +0 -26
- wandb/sdk/projects/_generated/delete_project.py +0 -22
- wandb/sdk/projects/_generated/enums.py +0 -4
- wandb/sdk/projects/_generated/fragments.py +0 -41
- wandb/sdk/projects/_generated/input_types.py +0 -13
- wandb/sdk/projects/_generated/operations.py +0 -88
- wandb/sdk/projects/_generated/rename_project.py +0 -27
- wandb/sdk/projects/_generated/upsert_registry_project.py +0 -27
- {wandb-0.22.2.dist-info → wandb-0.22.3.dist-info}/WHEEL +0 -0
- {wandb-0.22.2.dist-info → wandb-0.22.3.dist-info}/entry_points.txt +0 -0
- {wandb-0.22.2.dist-info → wandb-0.22.3.dist-info}/licenses/LICENSE +0 -0
wandb/sdk/artifacts/artifact.py
CHANGED
|
@@ -16,28 +16,20 @@ import time
|
|
|
16
16
|
from collections import deque
|
|
17
17
|
from concurrent.futures import Executor, ThreadPoolExecutor, as_completed
|
|
18
18
|
from copy import copy
|
|
19
|
-
from dataclasses import asdict,
|
|
19
|
+
from dataclasses import asdict, replace
|
|
20
20
|
from datetime import timedelta
|
|
21
21
|
from itertools import filterfalse
|
|
22
22
|
from pathlib import Path, PurePosixPath
|
|
23
|
-
from typing import
|
|
24
|
-
IO,
|
|
25
|
-
TYPE_CHECKING,
|
|
26
|
-
Any,
|
|
27
|
-
Final,
|
|
28
|
-
Iterator,
|
|
29
|
-
Literal,
|
|
30
|
-
Sequence,
|
|
31
|
-
Type,
|
|
32
|
-
final,
|
|
33
|
-
)
|
|
23
|
+
from typing import IO, TYPE_CHECKING, Any, Final, Iterator, Literal, Sequence, Type
|
|
34
24
|
from urllib.parse import quote, urljoin, urlparse
|
|
35
25
|
|
|
36
26
|
import requests
|
|
27
|
+
from pydantic import NonNegativeInt
|
|
37
28
|
|
|
38
29
|
import wandb
|
|
39
30
|
from wandb import data_types, env
|
|
40
31
|
from wandb._iterutils import one, unique_list
|
|
32
|
+
from wandb._pydantic import from_json
|
|
41
33
|
from wandb._strutils import nameof
|
|
42
34
|
from wandb.apis.normalize import normalize_exceptions
|
|
43
35
|
from wandb.apis.public import ArtifactCollection, ArtifactFiles, Run
|
|
@@ -49,7 +41,6 @@ from wandb.errors.term import termerror, termlog, termwarn
|
|
|
49
41
|
from wandb.proto import wandb_internal_pb2 as pb
|
|
50
42
|
from wandb.proto.wandb_deprecated import Deprecated
|
|
51
43
|
from wandb.sdk import wandb_setup
|
|
52
|
-
from wandb.sdk.artifacts.storage_policies._multipart import should_multipart_download
|
|
53
44
|
from wandb.sdk.data_types._dtypes import Type as WBType
|
|
54
45
|
from wandb.sdk.data_types._dtypes import TypeRegistry
|
|
55
46
|
from wandb.sdk.internal.internal_api import Api as InternalApi
|
|
@@ -78,9 +69,9 @@ from ._generated import (
|
|
|
78
69
|
ARTIFACT_COLLECTION_MEMBERSHIP_FILE_URLS_GQL,
|
|
79
70
|
ARTIFACT_CREATED_BY_GQL,
|
|
80
71
|
ARTIFACT_FILE_URLS_GQL,
|
|
72
|
+
ARTIFACT_MEMBERSHIP_BY_NAME_GQL,
|
|
81
73
|
ARTIFACT_TYPE_GQL,
|
|
82
74
|
ARTIFACT_USED_BY_GQL,
|
|
83
|
-
ARTIFACT_VIA_MEMBERSHIP_BY_NAME_GQL,
|
|
84
75
|
DELETE_ALIASES_GQL,
|
|
85
76
|
DELETE_ARTIFACT_GQL,
|
|
86
77
|
FETCH_ARTIFACT_MANIFEST_GQL,
|
|
@@ -88,29 +79,39 @@ from ._generated import (
|
|
|
88
79
|
LINK_ARTIFACT_GQL,
|
|
89
80
|
UNLINK_ARTIFACT_GQL,
|
|
90
81
|
UPDATE_ARTIFACT_GQL,
|
|
82
|
+
AddAliasesInput,
|
|
91
83
|
ArtifactAliasInput,
|
|
92
84
|
ArtifactByID,
|
|
93
85
|
ArtifactByName,
|
|
94
|
-
ArtifactCollectionAliasInput,
|
|
95
86
|
ArtifactCollectionMembershipFileUrls,
|
|
96
87
|
ArtifactCreatedBy,
|
|
97
88
|
ArtifactFileUrls,
|
|
98
89
|
ArtifactFragment,
|
|
90
|
+
ArtifactMembershipByName,
|
|
91
|
+
ArtifactMembershipFragment,
|
|
99
92
|
ArtifactType,
|
|
100
93
|
ArtifactUsedBy,
|
|
101
|
-
|
|
94
|
+
DeleteAliasesInput,
|
|
95
|
+
DeleteArtifactInput,
|
|
102
96
|
FetchArtifactManifest,
|
|
103
97
|
FetchLinkedArtifacts,
|
|
104
|
-
FileUrlsFragment,
|
|
105
98
|
LinkArtifact,
|
|
106
99
|
LinkArtifactInput,
|
|
107
|
-
|
|
108
|
-
TagInput,
|
|
100
|
+
UnlinkArtifactInput,
|
|
109
101
|
UpdateArtifact,
|
|
102
|
+
UpdateArtifactInput,
|
|
103
|
+
)
|
|
104
|
+
from ._gqlutils import (
|
|
105
|
+
omit_artifact_fields,
|
|
106
|
+
org_info_from_entity,
|
|
107
|
+
resolve_org_entity_name,
|
|
108
|
+
server_supports,
|
|
109
|
+
supports_enable_tracking_var,
|
|
110
|
+
type_info,
|
|
110
111
|
)
|
|
111
|
-
from .
|
|
112
|
+
from ._models.pagination import FileWithUrlConnection
|
|
112
113
|
from ._validators import (
|
|
113
|
-
|
|
114
|
+
LINKED_COLLECTION_TYPENAME,
|
|
114
115
|
ArtifactPath,
|
|
115
116
|
FullArtifactPath,
|
|
116
117
|
_LinkArtifactFields,
|
|
@@ -140,6 +141,7 @@ from .exceptions import (
|
|
|
140
141
|
)
|
|
141
142
|
from .staging import get_staging_dir
|
|
142
143
|
from .storage_handlers.gcs_handler import _GCSIsADirectoryError
|
|
144
|
+
from .storage_policies._multipart import should_multipart_download
|
|
143
145
|
|
|
144
146
|
reset_path = vendor_setup()
|
|
145
147
|
|
|
@@ -156,14 +158,6 @@ logger = logging.getLogger(__name__)
|
|
|
156
158
|
_MB: Final[int] = 1024 * 1024
|
|
157
159
|
|
|
158
160
|
|
|
159
|
-
@final
|
|
160
|
-
@dataclass
|
|
161
|
-
class _DeferredArtifactManifest:
|
|
162
|
-
"""A lightweight wrapper around the manifest URL, used to indicate deferred loading of the actual manifest."""
|
|
163
|
-
|
|
164
|
-
url: str
|
|
165
|
-
|
|
166
|
-
|
|
167
161
|
class Artifact:
|
|
168
162
|
"""Flexible and lightweight building block for dataset and model versioning.
|
|
169
163
|
|
|
@@ -212,14 +206,14 @@ class Artifact:
|
|
|
212
206
|
use_as: str | None = None,
|
|
213
207
|
storage_region: str | None = None,
|
|
214
208
|
) -> None:
|
|
209
|
+
from wandb.sdk.artifacts._internal_artifact import InternalArtifact
|
|
210
|
+
|
|
215
211
|
if not re.match(r"^[a-zA-Z0-9_\-.]+$", name):
|
|
216
212
|
raise ValueError(
|
|
217
213
|
f"Artifact name may only contain alphanumeric characters, dashes, "
|
|
218
|
-
f"underscores, and dots. Invalid name: {name}"
|
|
214
|
+
f"underscores, and dots. Invalid name: {name!r}"
|
|
219
215
|
)
|
|
220
216
|
|
|
221
|
-
from wandb.sdk.artifacts._internal_artifact import InternalArtifact
|
|
222
|
-
|
|
223
217
|
if incremental and not isinstance(self, InternalArtifact):
|
|
224
218
|
termwarn("Using experimental arg `incremental`")
|
|
225
219
|
|
|
@@ -268,9 +262,17 @@ class Artifact:
|
|
|
268
262
|
)
|
|
269
263
|
self._use_as: str | None = None
|
|
270
264
|
self._state: ArtifactState = ArtifactState.PENDING
|
|
271
|
-
|
|
272
|
-
|
|
265
|
+
|
|
266
|
+
# NOTE: These fields will only reflect the last fetched response from the server, if any.
|
|
267
|
+
# If the ArtifactManifest has already been fetched and/or populated locally, it should
|
|
268
|
+
# take priority for determining these values.
|
|
269
|
+
self._size: NonNegativeInt | None = None
|
|
270
|
+
self._digest: str | None = None
|
|
271
|
+
|
|
272
|
+
self._manifest: ArtifactManifest | None = ArtifactManifestV1(
|
|
273
|
+
storage_policy=make_storage_policy(storage_region)
|
|
273
274
|
)
|
|
275
|
+
|
|
274
276
|
self._commit_hash: str | None = None
|
|
275
277
|
self._file_count: int | None = None
|
|
276
278
|
self._created_at: str | None = None
|
|
@@ -301,7 +303,7 @@ class Artifact:
|
|
|
301
303
|
src_collection = artifact.artifact_sequence
|
|
302
304
|
src_project = src_collection.project
|
|
303
305
|
|
|
304
|
-
entity_name = src_project.
|
|
306
|
+
entity_name = src_project.entity.name if src_project else ""
|
|
305
307
|
project_name = src_project.name if src_project else ""
|
|
306
308
|
|
|
307
309
|
name = f"{src_collection.name}:v{artifact.version_index}"
|
|
@@ -313,25 +315,19 @@ class Artifact:
|
|
|
313
315
|
def _membership_from_name(
|
|
314
316
|
cls, *, path: FullArtifactPath, client: RetryingClient
|
|
315
317
|
) -> Artifact:
|
|
316
|
-
if not
|
|
317
|
-
pb.ServerFeature.PROJECT_ARTIFACT_COLLECTION_MEMBERSHIP
|
|
318
|
-
):
|
|
318
|
+
if not server_supports(client, pb.PROJECT_ARTIFACT_COLLECTION_MEMBERSHIP):
|
|
319
319
|
raise UnsupportedError(
|
|
320
|
-
"
|
|
320
|
+
"Querying for the artifact collection membership is not supported "
|
|
321
321
|
"by this version of wandb server. Consider updating to the latest version."
|
|
322
322
|
)
|
|
323
323
|
|
|
324
324
|
query = gql_compat(
|
|
325
|
-
|
|
325
|
+
ARTIFACT_MEMBERSHIP_BY_NAME_GQL,
|
|
326
326
|
omit_fields=omit_artifact_fields(client),
|
|
327
327
|
)
|
|
328
|
-
gql_vars = {
|
|
329
|
-
"entityName": path.prefix,
|
|
330
|
-
"projectName": path.project,
|
|
331
|
-
"name": path.name,
|
|
332
|
-
}
|
|
328
|
+
gql_vars = {"entity": path.prefix, "project": path.project, "name": path.name}
|
|
333
329
|
data = client.execute(query, variable_values=gql_vars)
|
|
334
|
-
result =
|
|
330
|
+
result = ArtifactMembershipByName.model_validate(data)
|
|
335
331
|
|
|
336
332
|
if not (project := result.project):
|
|
337
333
|
raise ValueError(
|
|
@@ -352,15 +348,13 @@ class Artifact:
|
|
|
352
348
|
client: RetryingClient,
|
|
353
349
|
enable_tracking: bool = False,
|
|
354
350
|
) -> Artifact:
|
|
355
|
-
if
|
|
356
|
-
pb.ServerFeature.PROJECT_ARTIFACT_COLLECTION_MEMBERSHIP
|
|
357
|
-
):
|
|
351
|
+
if server_supports(client, pb.PROJECT_ARTIFACT_COLLECTION_MEMBERSHIP):
|
|
358
352
|
return cls._membership_from_name(path=path, client=client)
|
|
359
353
|
|
|
360
354
|
omit_vars = None if supports_enable_tracking_var(client) else {"enableTracking"}
|
|
361
355
|
gql_vars = {
|
|
362
|
-
"
|
|
363
|
-
"
|
|
356
|
+
"entity": path.prefix,
|
|
357
|
+
"project": path.project,
|
|
364
358
|
"name": path.name,
|
|
365
359
|
"enableTracking": enable_tracking,
|
|
366
360
|
}
|
|
@@ -385,7 +379,7 @@ class Artifact:
|
|
|
385
379
|
@classmethod
|
|
386
380
|
def _from_membership(
|
|
387
381
|
cls,
|
|
388
|
-
membership:
|
|
382
|
+
membership: ArtifactMembershipFragment,
|
|
389
383
|
target: FullArtifactPath,
|
|
390
384
|
client: RetryingClient,
|
|
391
385
|
) -> Artifact:
|
|
@@ -404,14 +398,15 @@ class Artifact:
|
|
|
404
398
|
f"Your request was redirected to the corresponding artifact {name!r} in the new registry. "
|
|
405
399
|
f"Please update your paths to point to the migrated registry directly, '{proj.name}/{name}'."
|
|
406
400
|
)
|
|
407
|
-
new_target = replace(target, prefix=proj.
|
|
401
|
+
new_target = replace(target, prefix=proj.entity.name, project=proj.name)
|
|
408
402
|
else:
|
|
409
403
|
new_target = copy(target)
|
|
410
404
|
|
|
411
405
|
if not (artifact := membership.artifact):
|
|
412
406
|
raise ValueError(f"Artifact {target.to_str()!r} not found in response")
|
|
413
407
|
|
|
414
|
-
|
|
408
|
+
aliases = [a.alias for a in membership.aliases]
|
|
409
|
+
return cls._from_attrs(new_target, artifact, client, aliases=aliases)
|
|
415
410
|
|
|
416
411
|
@classmethod
|
|
417
412
|
def _from_attrs(
|
|
@@ -451,7 +446,7 @@ class Artifact:
|
|
|
451
446
|
src_collection = art.artifact_sequence
|
|
452
447
|
src_project = src_collection.project
|
|
453
448
|
|
|
454
|
-
self._source_entity = src_project.
|
|
449
|
+
self._source_entity = src_project.entity.name if src_project else ""
|
|
455
450
|
self._source_project = src_project.name if src_project else ""
|
|
456
451
|
self._source_name = f"{src_collection.name}:v{art.version_index}"
|
|
457
452
|
self._source_version = f"v{art.version_index}"
|
|
@@ -488,7 +483,7 @@ class Artifact:
|
|
|
488
483
|
if (
|
|
489
484
|
(coll := art_alias.artifact_collection)
|
|
490
485
|
and (proj := coll.project)
|
|
491
|
-
and proj.
|
|
486
|
+
and proj.entity.name == entity
|
|
492
487
|
and proj.name == project
|
|
493
488
|
and coll.name == collection
|
|
494
489
|
)
|
|
@@ -528,12 +523,10 @@ class Artifact:
|
|
|
528
523
|
)
|
|
529
524
|
|
|
530
525
|
self._state = ArtifactState(art.state)
|
|
526
|
+
self._size = art.size
|
|
527
|
+
self._digest = art.digest
|
|
531
528
|
|
|
532
|
-
self._manifest =
|
|
533
|
-
_DeferredArtifactManifest(manifest.file.direct_url)
|
|
534
|
-
if (manifest := art.current_manifest)
|
|
535
|
-
else None
|
|
536
|
-
)
|
|
529
|
+
self._manifest = None
|
|
537
530
|
|
|
538
531
|
self._commit_hash = art.commit_hash
|
|
539
532
|
self._file_count = art.file_count
|
|
@@ -802,10 +795,8 @@ class Artifact:
|
|
|
802
795
|
return ""
|
|
803
796
|
|
|
804
797
|
try:
|
|
805
|
-
|
|
806
|
-
|
|
807
|
-
)
|
|
808
|
-
except ValueError:
|
|
798
|
+
org_name = org_info_from_entity(self._client, self.entity).organization.name # type: ignore[union-attr]
|
|
799
|
+
except (AttributeError, ValueError):
|
|
809
800
|
return ""
|
|
810
801
|
|
|
811
802
|
selection_path = quote(
|
|
@@ -813,7 +804,7 @@ class Artifact:
|
|
|
813
804
|
)
|
|
814
805
|
return urljoin(
|
|
815
806
|
base_url,
|
|
816
|
-
f"orgs/{
|
|
807
|
+
f"orgs/{org_name}/registry/{remove_registry_prefix(self.project)}?selectionPath={selection_path}&view=membership&version={self.version}",
|
|
817
808
|
)
|
|
818
809
|
|
|
819
810
|
def _construct_model_registry_url(self, base_url: str) -> str:
|
|
@@ -1028,33 +1019,31 @@ class Artifact:
|
|
|
1028
1019
|
The manifest lists all of its contents, and can't be changed once the artifact
|
|
1029
1020
|
has been logged.
|
|
1030
1021
|
"""
|
|
1031
|
-
if isinstance(self._manifest, _DeferredArtifactManifest):
|
|
1032
|
-
# A deferred manifest URL flags a deferred download request,
|
|
1033
|
-
# so fetch the manifest to override the placeholder object
|
|
1034
|
-
self._manifest = self._load_manifest(self._manifest.url)
|
|
1035
|
-
return self._manifest
|
|
1036
|
-
|
|
1037
1022
|
if self._manifest is None:
|
|
1038
|
-
|
|
1039
|
-
|
|
1023
|
+
self._manifest = self._fetch_manifest()
|
|
1024
|
+
return self._manifest
|
|
1040
1025
|
|
|
1041
|
-
|
|
1042
|
-
|
|
1043
|
-
|
|
1044
|
-
|
|
1045
|
-
"name": self.name,
|
|
1046
|
-
}
|
|
1047
|
-
data = self._client.execute(query, variable_values=gql_vars)
|
|
1048
|
-
result = FetchArtifactManifest.model_validate(data)
|
|
1049
|
-
if not (
|
|
1050
|
-
(project := result.project)
|
|
1051
|
-
and (artifact := project.artifact)
|
|
1052
|
-
and (manifest := artifact.current_manifest)
|
|
1053
|
-
):
|
|
1054
|
-
raise ValueError("Failed to fetch artifact manifest")
|
|
1055
|
-
self._manifest = self._load_manifest(manifest.file.direct_url)
|
|
1026
|
+
def _fetch_manifest(self) -> ArtifactManifest:
|
|
1027
|
+
"""Fetch, parse, and load the full ArtifactManifest."""
|
|
1028
|
+
if self._client is None:
|
|
1029
|
+
raise RuntimeError("Client not initialized for artifact queries")
|
|
1056
1030
|
|
|
1057
|
-
|
|
1031
|
+
# From the GraphQL API, get the (expiring) directUrl for downloading the manifest.
|
|
1032
|
+
gql_op = gql(FETCH_ARTIFACT_MANIFEST_GQL)
|
|
1033
|
+
gql_vars = {"id": self.id}
|
|
1034
|
+
data = self._client.execute(gql_op, variable_values=gql_vars)
|
|
1035
|
+
result = FetchArtifactManifest.model_validate(data)
|
|
1036
|
+
|
|
1037
|
+
# Now fetch the actual manifest contents from the directUrl.
|
|
1038
|
+
if (artifact := result.artifact) and (manifest := artifact.current_manifest):
|
|
1039
|
+
# FIXME: For successive/repeated calls to `manifest`, figure out how to reuse a single
|
|
1040
|
+
# `requests.Session` within the constraints of the current artifacts API. Right now,
|
|
1041
|
+
# `requests.get()` creates a new session for _each_ fetch. This is wasteful and introduces a
|
|
1042
|
+
# noticeable perf overhead when e.g. downloading many artifacts sequentially or concurrently.
|
|
1043
|
+
response = requests.get(manifest.file.direct_url)
|
|
1044
|
+
return ArtifactManifest.from_manifest_json(from_json(response.content))
|
|
1045
|
+
|
|
1046
|
+
raise ValueError("Failed to fetch artifact manifest")
|
|
1058
1047
|
|
|
1059
1048
|
@property
|
|
1060
1049
|
def digest(self) -> str:
|
|
@@ -1063,7 +1052,14 @@ class Artifact:
|
|
|
1063
1052
|
The digest is the checksum of the artifact's contents. If an artifact has the
|
|
1064
1053
|
same digest as the current `latest` version, then `log_artifact` is a no-op.
|
|
1065
1054
|
"""
|
|
1066
|
-
|
|
1055
|
+
# Use the last fetched value of `Artifact.digest` ONLY if present AND the manifest has not been
|
|
1056
|
+
# fetched and/or populated locally. Otherwise, use the manifest directly to recalculate
|
|
1057
|
+
# the digest, as its contents may have been locally modified.
|
|
1058
|
+
return (
|
|
1059
|
+
self._digest
|
|
1060
|
+
if (self._manifest is None) and (self._digest is not None)
|
|
1061
|
+
else self.manifest.digest()
|
|
1062
|
+
)
|
|
1067
1063
|
|
|
1068
1064
|
@property
|
|
1069
1065
|
def size(self) -> int:
|
|
@@ -1071,7 +1067,16 @@ class Artifact:
|
|
|
1071
1067
|
|
|
1072
1068
|
Includes any references tracked by this artifact.
|
|
1073
1069
|
"""
|
|
1074
|
-
|
|
1070
|
+
# Use the last fetched value of `Artifact.size` ONLY if present AND the manifest has not been
|
|
1071
|
+
# fetched and/or populated locally. Otherwise, use the manifest directly to recalculate
|
|
1072
|
+
# the size, as its contents may have been locally modified.
|
|
1073
|
+
#
|
|
1074
|
+
# NOTE: The `Artifact.size` GQL field includes references, `Artifact.storageBytes` does not.
|
|
1075
|
+
return (
|
|
1076
|
+
self._size
|
|
1077
|
+
if (self._manifest is None) and (self._size is not None)
|
|
1078
|
+
else self.manifest.size()
|
|
1079
|
+
)
|
|
1075
1080
|
|
|
1076
1081
|
@property
|
|
1077
1082
|
@ensure_logged
|
|
@@ -1240,75 +1245,31 @@ class Artifact:
|
|
|
1240
1245
|
@normalize_exceptions
|
|
1241
1246
|
def _update(self) -> None:
|
|
1242
1247
|
"""Persists artifact changes to the wandb backend."""
|
|
1243
|
-
if self._client is None:
|
|
1248
|
+
if (client := self._client) is None:
|
|
1244
1249
|
raise RuntimeError("Client not initialized for artifact mutations")
|
|
1245
1250
|
|
|
1246
|
-
entity = self.entity
|
|
1247
|
-
project = self.project
|
|
1248
1251
|
collection = self.name.split(":")[0]
|
|
1249
1252
|
|
|
1250
|
-
|
|
1251
|
-
|
|
1252
|
-
if type_info(self._client, "AddAliasesInput") is not None:
|
|
1253
|
+
update_alias_inputs = None
|
|
1254
|
+
if type_info(client, "AddAliasesInput") is not None:
|
|
1253
1255
|
# wandb backend version >= 0.13.0
|
|
1254
|
-
|
|
1255
|
-
|
|
1256
|
-
|
|
1257
|
-
|
|
1258
|
-
|
|
1259
|
-
|
|
1260
|
-
|
|
1261
|
-
|
|
1262
|
-
ArtifactCollectionAliasInput(**alias_props, alias=alias)
|
|
1263
|
-
for alias in aliases_to_add
|
|
1264
|
-
]
|
|
1265
|
-
try:
|
|
1266
|
-
self._client.execute(
|
|
1267
|
-
add_mutation,
|
|
1268
|
-
variable_values={
|
|
1269
|
-
"artifactID": self.id,
|
|
1270
|
-
"aliases": [a.model_dump() for a in add_alias_inputs],
|
|
1271
|
-
},
|
|
1272
|
-
)
|
|
1273
|
-
except CommError as e:
|
|
1274
|
-
raise CommError(
|
|
1275
|
-
"You do not have permission to add"
|
|
1276
|
-
f" {'at least one of the following aliases' if len(aliases_to_add) > 1 else 'the following alias'}"
|
|
1277
|
-
f" to this artifact: {aliases_to_add}"
|
|
1278
|
-
) from e
|
|
1279
|
-
|
|
1280
|
-
if aliases_to_delete := (set(self._saved_aliases) - set(self.aliases)):
|
|
1281
|
-
delete_mutation = gql(DELETE_ALIASES_GQL)
|
|
1282
|
-
delete_alias_inputs = [
|
|
1283
|
-
ArtifactCollectionAliasInput(**alias_props, alias=alias)
|
|
1284
|
-
for alias in aliases_to_delete
|
|
1285
|
-
]
|
|
1286
|
-
try:
|
|
1287
|
-
self._client.execute(
|
|
1288
|
-
delete_mutation,
|
|
1289
|
-
variable_values={
|
|
1290
|
-
"artifactID": self.id,
|
|
1291
|
-
"aliases": [a.model_dump() for a in delete_alias_inputs],
|
|
1292
|
-
},
|
|
1293
|
-
)
|
|
1294
|
-
except CommError as e:
|
|
1295
|
-
raise CommError(
|
|
1296
|
-
f"You do not have permission to delete"
|
|
1297
|
-
f" {'at least one of the following aliases' if len(aliases_to_delete) > 1 else 'the following alias'}"
|
|
1298
|
-
f" from this artifact: {aliases_to_delete}"
|
|
1299
|
-
) from e
|
|
1300
|
-
|
|
1256
|
+
old_aliases, new_aliases = set(self._saved_aliases), set(self.aliases)
|
|
1257
|
+
target = FullArtifactPath(
|
|
1258
|
+
prefix=self.entity, project=self.project, name=collection
|
|
1259
|
+
)
|
|
1260
|
+
if added_aliases := (new_aliases - old_aliases):
|
|
1261
|
+
self._add_aliases(added_aliases, target=target)
|
|
1262
|
+
if deleted_aliases := (old_aliases - new_aliases):
|
|
1263
|
+
self._delete_aliases(deleted_aliases, target=target)
|
|
1301
1264
|
self._saved_aliases = copy(self.aliases)
|
|
1302
|
-
|
|
1303
|
-
|
|
1304
|
-
|
|
1305
|
-
|
|
1306
|
-
artifact_collection_name=collection, alias=alias
|
|
1307
|
-
).model_dump()
|
|
1265
|
+
else:
|
|
1266
|
+
# wandb backend version < 0.13.0
|
|
1267
|
+
update_alias_inputs = [
|
|
1268
|
+
{"artifactCollectionName": collection, "alias": alias}
|
|
1308
1269
|
for alias in self.aliases
|
|
1309
1270
|
]
|
|
1310
1271
|
|
|
1311
|
-
omit_fields = omit_artifact_fields(
|
|
1272
|
+
omit_fields = omit_artifact_fields(client)
|
|
1312
1273
|
omit_variables = set()
|
|
1313
1274
|
|
|
1314
1275
|
if {"ttlIsInherited", "ttlDurationSeconds"} & omit_fields:
|
|
@@ -1319,11 +1280,11 @@ class Artifact:
|
|
|
1319
1280
|
|
|
1320
1281
|
omit_variables |= {"ttlDurationSeconds"}
|
|
1321
1282
|
|
|
1322
|
-
|
|
1323
|
-
|
|
1283
|
+
added_tags = validate_tags(set(self.tags) - set(self._saved_tags))
|
|
1284
|
+
deleted_tags = validate_tags(set(self._saved_tags) - set(self.tags))
|
|
1324
1285
|
|
|
1325
1286
|
if {"tags"} & omit_fields:
|
|
1326
|
-
if
|
|
1287
|
+
if added_tags or deleted_tags:
|
|
1327
1288
|
termwarn(
|
|
1328
1289
|
"Server not compatible with Artifact tags. "
|
|
1329
1290
|
"To use Artifact tags, please upgrade the server to v0.85 or higher."
|
|
@@ -1331,23 +1292,18 @@ class Artifact:
|
|
|
1331
1292
|
|
|
1332
1293
|
omit_variables |= {"tagsToAdd", "tagsToDelete"}
|
|
1333
1294
|
|
|
1334
|
-
|
|
1335
|
-
|
|
1336
|
-
|
|
1337
|
-
|
|
1295
|
+
gql_op = gql_compat(UPDATE_ARTIFACT_GQL, omit_fields=omit_fields)
|
|
1296
|
+
gql_input = UpdateArtifactInput(
|
|
1297
|
+
artifact_id=self.id,
|
|
1298
|
+
description=self.description,
|
|
1299
|
+
metadata=json_dumps_safer(self.metadata),
|
|
1300
|
+
ttl_duration_seconds=self._ttl_duration_seconds_to_gql(),
|
|
1301
|
+
aliases=update_alias_inputs,
|
|
1302
|
+
tags_to_add=[{"tagName": t} for t in added_tags],
|
|
1303
|
+
tags_to_delete=[{"tagName": t} for t in deleted_tags],
|
|
1338
1304
|
)
|
|
1339
|
-
|
|
1340
|
-
|
|
1341
|
-
"artifactID": self.id,
|
|
1342
|
-
"description": self.description,
|
|
1343
|
-
"metadata": json_dumps_safer(self.metadata),
|
|
1344
|
-
"ttlDurationSeconds": self._ttl_duration_seconds_to_gql(),
|
|
1345
|
-
"aliases": aliases,
|
|
1346
|
-
"tagsToAdd": [TagInput(tag_name=t).model_dump() for t in tags_to_add],
|
|
1347
|
-
"tagsToDelete": [TagInput(tag_name=t).model_dump() for t in tags_to_del],
|
|
1348
|
-
}
|
|
1349
|
-
|
|
1350
|
-
data = self._client.execute(mutation, variable_values=gql_vars)
|
|
1305
|
+
gql_vars = {"input": gql_input.model_dump(exclude=omit_variables)}
|
|
1306
|
+
data = client.execute(gql_op, variable_values=gql_vars)
|
|
1351
1307
|
|
|
1352
1308
|
result = UpdateArtifact.model_validate(data).update_artifact
|
|
1353
1309
|
if not (result and (artifact := result.artifact)):
|
|
@@ -1356,6 +1312,50 @@ class Artifact:
|
|
|
1356
1312
|
|
|
1357
1313
|
self._ttl_changed = False # Reset after updating artifact
|
|
1358
1314
|
|
|
1315
|
+
def _add_aliases(self, alias_names: set[str], target: FullArtifactPath) -> None:
|
|
1316
|
+
if (client := self._client) is None:
|
|
1317
|
+
raise RuntimeError("Client not initialized for artifact mutations")
|
|
1318
|
+
|
|
1319
|
+
target_props = {
|
|
1320
|
+
"entityName": target.prefix,
|
|
1321
|
+
"projectName": target.project,
|
|
1322
|
+
"artifactCollectionName": target.name,
|
|
1323
|
+
}
|
|
1324
|
+
alias_inputs = [{**target_props, "alias": name} for name in alias_names]
|
|
1325
|
+
gql_op = gql(ADD_ALIASES_GQL)
|
|
1326
|
+
gql_input = AddAliasesInput(artifact_id=self.id, aliases=alias_inputs)
|
|
1327
|
+
gql_vars = {"input": gql_input.model_dump()}
|
|
1328
|
+
try:
|
|
1329
|
+
client.execute(gql_op, variable_values=gql_vars)
|
|
1330
|
+
except CommError as e:
|
|
1331
|
+
raise CommError(
|
|
1332
|
+
"You do not have permission to add"
|
|
1333
|
+
f" {'at least one of the following aliases' if len(alias_names) > 1 else 'the following alias'}"
|
|
1334
|
+
f" to this artifact: {alias_names!r}"
|
|
1335
|
+
) from e
|
|
1336
|
+
|
|
1337
|
+
def _delete_aliases(self, alias_names: set[str], target: FullArtifactPath) -> None:
|
|
1338
|
+
if (client := self._client) is None:
|
|
1339
|
+
raise RuntimeError("Client not initialized for artifact mutations")
|
|
1340
|
+
|
|
1341
|
+
target_props = {
|
|
1342
|
+
"entityName": target.prefix,
|
|
1343
|
+
"projectName": target.project,
|
|
1344
|
+
"artifactCollectionName": target.name,
|
|
1345
|
+
}
|
|
1346
|
+
alias_inputs = [{**target_props, "alias": name} for name in alias_names]
|
|
1347
|
+
gql_op = gql(DELETE_ALIASES_GQL)
|
|
1348
|
+
gql_input = DeleteAliasesInput(artifact_id=self.id, aliases=alias_inputs)
|
|
1349
|
+
gql_vars = {"input": gql_input.model_dump()}
|
|
1350
|
+
try:
|
|
1351
|
+
client.execute(gql_op, variable_values=gql_vars)
|
|
1352
|
+
except CommError as e:
|
|
1353
|
+
raise CommError(
|
|
1354
|
+
f"You do not have permission to delete"
|
|
1355
|
+
f" {'at least one of the following aliases' if len(alias_names) > 1 else 'the following alias'}"
|
|
1356
|
+
f" from this artifact: {alias_names!r}"
|
|
1357
|
+
) from e
|
|
1358
|
+
|
|
1359
1359
|
# Adding, removing, getting entries.
|
|
1360
1360
|
|
|
1361
1361
|
def __getitem__(self, name: str) -> WBValue | None:
|
|
@@ -1947,8 +1947,7 @@ class Artifact:
|
|
|
1947
1947
|
Raises:
|
|
1948
1948
|
ArtifactNotLoggedError: If the artifact is not logged.
|
|
1949
1949
|
"""
|
|
1950
|
-
root =
|
|
1951
|
-
self._add_download_root(root)
|
|
1950
|
+
root = self._add_download_root(root)
|
|
1952
1951
|
|
|
1953
1952
|
# TODO: download artifacts using core when implemented
|
|
1954
1953
|
# if is_require_core():
|
|
@@ -2062,6 +2061,16 @@ class Artifact:
|
|
|
2062
2061
|
except _GCSIsADirectoryError as e:
|
|
2063
2062
|
logger.debug(str(e))
|
|
2064
2063
|
return
|
|
2064
|
+
except IsADirectoryError:
|
|
2065
|
+
wandb.termwarn(
|
|
2066
|
+
f"Unable to download file {entry.path!r} as there is a directory with the same path, skipping."
|
|
2067
|
+
)
|
|
2068
|
+
return
|
|
2069
|
+
except NotADirectoryError:
|
|
2070
|
+
wandb.termwarn(
|
|
2071
|
+
f"Unable to download file {entry.path!r} as there is a file with the same path as a directory this file is expected to be in, skipping."
|
|
2072
|
+
)
|
|
2073
|
+
return
|
|
2065
2074
|
download_logger.notify_downloaded()
|
|
2066
2075
|
|
|
2067
2076
|
def _init_thread(
|
|
@@ -2148,13 +2157,11 @@ class Artifact:
|
|
|
2148
2157
|
)
|
|
2149
2158
|
def _fetch_file_urls(
|
|
2150
2159
|
self, cursor: str | None, per_page: int = 5000
|
|
2151
|
-
) ->
|
|
2160
|
+
) -> FileWithUrlConnection:
|
|
2152
2161
|
if self._client is None:
|
|
2153
2162
|
raise RuntimeError("Client not initialized")
|
|
2154
2163
|
|
|
2155
|
-
if
|
|
2156
|
-
pb.ServerFeature.ARTIFACT_COLLECTION_MEMBERSHIP_FILES
|
|
2157
|
-
):
|
|
2164
|
+
if server_supports(self._client, pb.ARTIFACT_COLLECTION_MEMBERSHIP_FILES):
|
|
2158
2165
|
query = gql(ARTIFACT_COLLECTION_MEMBERSHIP_FILE_URLS_GQL)
|
|
2159
2166
|
gql_vars = {
|
|
2160
2167
|
"entityName": self.entity,
|
|
@@ -2174,7 +2181,7 @@ class Artifact:
|
|
|
2174
2181
|
and (files := membership.files)
|
|
2175
2182
|
):
|
|
2176
2183
|
raise ValueError(f"Unable to fetch files for artifact: {self.name!r}")
|
|
2177
|
-
return files
|
|
2184
|
+
return FileWithUrlConnection.model_validate(files)
|
|
2178
2185
|
else:
|
|
2179
2186
|
query = gql(ARTIFACT_FILE_URLS_GQL)
|
|
2180
2187
|
gql_vars = {"id": self.id, "cursor": cursor, "perPage": per_page}
|
|
@@ -2183,7 +2190,7 @@ class Artifact:
|
|
|
2183
2190
|
|
|
2184
2191
|
if not ((artifact := result.artifact) and (files := artifact.files)):
|
|
2185
2192
|
raise ValueError(f"Unable to fetch files for artifact: {self.name!r}")
|
|
2186
|
-
return files
|
|
2193
|
+
return FileWithUrlConnection.model_validate(files)
|
|
2187
2194
|
|
|
2188
2195
|
@ensure_logged
|
|
2189
2196
|
def checkout(self, root: str | None = None) -> str:
|
|
@@ -2306,8 +2313,10 @@ class Artifact:
|
|
|
2306
2313
|
# use that, otherwise we'll fall back to the system-preferred path.
|
|
2307
2314
|
return FilePathStr(check_exists(root) or system_preferred_path(root))
|
|
2308
2315
|
|
|
2309
|
-
def _add_download_root(self, dir_path:
|
|
2310
|
-
self.
|
|
2316
|
+
def _add_download_root(self, dir_path: StrPath | None) -> FilePathStr:
|
|
2317
|
+
root = str(dir_path or self._default_root())
|
|
2318
|
+
self._download_roots.add(os.path.abspath(root))
|
|
2319
|
+
return root
|
|
2311
2320
|
|
|
2312
2321
|
def _local_path_to_name(self, file_path: str) -> str | None:
|
|
2313
2322
|
"""Convert a local file path to a path entry in the artifact."""
|
|
@@ -2351,10 +2360,12 @@ class Artifact:
|
|
|
2351
2360
|
if self._client is None:
|
|
2352
2361
|
raise RuntimeError("Client not initialized for artifact mutations")
|
|
2353
2362
|
|
|
2354
|
-
|
|
2355
|
-
|
|
2356
|
-
|
|
2357
|
-
|
|
2363
|
+
gql_op = gql(DELETE_ARTIFACT_GQL)
|
|
2364
|
+
gql_input = DeleteArtifactInput(
|
|
2365
|
+
artifact_id=self.id,
|
|
2366
|
+
delete_aliases=delete_aliases,
|
|
2367
|
+
)
|
|
2368
|
+
self._client.execute(gql_op, variable_values={"input": gql_input.model_dump()})
|
|
2358
2369
|
|
|
2359
2370
|
@normalize_exceptions
|
|
2360
2371
|
def link(self, target_path: str, aliases: list[str] | None = None) -> Artifact:
|
|
@@ -2385,7 +2396,7 @@ class Artifact:
|
|
|
2385
2396
|
"Linking to a link artifact will result in directly linking to the source artifact of that link artifact."
|
|
2386
2397
|
)
|
|
2387
2398
|
|
|
2388
|
-
if self._client is None:
|
|
2399
|
+
if (client := self._client) is None:
|
|
2389
2400
|
raise ValueError("Client not initialized for artifact mutations")
|
|
2390
2401
|
|
|
2391
2402
|
# Save the artifact first if necessary
|
|
@@ -2408,8 +2419,8 @@ class Artifact:
|
|
|
2408
2419
|
if target.is_registry_path():
|
|
2409
2420
|
# In a Registry linking, the entity is used to fetch the organization of the artifact
|
|
2410
2421
|
# therefore the source artifact's entity is passed to the backend
|
|
2411
|
-
org = target.prefix or settings.get("organization") or
|
|
2412
|
-
target.prefix =
|
|
2422
|
+
org = target.prefix or settings.get("organization") or None
|
|
2423
|
+
target.prefix = resolve_org_entity_name(client, self.source_entity, org)
|
|
2413
2424
|
else:
|
|
2414
2425
|
target = target.with_defaults(prefix=self.source_entity)
|
|
2415
2426
|
|
|
@@ -2428,29 +2439,25 @@ class Artifact:
|
|
|
2428
2439
|
project_name=target.project,
|
|
2429
2440
|
aliases=alias_inputs,
|
|
2430
2441
|
)
|
|
2431
|
-
gql_vars = {"input": gql_input.model_dump(
|
|
2442
|
+
gql_vars = {"input": gql_input.model_dump()}
|
|
2432
2443
|
|
|
2433
2444
|
# Newer server versions can return `artifactMembership` directly in the response,
|
|
2434
2445
|
# avoiding the need to re-fetch the linked artifact at the end.
|
|
2435
|
-
if
|
|
2436
|
-
|
|
2437
|
-
):
|
|
2438
|
-
omit_fragments = set()
|
|
2446
|
+
if server_supports(client, pb.ARTIFACT_MEMBERSHIP_IN_LINK_ARTIFACT_RESPONSE):
|
|
2447
|
+
omit_vars = omit_fields = None
|
|
2439
2448
|
else:
|
|
2440
|
-
|
|
2441
|
-
|
|
2442
|
-
"MembershipWithArtifact",
|
|
2443
|
-
"ArtifactFragment",
|
|
2444
|
-
"ArtifactFragmentWithoutAliases",
|
|
2445
|
-
}
|
|
2449
|
+
omit_vars = {"includeAliases"}
|
|
2450
|
+
omit_fields = {"artifactMembership"}
|
|
2446
2451
|
|
|
2447
|
-
gql_op = gql_compat(
|
|
2448
|
-
|
|
2452
|
+
gql_op = gql_compat(
|
|
2453
|
+
LINK_ARTIFACT_GQL, omit_variables=omit_vars, omit_fields=omit_fields
|
|
2454
|
+
)
|
|
2455
|
+
data = client.execute(gql_op, variable_values=gql_vars)
|
|
2449
2456
|
result = LinkArtifact.model_validate(data).link_artifact
|
|
2450
2457
|
|
|
2451
2458
|
# Newer server versions can return artifactMembership directly in the response
|
|
2452
2459
|
if result and (membership := result.artifact_membership):
|
|
2453
|
-
return self._from_membership(membership, target=target, client=
|
|
2460
|
+
return self._from_membership(membership, target=target, client=client)
|
|
2454
2461
|
|
|
2455
2462
|
# Fallback to old behavior, which requires re-fetching the linked artifact to return it
|
|
2456
2463
|
if not (result and (version_idx := result.version_index) is not None):
|
|
@@ -2483,13 +2490,16 @@ class Artifact:
|
|
|
2483
2490
|
raise RuntimeError("Client not initialized for artifact mutations")
|
|
2484
2491
|
|
|
2485
2492
|
mutation = gql(UNLINK_ARTIFACT_GQL)
|
|
2486
|
-
|
|
2487
|
-
|
|
2493
|
+
gql_input = UnlinkArtifactInput(
|
|
2494
|
+
artifact_id=self.id,
|
|
2495
|
+
artifact_portfolio_id=self.collection.id,
|
|
2496
|
+
)
|
|
2497
|
+
gql_vars = {"input": gql_input.model_dump()}
|
|
2488
2498
|
try:
|
|
2489
2499
|
self._client.execute(mutation, variable_values=gql_vars)
|
|
2490
2500
|
except CommError as e:
|
|
2491
2501
|
raise CommError(
|
|
2492
|
-
f"You do not have permission to unlink the artifact {self.qualified_name}"
|
|
2502
|
+
f"You do not have permission to unlink the artifact {self.qualified_name!r}"
|
|
2493
2503
|
) from e
|
|
2494
2504
|
|
|
2495
2505
|
@ensure_logged
|
|
@@ -2517,7 +2527,7 @@ class Artifact:
|
|
|
2517
2527
|
):
|
|
2518
2528
|
run_nodes = (e.node for e in edges)
|
|
2519
2529
|
return [
|
|
2520
|
-
Run(self._client, proj.
|
|
2530
|
+
Run(self._client, proj.entity.name, proj.name, run.name)
|
|
2521
2531
|
for run in run_nodes
|
|
2522
2532
|
if (proj := run.project)
|
|
2523
2533
|
]
|
|
@@ -2547,7 +2557,7 @@ class Artifact:
|
|
|
2547
2557
|
and (name := creator.name)
|
|
2548
2558
|
and (project := creator.project)
|
|
2549
2559
|
):
|
|
2550
|
-
return Run(self._client, project.
|
|
2560
|
+
return Run(self._client, project.entity.name, project.name, name)
|
|
2551
2561
|
return None
|
|
2552
2562
|
|
|
2553
2563
|
@ensure_logged
|
|
@@ -2581,11 +2591,6 @@ class Artifact:
|
|
|
2581
2591
|
return artifact_type.name
|
|
2582
2592
|
return None
|
|
2583
2593
|
|
|
2584
|
-
def _load_manifest(self, url: str) -> ArtifactManifest:
|
|
2585
|
-
with requests.get(url) as response:
|
|
2586
|
-
response.raise_for_status()
|
|
2587
|
-
return ArtifactManifest.from_manifest_json(response.json())
|
|
2588
|
-
|
|
2589
2594
|
def _ttl_duration_seconds_to_gql(self) -> int | None:
|
|
2590
2595
|
# Set artifact ttl value to ttl_duration_seconds if the user set a value
|
|
2591
2596
|
# otherwise use ttl_status to indicate the backend INHERIT(-1) or DISABLED(-2) when the TTL is None
|
|
@@ -2627,7 +2632,7 @@ class Artifact:
|
|
|
2627
2632
|
if (
|
|
2628
2633
|
(node := edge.node)
|
|
2629
2634
|
and (col := node.artifact_collection)
|
|
2630
|
-
and (col.typename__ ==
|
|
2635
|
+
and (col.typename__ == LINKED_COLLECTION_TYPENAME)
|
|
2631
2636
|
)
|
|
2632
2637
|
)
|
|
2633
2638
|
for node in linked_nodes:
|
|
@@ -2643,12 +2648,12 @@ class Artifact:
|
|
|
2643
2648
|
node
|
|
2644
2649
|
and (col := node.artifact_collection)
|
|
2645
2650
|
and (proj := col.project)
|
|
2646
|
-
and (proj.
|
|
2651
|
+
and (proj.entity.name and proj.name)
|
|
2647
2652
|
):
|
|
2648
2653
|
raise ValueError("Unable to fetch fields for linked artifact")
|
|
2649
2654
|
|
|
2650
2655
|
link_fields = _LinkArtifactFields(
|
|
2651
|
-
entity_name=proj.
|
|
2656
|
+
entity_name=proj.entity.name,
|
|
2652
2657
|
project_name=proj.name,
|
|
2653
2658
|
name=f"{col.name}:{version}",
|
|
2654
2659
|
version=version,
|