wandb 0.18.4__py3-none-any.whl → 0.18.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- wandb/__init__.py +2 -2
- wandb/__init__.pyi +21 -19
- wandb/agents/pyagent.py +1 -1
- wandb/apis/importers/wandb.py +1 -1
- wandb/apis/normalize.py +2 -18
- wandb/apis/public/api.py +122 -62
- wandb/apis/public/artifacts.py +8 -3
- wandb/apis/public/files.py +17 -2
- wandb/apis/public/jobs.py +2 -2
- wandb/apis/public/query_generator.py +1 -1
- wandb/apis/public/runs.py +8 -8
- wandb/apis/public/teams.py +3 -3
- wandb/apis/public/users.py +1 -1
- wandb/apis/public/utils.py +68 -0
- wandb/bin/gpu_stats +0 -0
- wandb/cli/cli.py +12 -3
- wandb/data_types.py +1 -1
- wandb/docker/__init__.py +2 -1
- wandb/docker/auth.py +2 -3
- wandb/errors/links.py +73 -0
- wandb/errors/term.py +7 -6
- wandb/filesync/step_prepare.py +1 -1
- wandb/filesync/upload_job.py +1 -1
- wandb/integration/catboost/catboost.py +2 -2
- wandb/integration/diffusers/pipeline_resolver.py +1 -1
- wandb/integration/diffusers/resolvers/multimodal.py +6 -6
- wandb/integration/diffusers/resolvers/utils.py +1 -1
- wandb/integration/fastai/__init__.py +3 -2
- wandb/integration/keras/callbacks/metrics_logger.py +1 -1
- wandb/integration/keras/callbacks/model_checkpoint.py +1 -1
- wandb/integration/keras/keras.py +1 -1
- wandb/integration/kfp/kfp_patch.py +1 -1
- wandb/integration/lightgbm/__init__.py +2 -2
- wandb/integration/magic.py +2 -2
- wandb/integration/metaflow/metaflow.py +1 -1
- wandb/integration/sacred/__init__.py +1 -1
- wandb/integration/sagemaker/auth.py +1 -1
- wandb/integration/sklearn/plot/classifier.py +7 -7
- wandb/integration/sklearn/plot/clusterer.py +3 -3
- wandb/integration/sklearn/plot/regressor.py +3 -3
- wandb/integration/sklearn/plot/shared.py +2 -2
- wandb/integration/tensorboard/log.py +2 -2
- wandb/integration/ultralytics/callback.py +2 -2
- wandb/integration/xgboost/xgboost.py +1 -1
- wandb/jupyter.py +0 -1
- wandb/plot/__init__.py +17 -8
- wandb/plot/bar.py +53 -27
- wandb/plot/confusion_matrix.py +151 -70
- wandb/plot/custom_chart.py +124 -0
- wandb/plot/histogram.py +46 -20
- wandb/plot/line.py +57 -26
- wandb/plot/line_series.py +148 -60
- wandb/plot/pr_curve.py +89 -44
- wandb/plot/roc_curve.py +82 -37
- wandb/plot/scatter.py +53 -20
- wandb/plot/viz.py +20 -102
- wandb/sdk/artifacts/artifact.py +280 -328
- wandb/sdk/artifacts/artifact_manifest.py +10 -9
- wandb/sdk/artifacts/artifact_manifest_entry.py +1 -1
- wandb/sdk/artifacts/storage_handlers/azure_handler.py +9 -4
- wandb/sdk/artifacts/storage_handlers/gcs_handler.py +1 -3
- wandb/sdk/artifacts/storage_handlers/s3_handler.py +1 -1
- wandb/sdk/artifacts/storage_handlers/wb_artifact_handler.py +2 -2
- wandb/sdk/artifacts/storage_handlers/wb_local_artifact_handler.py +1 -1
- wandb/sdk/backend/backend.py +0 -1
- wandb/sdk/data_types/audio.py +1 -1
- wandb/sdk/data_types/base_types/media.py +66 -5
- wandb/sdk/data_types/bokeh.py +1 -1
- wandb/sdk/data_types/helper_types/bounding_boxes_2d.py +1 -1
- wandb/sdk/data_types/helper_types/image_mask.py +2 -2
- wandb/sdk/data_types/histogram.py +1 -1
- wandb/sdk/data_types/html.py +1 -1
- wandb/sdk/data_types/image.py +1 -1
- wandb/sdk/data_types/molecule.py +3 -3
- wandb/sdk/data_types/object_3d.py +4 -4
- wandb/sdk/data_types/plotly.py +1 -1
- wandb/sdk/data_types/saved_model.py +0 -1
- wandb/sdk/data_types/table.py +7 -7
- wandb/sdk/data_types/trace_tree.py +1 -1
- wandb/sdk/data_types/video.py +4 -3
- wandb/sdk/interface/router.py +0 -2
- wandb/sdk/internal/datastore.py +1 -1
- wandb/sdk/internal/file_pusher.py +1 -1
- wandb/sdk/internal/file_stream.py +4 -4
- wandb/sdk/internal/handler.py +3 -2
- wandb/sdk/internal/internal.py +1 -1
- wandb/sdk/internal/internal_api.py +183 -64
- wandb/sdk/internal/job_builder.py +4 -3
- wandb/sdk/internal/system/assets/__init__.py +0 -2
- wandb/sdk/internal/tb_watcher.py +11 -10
- wandb/sdk/launch/_launch.py +4 -3
- wandb/sdk/launch/_launch_add.py +2 -2
- wandb/sdk/launch/builder/kaniko_builder.py +0 -1
- wandb/sdk/launch/create_job.py +1 -0
- wandb/sdk/launch/environment/local_environment.py +0 -1
- wandb/sdk/launch/errors.py +0 -6
- wandb/sdk/launch/registry/local_registry.py +0 -2
- wandb/sdk/launch/runner/abstract.py +0 -5
- wandb/sdk/launch/sweeps/__init__.py +0 -2
- wandb/sdk/launch/sweeps/scheduler.py +0 -2
- wandb/sdk/launch/sweeps/scheduler_sweep.py +0 -1
- wandb/sdk/lib/apikey.py +3 -3
- wandb/sdk/lib/file_stream_utils.py +1 -1
- wandb/sdk/lib/filesystem.py +1 -1
- wandb/sdk/lib/ipython.py +16 -9
- wandb/sdk/lib/mailbox.py +0 -4
- wandb/sdk/lib/printer.py +44 -8
- wandb/sdk/lib/retry.py +1 -1
- wandb/sdk/service/service.py +3 -3
- wandb/sdk/service/streams.py +2 -4
- wandb/sdk/wandb_init.py +20 -20
- wandb/sdk/wandb_login.py +1 -1
- wandb/sdk/wandb_require.py +1 -4
- wandb/sdk/wandb_run.py +57 -69
- wandb/sdk/wandb_settings.py +3 -4
- wandb/sdk/wandb_sync.py +2 -1
- wandb/util.py +46 -18
- wandb/wandb_agent.py +3 -3
- wandb/wandb_controller.py +2 -2
- {wandb-0.18.4.dist-info → wandb-0.18.6.dist-info}/METADATA +1 -1
- {wandb-0.18.4.dist-info → wandb-0.18.6.dist-info}/RECORD +124 -125
- wandb/sdk/internal/system/assets/gpu_apple.py +0 -177
- wandb/sdk/lib/_wburls_generate.py +0 -25
- wandb/sdk/lib/_wburls_generated.py +0 -22
- wandb/sdk/lib/wburls.py +0 -46
- {wandb-0.18.4.dist-info → wandb-0.18.6.dist-info}/WHEEL +0 -0
- {wandb-0.18.4.dist-info → wandb-0.18.6.dist-info}/entry_points.txt +0 -0
- {wandb-0.18.4.dist-info → wandb-0.18.6.dist-info}/licenses/LICENSE +0 -0
wandb/sdk/artifacts/artifact.py
CHANGED
@@ -41,7 +41,6 @@ from wandb.errors.term import termerror, termlog, termwarn
|
|
41
41
|
from wandb.sdk.artifacts._validators import (
|
42
42
|
ensure_logged,
|
43
43
|
ensure_not_finalized,
|
44
|
-
is_artifact_registry_project,
|
45
44
|
validate_aliases,
|
46
45
|
validate_tags,
|
47
46
|
)
|
@@ -89,7 +88,7 @@ class Artifact:
|
|
89
88
|
begin with `add`. Once the artifact has all the desired files, you can call
|
90
89
|
`wandb.log_artifact()` to log it.
|
91
90
|
|
92
|
-
|
91
|
+
Args:
|
93
92
|
name: A human-readable name for the artifact. Use the name to identify
|
94
93
|
a specific artifact in the W&B App UI or programmatically. You can
|
95
94
|
interactively reference an artifact with the `use_artifact` Public API.
|
@@ -204,15 +203,10 @@ class Artifact:
|
|
204
203
|
query ArtifactByID($id: ID!) {
|
205
204
|
artifact(id: $id) {
|
206
205
|
...ArtifactFragment
|
207
|
-
currentManifest {
|
208
|
-
file {
|
209
|
-
directUrl
|
210
|
-
}
|
211
|
-
}
|
212
206
|
}
|
213
207
|
}
|
214
208
|
"""
|
215
|
-
+
|
209
|
+
+ _gql_artifact_fragment()
|
216
210
|
)
|
217
211
|
response = client.execute(
|
218
212
|
query,
|
@@ -221,13 +215,14 @@ class Artifact:
|
|
221
215
|
attrs = response.get("artifact")
|
222
216
|
if attrs is None:
|
223
217
|
return None
|
224
|
-
|
225
|
-
|
226
|
-
|
227
|
-
|
228
|
-
|
229
|
-
|
230
|
-
|
218
|
+
|
219
|
+
src_collection = attrs["artifactSequence"]
|
220
|
+
src_project = src_collection["project"]
|
221
|
+
|
222
|
+
entity_name = src_project["entityName"] if src_project else ""
|
223
|
+
project_name = src_project["name"] if src_project else ""
|
224
|
+
|
225
|
+
name = "{}:v{}".format(src_collection["name"], attrs["versionIndex"])
|
231
226
|
return cls._from_attrs(entity_name, project_name, name, attrs, client)
|
232
227
|
|
233
228
|
@classmethod
|
@@ -238,55 +233,43 @@ class Artifact:
|
|
238
233
|
name: str,
|
239
234
|
client: RetryingClient,
|
240
235
|
organization: str = "",
|
236
|
+
enable_tracking: bool = False,
|
241
237
|
) -> Artifact:
|
238
|
+
server_supports_enabling_artifact_usage_tracking = (
|
239
|
+
InternalApi().server_project_type_introspection()
|
240
|
+
)
|
241
|
+
query_vars = ["$entityName: String!", "$projectName: String!", "$name: String!"]
|
242
|
+
query_args = ["name: $name"]
|
243
|
+
if server_supports_enabling_artifact_usage_tracking:
|
244
|
+
query_vars.append("$enableTracking: Boolean")
|
245
|
+
query_args.append("enableTracking: $enableTracking")
|
246
|
+
|
247
|
+
vars_str = ", ".join(query_vars)
|
248
|
+
args_str = ", ".join(query_args)
|
249
|
+
|
242
250
|
query = gql(
|
243
|
-
"""
|
244
|
-
query ArtifactByName(
|
245
|
-
$entityName:
|
246
|
-
|
247
|
-
$name: String!
|
248
|
-
) {
|
249
|
-
project(name: $projectName, entityName: $entityName) {
|
250
|
-
artifact(name: $name) {
|
251
|
+
f"""
|
252
|
+
query ArtifactByName({vars_str}) {{
|
253
|
+
project(name: $projectName, entityName: $entityName) {{
|
254
|
+
artifact({args_str}) {{
|
251
255
|
...ArtifactFragment
|
252
|
-
}
|
253
|
-
}
|
254
|
-
}
|
256
|
+
}}
|
257
|
+
}}
|
258
|
+
}}
|
259
|
+
{_gql_artifact_fragment()}
|
255
260
|
"""
|
256
|
-
+ cls._get_gql_artifact_fragment()
|
257
261
|
)
|
258
|
-
|
259
|
-
|
260
|
-
|
261
|
-
|
262
|
-
|
263
|
-
|
264
|
-
|
265
|
-
if not organization or organization == entity:
|
266
|
-
wandb.termerror(str(entity_error))
|
267
|
-
raise
|
268
|
-
|
269
|
-
# Try to resolve the organization using an org entity.
|
270
|
-
try:
|
271
|
-
entity = InternalApi()._resolve_org_entity_name(
|
272
|
-
organization, organization
|
273
|
-
)
|
274
|
-
except ValueError as org_error:
|
275
|
-
wandb.termerror(
|
276
|
-
f"Error resolving organization of entity: {entity!r}. Failed with error: {entity_error!r}."
|
277
|
-
)
|
278
|
-
wandb.termerror(
|
279
|
-
f"Defaulted to use {organization!r} as an org entity to resolve organization. Failed with error: {org_error!r}."
|
280
|
-
)
|
281
|
-
raise
|
262
|
+
query_variable_values: dict[str, Any] = {
|
263
|
+
"entityName": entity,
|
264
|
+
"projectName": project,
|
265
|
+
"name": name,
|
266
|
+
}
|
267
|
+
if server_supports_enabling_artifact_usage_tracking:
|
268
|
+
query_variable_values["enableTracking"] = enable_tracking
|
282
269
|
|
283
270
|
response = client.execute(
|
284
271
|
query,
|
285
|
-
variable_values=
|
286
|
-
"entityName": entity,
|
287
|
-
"projectName": project,
|
288
|
-
"name": name,
|
289
|
-
},
|
272
|
+
variable_values=query_variable_values,
|
290
273
|
)
|
291
274
|
project_attrs = response.get("project")
|
292
275
|
if not project_attrs:
|
@@ -308,67 +291,108 @@ class Artifact:
|
|
308
291
|
# Placeholder is required to skip validation.
|
309
292
|
artifact = cls("placeholder", type="placeholder")
|
310
293
|
artifact._client = client
|
311
|
-
artifact._id = attrs["id"]
|
312
294
|
artifact._entity = entity
|
313
295
|
artifact._project = project
|
314
296
|
artifact._name = name
|
297
|
+
artifact._assign_attrs(attrs)
|
298
|
+
|
299
|
+
artifact.finalize()
|
300
|
+
|
301
|
+
# Cache.
|
302
|
+
assert artifact.id is not None
|
303
|
+
artifact_instance_cache[artifact.id] = artifact
|
304
|
+
return artifact
|
305
|
+
|
306
|
+
def _assign_attrs(self, attrs: dict[str, Any]) -> None:
|
307
|
+
"""Update this Artifact's attributes using the server response."""
|
308
|
+
self._id = attrs["id"]
|
309
|
+
|
310
|
+
src_version = f"v{attrs['versionIndex']}"
|
311
|
+
src_collection = attrs["artifactSequence"]
|
312
|
+
src_project = src_collection["project"]
|
313
|
+
|
314
|
+
self._source_entity = src_project["entityName"] if src_project else ""
|
315
|
+
self._source_project = src_project["name"] if src_project else ""
|
316
|
+
self._source_name = f"{src_collection['name']}:{src_version}"
|
317
|
+
self._source_version = src_version
|
318
|
+
|
319
|
+
if self._entity is None:
|
320
|
+
self._entity = self._source_entity
|
321
|
+
if self._project is None:
|
322
|
+
self._project = self._source_project
|
323
|
+
|
324
|
+
if self._name is None:
|
325
|
+
self._name = self._source_name
|
326
|
+
|
327
|
+
self._type = attrs["artifactType"]["name"]
|
328
|
+
self._description = attrs["description"]
|
329
|
+
|
330
|
+
entity = self._entity
|
331
|
+
project = self._project
|
332
|
+
collection, *_ = self._name.split(":")
|
315
333
|
aliases = [
|
316
|
-
|
317
|
-
for
|
318
|
-
if
|
319
|
-
and
|
320
|
-
and
|
321
|
-
and
|
322
|
-
and
|
334
|
+
obj["alias"]
|
335
|
+
for obj in attrs["aliases"]
|
336
|
+
if obj["artifactCollection"]
|
337
|
+
and obj["artifactCollection"]["project"]
|
338
|
+
and obj["artifactCollection"]["project"]["entityName"] == entity
|
339
|
+
and obj["artifactCollection"]["project"]["name"] == project
|
340
|
+
and obj["artifactCollection"]["name"] == collection
|
323
341
|
]
|
324
|
-
|
342
|
+
|
325
343
|
version_aliases = [
|
326
344
|
alias for alias in aliases if util.alias_is_version_index(alias)
|
327
345
|
]
|
328
|
-
|
329
|
-
|
330
|
-
|
331
|
-
|
332
|
-
|
333
|
-
|
334
|
-
|
335
|
-
|
336
|
-
|
337
|
-
|
338
|
-
|
339
|
-
|
340
|
-
|
341
|
-
|
342
|
-
|
343
|
-
|
346
|
+
other_aliases = [
|
347
|
+
alias for alias in aliases if not util.alias_is_version_index(alias)
|
348
|
+
]
|
349
|
+
if version_aliases:
|
350
|
+
try:
|
351
|
+
[version] = version_aliases
|
352
|
+
except ValueError:
|
353
|
+
raise ValueError(
|
354
|
+
f"Expected at most one version alias, got {len(version_aliases)}: {version_aliases!r}"
|
355
|
+
)
|
356
|
+
else:
|
357
|
+
version = src_version
|
358
|
+
|
359
|
+
self._version = version
|
360
|
+
|
361
|
+
if ":" not in self._name:
|
362
|
+
self._name = f"{self._name}:{version}"
|
363
|
+
|
364
|
+
self._aliases = other_aliases
|
365
|
+
self._saved_aliases = copy(other_aliases)
|
366
|
+
|
367
|
+
tags = [obj["name"] for obj in attrs.get("tags", [])]
|
368
|
+
self._tags = tags
|
369
|
+
self._saved_tags = copy(tags)
|
370
|
+
|
371
|
+
metadata_str = attrs["metadata"]
|
372
|
+
self.metadata = self._normalize_metadata(
|
373
|
+
json.loads(metadata_str) if metadata_str else {}
|
344
374
|
)
|
345
|
-
|
375
|
+
|
376
|
+
self._ttl_duration_seconds = _ttl_duration_seconds_from_gql(
|
346
377
|
attrs.get("ttlDurationSeconds")
|
347
378
|
)
|
348
|
-
|
349
|
-
True if attrs.get("ttlIsInherited") is None else attrs["ttlIsInherited"]
|
379
|
+
self._ttl_is_inherited = (
|
380
|
+
True if (attrs.get("ttlIsInherited") is None) else attrs["ttlIsInherited"]
|
350
381
|
)
|
351
|
-
|
352
|
-
|
353
|
-
|
354
|
-
|
355
|
-
|
356
|
-
|
357
|
-
|
358
|
-
if "currentManifest" in attrs:
|
359
|
-
artifact._load_manifest(attrs["currentManifest"]["file"]["directUrl"])
|
382
|
+
|
383
|
+
self._state = ArtifactState(attrs["state"])
|
384
|
+
|
385
|
+
try:
|
386
|
+
manifest_url = attrs["currentManifest"]["file"]["directUrl"]
|
387
|
+
except (LookupError, TypeError):
|
388
|
+
self._manifest = None
|
360
389
|
else:
|
361
|
-
|
362
|
-
artifact._commit_hash = attrs["commitHash"]
|
363
|
-
artifact._file_count = attrs["fileCount"]
|
364
|
-
artifact._created_at = attrs["createdAt"]
|
365
|
-
artifact._updated_at = attrs["updatedAt"]
|
366
|
-
artifact._final = True
|
367
|
-
# Cache.
|
390
|
+
self._manifest = self._load_manifest(manifest_url)
|
368
391
|
|
369
|
-
|
370
|
-
|
371
|
-
|
392
|
+
self._commit_hash = attrs["commitHash"]
|
393
|
+
self._file_count = attrs["fileCount"]
|
394
|
+
self._created_at = attrs["createdAt"]
|
395
|
+
self._updated_at = attrs["updatedAt"]
|
372
396
|
|
373
397
|
@ensure_logged
|
374
398
|
def new_draft(self) -> Artifact:
|
@@ -431,7 +455,7 @@ class Artifact:
|
|
431
455
|
def name(self) -> str:
|
432
456
|
"""The artifact name and version in its secondary (portfolio) collection.
|
433
457
|
|
434
|
-
A string with the format {collection}:{alias}
|
458
|
+
A string with the format `{collection}:{alias}`. Before the artifact is saved,
|
435
459
|
contains only the name since the version is not yet known.
|
436
460
|
"""
|
437
461
|
return self._name
|
@@ -482,7 +506,7 @@ class Artifact:
|
|
482
506
|
def source_name(self) -> str:
|
483
507
|
"""The artifact name and version in its primary (sequence) collection.
|
484
508
|
|
485
|
-
A string with the format {collection}:{alias}
|
509
|
+
A string with the format `{collection}:{alias}`. Before the artifact is saved,
|
486
510
|
contains only the name since the version is not yet known.
|
487
511
|
"""
|
488
512
|
return self._source_name
|
@@ -497,7 +521,7 @@ class Artifact:
|
|
497
521
|
def source_version(self) -> str:
|
498
522
|
"""The artifact's version in its primary (sequence) collection.
|
499
523
|
|
500
|
-
A string with the format
|
524
|
+
A string with the format `v{number}`.
|
501
525
|
"""
|
502
526
|
assert self._source_version is not None
|
503
527
|
return self._source_version
|
@@ -529,7 +553,7 @@ class Artifact:
|
|
529
553
|
standardized team model or dataset card. In the W&B UI the
|
530
554
|
description is rendered as markdown.
|
531
555
|
|
532
|
-
|
556
|
+
Args:
|
533
557
|
description: Free text that offers a description of the artifact.
|
534
558
|
"""
|
535
559
|
self._description = description
|
@@ -551,7 +575,7 @@ class Artifact:
|
|
551
575
|
|
552
576
|
Note: There is currently a limit of 100 total keys.
|
553
577
|
|
554
|
-
|
578
|
+
Args:
|
555
579
|
metadata: Structured data associated with the artifact.
|
556
580
|
"""
|
557
581
|
self._metadata = self._normalize_metadata(metadata)
|
@@ -586,7 +610,7 @@ class Artifact:
|
|
586
610
|
the team default if the team administrator defines a default
|
587
611
|
TTL and there is no custom policy set on an artifact.
|
588
612
|
|
589
|
-
|
613
|
+
Args:
|
590
614
|
ttl: The duration as a positive Python `datetime.timedelta` Type
|
591
615
|
that represents how long the artifact will remain active from its creation.
|
592
616
|
|
@@ -699,8 +723,9 @@ class Artifact:
|
|
699
723
|
},
|
700
724
|
)
|
701
725
|
attrs = response["project"]["artifact"]
|
702
|
-
self._load_manifest(
|
703
|
-
|
726
|
+
self._manifest = self._load_manifest(
|
727
|
+
attrs["currentManifest"]["file"]["directUrl"]
|
728
|
+
)
|
704
729
|
return self._manifest
|
705
730
|
|
706
731
|
@property
|
@@ -784,7 +809,7 @@ class Artifact:
|
|
784
809
|
If currently in a run, that run will log this artifact. If not currently in a
|
785
810
|
run, a run of type "auto" is created to track this artifact.
|
786
811
|
|
787
|
-
|
812
|
+
Args:
|
788
813
|
project: A project to use for the artifact in the case that a run is not
|
789
814
|
already in context.
|
790
815
|
settings: A settings object to use when initializing an automatic run. Most
|
@@ -824,7 +849,7 @@ class Artifact:
|
|
824
849
|
def wait(self, timeout: int | None = None) -> Artifact:
|
825
850
|
"""If needed, wait for this artifact to finish logging.
|
826
851
|
|
827
|
-
|
852
|
+
Args:
|
828
853
|
timeout: The time, in seconds, to wait.
|
829
854
|
|
830
855
|
Returns:
|
@@ -845,52 +870,13 @@ class Artifact:
|
|
845
870
|
return self
|
846
871
|
|
847
872
|
def _populate_after_save(self, artifact_id: str) -> None:
|
848
|
-
|
849
|
-
|
850
|
-
|
851
|
-
|
852
|
-
|
853
|
-
|
854
|
-
|
855
|
-
tags = "tags {name}" if supports_tags else ""
|
856
|
-
|
857
|
-
query_template = f"""
|
858
|
-
query ArtifactByIDShort($id: ID!) {{
|
859
|
-
artifact(id: $id) {{
|
860
|
-
artifactSequence {{
|
861
|
-
project {{
|
862
|
-
entityName
|
863
|
-
name
|
864
|
-
}}
|
865
|
-
name
|
866
|
-
}}
|
867
|
-
versionIndex
|
868
|
-
{ttl_duration_seconds}
|
869
|
-
{ttl_is_inherited}
|
870
|
-
aliases {{
|
871
|
-
artifactCollection {{
|
872
|
-
project {{
|
873
|
-
entityName
|
874
|
-
name
|
875
|
-
}}
|
876
|
-
name
|
877
|
-
}}
|
878
|
-
alias
|
879
|
-
}}
|
880
|
-
{tags!s}
|
881
|
-
state
|
882
|
-
currentManifest {{
|
883
|
-
file {{
|
884
|
-
directUrl
|
885
|
-
}}
|
886
|
-
}}
|
887
|
-
commitHash
|
888
|
-
fileCount
|
889
|
-
createdAt
|
890
|
-
updatedAt
|
891
|
-
}}
|
892
|
-
}}
|
893
|
-
"""
|
873
|
+
query_template = """
|
874
|
+
query ArtifactByIDShort($id: ID!) {
|
875
|
+
artifact(id: $id) {
|
876
|
+
...ArtifactFragment
|
877
|
+
}
|
878
|
+
}
|
879
|
+
""" + _gql_artifact_fragment()
|
894
880
|
|
895
881
|
query = gql(query_template)
|
896
882
|
|
@@ -899,48 +885,13 @@ class Artifact:
|
|
899
885
|
query,
|
900
886
|
variable_values={"id": artifact_id},
|
901
887
|
)
|
902
|
-
|
903
|
-
|
904
|
-
|
905
|
-
|
906
|
-
|
907
|
-
|
908
|
-
|
909
|
-
if attr_project:
|
910
|
-
self._entity = attr_project["entityName"]
|
911
|
-
self._project = attr_project["name"]
|
912
|
-
self._name = "{}:v{}".format(
|
913
|
-
attrs["artifactSequence"]["name"], attrs["versionIndex"]
|
914
|
-
)
|
915
|
-
self._version = "v{}".format(attrs["versionIndex"])
|
916
|
-
self._source_entity = self._entity
|
917
|
-
self._source_project = self._project
|
918
|
-
self._source_name = self._name
|
919
|
-
self._source_version = self._version
|
920
|
-
self._ttl_duration_seconds = self._ttl_duration_seconds_from_gql(
|
921
|
-
attrs.get("ttlDurationSeconds")
|
922
|
-
)
|
923
|
-
self._ttl_is_inherited = (
|
924
|
-
True if attrs.get("ttlIsInherited") is None else attrs["ttlIsInherited"]
|
925
|
-
)
|
926
|
-
self._ttl_changed = False # Reset after saving artifact
|
927
|
-
self._aliases = [
|
928
|
-
alias["alias"]
|
929
|
-
for alias in attrs["aliases"]
|
930
|
-
if alias["artifactCollection"]
|
931
|
-
and alias["artifactCollection"]["project"]
|
932
|
-
and alias["artifactCollection"]["project"]["entityName"] == self._entity
|
933
|
-
and alias["artifactCollection"]["project"]["name"] == self._project
|
934
|
-
and alias["artifactCollection"]["name"] == self._name.split(":")[0]
|
935
|
-
and not util.alias_is_version_index(alias["alias"])
|
936
|
-
]
|
937
|
-
self._tags = [tag_obj["name"] for tag_obj in attrs.get("tags", [])]
|
938
|
-
self._state = ArtifactState(attrs["state"])
|
939
|
-
self._load_manifest(attrs["currentManifest"]["file"]["directUrl"])
|
940
|
-
self._commit_hash = attrs["commitHash"]
|
941
|
-
self._file_count = attrs["fileCount"]
|
942
|
-
self._created_at = attrs["createdAt"]
|
943
|
-
self._updated_at = attrs["updatedAt"]
|
888
|
+
|
889
|
+
try:
|
890
|
+
attrs = response["artifact"]
|
891
|
+
except LookupError:
|
892
|
+
raise ValueError(f"Unable to fetch artifact with id: {artifact_id!r}")
|
893
|
+
else:
|
894
|
+
self._assign_attrs(attrs)
|
944
895
|
|
945
896
|
@normalize_exceptions
|
946
897
|
def _update(self) -> None:
|
@@ -1057,12 +1008,12 @@ class Artifact:
|
|
1057
1008
|
}
|
1058
1009
|
) {
|
1059
1010
|
artifact {
|
1060
|
-
|
1061
|
-
_TTL_DURATION_SECONDS_FIELDS_
|
1011
|
+
...ArtifactFragment
|
1062
1012
|
}
|
1063
1013
|
}
|
1064
1014
|
}
|
1065
|
-
"""
|
1015
|
+
""" + _gql_artifact_fragment()
|
1016
|
+
|
1066
1017
|
fields = InternalApi().server_artifact_introspection()
|
1067
1018
|
if "ttlIsInherited" in fields:
|
1068
1019
|
mutation_template = (
|
@@ -1131,14 +1082,8 @@ class Artifact:
|
|
1131
1082
|
},
|
1132
1083
|
)
|
1133
1084
|
attrs = response["updateArtifact"]["artifact"]
|
1085
|
+
self._assign_attrs(attrs)
|
1134
1086
|
|
1135
|
-
# Update ttl_duration_seconds based on updateArtifact
|
1136
|
-
self._ttl_duration_seconds = self._ttl_duration_seconds_from_gql(
|
1137
|
-
attrs.get("ttlDurationSeconds")
|
1138
|
-
)
|
1139
|
-
self._ttl_is_inherited = (
|
1140
|
-
True if attrs.get("ttlIsInherited") is None else attrs["ttlIsInherited"]
|
1141
|
-
)
|
1142
1087
|
self._ttl_changed = False # Reset after updating artifact
|
1143
1088
|
|
1144
1089
|
# Adding, removing, getting entries.
|
@@ -1146,7 +1091,7 @@ class Artifact:
|
|
1146
1091
|
def __getitem__(self, name: str) -> WBValue | None:
|
1147
1092
|
"""Get the WBValue object located at the artifact relative `name`.
|
1148
1093
|
|
1149
|
-
|
1094
|
+
Args:
|
1150
1095
|
name: The artifact relative name to get.
|
1151
1096
|
|
1152
1097
|
Returns:
|
@@ -1160,7 +1105,7 @@ class Artifact:
|
|
1160
1105
|
def __setitem__(self, name: str, item: WBValue) -> ArtifactManifestEntry:
|
1161
1106
|
"""Add `item` to the artifact at path `name`.
|
1162
1107
|
|
1163
|
-
|
1108
|
+
Args:
|
1164
1109
|
name: The path within the artifact to add the object.
|
1165
1110
|
item: The object to add.
|
1166
1111
|
|
@@ -1176,11 +1121,11 @@ class Artifact:
|
|
1176
1121
|
@contextlib.contextmanager
|
1177
1122
|
@ensure_not_finalized
|
1178
1123
|
def new_file(
|
1179
|
-
self, name: str, mode: str = "
|
1124
|
+
self, name: str, mode: str = "x", encoding: str | None = None
|
1180
1125
|
) -> Iterator[IO]:
|
1181
1126
|
"""Open a new temporary file and add it to the artifact.
|
1182
1127
|
|
1183
|
-
|
1128
|
+
Args:
|
1184
1129
|
name: The name of the new file to add to the artifact.
|
1185
1130
|
mode: The file access mode to use to open the new file.
|
1186
1131
|
encoding: The encoding used to open the new file.
|
@@ -1193,24 +1138,28 @@ class Artifact:
|
|
1193
1138
|
ArtifactFinalizedError: You cannot make changes to the current artifact
|
1194
1139
|
version because it is finalized. Log a new artifact version instead.
|
1195
1140
|
"""
|
1141
|
+
overwrite: bool = "x" not in mode
|
1142
|
+
|
1196
1143
|
if self._tmp_dir is None:
|
1197
1144
|
self._tmp_dir = tempfile.TemporaryDirectory()
|
1198
1145
|
path = os.path.join(self._tmp_dir.name, name.lstrip("/"))
|
1199
|
-
if os.path.exists(path):
|
1200
|
-
raise ValueError(f"File with name {name!r} already exists at {path!r}")
|
1201
1146
|
|
1202
1147
|
filesystem.mkdir_exists_ok(os.path.dirname(path))
|
1203
1148
|
try:
|
1204
1149
|
with util.fsync_open(path, mode, encoding) as f:
|
1205
1150
|
yield f
|
1151
|
+
except FileExistsError:
|
1152
|
+
raise ValueError(f"File with name {name!r} already exists at {path!r}")
|
1206
1153
|
except UnicodeEncodeError as e:
|
1207
1154
|
termerror(
|
1208
|
-
f"Failed to open the provided file (
|
1155
|
+
f"Failed to open the provided file ({type(e).__name__}: {e}). Please "
|
1209
1156
|
f"provide the proper encoding."
|
1210
1157
|
)
|
1211
1158
|
raise e
|
1212
1159
|
|
1213
|
-
self.add_file(
|
1160
|
+
self.add_file(
|
1161
|
+
path, name=name, policy="immutable", skip_cache=True, overwrite=overwrite
|
1162
|
+
)
|
1214
1163
|
|
1215
1164
|
@ensure_not_finalized
|
1216
1165
|
def add_file(
|
@@ -1220,22 +1169,24 @@ class Artifact:
|
|
1220
1169
|
is_tmp: bool | None = False,
|
1221
1170
|
skip_cache: bool | None = False,
|
1222
1171
|
policy: Literal["mutable", "immutable"] | None = "mutable",
|
1172
|
+
overwrite: bool = False,
|
1223
1173
|
) -> ArtifactManifestEntry:
|
1224
1174
|
"""Add a local file to the artifact.
|
1225
1175
|
|
1226
|
-
|
1176
|
+
Args:
|
1227
1177
|
local_path: The path to the file being added.
|
1228
1178
|
name: The path within the artifact to use for the file being added. Defaults
|
1229
1179
|
to the basename of the file.
|
1230
1180
|
is_tmp: If true, then the file is renamed deterministically to avoid
|
1231
1181
|
collisions.
|
1232
|
-
skip_cache: If
|
1182
|
+
skip_cache: If `True`, W&B will not copy files to the cache after uploading.
|
1233
1183
|
policy: By default, set to "mutable". If set to "mutable", create a temporary copy of the
|
1234
1184
|
file to prevent corruption during upload. If set to "immutable", disable
|
1235
1185
|
protection and rely on the user not to delete or change the file.
|
1186
|
+
overwrite: If `True`, overwrite the file if it already exists.
|
1236
1187
|
|
1237
1188
|
Returns:
|
1238
|
-
The added manifest entry
|
1189
|
+
The added manifest entry.
|
1239
1190
|
|
1240
1191
|
Raises:
|
1241
1192
|
ArtifactFinalizedError: You cannot make changes to the current artifact
|
@@ -1243,7 +1194,7 @@ class Artifact:
|
|
1243
1194
|
ValueError: Policy must be "mutable" or "immutable"
|
1244
1195
|
"""
|
1245
1196
|
if not os.path.isfile(local_path):
|
1246
|
-
raise ValueError("Path is not a file: {}"
|
1197
|
+
raise ValueError(f"Path is not a file: {local_path!r}")
|
1247
1198
|
|
1248
1199
|
name = LogicalPath(name or os.path.basename(local_path))
|
1249
1200
|
digest = md5_file_b64(local_path)
|
@@ -1255,7 +1206,12 @@ class Artifact:
|
|
1255
1206
|
name = os.path.join(file_path, ".".join(file_name_parts))
|
1256
1207
|
|
1257
1208
|
return self._add_local_file(
|
1258
|
-
name,
|
1209
|
+
name,
|
1210
|
+
local_path,
|
1211
|
+
digest=digest,
|
1212
|
+
skip_cache=skip_cache,
|
1213
|
+
policy=policy,
|
1214
|
+
overwrite=overwrite,
|
1259
1215
|
)
|
1260
1216
|
|
1261
1217
|
@ensure_not_finalized
|
@@ -1268,7 +1224,7 @@ class Artifact:
|
|
1268
1224
|
) -> None:
|
1269
1225
|
"""Add a local directory to the artifact.
|
1270
1226
|
|
1271
|
-
|
1227
|
+
Args:
|
1272
1228
|
local_path: The path of the local directory.
|
1273
1229
|
name: The subdirectory name within an artifact. The name you specify appears
|
1274
1230
|
in the W&B App UI nested by artifact's `type`.
|
@@ -1352,7 +1308,7 @@ class Artifact:
|
|
1352
1308
|
For any other scheme, the digest is just a hash of the URI and the size is left
|
1353
1309
|
blank.
|
1354
1310
|
|
1355
|
-
|
1311
|
+
Args:
|
1356
1312
|
uri: The URI path of the reference to add. The URI path can be an object
|
1357
1313
|
returned from `Artifact.get_entry` to store a reference to another
|
1358
1314
|
artifact's entry.
|
@@ -1404,14 +1360,17 @@ class Artifact:
|
|
1404
1360
|
return manifest_entries
|
1405
1361
|
|
1406
1362
|
@ensure_not_finalized
|
1407
|
-
def add(
|
1363
|
+
def add(
|
1364
|
+
self, obj: WBValue, name: StrPath, overwrite: bool = False
|
1365
|
+
) -> ArtifactManifestEntry:
|
1408
1366
|
"""Add wandb.WBValue `obj` to the artifact.
|
1409
1367
|
|
1410
|
-
|
1368
|
+
Args:
|
1411
1369
|
obj: The object to add. Currently support one of Bokeh, JoinedTable,
|
1412
1370
|
PartitionedTable, Table, Classes, ImageMask, BoundingBoxes2D, Audio,
|
1413
1371
|
Image, Video, Html, Object3D
|
1414
1372
|
name: The path within the artifact to add the object.
|
1373
|
+
overwrite: If True, overwrite existing objects with the same file path (if applicable).
|
1415
1374
|
|
1416
1375
|
Returns:
|
1417
1376
|
The added manifest entry
|
@@ -1430,7 +1389,7 @@ class Artifact:
|
|
1430
1389
|
# Validate that the object is one of the correct wandb.Media types
|
1431
1390
|
# TODO: move this to checking subclass of wandb.Media once all are
|
1432
1391
|
# generally supported
|
1433
|
-
allowed_types =
|
1392
|
+
allowed_types = (
|
1434
1393
|
data_types.Bokeh,
|
1435
1394
|
data_types.JoinedTable,
|
1436
1395
|
data_types.PartitionedTable,
|
@@ -1445,13 +1404,10 @@ class Artifact:
|
|
1445
1404
|
data_types.Object3D,
|
1446
1405
|
data_types.Molecule,
|
1447
1406
|
data_types._SavedModel,
|
1448
|
-
|
1449
|
-
|
1450
|
-
if not any(isinstance(obj, t) for t in allowed_types):
|
1407
|
+
)
|
1408
|
+
if not isinstance(obj, allowed_types):
|
1451
1409
|
raise ValueError(
|
1452
|
-
"Found object of type {}, expected one of {}
|
1453
|
-
obj.__class__, allowed_types
|
1454
|
-
)
|
1410
|
+
f"Found object of type {obj.__class__}, expected one of: {allowed_types}"
|
1455
1411
|
)
|
1456
1412
|
|
1457
1413
|
obj_id = id(obj)
|
@@ -1466,26 +1422,20 @@ class Artifact:
|
|
1466
1422
|
val = obj.to_json(self)
|
1467
1423
|
name = obj.with_suffix(name)
|
1468
1424
|
entry = self.manifest.get_entry_by_path(name)
|
1469
|
-
if entry is not None:
|
1425
|
+
if (not overwrite) and (entry is not None):
|
1470
1426
|
return entry
|
1471
1427
|
|
1472
|
-
def do_write(f: IO) -> None:
|
1473
|
-
import json
|
1474
|
-
|
1475
|
-
# TODO: Do we need to open with utf-8 codec?
|
1476
|
-
f.write(json.dumps(val, sort_keys=True))
|
1477
|
-
|
1478
1428
|
if is_tmp_name:
|
1479
1429
|
file_path = os.path.join(self._TMP_DIR.name, str(id(self)), name)
|
1480
1430
|
folder_path, _ = os.path.split(file_path)
|
1481
|
-
|
1482
|
-
|
1483
|
-
|
1484
|
-
do_write(tmp_f)
|
1431
|
+
os.makedirs(folder_path, exist_ok=True)
|
1432
|
+
with open(file_path, "w", encoding="utf-8") as tmp_f:
|
1433
|
+
json.dump(val, tmp_f, sort_keys=True)
|
1485
1434
|
else:
|
1486
|
-
|
1435
|
+
filemode = "w" if overwrite else "x"
|
1436
|
+
with self.new_file(name, mode=filemode, encoding="utf-8") as f:
|
1437
|
+
json.dump(val, f, sort_keys=True)
|
1487
1438
|
file_path = f.name
|
1488
|
-
do_write(f)
|
1489
1439
|
|
1490
1440
|
# Note, we add the file from our temp directory.
|
1491
1441
|
# It will be added again later on finalize, but succeed since
|
@@ -1497,7 +1447,7 @@ class Artifact:
|
|
1497
1447
|
obj._set_artifact_target(self, entry.path)
|
1498
1448
|
|
1499
1449
|
if is_tmp_name:
|
1500
|
-
|
1450
|
+
with contextlib.suppress(FileNotFoundError):
|
1501
1451
|
os.remove(file_path)
|
1502
1452
|
|
1503
1453
|
return entry
|
@@ -1509,11 +1459,12 @@ class Artifact:
|
|
1509
1459
|
digest: B64MD5 | None = None,
|
1510
1460
|
skip_cache: bool | None = False,
|
1511
1461
|
policy: Literal["mutable", "immutable"] | None = "mutable",
|
1462
|
+
overwrite: bool = False,
|
1512
1463
|
) -> ArtifactManifestEntry:
|
1513
1464
|
policy = policy or "mutable"
|
1514
1465
|
if policy not in ["mutable", "immutable"]:
|
1515
1466
|
raise ValueError(
|
1516
|
-
f"Invalid policy
|
1467
|
+
f"Invalid policy {policy!r}. Policy may only be `mutable` or `immutable`."
|
1517
1468
|
)
|
1518
1469
|
upload_path = path
|
1519
1470
|
if policy == "mutable":
|
@@ -1531,7 +1482,7 @@ class Artifact:
|
|
1531
1482
|
local_path=upload_path,
|
1532
1483
|
skip_cache=skip_cache,
|
1533
1484
|
)
|
1534
|
-
self.manifest.add_entry(entry)
|
1485
|
+
self.manifest.add_entry(entry, overwrite=overwrite)
|
1535
1486
|
self._added_local_paths[os.fspath(path)] = entry
|
1536
1487
|
return entry
|
1537
1488
|
|
@@ -1539,7 +1490,7 @@ class Artifact:
|
|
1539
1490
|
def remove(self, item: StrPath | ArtifactManifestEntry) -> None:
|
1540
1491
|
"""Remove an item from the artifact.
|
1541
1492
|
|
1542
|
-
|
1493
|
+
Args:
|
1543
1494
|
item: The item to remove. Can be a specific manifest entry or the name of an
|
1544
1495
|
artifact-relative path. If the item matches a directory all items in
|
1545
1496
|
that directory will be removed.
|
@@ -1577,7 +1528,7 @@ class Artifact:
|
|
1577
1528
|
def get_entry(self, name: StrPath) -> ArtifactManifestEntry:
|
1578
1529
|
"""Get the entry with the given name.
|
1579
1530
|
|
1580
|
-
|
1531
|
+
Args:
|
1581
1532
|
name: The artifact relative name to get
|
1582
1533
|
|
1583
1534
|
Returns:
|
@@ -1598,7 +1549,7 @@ class Artifact:
|
|
1598
1549
|
def get(self, name: str) -> WBValue | None:
|
1599
1550
|
"""Get the WBValue object located at the artifact relative `name`.
|
1600
1551
|
|
1601
|
-
|
1552
|
+
Args:
|
1602
1553
|
name: The artifact relative name to retrieve.
|
1603
1554
|
|
1604
1555
|
Returns:
|
@@ -1644,7 +1595,7 @@ class Artifact:
|
|
1644
1595
|
def get_added_local_path_name(self, local_path: str) -> str | None:
|
1645
1596
|
"""Get the artifact relative name of a file added by a local filesystem path.
|
1646
1597
|
|
1647
|
-
|
1598
|
+
Args:
|
1648
1599
|
local_path: The local path to resolve into an artifact relative name.
|
1649
1600
|
|
1650
1601
|
Returns:
|
@@ -1665,7 +1616,7 @@ class Artifact:
|
|
1665
1616
|
able to resolve a name, without tasking the user with appending .THING.json.
|
1666
1617
|
This method returns an entry if it exists by a suffixed name.
|
1667
1618
|
|
1668
|
-
|
1619
|
+
Args:
|
1669
1620
|
name: name used when adding
|
1670
1621
|
"""
|
1671
1622
|
for wb_class in WBValue.type_mapping().values():
|
@@ -1691,7 +1642,7 @@ class Artifact:
|
|
1691
1642
|
before you call `download` if you want the contents of `root` to exactly match
|
1692
1643
|
the artifact.
|
1693
1644
|
|
1694
|
-
|
1645
|
+
Args:
|
1695
1646
|
root: The directory W&B stores the artifact's files.
|
1696
1647
|
allow_missing_references: If set to `True`, any invalid reference paths
|
1697
1648
|
will be ignored while downloading referenced files.
|
@@ -1937,7 +1888,7 @@ class Artifact:
|
|
1937
1888
|
WARNING: This will delete all files in `root` that are not included in the
|
1938
1889
|
artifact.
|
1939
1890
|
|
1940
|
-
|
1891
|
+
Args:
|
1941
1892
|
root: The directory to replace with this artifact's files.
|
1942
1893
|
|
1943
1894
|
Returns:
|
@@ -1967,7 +1918,7 @@ class Artifact:
|
|
1967
1918
|
All files in the directory are checksummed and the checksums are then
|
1968
1919
|
cross-referenced against the artifact's manifest. References are not verified.
|
1969
1920
|
|
1970
|
-
|
1921
|
+
Args:
|
1971
1922
|
root: The directory to verify. If None artifact will be downloaded to
|
1972
1923
|
'./artifacts/self.name/'
|
1973
1924
|
|
@@ -2004,7 +1955,7 @@ class Artifact:
|
|
2004
1955
|
def file(self, root: str | None = None) -> StrPath:
|
2005
1956
|
"""Download a single file artifact to the directory you specify with `root`.
|
2006
1957
|
|
2007
|
-
|
1958
|
+
Args:
|
2008
1959
|
root: The root directory to store the file. Defaults to
|
2009
1960
|
'./artifacts/self.name/'.
|
2010
1961
|
|
@@ -2032,7 +1983,7 @@ class Artifact:
|
|
2032
1983
|
) -> ArtifactFiles:
|
2033
1984
|
"""Iterate over all files stored in this artifact.
|
2034
1985
|
|
2035
|
-
|
1986
|
+
Args:
|
2036
1987
|
names: The filename paths relative to the root of the artifact you wish to
|
2037
1988
|
list.
|
2038
1989
|
per_page: The number of files to return per request.
|
@@ -2075,7 +2026,7 @@ class Artifact:
|
|
2075
2026
|
If called on a linked artifact (i.e. a member of a portfolio collection): only the link is deleted, and the
|
2076
2027
|
source artifact is unaffected.
|
2077
2028
|
|
2078
|
-
|
2029
|
+
Args:
|
2079
2030
|
delete_aliases: If set to `True`, deletes all aliases associated with the artifact.
|
2080
2031
|
Otherwise, this raises an exception if the artifact has existing
|
2081
2032
|
aliases.
|
@@ -2118,7 +2069,7 @@ class Artifact:
|
|
2118
2069
|
def link(self, target_path: str, aliases: list[str] | None = None) -> None:
|
2119
2070
|
"""Link this artifact to a portfolio (a promoted collection of artifacts).
|
2120
2071
|
|
2121
|
-
|
2072
|
+
Args:
|
2122
2073
|
target_path: The path to the portfolio inside a project.
|
2123
2074
|
The target path must adhere to one of the following
|
2124
2075
|
schemas `{portfolio}`, `{project}/{portfolio}` or
|
@@ -2331,63 +2282,10 @@ class Artifact:
|
|
2331
2282
|
Dict[str, Any], json.loads(json.dumps(util.json_friendly_val(metadata)))
|
2332
2283
|
)
|
2333
2284
|
|
2334
|
-
def _load_manifest(self, url: str) ->
|
2335
|
-
with requests.get(url) as
|
2336
|
-
|
2337
|
-
|
2338
|
-
json.loads(util.ensure_text(request.content))
|
2339
|
-
)
|
2340
|
-
|
2341
|
-
@staticmethod
|
2342
|
-
def _get_gql_artifact_fragment() -> str:
|
2343
|
-
fields = InternalApi().server_artifact_introspection()
|
2344
|
-
fragment = """
|
2345
|
-
fragment ArtifactFragment on Artifact {
|
2346
|
-
id
|
2347
|
-
artifactSequence {
|
2348
|
-
project {
|
2349
|
-
entityName
|
2350
|
-
name
|
2351
|
-
}
|
2352
|
-
name
|
2353
|
-
}
|
2354
|
-
versionIndex
|
2355
|
-
artifactType {
|
2356
|
-
name
|
2357
|
-
}
|
2358
|
-
description
|
2359
|
-
metadata
|
2360
|
-
ttlDurationSeconds
|
2361
|
-
ttlIsInherited
|
2362
|
-
aliases {
|
2363
|
-
artifactCollection {
|
2364
|
-
project {
|
2365
|
-
entityName
|
2366
|
-
name
|
2367
|
-
}
|
2368
|
-
name
|
2369
|
-
}
|
2370
|
-
alias
|
2371
|
-
}
|
2372
|
-
_MAYBE_TAGS_
|
2373
|
-
state
|
2374
|
-
commitHash
|
2375
|
-
fileCount
|
2376
|
-
createdAt
|
2377
|
-
updatedAt
|
2378
|
-
}
|
2379
|
-
"""
|
2380
|
-
if "ttlIsInherited" not in fields:
|
2381
|
-
fragment = fragment.replace("ttlDurationSeconds", "").replace(
|
2382
|
-
"ttlIsInherited", ""
|
2383
|
-
)
|
2384
|
-
|
2385
|
-
if "tags" in fields:
|
2386
|
-
fragment = fragment.replace("_MAYBE_TAGS_", "tags {name}")
|
2387
|
-
else:
|
2388
|
-
fragment = fragment.replace("_MAYBE_TAGS_", "")
|
2389
|
-
|
2390
|
-
return fragment
|
2285
|
+
def _load_manifest(self, url: str) -> ArtifactManifest:
|
2286
|
+
with requests.get(url) as response:
|
2287
|
+
response.raise_for_status()
|
2288
|
+
return ArtifactManifest.from_manifest_json(response.json())
|
2391
2289
|
|
2392
2290
|
def _ttl_duration_seconds_to_gql(self) -> int | None:
|
2393
2291
|
# Set artifact ttl value to ttl_duration_seconds if the user set a value
|
@@ -2402,14 +2300,68 @@ class Artifact:
|
|
2402
2300
|
return INHERIT
|
2403
2301
|
return self._ttl_duration_seconds or DISABLED
|
2404
2302
|
|
2405
|
-
|
2406
|
-
|
2407
|
-
|
2408
|
-
|
2409
|
-
|
2410
|
-
|
2411
|
-
|
2412
|
-
|
2303
|
+
|
2304
|
+
def _ttl_duration_seconds_from_gql(gql_ttl_duration_seconds: int | None) -> int | None:
|
2305
|
+
# If gql_ttl_duration_seconds is not positive, its indicating that TTL is DISABLED(-2)
|
2306
|
+
# gql_ttl_duration_seconds only returns None if the server is not compatible with setting Artifact TTLs
|
2307
|
+
if gql_ttl_duration_seconds and gql_ttl_duration_seconds > 0:
|
2308
|
+
return gql_ttl_duration_seconds
|
2309
|
+
return None
|
2310
|
+
|
2311
|
+
|
2312
|
+
def _gql_artifact_fragment() -> str:
|
2313
|
+
"""Return a GraphQL query fragment with all parseable Artifact attributes."""
|
2314
|
+
allowed_fields = set(InternalApi().server_artifact_introspection())
|
2315
|
+
|
2316
|
+
supports_ttl = "ttlIsInherited" in allowed_fields
|
2317
|
+
supports_tags = "tags" in allowed_fields
|
2318
|
+
|
2319
|
+
ttl_duration_seconds = "ttlDurationSeconds" if supports_ttl else ""
|
2320
|
+
ttl_is_inherited = "ttlIsInherited" if supports_ttl else ""
|
2321
|
+
|
2322
|
+
tags = "tags {name}" if supports_tags else ""
|
2323
|
+
|
2324
|
+
return f"""
|
2325
|
+
fragment ArtifactFragment on Artifact {{
|
2326
|
+
id
|
2327
|
+
artifactSequence {{
|
2328
|
+
project {{
|
2329
|
+
entityName
|
2330
|
+
name
|
2331
|
+
}}
|
2332
|
+
name
|
2333
|
+
}}
|
2334
|
+
versionIndex
|
2335
|
+
artifactType {{
|
2336
|
+
name
|
2337
|
+
}}
|
2338
|
+
description
|
2339
|
+
metadata
|
2340
|
+
{ttl_duration_seconds}
|
2341
|
+
{ttl_is_inherited}
|
2342
|
+
aliases {{
|
2343
|
+
artifactCollection {{
|
2344
|
+
project {{
|
2345
|
+
entityName
|
2346
|
+
name
|
2347
|
+
}}
|
2348
|
+
name
|
2349
|
+
}}
|
2350
|
+
alias
|
2351
|
+
}}
|
2352
|
+
{tags}
|
2353
|
+
state
|
2354
|
+
currentManifest {{
|
2355
|
+
file {{
|
2356
|
+
directUrl
|
2357
|
+
}}
|
2358
|
+
}}
|
2359
|
+
commitHash
|
2360
|
+
fileCount
|
2361
|
+
createdAt
|
2362
|
+
updatedAt
|
2363
|
+
}}
|
2364
|
+
"""
|
2413
2365
|
|
2414
2366
|
|
2415
2367
|
class _ArtifactVersionType(WBType):
|