fraclab-sdk 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- README.md +1601 -0
- fraclab_sdk/__init__.py +34 -0
- fraclab_sdk/algorithm/__init__.py +13 -0
- fraclab_sdk/algorithm/export.py +1 -0
- fraclab_sdk/algorithm/library.py +378 -0
- fraclab_sdk/cli.py +381 -0
- fraclab_sdk/config.py +54 -0
- fraclab_sdk/devkit/__init__.py +25 -0
- fraclab_sdk/devkit/compile.py +342 -0
- fraclab_sdk/devkit/export.py +354 -0
- fraclab_sdk/devkit/validate.py +1043 -0
- fraclab_sdk/errors.py +124 -0
- fraclab_sdk/materialize/__init__.py +8 -0
- fraclab_sdk/materialize/fsops.py +125 -0
- fraclab_sdk/materialize/hash.py +28 -0
- fraclab_sdk/materialize/materializer.py +241 -0
- fraclab_sdk/models/__init__.py +52 -0
- fraclab_sdk/models/bundle_manifest.py +51 -0
- fraclab_sdk/models/dataspec.py +65 -0
- fraclab_sdk/models/drs.py +47 -0
- fraclab_sdk/models/output_contract.py +111 -0
- fraclab_sdk/models/run_output_manifest.py +119 -0
- fraclab_sdk/results/__init__.py +25 -0
- fraclab_sdk/results/preview.py +150 -0
- fraclab_sdk/results/reader.py +329 -0
- fraclab_sdk/run/__init__.py +10 -0
- fraclab_sdk/run/logs.py +42 -0
- fraclab_sdk/run/manager.py +403 -0
- fraclab_sdk/run/subprocess_runner.py +153 -0
- fraclab_sdk/runtime/__init__.py +11 -0
- fraclab_sdk/runtime/artifacts.py +303 -0
- fraclab_sdk/runtime/data_client.py +123 -0
- fraclab_sdk/runtime/runner_main.py +286 -0
- fraclab_sdk/runtime/snapshot_provider.py +1 -0
- fraclab_sdk/selection/__init__.py +11 -0
- fraclab_sdk/selection/model.py +247 -0
- fraclab_sdk/selection/validate.py +54 -0
- fraclab_sdk/snapshot/__init__.py +12 -0
- fraclab_sdk/snapshot/index.py +94 -0
- fraclab_sdk/snapshot/library.py +205 -0
- fraclab_sdk/snapshot/loader.py +217 -0
- fraclab_sdk/specs/manifest.py +89 -0
- fraclab_sdk/utils/io.py +32 -0
- fraclab_sdk-0.1.0.dist-info/METADATA +1622 -0
- fraclab_sdk-0.1.0.dist-info/RECORD +47 -0
- fraclab_sdk-0.1.0.dist-info/WHEEL +4 -0
- fraclab_sdk-0.1.0.dist-info/entry_points.txt +4 -0
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
"""DRS (Data Requirement Specification) model."""
|
|
2
|
+
|
|
3
|
+
from typing import Literal
|
|
4
|
+
|
|
5
|
+
from pydantic import BaseModel, ConfigDict, Field, field_validator
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class DRSDataset(BaseModel):
|
|
9
|
+
"""A dataset requirement in a DRS."""
|
|
10
|
+
|
|
11
|
+
model_config = ConfigDict(extra="ignore", populate_by_name=True)
|
|
12
|
+
|
|
13
|
+
datasetKey: str = Field(alias="key")
|
|
14
|
+
resourceType: str | None = Field(default=None, alias="resource")
|
|
15
|
+
cardinality: Literal["one", "many", "zeroOrMany"] = "many"
|
|
16
|
+
description: str | None = None
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class DRS(BaseModel):
|
|
20
|
+
"""Data Requirement Specification.
|
|
21
|
+
|
|
22
|
+
Defines what data an algorithm requires as input.
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
model_config = ConfigDict(extra="ignore", populate_by_name=True)
|
|
26
|
+
|
|
27
|
+
schemaVersion: str | None = None
|
|
28
|
+
datasets: list[DRSDataset] = Field(default_factory=list)
|
|
29
|
+
|
|
30
|
+
@field_validator("datasets", mode="before")
|
|
31
|
+
@classmethod
|
|
32
|
+
def _coerce_datasets(cls, v):
|
|
33
|
+
"""Accept mapping form {'key': {...}} by converting to list."""
|
|
34
|
+
if isinstance(v, dict):
|
|
35
|
+
return [{"key": k, **(val or {})} for k, val in v.items()]
|
|
36
|
+
return v
|
|
37
|
+
|
|
38
|
+
def get_dataset(self, dataset_key: str) -> DRSDataset | None:
|
|
39
|
+
"""Get a dataset requirement by key."""
|
|
40
|
+
for ds in self.datasets:
|
|
41
|
+
if ds.datasetKey == dataset_key:
|
|
42
|
+
return ds
|
|
43
|
+
return None
|
|
44
|
+
|
|
45
|
+
def get_dataset_keys(self) -> list[str]:
|
|
46
|
+
"""Get all required dataset keys."""
|
|
47
|
+
return [ds.datasetKey for ds in self.datasets]
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
"""OutputContract model aligned with OutputSpec documentation."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Any, Literal
|
|
6
|
+
|
|
7
|
+
from pydantic import BaseModel, ConfigDict, Field
|
|
8
|
+
|
|
9
|
+
# ---------------------------------------------------------------------------
|
|
10
|
+
# Schemas
|
|
11
|
+
# ---------------------------------------------------------------------------
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class ScalarOutputSchema(BaseModel):
|
|
15
|
+
"""Schema for scalar outputs."""
|
|
16
|
+
|
|
17
|
+
model_config = ConfigDict(extra="ignore")
|
|
18
|
+
|
|
19
|
+
type: Literal["scalar"]
|
|
20
|
+
dtype: str | None = None
|
|
21
|
+
precision: int | None = None
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class FrameOutputSchema(BaseModel):
|
|
25
|
+
"""Schema for frame (tabular) outputs."""
|
|
26
|
+
|
|
27
|
+
model_config = ConfigDict(extra="ignore")
|
|
28
|
+
|
|
29
|
+
type: Literal["frame"]
|
|
30
|
+
index: Literal["time", "depth", "none"] | None = None
|
|
31
|
+
# allow extra fields for forward-compatibility
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class ObjectOutputSchema(BaseModel):
|
|
35
|
+
"""Schema for structured object outputs."""
|
|
36
|
+
|
|
37
|
+
model_config = ConfigDict(extra="ignore")
|
|
38
|
+
|
|
39
|
+
type: Literal["object"]
|
|
40
|
+
# keep open for schema details
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class BlobOutputSchema(BaseModel):
|
|
44
|
+
"""Schema for blob outputs."""
|
|
45
|
+
|
|
46
|
+
model_config = ConfigDict(extra="ignore")
|
|
47
|
+
|
|
48
|
+
type: Literal["blob"]
|
|
49
|
+
mime: str | None = None
|
|
50
|
+
ext: str | None = None
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
OutputSchema = ScalarOutputSchema | FrameOutputSchema | ObjectOutputSchema | BlobOutputSchema
|
|
54
|
+
|
|
55
|
+
# ---------------------------------------------------------------------------
|
|
56
|
+
# Contracts
|
|
57
|
+
# ---------------------------------------------------------------------------
|
|
58
|
+
|
|
59
|
+
OwnerType = Literal["stage", "well", "platform"]
|
|
60
|
+
Cardinality = Literal["one", "many"]
|
|
61
|
+
DatasetKind = Literal["frame", "object", "blob", "scalar"]
|
|
62
|
+
DatasetRole = Literal["primary", "supporting", "debug"]
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
class OutputDatasetContract(BaseModel):
|
|
66
|
+
"""Dataset-level contract (declares a named output channel)."""
|
|
67
|
+
|
|
68
|
+
model_config = ConfigDict(extra="ignore")
|
|
69
|
+
|
|
70
|
+
key: str
|
|
71
|
+
kind: DatasetKind
|
|
72
|
+
owner: OwnerType
|
|
73
|
+
cardinality: Cardinality = "many"
|
|
74
|
+
required: bool = True
|
|
75
|
+
schema: OutputSchema | dict[str, Any]
|
|
76
|
+
role: DatasetRole | None = None
|
|
77
|
+
groupPath: list[str] | None = None
|
|
78
|
+
dimensions: list[str] = Field(default_factory=list)
|
|
79
|
+
description: str | None = None
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
class OutputContract(BaseModel):
|
|
83
|
+
"""Full output contract for an algorithm."""
|
|
84
|
+
|
|
85
|
+
model_config = ConfigDict(extra="ignore")
|
|
86
|
+
|
|
87
|
+
datasets: list[OutputDatasetContract] = Field(default_factory=list)
|
|
88
|
+
invariants: list[dict[str, Any]] = Field(default_factory=list)
|
|
89
|
+
relations: list[dict[str, Any]] = Field(default_factory=list)
|
|
90
|
+
|
|
91
|
+
def get_dataset(self, key: str) -> OutputDatasetContract | None:
|
|
92
|
+
"""Get dataset by key."""
|
|
93
|
+
for ds in self.datasets:
|
|
94
|
+
if ds.key == key:
|
|
95
|
+
return ds
|
|
96
|
+
return None
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
__all__ = [
|
|
100
|
+
"OutputSchema",
|
|
101
|
+
"ScalarOutputSchema",
|
|
102
|
+
"FrameOutputSchema",
|
|
103
|
+
"ObjectOutputSchema",
|
|
104
|
+
"BlobOutputSchema",
|
|
105
|
+
"OutputDatasetContract",
|
|
106
|
+
"OutputContract",
|
|
107
|
+
"OwnerType",
|
|
108
|
+
"DatasetKind",
|
|
109
|
+
"Cardinality",
|
|
110
|
+
"DatasetRole",
|
|
111
|
+
]
|
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
"""Run output manifest model aligned with OutputSpec."""
|
|
2
|
+
|
|
3
|
+
from typing import Any
|
|
4
|
+
|
|
5
|
+
from pydantic import AliasChoices, BaseModel, ConfigDict, Field
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class RunInfo(BaseModel):
|
|
9
|
+
"""Information about a run."""
|
|
10
|
+
|
|
11
|
+
model_config = ConfigDict(extra="ignore")
|
|
12
|
+
|
|
13
|
+
runId: str
|
|
14
|
+
algorithmId: str
|
|
15
|
+
contractVersion: str | None = None
|
|
16
|
+
codeVersion: str | None = None
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class OwnerRef(BaseModel):
|
|
20
|
+
"""Owner reference for an item."""
|
|
21
|
+
|
|
22
|
+
model_config = ConfigDict(extra="ignore")
|
|
23
|
+
|
|
24
|
+
platformId: str | None = None
|
|
25
|
+
wellId: str | None = None
|
|
26
|
+
stageId: str | None = None
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class ArtifactInfo(BaseModel):
|
|
30
|
+
"""Information about an output artifact."""
|
|
31
|
+
|
|
32
|
+
model_config = ConfigDict(extra="ignore", populate_by_name=True)
|
|
33
|
+
|
|
34
|
+
artifactKey: str
|
|
35
|
+
type: str = Field(
|
|
36
|
+
validation_alias=AliasChoices("type", "artifactType"),
|
|
37
|
+
serialization_alias="type",
|
|
38
|
+
) # e.g., "scalar", "blob", "json", "frame", "parquet"
|
|
39
|
+
uri: str | None = Field(
|
|
40
|
+
default=None,
|
|
41
|
+
validation_alias=AliasChoices("uri", "fileUri"),
|
|
42
|
+
serialization_alias="uri",
|
|
43
|
+
)
|
|
44
|
+
mimeType: str | None = None
|
|
45
|
+
description: str | None = None
|
|
46
|
+
value: Any | None = None # For scalar artifacts
|
|
47
|
+
inline: dict[str, Any] | None = None # Optional embedded payload
|
|
48
|
+
|
|
49
|
+
@property
|
|
50
|
+
def artifactType(self) -> str:
|
|
51
|
+
"""Backward-compatible accessor for artifact type."""
|
|
52
|
+
return self.type
|
|
53
|
+
|
|
54
|
+
@property
|
|
55
|
+
def fileUri(self) -> str | None:
|
|
56
|
+
"""Backward-compatible accessor for file URI."""
|
|
57
|
+
return self.uri
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
class RunOutputItem(BaseModel):
|
|
61
|
+
"""Single item within a dataset."""
|
|
62
|
+
|
|
63
|
+
model_config = ConfigDict(extra="ignore")
|
|
64
|
+
|
|
65
|
+
itemKey: str | None = Field(default=None, alias="key")
|
|
66
|
+
owner: OwnerRef | None = None
|
|
67
|
+
dims: dict[str, Any] = Field(default_factory=dict)
|
|
68
|
+
meta: dict[str, Any] = Field(default_factory=dict)
|
|
69
|
+
inline: dict[str, Any] | None = None
|
|
70
|
+
artifact: ArtifactInfo
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
class RunOutputDataset(BaseModel):
|
|
74
|
+
"""Output dataset in run output manifest."""
|
|
75
|
+
|
|
76
|
+
model_config = ConfigDict(extra="ignore")
|
|
77
|
+
|
|
78
|
+
datasetKey: str
|
|
79
|
+
items: list[RunOutputItem] = Field(default_factory=list)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
class RunOutputManifest(BaseModel):
|
|
83
|
+
"""Manifest for run output."""
|
|
84
|
+
|
|
85
|
+
model_config = ConfigDict(extra="ignore")
|
|
86
|
+
|
|
87
|
+
schemaVersion: str | None = None
|
|
88
|
+
run: RunInfo
|
|
89
|
+
status: str | None = None
|
|
90
|
+
error: str | None = None
|
|
91
|
+
startedAt: str | None = None
|
|
92
|
+
completedAt: str | None = None
|
|
93
|
+
datasets: list[RunOutputDataset] = Field(default_factory=list)
|
|
94
|
+
|
|
95
|
+
def get_artifact(self, artifact_key: str) -> ArtifactInfo | None:
|
|
96
|
+
"""Get an artifact by key (searches all datasets)."""
|
|
97
|
+
for dataset in self.datasets:
|
|
98
|
+
for item in dataset.items:
|
|
99
|
+
if item.artifact.artifactKey == artifact_key:
|
|
100
|
+
return item.artifact
|
|
101
|
+
return None
|
|
102
|
+
|
|
103
|
+
def list_all_artifacts(self) -> list[ArtifactInfo]:
|
|
104
|
+
"""List all artifacts from all datasets."""
|
|
105
|
+
all_artifacts: list[ArtifactInfo] = []
|
|
106
|
+
for dataset in self.datasets:
|
|
107
|
+
for item in dataset.items:
|
|
108
|
+
all_artifacts.append(item.artifact)
|
|
109
|
+
return all_artifacts
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
__all__ = [
|
|
113
|
+
"RunInfo",
|
|
114
|
+
"OwnerRef",
|
|
115
|
+
"ArtifactInfo",
|
|
116
|
+
"RunOutputItem",
|
|
117
|
+
"RunOutputDataset",
|
|
118
|
+
"RunOutputManifest",
|
|
119
|
+
]
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
"""Result reading and preview."""
|
|
2
|
+
|
|
3
|
+
from fraclab_sdk.results.preview import (
|
|
4
|
+
get_artifact_preview_type,
|
|
5
|
+
preview_image,
|
|
6
|
+
preview_json_raw,
|
|
7
|
+
preview_json_table,
|
|
8
|
+
preview_scalar,
|
|
9
|
+
)
|
|
10
|
+
from fraclab_sdk.results.reader import (
|
|
11
|
+
ArtifactWithPath,
|
|
12
|
+
ResultReader,
|
|
13
|
+
file_uri_to_path,
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
__all__ = [
|
|
17
|
+
"ArtifactWithPath",
|
|
18
|
+
"ResultReader",
|
|
19
|
+
"file_uri_to_path",
|
|
20
|
+
"get_artifact_preview_type",
|
|
21
|
+
"preview_image",
|
|
22
|
+
"preview_json_raw",
|
|
23
|
+
"preview_json_table",
|
|
24
|
+
"preview_scalar",
|
|
25
|
+
]
|
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
"""Preview helpers for result artifacts."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
from fraclab_sdk.models import ArtifactInfo
|
|
8
|
+
from fraclab_sdk.results.reader import file_uri_to_path
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def preview_scalar(artifact: ArtifactInfo) -> Any:
|
|
12
|
+
"""Preview a scalar artifact.
|
|
13
|
+
|
|
14
|
+
Args:
|
|
15
|
+
artifact: The artifact info.
|
|
16
|
+
|
|
17
|
+
Returns:
|
|
18
|
+
The scalar value or None.
|
|
19
|
+
"""
|
|
20
|
+
if artifact.artifactType != "scalar":
|
|
21
|
+
return None
|
|
22
|
+
return artifact.value
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def preview_image(artifact: ArtifactInfo) -> Path | None:
|
|
26
|
+
"""Get image path for preview.
|
|
27
|
+
|
|
28
|
+
Args:
|
|
29
|
+
artifact: The artifact info.
|
|
30
|
+
|
|
31
|
+
Returns:
|
|
32
|
+
Path to image file or None if not an image.
|
|
33
|
+
"""
|
|
34
|
+
if artifact.artifactType != "blob":
|
|
35
|
+
return None
|
|
36
|
+
|
|
37
|
+
if artifact.mimeType and not artifact.mimeType.startswith("image/"):
|
|
38
|
+
return None
|
|
39
|
+
|
|
40
|
+
if artifact.fileUri:
|
|
41
|
+
return file_uri_to_path(artifact.fileUri)
|
|
42
|
+
|
|
43
|
+
return None
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def preview_json_table(artifact: ArtifactInfo) -> dict | None:
|
|
47
|
+
"""Preview JSON artifact as table data.
|
|
48
|
+
|
|
49
|
+
For array of objects, extracts columns and rows for table display.
|
|
50
|
+
Format: {"columns": [...], "rows": [[...], ...]}
|
|
51
|
+
|
|
52
|
+
Args:
|
|
53
|
+
artifact: The artifact info.
|
|
54
|
+
|
|
55
|
+
Returns:
|
|
56
|
+
Table data dict or None if not suitable for table display.
|
|
57
|
+
"""
|
|
58
|
+
if artifact.artifactType not in {"json", "object"}:
|
|
59
|
+
return None
|
|
60
|
+
|
|
61
|
+
if not artifact.fileUri:
|
|
62
|
+
return None
|
|
63
|
+
|
|
64
|
+
path = file_uri_to_path(artifact.fileUri)
|
|
65
|
+
data = json.loads(path.read_text())
|
|
66
|
+
|
|
67
|
+
# Handle array of objects
|
|
68
|
+
if isinstance(data, list) and len(data) > 0 and isinstance(data[0], dict):
|
|
69
|
+
# Extract columns from first object
|
|
70
|
+
columns = list(data[0].keys())
|
|
71
|
+
|
|
72
|
+
# Extract rows
|
|
73
|
+
rows = []
|
|
74
|
+
for item in data:
|
|
75
|
+
if isinstance(item, dict):
|
|
76
|
+
row = [item.get(col) for col in columns]
|
|
77
|
+
rows.append(row)
|
|
78
|
+
|
|
79
|
+
return {"columns": columns, "rows": rows}
|
|
80
|
+
|
|
81
|
+
# Handle single object
|
|
82
|
+
if isinstance(data, dict):
|
|
83
|
+
columns = ["key", "value"]
|
|
84
|
+
rows = [[k, v] for k, v in data.items()]
|
|
85
|
+
return {"columns": columns, "rows": rows}
|
|
86
|
+
|
|
87
|
+
return None
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def preview_json_raw(artifact: ArtifactInfo, max_lines: int = 50) -> str | None:
|
|
91
|
+
"""Preview raw JSON content.
|
|
92
|
+
|
|
93
|
+
Args:
|
|
94
|
+
artifact: The artifact info.
|
|
95
|
+
max_lines: Maximum lines to return.
|
|
96
|
+
|
|
97
|
+
Returns:
|
|
98
|
+
Pretty-printed JSON string or None.
|
|
99
|
+
"""
|
|
100
|
+
if artifact.artifactType != "json":
|
|
101
|
+
return None
|
|
102
|
+
|
|
103
|
+
if not artifact.fileUri:
|
|
104
|
+
return None
|
|
105
|
+
|
|
106
|
+
path = file_uri_to_path(artifact.fileUri)
|
|
107
|
+
data = json.loads(path.read_text())
|
|
108
|
+
formatted = json.dumps(data, indent=2)
|
|
109
|
+
|
|
110
|
+
lines = formatted.split("\n")
|
|
111
|
+
if len(lines) > max_lines:
|
|
112
|
+
lines = lines[:max_lines]
|
|
113
|
+
lines.append(f"... ({len(formatted.split(chr(10))) - max_lines} more lines)")
|
|
114
|
+
|
|
115
|
+
return "\n".join(lines)
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
def get_artifact_preview_type(artifact: ArtifactInfo) -> str:
|
|
119
|
+
"""Determine the best preview type for an artifact.
|
|
120
|
+
|
|
121
|
+
Args:
|
|
122
|
+
artifact: The artifact info.
|
|
123
|
+
|
|
124
|
+
Returns:
|
|
125
|
+
Preview type: "scalar", "image", "json_table", "json_raw", "file", or "none".
|
|
126
|
+
"""
|
|
127
|
+
if artifact.artifactType == "scalar":
|
|
128
|
+
return "scalar"
|
|
129
|
+
|
|
130
|
+
if artifact.artifactType == "blob":
|
|
131
|
+
if artifact.mimeType and artifact.mimeType.startswith("image/"):
|
|
132
|
+
return "image"
|
|
133
|
+
return "file"
|
|
134
|
+
|
|
135
|
+
if artifact.artifactType in {"json", "object"}:
|
|
136
|
+
# Check if suitable for table display
|
|
137
|
+
if artifact.fileUri:
|
|
138
|
+
try:
|
|
139
|
+
path = file_uri_to_path(artifact.fileUri)
|
|
140
|
+
data = json.loads(path.read_text())
|
|
141
|
+
if isinstance(data, list) and len(data) > 0 and isinstance(data[0], dict):
|
|
142
|
+
return "json_table"
|
|
143
|
+
except Exception:
|
|
144
|
+
pass
|
|
145
|
+
return "json_raw"
|
|
146
|
+
|
|
147
|
+
if artifact.artifactType in {"frame", "parquet"}:
|
|
148
|
+
return "file"
|
|
149
|
+
|
|
150
|
+
return "none"
|