antioch-py 2.2.4__py3-none-any.whl → 3.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of antioch-py might be problematic. Click here for more details.
- antioch/__init__.py +101 -0
- antioch/{module/execution.py → execution.py} +1 -1
- antioch/{module/input.py → input.py} +2 -4
- antioch/{module/module.py → module.py} +17 -34
- antioch/{module/node.py → node.py} +17 -16
- {antioch_py-2.2.4.dist-info → antioch_py-3.0.0.dist-info}/METADATA +8 -11
- antioch_py-3.0.0.dist-info/RECORD +61 -0
- {antioch_py-2.2.4.dist-info → antioch_py-3.0.0.dist-info}/WHEEL +1 -1
- antioch_py-3.0.0.dist-info/licenses/LICENSE +21 -0
- common/ark/__init__.py +6 -16
- common/ark/ark.py +23 -62
- common/ark/hardware.py +1 -1
- common/ark/kinematics.py +1 -1
- common/ark/module.py +22 -0
- common/ark/node.py +46 -3
- common/ark/scheduler.py +2 -29
- common/ark/sim.py +1 -1
- {antioch/module → common/ark}/token.py +17 -0
- common/assets/rigging.usd +0 -0
- common/constants.py +63 -5
- common/core/__init__.py +37 -24
- common/core/auth.py +87 -112
- common/core/container.py +261 -0
- common/core/registry.py +131 -152
- common/core/rome.py +251 -0
- common/core/telemetry.py +176 -0
- common/core/types.py +219 -0
- common/message/__init__.py +19 -5
- common/message/annotation.py +174 -23
- common/message/array.py +25 -1
- common/message/camera.py +23 -1
- common/message/color.py +32 -6
- common/message/detection.py +40 -0
- common/message/foxglove.py +20 -0
- common/message/frame.py +71 -7
- common/message/image.py +58 -9
- common/message/imu.py +24 -4
- common/message/joint.py +69 -10
- common/message/log.py +52 -7
- common/message/pir.py +23 -8
- common/message/plot.py +57 -0
- common/message/point.py +55 -6
- common/message/point_cloud.py +55 -19
- common/message/pose.py +59 -19
- common/message/quaternion.py +105 -92
- common/message/radar.py +195 -29
- common/message/twist.py +34 -0
- common/message/types.py +40 -5
- common/message/vector.py +180 -245
- common/sim/__init__.py +49 -0
- common/{session/config.py → sim/objects.py} +97 -27
- common/sim/state.py +11 -0
- common/utils/comms.py +30 -12
- common/utils/logger.py +26 -7
- antioch/message.py +0 -87
- antioch/module/__init__.py +0 -53
- antioch/session/__init__.py +0 -152
- antioch/session/ark.py +0 -500
- antioch/session/asset.py +0 -65
- antioch/session/error.py +0 -80
- antioch/session/objects/__init__.py +0 -40
- antioch/session/objects/animation.py +0 -162
- antioch/session/objects/articulation.py +0 -180
- antioch/session/objects/basis_curve.py +0 -180
- antioch/session/objects/camera.py +0 -65
- antioch/session/objects/collision.py +0 -46
- antioch/session/objects/geometry.py +0 -58
- antioch/session/objects/ground_plane.py +0 -48
- antioch/session/objects/imu.py +0 -53
- antioch/session/objects/joint.py +0 -49
- antioch/session/objects/light.py +0 -123
- antioch/session/objects/pir_sensor.py +0 -102
- antioch/session/objects/radar.py +0 -62
- antioch/session/objects/rigid_body.py +0 -197
- antioch/session/objects/xform.py +0 -119
- antioch/session/record.py +0 -158
- antioch/session/scene.py +0 -1544
- antioch/session/session.py +0 -211
- antioch/session/task.py +0 -309
- antioch_py-2.2.4.dist-info/RECORD +0 -85
- antioch_py-2.2.4.dist-info/entry_points.txt +0 -2
- common/core/agent.py +0 -324
- common/core/task.py +0 -36
- common/message/velocity.py +0 -11
- common/rome/__init__.py +0 -9
- common/rome/client.py +0 -435
- common/rome/error.py +0 -16
- common/session/__init__.py +0 -31
- common/session/environment.py +0 -31
- common/session/sim.py +0 -129
- common/utils/usd.py +0 -12
- /antioch/{module/clock.py → clock.py} +0 -0
- {antioch_py-2.2.4.dist-info → antioch_py-3.0.0.dist-info}/top_level.txt +0 -0
- /common/message/{base.py → message.py} +0 -0
common/core/container.py
ADDED
|
@@ -0,0 +1,261 @@
|
|
|
1
|
+
import contextlib
|
|
2
|
+
import time
|
|
3
|
+
from enum import Enum
|
|
4
|
+
|
|
5
|
+
import docker
|
|
6
|
+
from docker.errors import APIError
|
|
7
|
+
from docker.models.containers import Container
|
|
8
|
+
|
|
9
|
+
from common.ark import Ark as ArkDefinition, Environment
|
|
10
|
+
from common.ark.module import ModuleImage, ModuleReady, ModuleStart
|
|
11
|
+
from common.constants import ANTIOCH_API_URL
|
|
12
|
+
from common.core.auth import AuthHandler
|
|
13
|
+
from common.core.rome import RomeClient
|
|
14
|
+
from common.utils.comms import CommsSession
|
|
15
|
+
from common.utils.time import now_us
|
|
16
|
+
|
|
17
|
+
# Container naming prefix for all Antioch module containers
|
|
18
|
+
CONTAINER_PREFIX = "antioch-module-"
|
|
19
|
+
|
|
20
|
+
# Synchronization paths for module coordination
|
|
21
|
+
ARK_MODULE_READY_PATH = "_ark/module_ready"
|
|
22
|
+
ARK_MODULE_START_PATH = "_ark/module_start"
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class ContainerSource(str, Enum):
|
|
26
|
+
"""
|
|
27
|
+
Source location for container images.
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
LOCAL = "Local"
|
|
31
|
+
REMOTE = "Remote"
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class ContainerManagerError(Exception):
|
|
35
|
+
"""
|
|
36
|
+
Raised when container management operations fail.
|
|
37
|
+
"""
|
|
38
|
+
|
|
39
|
+
pass
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class ContainerManager:
|
|
43
|
+
"""
|
|
44
|
+
Manages Docker containers for Ark modules.
|
|
45
|
+
|
|
46
|
+
Handles launching, coordination, and cleanup of module containers.
|
|
47
|
+
Uses host networking for Zenoh communication.
|
|
48
|
+
"""
|
|
49
|
+
|
|
50
|
+
def __init__(self) -> None:
|
|
51
|
+
"""
|
|
52
|
+
Create a new container manager.
|
|
53
|
+
|
|
54
|
+
Initializes Docker client and Zenoh communication session.
|
|
55
|
+
"""
|
|
56
|
+
|
|
57
|
+
self._comms = CommsSession()
|
|
58
|
+
self._client = docker.from_env()
|
|
59
|
+
self._containers: dict[str, Container] = {}
|
|
60
|
+
|
|
61
|
+
def launch_ark(
|
|
62
|
+
self,
|
|
63
|
+
ark: ArkDefinition,
|
|
64
|
+
source: ContainerSource = ContainerSource.LOCAL,
|
|
65
|
+
environment: Environment = Environment.SIM,
|
|
66
|
+
debug: bool = False,
|
|
67
|
+
timeout: float = 30.0,
|
|
68
|
+
) -> int:
|
|
69
|
+
"""
|
|
70
|
+
Launch all module containers for an Ark.
|
|
71
|
+
|
|
72
|
+
Stops any existing module containers first to ensure idempotent behavior.
|
|
73
|
+
|
|
74
|
+
:param ark: Ark definition to launch.
|
|
75
|
+
:param source: Container image source (local or remote).
|
|
76
|
+
:param environment: Environment to run in (sim or real).
|
|
77
|
+
:param debug: Enable debug mode.
|
|
78
|
+
:param timeout: Timeout in seconds for modules to become ready.
|
|
79
|
+
:return: Global start time in microseconds.
|
|
80
|
+
:raises ContainerManagerError: If environment is incompatible or launch fails.
|
|
81
|
+
"""
|
|
82
|
+
|
|
83
|
+
# Validate environment compatibility
|
|
84
|
+
if ark.capability == Environment.SIM and environment == Environment.REAL:
|
|
85
|
+
raise ContainerManagerError(f"Ark '{ark.name}' has sim capability but requested for real")
|
|
86
|
+
if ark.capability == Environment.REAL and environment == Environment.SIM:
|
|
87
|
+
raise ContainerManagerError(f"Ark '{ark.name}' has real capability but requested for sim")
|
|
88
|
+
|
|
89
|
+
# Stop all existing module containers (idempotent)
|
|
90
|
+
self._stop_all()
|
|
91
|
+
|
|
92
|
+
# Get GAR credentials if pulling from remote
|
|
93
|
+
gar_auth = self._get_gar_auth() if source == ContainerSource.REMOTE else None
|
|
94
|
+
|
|
95
|
+
# Build container configs
|
|
96
|
+
configs: list[tuple[str, str, str]] = []
|
|
97
|
+
for module in ark.modules:
|
|
98
|
+
image = self._get_image(module.image, environment)
|
|
99
|
+
if image is None:
|
|
100
|
+
raise ContainerManagerError(f"No image for module '{module.name}' in {environment}")
|
|
101
|
+
if gar_auth is not None:
|
|
102
|
+
image = f"{gar_auth['registry_host']}/{gar_auth['repository']}/{image}"
|
|
103
|
+
container_name = f"{CONTAINER_PREFIX}{ark.name.replace('_', '-')}-{module.name.replace('_', '-')}"
|
|
104
|
+
configs.append((module.name, container_name, image))
|
|
105
|
+
|
|
106
|
+
# Pull images if remote
|
|
107
|
+
if gar_auth is not None:
|
|
108
|
+
self._pull_images([c[2] for c in configs], gar_auth)
|
|
109
|
+
|
|
110
|
+
# Set up ready subscriber before launching
|
|
111
|
+
ready_sub = self._comms.declare_async_subscriber(ARK_MODULE_READY_PATH)
|
|
112
|
+
|
|
113
|
+
# Launch containers
|
|
114
|
+
ark_json = ark.model_dump_json()
|
|
115
|
+
for module_name, container_name, image in configs:
|
|
116
|
+
self._launch(module_name, container_name, image, ark_json, environment, debug)
|
|
117
|
+
|
|
118
|
+
# Wait for all modules to be ready
|
|
119
|
+
pending = {m.name for m in ark.modules}
|
|
120
|
+
start = time.time()
|
|
121
|
+
while pending:
|
|
122
|
+
if time.time() - start > timeout:
|
|
123
|
+
raise ContainerManagerError(f"Timeout waiting for modules: {', '.join(sorted(pending))}")
|
|
124
|
+
msg = ready_sub.recv_timeout(ModuleReady, timeout=0.1)
|
|
125
|
+
if msg is not None:
|
|
126
|
+
print(f"Module ready: {msg.module_name}")
|
|
127
|
+
pending.discard(msg.module_name)
|
|
128
|
+
|
|
129
|
+
# Broadcast global start time (2s in future for sync)
|
|
130
|
+
global_start_us = ((now_us() // 1_000_000) + 2) * 1_000_000
|
|
131
|
+
self._comms.declare_publisher(ARK_MODULE_START_PATH).publish(ModuleStart(global_start_time_us=global_start_us))
|
|
132
|
+
return global_start_us
|
|
133
|
+
|
|
134
|
+
def stop(self, timeout: float = 10.0) -> None:
|
|
135
|
+
"""
|
|
136
|
+
Stop all module containers.
|
|
137
|
+
|
|
138
|
+
:param timeout: Timeout in seconds for container stop operation.
|
|
139
|
+
"""
|
|
140
|
+
|
|
141
|
+
self._stop_all(timeout)
|
|
142
|
+
|
|
143
|
+
def close(self, timeout: float = 10.0) -> None:
|
|
144
|
+
"""
|
|
145
|
+
Close the container manager and clean up resources.
|
|
146
|
+
|
|
147
|
+
Stops all module containers and closes the Zenoh session.
|
|
148
|
+
|
|
149
|
+
:param timeout: Timeout in seconds for container stop operation.
|
|
150
|
+
"""
|
|
151
|
+
|
|
152
|
+
self._stop_all(timeout)
|
|
153
|
+
self._comms.close()
|
|
154
|
+
|
|
155
|
+
def _launch(
|
|
156
|
+
self,
|
|
157
|
+
module_name: str,
|
|
158
|
+
container_name: str,
|
|
159
|
+
image: str,
|
|
160
|
+
ark_json: str,
|
|
161
|
+
environment: Environment,
|
|
162
|
+
debug: bool,
|
|
163
|
+
) -> None:
|
|
164
|
+
"""
|
|
165
|
+
Launch a single module container.
|
|
166
|
+
|
|
167
|
+
:param module_name: Name of the module.
|
|
168
|
+
:param container_name: Docker container name.
|
|
169
|
+
:param image: Docker image to use.
|
|
170
|
+
:param ark_json: Serialized Ark definition.
|
|
171
|
+
:param environment: Environment (sim or real).
|
|
172
|
+
:param debug: Enable debug mode.
|
|
173
|
+
:raises ContainerManagerError: If container launch fails.
|
|
174
|
+
"""
|
|
175
|
+
|
|
176
|
+
try:
|
|
177
|
+
container = self._client.containers.run(
|
|
178
|
+
image=image,
|
|
179
|
+
name=container_name,
|
|
180
|
+
environment={
|
|
181
|
+
"_MODULE_NAME": module_name,
|
|
182
|
+
"_ARK": ark_json,
|
|
183
|
+
"_ENVIRONMENT": str(environment.value),
|
|
184
|
+
"_DEBUG": str(debug).lower(),
|
|
185
|
+
},
|
|
186
|
+
network_mode="host",
|
|
187
|
+
ipc_mode="host",
|
|
188
|
+
detach=True,
|
|
189
|
+
remove=False,
|
|
190
|
+
)
|
|
191
|
+
self._containers[container_name] = container
|
|
192
|
+
print(f"Launched container: {container_name}")
|
|
193
|
+
except APIError as e:
|
|
194
|
+
raise ContainerManagerError(f"Failed to launch '{container_name}': {e}") from e
|
|
195
|
+
|
|
196
|
+
def _stop_all(self, timeout: float = 10.0) -> None:
|
|
197
|
+
"""
|
|
198
|
+
Stop all Antioch module containers.
|
|
199
|
+
|
|
200
|
+
Finds all containers with the antioch-module- prefix and stops them.
|
|
201
|
+
|
|
202
|
+
:param timeout: Timeout in seconds for stop operation.
|
|
203
|
+
"""
|
|
204
|
+
|
|
205
|
+
with contextlib.suppress(APIError):
|
|
206
|
+
for container in self._client.containers.list(all=True):
|
|
207
|
+
if container.name and container.name.startswith(CONTAINER_PREFIX):
|
|
208
|
+
print(f"Stopping container: {container.name}")
|
|
209
|
+
self._stop_container(container, timeout)
|
|
210
|
+
|
|
211
|
+
self._containers.clear()
|
|
212
|
+
|
|
213
|
+
def _stop_container(self, container: Container, timeout: float) -> None:
|
|
214
|
+
"""
|
|
215
|
+
Stop and remove a single container.
|
|
216
|
+
|
|
217
|
+
:param container: Docker container to stop.
|
|
218
|
+
:param timeout: Timeout in seconds for stop operation.
|
|
219
|
+
"""
|
|
220
|
+
|
|
221
|
+
with contextlib.suppress(APIError):
|
|
222
|
+
container.stop(timeout=int(timeout))
|
|
223
|
+
with contextlib.suppress(APIError):
|
|
224
|
+
container.remove(force=True)
|
|
225
|
+
|
|
226
|
+
def _get_image(self, image: str | ModuleImage, environment: Environment) -> str | None:
|
|
227
|
+
"""
|
|
228
|
+
Get the image name for the given environment.
|
|
229
|
+
|
|
230
|
+
:param image: Image specification (string or ModuleImage).
|
|
231
|
+
:param environment: Environment to get image for.
|
|
232
|
+
:return: Image name or None if not available.
|
|
233
|
+
"""
|
|
234
|
+
|
|
235
|
+
if isinstance(image, str):
|
|
236
|
+
return image
|
|
237
|
+
return image.sim if environment == Environment.SIM else image.real
|
|
238
|
+
|
|
239
|
+
def _get_gar_auth(self) -> dict:
|
|
240
|
+
"""
|
|
241
|
+
Get Google Artifact Registry authentication credentials.
|
|
242
|
+
|
|
243
|
+
:return: Dictionary containing registry host, repository, and access token.
|
|
244
|
+
"""
|
|
245
|
+
|
|
246
|
+
auth = AuthHandler()
|
|
247
|
+
rome = RomeClient(ANTIOCH_API_URL, auth.get_token())
|
|
248
|
+
return rome.get_gar_token()
|
|
249
|
+
|
|
250
|
+
def _pull_images(self, images: list[str], gar_auth: dict) -> None:
|
|
251
|
+
"""
|
|
252
|
+
Pull container images from the registry.
|
|
253
|
+
|
|
254
|
+
:param images: List of image names to pull.
|
|
255
|
+
:param gar_auth: GAR authentication credentials.
|
|
256
|
+
"""
|
|
257
|
+
|
|
258
|
+
auth_config = {"username": "oauth2accesstoken", "password": gar_auth["access_token"]}
|
|
259
|
+
for image in set(images):
|
|
260
|
+
print(f"Pulling image: {image}")
|
|
261
|
+
self._client.images.pull(image, auth_config=auth_config)
|
common/core/registry.py
CHANGED
|
@@ -1,12 +1,15 @@
|
|
|
1
1
|
import json
|
|
2
|
+
import os
|
|
3
|
+
import tempfile
|
|
2
4
|
from collections import defaultdict
|
|
3
5
|
from datetime import datetime
|
|
4
6
|
from pathlib import Path
|
|
5
7
|
|
|
6
|
-
from common.ark import Ark as ArkDefinition
|
|
8
|
+
from common.ark import Ark as ArkDefinition
|
|
7
9
|
from common.constants import ANTIOCH_API_URL, get_ark_dir, get_asset_dir
|
|
8
|
-
from common.core.auth import
|
|
9
|
-
from common.rome import RomeClient
|
|
10
|
+
from common.core.auth import AuthHandler
|
|
11
|
+
from common.core.rome import RomeClient
|
|
12
|
+
from common.core.types import ArkReference, ArkVersionReference, AssetReference, AssetVersionReference
|
|
10
13
|
|
|
11
14
|
|
|
12
15
|
def list_local_arks() -> list[ArkReference]:
|
|
@@ -17,17 +20,43 @@ def list_local_arks() -> list[ArkReference]:
|
|
|
17
20
|
"""
|
|
18
21
|
|
|
19
22
|
arks_dir = get_ark_dir()
|
|
20
|
-
files_by_name = defaultdict(list)
|
|
21
|
-
for file_path in arks_dir.iterdir():
|
|
22
|
-
if file_path.is_file() and (file_path.name.endswith(":ark.json") or file_path.name.endswith(":asset.usdz")):
|
|
23
|
-
name = file_path.name.split(":")[0]
|
|
24
|
-
files_by_name[name].append(file_path)
|
|
25
23
|
|
|
24
|
+
# Group files by ark name and version
|
|
25
|
+
# File format: {name}:{version}:ark.json or {name}:{version}:asset.usdz
|
|
26
|
+
files_by_name_version: dict[str, dict[str, dict[str, Path]]] = defaultdict(lambda: defaultdict(dict))
|
|
27
|
+
for file_path in arks_dir.iterdir():
|
|
28
|
+
if not file_path.is_file():
|
|
29
|
+
continue
|
|
30
|
+
if file_path.name.endswith(":ark.json"):
|
|
31
|
+
name, version, _ = file_path.name.rsplit(":", 2)
|
|
32
|
+
files_by_name_version[name][version]["ark"] = file_path
|
|
33
|
+
elif file_path.name.endswith(":asset.usdz"):
|
|
34
|
+
name, version, _ = file_path.name.rsplit(":", 2)
|
|
35
|
+
files_by_name_version[name][version]["asset"] = file_path
|
|
36
|
+
|
|
37
|
+
# Build references for each ark
|
|
26
38
|
results = []
|
|
27
|
-
for name,
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
39
|
+
for name, versions in files_by_name_version.items():
|
|
40
|
+
version_refs = []
|
|
41
|
+
for version, files in versions.items():
|
|
42
|
+
ark_file = files.get("ark")
|
|
43
|
+
if ark_file is None:
|
|
44
|
+
continue
|
|
45
|
+
asset_file = files.get("asset")
|
|
46
|
+
ark_stat = ark_file.stat()
|
|
47
|
+
version_refs.append(
|
|
48
|
+
ArkVersionReference(
|
|
49
|
+
version=version,
|
|
50
|
+
full_path=str(ark_file),
|
|
51
|
+
asset_path=str(asset_file) if asset_file else None,
|
|
52
|
+
size_bytes=ark_stat.st_size,
|
|
53
|
+
created_at=datetime.fromtimestamp(ark_stat.st_ctime).isoformat(),
|
|
54
|
+
updated_at=datetime.fromtimestamp(ark_stat.st_mtime).isoformat(),
|
|
55
|
+
asset_size_bytes=asset_file.stat().st_size if asset_file else None,
|
|
56
|
+
)
|
|
57
|
+
)
|
|
58
|
+
if version_refs:
|
|
59
|
+
results.append(build_ark_reference_from_versions(name, version_refs))
|
|
31
60
|
|
|
32
61
|
return results
|
|
33
62
|
|
|
@@ -60,8 +89,8 @@ def get_ark_version_reference(name: str, version: str) -> ArkVersionReference:
|
|
|
60
89
|
for version_ref in ark_ref.versions:
|
|
61
90
|
if version_ref.version == version:
|
|
62
91
|
return version_ref
|
|
63
|
-
raise FileNotFoundError(f"Version {version} of Ark {name} not found
|
|
64
|
-
raise FileNotFoundError(f"No versions of Ark {name} found
|
|
92
|
+
raise FileNotFoundError(f"Version {version} of Ark {name} not found locally. Please pull the Ark first.")
|
|
93
|
+
raise FileNotFoundError(f"No versions of Ark {name} found locally. Please pull the Ark first.")
|
|
65
94
|
|
|
66
95
|
|
|
67
96
|
def get_asset_path(name: str, version: str, extension: str = "usdz", assert_exists: bool = True) -> Path:
|
|
@@ -94,19 +123,34 @@ def list_local_assets() -> list[AssetReference]:
|
|
|
94
123
|
if not assets_dir.exists():
|
|
95
124
|
return []
|
|
96
125
|
|
|
97
|
-
|
|
126
|
+
# Group files by asset name and version
|
|
127
|
+
# File format: {name}:{version}:file.{extension}
|
|
128
|
+
files_by_name_version: dict[str, dict[str, Path]] = defaultdict(dict)
|
|
98
129
|
for file_path in assets_dir.iterdir():
|
|
99
|
-
if file_path.is_file():
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
130
|
+
if not file_path.is_file():
|
|
131
|
+
continue
|
|
132
|
+
parts = file_path.stem.split(":")
|
|
133
|
+
if len(parts) == 3 and parts[-1] == "file":
|
|
134
|
+
name, version = parts[0], parts[1]
|
|
135
|
+
files_by_name_version[name][version] = file_path
|
|
104
136
|
|
|
137
|
+
# Build references for each asset
|
|
105
138
|
results = []
|
|
106
|
-
for name,
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
139
|
+
for name, versions in files_by_name_version.items():
|
|
140
|
+
version_refs = []
|
|
141
|
+
for version, asset_file in versions.items():
|
|
142
|
+
asset_stat = asset_file.stat()
|
|
143
|
+
version_refs.append(
|
|
144
|
+
AssetVersionReference(
|
|
145
|
+
version=version,
|
|
146
|
+
full_path=str(asset_file),
|
|
147
|
+
size_bytes=asset_stat.st_size,
|
|
148
|
+
created_at=datetime.fromtimestamp(asset_stat.st_ctime).isoformat(),
|
|
149
|
+
updated_at=datetime.fromtimestamp(asset_stat.st_mtime).isoformat(),
|
|
150
|
+
)
|
|
151
|
+
)
|
|
152
|
+
if version_refs:
|
|
153
|
+
results.append(build_asset_reference_from_versions(name, version_refs))
|
|
110
154
|
|
|
111
155
|
return results
|
|
112
156
|
|
|
@@ -121,13 +165,8 @@ def list_remote_arks() -> list[ArkReference]:
|
|
|
121
165
|
:raises AuthError: If not authenticated.
|
|
122
166
|
"""
|
|
123
167
|
|
|
124
|
-
# Get auth token
|
|
125
168
|
auth = AuthHandler()
|
|
126
169
|
token = auth.get_token()
|
|
127
|
-
if token is None:
|
|
128
|
-
raise AuthError("User not authenticated. Please login first")
|
|
129
|
-
|
|
130
|
-
# Create Rome client and list arks
|
|
131
170
|
rome_client = RomeClient(api_url=ANTIOCH_API_URL, token=token)
|
|
132
171
|
return rome_client.list_arks()
|
|
133
172
|
|
|
@@ -136,6 +175,8 @@ def pull_remote_ark(name: str, version: str, overwrite: bool = False) -> ArkDefi
|
|
|
136
175
|
"""
|
|
137
176
|
Pull an Ark from remote registry to local storage.
|
|
138
177
|
|
|
178
|
+
Downloads the Ark config (ark.json) and asset (asset.usdz) if present.
|
|
179
|
+
|
|
139
180
|
Requires authentication.
|
|
140
181
|
|
|
141
182
|
:param name: Name of the Ark.
|
|
@@ -148,30 +189,26 @@ def pull_remote_ark(name: str, version: str, overwrite: bool = False) -> ArkDefi
|
|
|
148
189
|
# Check if Ark already exists locally
|
|
149
190
|
arks_dir = get_ark_dir()
|
|
150
191
|
ark_json_path = arks_dir / f"{name}:{version}:ark.json"
|
|
192
|
+
ark_asset_path = arks_dir / f"{name}:{version}:asset.usdz"
|
|
151
193
|
if ark_json_path.exists() and not overwrite:
|
|
152
194
|
return load_local_ark(name, version)
|
|
153
195
|
|
|
154
|
-
# Get auth token
|
|
155
196
|
auth = AuthHandler()
|
|
156
197
|
token = auth.get_token()
|
|
157
|
-
if not token:
|
|
158
|
-
raise AuthError("User not authenticated. Please login first")
|
|
159
|
-
|
|
160
|
-
# Create Rome client and fetch Ark definition and save to local storage
|
|
161
198
|
rome_client = RomeClient(api_url=ANTIOCH_API_URL, token=token)
|
|
162
|
-
ark = rome_client.get_ark(name=name, version=version)
|
|
163
|
-
|
|
164
|
-
# Save Ark JSON
|
|
165
|
-
with open(ark_json_path, "wb") as f:
|
|
166
|
-
f.write(json.dumps(ark).encode("utf-8"))
|
|
167
199
|
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
200
|
+
print(f"Pulling {name} v{version}")
|
|
201
|
+
downloaded_asset = rome_client.pull_ark(
|
|
202
|
+
name=name,
|
|
203
|
+
version=version,
|
|
204
|
+
config_output_path=str(ark_json_path),
|
|
205
|
+
asset_output_path=str(ark_asset_path),
|
|
206
|
+
)
|
|
207
|
+
print(" ✓ Config downloaded")
|
|
208
|
+
if downloaded_asset:
|
|
209
|
+
print(" ✓ Asset downloaded")
|
|
210
|
+
print(f"✓ Ark {name} v{version} pulled successfully")
|
|
211
|
+
return load_local_ark(name, version)
|
|
175
212
|
|
|
176
213
|
|
|
177
214
|
def list_remote_assets() -> list[AssetReference]:
|
|
@@ -184,17 +221,12 @@ def list_remote_assets() -> list[AssetReference]:
|
|
|
184
221
|
:raises AuthError: If not authenticated.
|
|
185
222
|
"""
|
|
186
223
|
|
|
187
|
-
# Get auth token
|
|
188
224
|
token = AuthHandler().get_token()
|
|
189
|
-
if token is None:
|
|
190
|
-
raise AuthError("User not authenticated. Please login first")
|
|
191
|
-
|
|
192
|
-
# Create Rome client and list assets
|
|
193
225
|
rome_client = RomeClient(api_url=ANTIOCH_API_URL, token=token)
|
|
194
226
|
return rome_client.list_assets()
|
|
195
227
|
|
|
196
228
|
|
|
197
|
-
def pull_remote_asset(name: str, version: str, overwrite: bool = False
|
|
229
|
+
def pull_remote_asset(name: str, version: str, overwrite: bool = False) -> Path:
|
|
198
230
|
"""
|
|
199
231
|
Pull an asset from remote registry to local storage.
|
|
200
232
|
|
|
@@ -203,129 +235,76 @@ def pull_remote_asset(name: str, version: str, overwrite: bool = False, show_pro
|
|
|
203
235
|
:param name: Name of the asset.
|
|
204
236
|
:param version: Version of the asset.
|
|
205
237
|
:param overwrite: Overwrite local asset if it already exists.
|
|
206
|
-
:param show_progress: Show download progress bar.
|
|
207
238
|
:return: Path to the downloaded asset file.
|
|
208
239
|
:raises AuthError: If not authenticated.
|
|
209
240
|
"""
|
|
210
241
|
|
|
211
|
-
# Get auth token
|
|
212
|
-
token = AuthHandler().get_token()
|
|
213
|
-
if token is None:
|
|
214
|
-
raise AuthError("User not authenticated. Please login first")
|
|
215
|
-
|
|
216
|
-
# Create Rome client and get asset metadata to determine extension
|
|
217
|
-
rome_client = RomeClient(api_url=ANTIOCH_API_URL, token=token)
|
|
218
|
-
metadata = rome_client.get_asset_metadata(name=name, version=version)
|
|
219
|
-
extension = metadata.get("extension", "usdz")
|
|
220
|
-
|
|
221
242
|
# Check if asset already exists locally
|
|
222
|
-
|
|
243
|
+
# NOTE: Only checks USDZ assets for now
|
|
244
|
+
asset_file_path = get_asset_path(name=name, version=version, extension="usdz", assert_exists=False)
|
|
223
245
|
if asset_file_path.exists() and not overwrite:
|
|
224
|
-
print(f"Asset {name}:{version} already exists locally, skipping download")
|
|
225
246
|
return asset_file_path
|
|
226
247
|
|
|
227
|
-
|
|
228
|
-
rome_client
|
|
229
|
-
|
|
248
|
+
token = AuthHandler().get_token()
|
|
249
|
+
rome_client = RomeClient(api_url=ANTIOCH_API_URL, token=token)
|
|
250
|
+
temp_path: str | None = None
|
|
251
|
+
|
|
252
|
+
try:
|
|
253
|
+
# Download to a temp file in the destination directory so publishing can be atomic
|
|
254
|
+
# This avoids EXDEV when the asset directory is a separate mount (common in Kubernetes)
|
|
255
|
+
asset_file_path.parent.mkdir(parents=True, exist_ok=True)
|
|
256
|
+
safe_prefix = f".{name.replace(':', '_')}.{version.replace(':', '_')}."
|
|
257
|
+
fd, temp_path = tempfile.mkstemp(prefix=safe_prefix, dir=str(asset_file_path.parent))
|
|
258
|
+
os.close(fd)
|
|
259
|
+
|
|
260
|
+
# Pull asset - metadata comes back from response body
|
|
261
|
+
metadata = rome_client.pull_asset(name=name, version=version, output_path=temp_path)
|
|
262
|
+
extension = metadata.get("extension", "usdz")
|
|
263
|
+
|
|
264
|
+
# Get final path with correct extension
|
|
265
|
+
asset_file_path = get_asset_path(name=name, version=version, extension=extension, assert_exists=False)
|
|
266
|
+
asset_file_path.parent.mkdir(parents=True, exist_ok=True)
|
|
267
|
+
|
|
268
|
+
# Publish atomically on the same filesystem
|
|
269
|
+
Path(temp_path).replace(asset_file_path)
|
|
270
|
+
return asset_file_path
|
|
271
|
+
finally:
|
|
272
|
+
# Clean up if download fails or publish raises
|
|
273
|
+
if temp_path is not None:
|
|
274
|
+
Path(temp_path).unlink(missing_ok=True)
|
|
230
275
|
|
|
231
276
|
|
|
232
|
-
def
|
|
277
|
+
def build_ark_reference_from_versions(name: str, version_refs: list[ArkVersionReference]) -> ArkReference | None:
|
|
233
278
|
"""
|
|
234
|
-
|
|
279
|
+
Build an ArkReference from version references.
|
|
235
280
|
|
|
236
|
-
:param name: The name
|
|
237
|
-
:param
|
|
238
|
-
:return: ArkReference
|
|
281
|
+
:param name: The Ark name.
|
|
282
|
+
:param version_refs: List of ArkVersionReference instances.
|
|
283
|
+
:return: ArkReference or None if no versions exist.
|
|
239
284
|
"""
|
|
240
285
|
|
|
241
|
-
file_stats = [f.stat() for f in files]
|
|
242
|
-
created_at = min(datetime.fromtimestamp(stat.st_ctime) for stat in file_stats).isoformat()
|
|
243
|
-
updated_at = max(datetime.fromtimestamp(stat.st_mtime) for stat in file_stats).isoformat()
|
|
244
|
-
|
|
245
|
-
# Group files by version - parse from {name}:{version}:ark.json or {name}:{version}:asset.usdz
|
|
246
|
-
files_by_version: dict[str, list[Path]] = defaultdict(list)
|
|
247
|
-
for file_path in files:
|
|
248
|
-
files_by_version[file_path.name.split(":")[1]].append(file_path)
|
|
249
|
-
|
|
250
|
-
# Create an ArkVersionReference for each version
|
|
251
|
-
version_refs = []
|
|
252
|
-
for version, version_files in files_by_version.items():
|
|
253
|
-
ark_file = None
|
|
254
|
-
asset_file = None
|
|
255
|
-
for file_path in version_files:
|
|
256
|
-
if file_path.name.endswith(":ark.json"):
|
|
257
|
-
ark_file = file_path
|
|
258
|
-
elif file_path.name.endswith(":asset.usdz"):
|
|
259
|
-
asset_file = file_path
|
|
260
|
-
if ark_file is None:
|
|
261
|
-
continue
|
|
262
|
-
|
|
263
|
-
ark_stat = ark_file.stat()
|
|
264
|
-
version_refs.append(
|
|
265
|
-
ArkVersionReference(
|
|
266
|
-
version=version,
|
|
267
|
-
full_path=str(ark_file),
|
|
268
|
-
asset_path=str(asset_file) if asset_file else None,
|
|
269
|
-
size_bytes=ark_stat.st_size,
|
|
270
|
-
created_at=datetime.fromtimestamp(ark_stat.st_ctime).isoformat(),
|
|
271
|
-
updated_at=datetime.fromtimestamp(ark_stat.st_mtime).isoformat(),
|
|
272
|
-
asset_size_bytes=asset_file.stat().st_size if asset_file else None,
|
|
273
|
-
)
|
|
274
|
-
)
|
|
275
|
-
|
|
276
286
|
if not version_refs:
|
|
277
287
|
return None
|
|
278
288
|
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
updated_at=updated_at,
|
|
284
|
-
)
|
|
289
|
+
# Aggregate timestamps from versions, fallback to empty string if all are missing
|
|
290
|
+
created_at = min((v.created_at for v in version_refs if v.created_at), default="")
|
|
291
|
+
updated_at = max((v.updated_at for v in version_refs if v.updated_at), default="")
|
|
292
|
+
return ArkReference(name=name, versions=version_refs, created_at=created_at, updated_at=updated_at)
|
|
285
293
|
|
|
286
294
|
|
|
287
|
-
def
|
|
295
|
+
def build_asset_reference_from_versions(name: str, version_refs: list[AssetVersionReference]) -> AssetReference | None:
|
|
288
296
|
"""
|
|
289
|
-
|
|
297
|
+
Build an AssetReference from version references.
|
|
290
298
|
|
|
291
|
-
:param name: The name
|
|
292
|
-
:param
|
|
293
|
-
:return: AssetReference
|
|
299
|
+
:param name: The Asset name.
|
|
300
|
+
:param version_refs: List of AssetVersionReference instances.
|
|
301
|
+
:return: AssetReference or None if no versions exist.
|
|
294
302
|
"""
|
|
295
303
|
|
|
296
|
-
file_stats = [f.stat() for f in files]
|
|
297
|
-
created_at = min(datetime.fromtimestamp(stat.st_ctime) for stat in file_stats).isoformat()
|
|
298
|
-
updated_at = max(datetime.fromtimestamp(stat.st_mtime) for stat in file_stats).isoformat()
|
|
299
|
-
|
|
300
|
-
# Group files by version
|
|
301
|
-
files_by_version: dict[str, list[Path]] = defaultdict(list)
|
|
302
|
-
for file_path in files:
|
|
303
|
-
# Parse filename format: {name}:{version}:file.usdz
|
|
304
|
-
version = file_path.name.split(":")[1]
|
|
305
|
-
files_by_version[version].append(file_path)
|
|
306
|
-
|
|
307
|
-
# Create an AssetVersionReference for each version
|
|
308
|
-
version_refs = []
|
|
309
|
-
for version, version_files in files_by_version.items():
|
|
310
|
-
# Should only be one file per version, but take first if multiple
|
|
311
|
-
asset_file = version_files[0]
|
|
312
|
-
asset_stat = asset_file.stat()
|
|
313
|
-
version_refs.append(
|
|
314
|
-
AssetVersionReference(
|
|
315
|
-
version=version,
|
|
316
|
-
full_path=str(asset_file),
|
|
317
|
-
size_bytes=asset_stat.st_size,
|
|
318
|
-
created_at=datetime.fromtimestamp(asset_stat.st_ctime).isoformat(),
|
|
319
|
-
updated_at=datetime.fromtimestamp(asset_stat.st_mtime).isoformat(),
|
|
320
|
-
)
|
|
321
|
-
)
|
|
322
|
-
|
|
323
304
|
if not version_refs:
|
|
324
305
|
return None
|
|
325
306
|
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
updated_at=updated_at,
|
|
331
|
-
)
|
|
307
|
+
# Aggregate timestamps from versions, fallback to empty string if all are missing
|
|
308
|
+
created_at = min((v.created_at for v in version_refs if v.created_at), default="")
|
|
309
|
+
updated_at = max((v.updated_at for v in version_refs if v.updated_at), default="")
|
|
310
|
+
return AssetReference(name=name, versions=version_refs, created_at=created_at, updated_at=updated_at)
|