ml-dash 0.6.2rc1__py3-none-any.whl → 0.6.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ml_dash/__init__.py +36 -64
- ml_dash/auth/token_storage.py +267 -226
- ml_dash/auto_start.py +28 -15
- ml_dash/cli.py +16 -2
- ml_dash/cli_commands/api.py +174 -0
- ml_dash/cli_commands/download.py +773 -666
- ml_dash/cli_commands/list.py +164 -14
- ml_dash/cli_commands/login.py +190 -183
- ml_dash/cli_commands/profile.py +92 -0
- ml_dash/cli_commands/upload.py +1312 -1141
- ml_dash/client.py +335 -82
- ml_dash/config.py +119 -119
- ml_dash/experiment.py +1293 -1033
- ml_dash/files.py +339 -224
- ml_dash/log.py +7 -7
- ml_dash/metric.py +359 -100
- ml_dash/params.py +6 -6
- ml_dash/remote_auto_start.py +20 -17
- ml_dash/run.py +211 -65
- ml_dash/snowflake.py +173 -0
- ml_dash/storage.py +1051 -1081
- {ml_dash-0.6.2rc1.dist-info → ml_dash-0.6.4.dist-info}/METADATA +12 -14
- ml_dash-0.6.4.dist-info/RECORD +33 -0
- {ml_dash-0.6.2rc1.dist-info → ml_dash-0.6.4.dist-info}/WHEEL +1 -1
- ml_dash-0.6.2rc1.dist-info/RECORD +0 -30
- {ml_dash-0.6.2rc1.dist-info → ml_dash-0.6.4.dist-info}/entry_points.txt +0 -0
ml_dash/client.py
CHANGED
|
@@ -9,12 +9,13 @@ import httpx
|
|
|
9
9
|
class RemoteClient:
|
|
10
10
|
"""Client for communicating with ML-Dash server."""
|
|
11
11
|
|
|
12
|
-
def __init__(self, base_url: str, api_key: Optional[str] = None):
|
|
12
|
+
def __init__(self, base_url: str, namespace: str, api_key: Optional[str] = None):
|
|
13
13
|
"""
|
|
14
14
|
Initialize remote client.
|
|
15
15
|
|
|
16
16
|
Args:
|
|
17
17
|
base_url: Base URL of ML-Dash server (e.g., "http://localhost:3000")
|
|
18
|
+
namespace: Namespace slug (e.g., "my-namespace")
|
|
18
19
|
api_key: JWT token for authentication (optional - auto-loads from storage if not provided)
|
|
19
20
|
|
|
20
21
|
Note:
|
|
@@ -27,6 +28,9 @@ class RemoteClient:
|
|
|
27
28
|
# Add /api prefix to base URL for REST API calls
|
|
28
29
|
self.base_url = base_url.rstrip("/") + "/api"
|
|
29
30
|
|
|
31
|
+
# Store namespace
|
|
32
|
+
self.namespace = namespace
|
|
33
|
+
|
|
30
34
|
# If no api_key provided, try to load from storage
|
|
31
35
|
if not api_key:
|
|
32
36
|
from .auth.token_storage import get_token_storage
|
|
@@ -37,6 +41,7 @@ class RemoteClient:
|
|
|
37
41
|
self.api_key = api_key
|
|
38
42
|
self._rest_client = None
|
|
39
43
|
self._gql_client = None
|
|
44
|
+
self._id_cache: Dict[str, str] = {} # Cache for slug -> ID mappings
|
|
40
45
|
|
|
41
46
|
def _ensure_authenticated(self):
|
|
42
47
|
"""Check if authenticated, raise error if not."""
|
|
@@ -77,6 +82,80 @@ class RemoteClient:
|
|
|
77
82
|
)
|
|
78
83
|
return self._gql_client
|
|
79
84
|
|
|
85
|
+
def _get_project_id(self, project_slug: str) -> str:
|
|
86
|
+
"""
|
|
87
|
+
Resolve project ID from slug using GraphQL.
|
|
88
|
+
|
|
89
|
+
Args:
|
|
90
|
+
project_slug: Project slug
|
|
91
|
+
|
|
92
|
+
Returns:
|
|
93
|
+
Project ID (Snowflake ID)
|
|
94
|
+
|
|
95
|
+
Raises:
|
|
96
|
+
ValueError: If project not found
|
|
97
|
+
"""
|
|
98
|
+
cache_key = f"project:{self.namespace}:{project_slug}"
|
|
99
|
+
if cache_key in self._id_cache:
|
|
100
|
+
return self._id_cache[cache_key]
|
|
101
|
+
|
|
102
|
+
query = """
|
|
103
|
+
query GetProject($namespace: String!) {
|
|
104
|
+
namespace(slug: $namespace) {
|
|
105
|
+
projects {
|
|
106
|
+
id
|
|
107
|
+
slug
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
"""
|
|
112
|
+
result = self.graphql_query(query, {
|
|
113
|
+
"namespace": self.namespace
|
|
114
|
+
})
|
|
115
|
+
|
|
116
|
+
projects = result.get("namespace", {}).get("projects", [])
|
|
117
|
+
for project in projects:
|
|
118
|
+
if project["slug"] == project_slug:
|
|
119
|
+
project_id = project["id"]
|
|
120
|
+
self._id_cache[cache_key] = project_id
|
|
121
|
+
return project_id
|
|
122
|
+
|
|
123
|
+
raise ValueError(f"Project '{project_slug}' not found in namespace '{self.namespace}'")
|
|
124
|
+
|
|
125
|
+
def _get_experiment_node_id(self, experiment_id: str) -> str:
|
|
126
|
+
"""
|
|
127
|
+
Resolve node ID from experiment ID using GraphQL.
|
|
128
|
+
|
|
129
|
+
Args:
|
|
130
|
+
experiment_id: Experiment ID
|
|
131
|
+
|
|
132
|
+
Returns:
|
|
133
|
+
Node ID
|
|
134
|
+
|
|
135
|
+
Raises:
|
|
136
|
+
ValueError: If experiment node not found
|
|
137
|
+
"""
|
|
138
|
+
cache_key = f"exp_node:{experiment_id}"
|
|
139
|
+
if cache_key in self._id_cache:
|
|
140
|
+
return self._id_cache[cache_key]
|
|
141
|
+
|
|
142
|
+
query = """
|
|
143
|
+
query GetExperimentNode($experimentId: ID!) {
|
|
144
|
+
experimentNode(experimentId: $experimentId) {
|
|
145
|
+
id
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
"""
|
|
149
|
+
result = self.graphql_query(query, {"experimentId": experiment_id})
|
|
150
|
+
|
|
151
|
+
node = result.get("experimentNode")
|
|
152
|
+
if not node:
|
|
153
|
+
raise ValueError(f"No node found for experiment ID '{experiment_id}'")
|
|
154
|
+
|
|
155
|
+
node_id = node["id"]
|
|
156
|
+
self._id_cache[cache_key] = node_id
|
|
157
|
+
return node_id
|
|
158
|
+
|
|
80
159
|
def create_or_update_experiment(
|
|
81
160
|
self,
|
|
82
161
|
project: str,
|
|
@@ -84,31 +163,38 @@ class RemoteClient:
|
|
|
84
163
|
description: Optional[str] = None,
|
|
85
164
|
tags: Optional[List[str]] = None,
|
|
86
165
|
bindrs: Optional[List[str]] = None,
|
|
87
|
-
|
|
166
|
+
prefix: Optional[str] = None,
|
|
88
167
|
write_protected: bool = False,
|
|
89
168
|
metadata: Optional[Dict[str, Any]] = None,
|
|
90
169
|
) -> Dict[str, Any]:
|
|
91
170
|
"""
|
|
92
|
-
Create or update an experiment.
|
|
171
|
+
Create or update an experiment using unified node API.
|
|
93
172
|
|
|
94
173
|
Args:
|
|
95
|
-
project: Project
|
|
174
|
+
project: Project slug
|
|
96
175
|
name: Experiment name
|
|
97
176
|
description: Optional description
|
|
98
177
|
tags: Optional list of tags
|
|
99
178
|
bindrs: Optional list of bindrs
|
|
100
|
-
|
|
179
|
+
prefix: Full prefix path (ignored in new API - use folders instead)
|
|
101
180
|
write_protected: If True, experiment becomes immutable
|
|
102
181
|
metadata: Optional metadata dict
|
|
103
182
|
|
|
104
183
|
Returns:
|
|
105
|
-
Response dict with experiment,
|
|
184
|
+
Response dict with experiment, node, and project data
|
|
106
185
|
|
|
107
186
|
Raises:
|
|
108
187
|
httpx.HTTPStatusError: If request fails
|
|
188
|
+
ValueError: If project not found
|
|
109
189
|
"""
|
|
190
|
+
# Resolve project ID from slug
|
|
191
|
+
project_id = self._get_project_id(project)
|
|
192
|
+
|
|
193
|
+
# Build payload for unified node API
|
|
110
194
|
payload = {
|
|
195
|
+
"type": "EXPERIMENT",
|
|
111
196
|
"name": name,
|
|
197
|
+
"projectId": project_id,
|
|
112
198
|
}
|
|
113
199
|
|
|
114
200
|
if description is not None:
|
|
@@ -117,19 +203,26 @@ class RemoteClient:
|
|
|
117
203
|
payload["tags"] = tags
|
|
118
204
|
if bindrs is not None:
|
|
119
205
|
payload["bindrs"] = bindrs
|
|
120
|
-
if folder is not None:
|
|
121
|
-
payload["folder"] = folder
|
|
122
206
|
if write_protected:
|
|
123
207
|
payload["writeProtected"] = write_protected
|
|
124
208
|
if metadata is not None:
|
|
125
209
|
payload["metadata"] = metadata
|
|
126
210
|
|
|
211
|
+
# Call unified node creation API
|
|
127
212
|
response = self._client.post(
|
|
128
|
-
f"/
|
|
213
|
+
f"/namespaces/{self.namespace}/nodes",
|
|
129
214
|
json=payload,
|
|
130
215
|
)
|
|
131
216
|
response.raise_for_status()
|
|
132
|
-
|
|
217
|
+
result = response.json()
|
|
218
|
+
|
|
219
|
+
# Cache the experiment node ID mapping
|
|
220
|
+
if "experiment" in result and "node" in result:
|
|
221
|
+
exp_id = result["experiment"]["id"]
|
|
222
|
+
node_id = result["node"]["id"]
|
|
223
|
+
self._id_cache[f"exp_node:{exp_id}"] = node_id
|
|
224
|
+
|
|
225
|
+
return result
|
|
133
226
|
|
|
134
227
|
def update_experiment_status(
|
|
135
228
|
self,
|
|
@@ -137,24 +230,27 @@ class RemoteClient:
|
|
|
137
230
|
status: str,
|
|
138
231
|
) -> Dict[str, Any]:
|
|
139
232
|
"""
|
|
140
|
-
Update experiment status.
|
|
233
|
+
Update experiment status using unified node API.
|
|
141
234
|
|
|
142
235
|
Args:
|
|
143
236
|
experiment_id: Experiment ID
|
|
144
237
|
status: Status value - "RUNNING" | "COMPLETED" | "FAILED" | "CANCELLED"
|
|
145
238
|
|
|
146
239
|
Returns:
|
|
147
|
-
Response dict with updated
|
|
240
|
+
Response dict with updated node data
|
|
148
241
|
|
|
149
242
|
Raises:
|
|
150
243
|
httpx.HTTPStatusError: If request fails
|
|
244
|
+
ValueError: If experiment node not found
|
|
151
245
|
"""
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
246
|
+
# Resolve node ID from experiment ID
|
|
247
|
+
node_id = self._get_experiment_node_id(experiment_id)
|
|
248
|
+
|
|
249
|
+
# Update node with new status
|
|
250
|
+
payload = {"status": status}
|
|
155
251
|
|
|
156
252
|
response = self._client.patch(
|
|
157
|
-
f"/
|
|
253
|
+
f"/nodes/{node_id}",
|
|
158
254
|
json=payload,
|
|
159
255
|
)
|
|
160
256
|
response.raise_for_status()
|
|
@@ -263,15 +359,17 @@ class RemoteClient:
|
|
|
263
359
|
metadata: Optional[Dict[str, Any]],
|
|
264
360
|
checksum: str,
|
|
265
361
|
content_type: str,
|
|
266
|
-
size_bytes: int
|
|
362
|
+
size_bytes: int,
|
|
363
|
+
project_id: Optional[str] = None,
|
|
364
|
+
parent_id: str = "ROOT"
|
|
267
365
|
) -> Dict[str, Any]:
|
|
268
366
|
"""
|
|
269
|
-
Upload a file to an experiment.
|
|
367
|
+
Upload a file to an experiment using unified node API.
|
|
270
368
|
|
|
271
369
|
Args:
|
|
272
370
|
experiment_id: Experiment ID (Snowflake ID)
|
|
273
371
|
file_path: Local file path
|
|
274
|
-
prefix: Logical path prefix
|
|
372
|
+
prefix: Logical path prefix (DEPRECATED - use parent_id for folder structure)
|
|
275
373
|
filename: Original filename
|
|
276
374
|
description: Optional description
|
|
277
375
|
tags: Optional tags
|
|
@@ -279,23 +377,43 @@ class RemoteClient:
|
|
|
279
377
|
checksum: SHA256 checksum
|
|
280
378
|
content_type: MIME type
|
|
281
379
|
size_bytes: File size in bytes
|
|
380
|
+
project_id: Project ID (optional - will be resolved from experiment if not provided)
|
|
381
|
+
parent_id: Parent node ID (folder) or "ROOT" for root level
|
|
282
382
|
|
|
283
383
|
Returns:
|
|
284
|
-
|
|
384
|
+
Response dict with node and physicalFile data
|
|
285
385
|
|
|
286
386
|
Raises:
|
|
287
387
|
httpx.HTTPStatusError: If request fails
|
|
288
388
|
"""
|
|
389
|
+
# If project_id not provided, need to resolve it from experiment
|
|
390
|
+
# For now, assuming we have it or it will be queried separately
|
|
391
|
+
if project_id is None:
|
|
392
|
+
# Query experiment to get project ID
|
|
393
|
+
query = """
|
|
394
|
+
query GetExperimentProject($experimentId: ID!) {
|
|
395
|
+
experimentById(id: $experimentId) {
|
|
396
|
+
projectId
|
|
397
|
+
}
|
|
398
|
+
}
|
|
399
|
+
"""
|
|
400
|
+
result = self.graphql_query(query, {"experimentId": experiment_id})
|
|
401
|
+
project_id = result.get("experimentById", {}).get("projectId")
|
|
402
|
+
if not project_id:
|
|
403
|
+
raise ValueError(f"Could not resolve project ID for experiment {experiment_id}")
|
|
404
|
+
|
|
289
405
|
# Prepare multipart form data
|
|
290
|
-
# Read file content first (httpx needs content, not file handle)
|
|
291
406
|
with open(file_path, "rb") as f:
|
|
292
407
|
file_content = f.read()
|
|
293
408
|
|
|
294
409
|
files = {"file": (filename, file_content, content_type)}
|
|
295
410
|
data = {
|
|
296
|
-
"
|
|
411
|
+
"type": "FILE",
|
|
412
|
+
"projectId": project_id,
|
|
413
|
+
"experimentId": experiment_id,
|
|
414
|
+
"parentId": parent_id,
|
|
415
|
+
"name": filename,
|
|
297
416
|
"checksum": checksum,
|
|
298
|
-
"sizeBytes": str(size_bytes),
|
|
299
417
|
}
|
|
300
418
|
if description:
|
|
301
419
|
data["description"] = description
|
|
@@ -305,15 +423,53 @@ class RemoteClient:
|
|
|
305
423
|
import json
|
|
306
424
|
data["metadata"] = json.dumps(metadata)
|
|
307
425
|
|
|
308
|
-
#
|
|
426
|
+
# Call unified node creation API
|
|
309
427
|
response = self._client.post(
|
|
310
|
-
f"/
|
|
428
|
+
f"/namespaces/{self.namespace}/nodes",
|
|
311
429
|
files=files,
|
|
312
430
|
data=data
|
|
313
431
|
)
|
|
314
432
|
|
|
315
433
|
response.raise_for_status()
|
|
316
|
-
|
|
434
|
+
result = response.json()
|
|
435
|
+
|
|
436
|
+
# Transform unified node response to expected file metadata format
|
|
437
|
+
# The server returns {node: {...}, physicalFile: {...}}
|
|
438
|
+
# We need to flatten it to match the expected format
|
|
439
|
+
node = result.get("node", {})
|
|
440
|
+
physical_file = result.get("physicalFile", {})
|
|
441
|
+
|
|
442
|
+
# Convert BigInt IDs and sizeBytes from string back to appropriate types
|
|
443
|
+
# Node ID should remain as string for consistency
|
|
444
|
+
node_id = node.get("id")
|
|
445
|
+
if isinstance(node_id, (int, float)):
|
|
446
|
+
# If it was deserialized as a number, convert to string to preserve full precision
|
|
447
|
+
node_id = str(int(node_id))
|
|
448
|
+
|
|
449
|
+
size_bytes = physical_file.get("sizeBytes")
|
|
450
|
+
if isinstance(size_bytes, str):
|
|
451
|
+
size_bytes = int(size_bytes)
|
|
452
|
+
|
|
453
|
+
# Use experimentId from node, not the parameter (which might be a path string)
|
|
454
|
+
experiment_id_from_node = node.get("experimentId")
|
|
455
|
+
if isinstance(experiment_id_from_node, (int, float)):
|
|
456
|
+
experiment_id_from_node = str(int(experiment_id_from_node))
|
|
457
|
+
|
|
458
|
+
return {
|
|
459
|
+
"id": node_id,
|
|
460
|
+
"experimentId": experiment_id_from_node or experiment_id,
|
|
461
|
+
"path": prefix, # Use prefix as path for backward compatibility
|
|
462
|
+
"filename": filename,
|
|
463
|
+
"description": node.get("description"),
|
|
464
|
+
"tags": node.get("tags", []),
|
|
465
|
+
"contentType": physical_file.get("contentType"),
|
|
466
|
+
"sizeBytes": size_bytes,
|
|
467
|
+
"checksum": physical_file.get("checksum"),
|
|
468
|
+
"metadata": node.get("metadata"),
|
|
469
|
+
"uploadedAt": node.get("createdAt"),
|
|
470
|
+
"updatedAt": node.get("updatedAt"),
|
|
471
|
+
"deletedAt": node.get("deletedAt"),
|
|
472
|
+
}
|
|
317
473
|
|
|
318
474
|
def list_files(
|
|
319
475
|
self,
|
|
@@ -322,48 +478,72 @@ class RemoteClient:
|
|
|
322
478
|
tags: Optional[List[str]] = None
|
|
323
479
|
) -> List[Dict[str, Any]]:
|
|
324
480
|
"""
|
|
325
|
-
List files in an experiment.
|
|
481
|
+
List files in an experiment using GraphQL.
|
|
326
482
|
|
|
327
483
|
Args:
|
|
328
484
|
experiment_id: Experiment ID (Snowflake ID)
|
|
329
|
-
prefix: Optional prefix filter
|
|
485
|
+
prefix: Optional prefix filter (DEPRECATED - filtering not supported in new API)
|
|
330
486
|
tags: Optional tags filter
|
|
331
487
|
|
|
332
488
|
Returns:
|
|
333
|
-
List of file
|
|
489
|
+
List of file node dicts
|
|
334
490
|
|
|
335
491
|
Raises:
|
|
336
492
|
httpx.HTTPStatusError: If request fails
|
|
337
493
|
"""
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
494
|
+
query = """
|
|
495
|
+
query ListExperimentFiles($experimentId: ID!) {
|
|
496
|
+
experimentById(id: $experimentId) {
|
|
497
|
+
files {
|
|
498
|
+
id
|
|
499
|
+
name
|
|
500
|
+
description
|
|
501
|
+
tags
|
|
502
|
+
metadata
|
|
503
|
+
createdAt
|
|
504
|
+
pPath
|
|
505
|
+
physicalFile {
|
|
506
|
+
id
|
|
507
|
+
filename
|
|
508
|
+
contentType
|
|
509
|
+
sizeBytes
|
|
510
|
+
checksum
|
|
511
|
+
s3Url
|
|
512
|
+
}
|
|
513
|
+
}
|
|
514
|
+
}
|
|
515
|
+
}
|
|
516
|
+
"""
|
|
517
|
+
result = self.graphql_query(query, {"experimentId": experiment_id})
|
|
518
|
+
files = result.get("experimentById", {}).get("files", [])
|
|
519
|
+
|
|
520
|
+
# Apply client-side filtering if tags specified
|
|
341
521
|
if tags:
|
|
342
|
-
|
|
522
|
+
filtered_files = []
|
|
523
|
+
for file in files:
|
|
524
|
+
file_tags = file.get("tags", [])
|
|
525
|
+
if any(tag in file_tags for tag in tags):
|
|
526
|
+
filtered_files.append(file)
|
|
527
|
+
return filtered_files
|
|
343
528
|
|
|
344
|
-
|
|
345
|
-
f"/experiments/{experiment_id}/files",
|
|
346
|
-
params=params
|
|
347
|
-
)
|
|
348
|
-
response.raise_for_status()
|
|
349
|
-
result = response.json()
|
|
350
|
-
return result.get("files", [])
|
|
529
|
+
return files
|
|
351
530
|
|
|
352
531
|
def get_file(self, experiment_id: str, file_id: str) -> Dict[str, Any]:
|
|
353
532
|
"""
|
|
354
|
-
Get file metadata.
|
|
533
|
+
Get file metadata using unified node API.
|
|
355
534
|
|
|
356
535
|
Args:
|
|
357
|
-
experiment_id: Experiment ID (
|
|
358
|
-
file_id: File ID (Snowflake ID)
|
|
536
|
+
experiment_id: Experiment ID (DEPRECATED - not used in new API)
|
|
537
|
+
file_id: File node ID (Snowflake ID)
|
|
359
538
|
|
|
360
539
|
Returns:
|
|
361
|
-
|
|
540
|
+
Node metadata dict
|
|
362
541
|
|
|
363
542
|
Raises:
|
|
364
543
|
httpx.HTTPStatusError: If request fails
|
|
365
544
|
"""
|
|
366
|
-
|
|
545
|
+
# file_id is actually the node ID in the new system
|
|
546
|
+
response = self._client.get(f"/nodes/{file_id}")
|
|
367
547
|
response.raise_for_status()
|
|
368
548
|
return response.json()
|
|
369
549
|
|
|
@@ -374,11 +554,11 @@ class RemoteClient:
|
|
|
374
554
|
dest_path: Optional[str] = None
|
|
375
555
|
) -> str:
|
|
376
556
|
"""
|
|
377
|
-
Download a file
|
|
557
|
+
Download a file using unified node API.
|
|
378
558
|
|
|
379
559
|
Args:
|
|
380
|
-
experiment_id: Experiment ID (
|
|
381
|
-
file_id: File ID (Snowflake ID)
|
|
560
|
+
experiment_id: Experiment ID (DEPRECATED - not used in new API)
|
|
561
|
+
file_id: File node ID (Snowflake ID)
|
|
382
562
|
dest_path: Optional destination path (defaults to original filename)
|
|
383
563
|
|
|
384
564
|
Returns:
|
|
@@ -390,40 +570,39 @@ class RemoteClient:
|
|
|
390
570
|
"""
|
|
391
571
|
# Get file metadata first to get filename and checksum
|
|
392
572
|
file_metadata = self.get_file(experiment_id, file_id)
|
|
393
|
-
filename = file_metadata
|
|
394
|
-
expected_checksum = file_metadata
|
|
573
|
+
filename = file_metadata.get("name") or file_metadata.get("physicalFile", {}).get("filename")
|
|
574
|
+
expected_checksum = file_metadata.get("physicalFile", {}).get("checksum")
|
|
395
575
|
|
|
396
576
|
# Determine destination path
|
|
397
577
|
if dest_path is None:
|
|
398
578
|
dest_path = filename
|
|
399
579
|
|
|
400
|
-
# Download file
|
|
401
|
-
response = self._client.get(
|
|
402
|
-
f"/experiments/{experiment_id}/files/{file_id}/download"
|
|
403
|
-
)
|
|
580
|
+
# Download file using node API
|
|
581
|
+
response = self._client.get(f"/nodes/{file_id}/download")
|
|
404
582
|
response.raise_for_status()
|
|
405
583
|
|
|
406
584
|
# Write to file
|
|
407
585
|
with open(dest_path, "wb") as f:
|
|
408
586
|
f.write(response.content)
|
|
409
587
|
|
|
410
|
-
# Verify checksum
|
|
411
|
-
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
588
|
+
# Verify checksum if available
|
|
589
|
+
if expected_checksum:
|
|
590
|
+
from .files import verify_checksum
|
|
591
|
+
if not verify_checksum(dest_path, expected_checksum):
|
|
592
|
+
# Delete corrupted file
|
|
593
|
+
import os
|
|
594
|
+
os.remove(dest_path)
|
|
595
|
+
raise ValueError(f"Checksum verification failed for file {file_id}")
|
|
417
596
|
|
|
418
597
|
return dest_path
|
|
419
598
|
|
|
420
599
|
def delete_file(self, experiment_id: str, file_id: str) -> Dict[str, Any]:
|
|
421
600
|
"""
|
|
422
|
-
Delete a file (soft delete).
|
|
601
|
+
Delete a file using unified node API (soft delete).
|
|
423
602
|
|
|
424
603
|
Args:
|
|
425
|
-
experiment_id: Experiment ID (
|
|
426
|
-
file_id: File ID (Snowflake ID)
|
|
604
|
+
experiment_id: Experiment ID (DEPRECATED - not used in new API)
|
|
605
|
+
file_id: File node ID (Snowflake ID)
|
|
427
606
|
|
|
428
607
|
Returns:
|
|
429
608
|
Dict with id and deletedAt
|
|
@@ -431,7 +610,7 @@ class RemoteClient:
|
|
|
431
610
|
Raises:
|
|
432
611
|
httpx.HTTPStatusError: If request fails
|
|
433
612
|
"""
|
|
434
|
-
response = self._client.delete(f"/
|
|
613
|
+
response = self._client.delete(f"/nodes/{file_id}")
|
|
435
614
|
response.raise_for_status()
|
|
436
615
|
return response.json()
|
|
437
616
|
|
|
@@ -444,17 +623,17 @@ class RemoteClient:
|
|
|
444
623
|
metadata: Optional[Dict[str, Any]] = None
|
|
445
624
|
) -> Dict[str, Any]:
|
|
446
625
|
"""
|
|
447
|
-
Update file metadata.
|
|
626
|
+
Update file metadata using unified node API.
|
|
448
627
|
|
|
449
628
|
Args:
|
|
450
|
-
experiment_id: Experiment ID (
|
|
451
|
-
file_id: File ID (Snowflake ID)
|
|
629
|
+
experiment_id: Experiment ID (DEPRECATED - not used in new API)
|
|
630
|
+
file_id: File node ID (Snowflake ID)
|
|
452
631
|
description: Optional description
|
|
453
632
|
tags: Optional tags
|
|
454
633
|
metadata: Optional metadata
|
|
455
634
|
|
|
456
635
|
Returns:
|
|
457
|
-
Updated
|
|
636
|
+
Updated node metadata dict
|
|
458
637
|
|
|
459
638
|
Raises:
|
|
460
639
|
httpx.HTTPStatusError: If request fails
|
|
@@ -468,7 +647,7 @@ class RemoteClient:
|
|
|
468
647
|
payload["metadata"] = metadata
|
|
469
648
|
|
|
470
649
|
response = self._client.patch(
|
|
471
|
-
f"/
|
|
650
|
+
f"/nodes/{file_id}",
|
|
472
651
|
json=payload
|
|
473
652
|
)
|
|
474
653
|
response.raise_for_status()
|
|
@@ -728,6 +907,9 @@ class RemoteClient:
|
|
|
728
907
|
metadata
|
|
729
908
|
project {
|
|
730
909
|
slug
|
|
910
|
+
namespace {
|
|
911
|
+
slug
|
|
912
|
+
}
|
|
731
913
|
}
|
|
732
914
|
logMetadata {
|
|
733
915
|
totalLogs
|
|
@@ -792,6 +974,9 @@ class RemoteClient:
|
|
|
792
974
|
metadata
|
|
793
975
|
project {
|
|
794
976
|
slug
|
|
977
|
+
namespace {
|
|
978
|
+
slug
|
|
979
|
+
}
|
|
795
980
|
}
|
|
796
981
|
logMetadata {
|
|
797
982
|
totalLogs
|
|
@@ -828,15 +1013,82 @@ class RemoteClient:
|
|
|
828
1013
|
result = self.graphql_query(query, variables)
|
|
829
1014
|
return result.get("experiment")
|
|
830
1015
|
|
|
1016
|
+
def search_experiments_graphql(self, pattern: str) -> List[Dict[str, Any]]:
|
|
1017
|
+
"""
|
|
1018
|
+
Search experiments using glob pattern via GraphQL.
|
|
1019
|
+
|
|
1020
|
+
Pattern format: namespace/project/experiment
|
|
1021
|
+
Supports wildcards: *, ?, [0-9], [a-z], etc.
|
|
1022
|
+
|
|
1023
|
+
Args:
|
|
1024
|
+
pattern: Glob pattern (e.g., "tom*/tutorials/*", "*/project-?/exp*")
|
|
1025
|
+
|
|
1026
|
+
Returns:
|
|
1027
|
+
List of experiment dicts matching the pattern
|
|
1028
|
+
|
|
1029
|
+
Raises:
|
|
1030
|
+
httpx.HTTPStatusError: If request fails
|
|
1031
|
+
|
|
1032
|
+
Examples:
|
|
1033
|
+
>>> client.search_experiments_graphql("tom*/tutorials/*")
|
|
1034
|
+
>>> client.search_experiments_graphql("*/my-project/baseline*")
|
|
1035
|
+
"""
|
|
1036
|
+
query = """
|
|
1037
|
+
query SearchExperiments($pattern: String!) {
|
|
1038
|
+
searchExperiments(pattern: $pattern) {
|
|
1039
|
+
id
|
|
1040
|
+
name
|
|
1041
|
+
description
|
|
1042
|
+
tags
|
|
1043
|
+
status
|
|
1044
|
+
startedAt
|
|
1045
|
+
endedAt
|
|
1046
|
+
metadata
|
|
1047
|
+
project {
|
|
1048
|
+
id
|
|
1049
|
+
slug
|
|
1050
|
+
name
|
|
1051
|
+
namespace {
|
|
1052
|
+
id
|
|
1053
|
+
slug
|
|
1054
|
+
}
|
|
1055
|
+
}
|
|
1056
|
+
logMetadata {
|
|
1057
|
+
totalLogs
|
|
1058
|
+
}
|
|
1059
|
+
metrics {
|
|
1060
|
+
name
|
|
1061
|
+
metricMetadata {
|
|
1062
|
+
totalDataPoints
|
|
1063
|
+
}
|
|
1064
|
+
}
|
|
1065
|
+
files {
|
|
1066
|
+
id
|
|
1067
|
+
filename
|
|
1068
|
+
path
|
|
1069
|
+
contentType
|
|
1070
|
+
sizeBytes
|
|
1071
|
+
checksum
|
|
1072
|
+
description
|
|
1073
|
+
tags
|
|
1074
|
+
metadata
|
|
1075
|
+
}
|
|
1076
|
+
}
|
|
1077
|
+
}
|
|
1078
|
+
"""
|
|
1079
|
+
variables = {"pattern": pattern}
|
|
1080
|
+
result = self.graphql_query(query, variables)
|
|
1081
|
+
return result.get("searchExperiments", [])
|
|
1082
|
+
|
|
831
1083
|
def download_file_streaming(
|
|
832
1084
|
self, experiment_id: str, file_id: str, dest_path: str
|
|
833
1085
|
) -> str:
|
|
834
1086
|
"""
|
|
835
|
-
Download a file with streaming for large files.
|
|
1087
|
+
Download a file with streaming for large files using unified node API.
|
|
836
1088
|
|
|
837
1089
|
Args:
|
|
838
|
-
experiment_id: Experiment ID (
|
|
839
|
-
file_id: File ID (Snowflake ID)
|
|
1090
|
+
experiment_id: Experiment ID (DEPRECATED - not used in new API)
|
|
1091
|
+
file_id: File node ID (Snowflake ID)
|
|
840
1092
|
dest_path: Destination path to save file
|
|
841
1093
|
|
|
842
1094
|
Returns:
|
|
@@ -848,22 +1100,23 @@ class RemoteClient:
|
|
|
848
1100
|
"""
|
|
849
1101
|
# Get metadata first for checksum
|
|
850
1102
|
file_metadata = self.get_file(experiment_id, file_id)
|
|
851
|
-
expected_checksum = file_metadata
|
|
1103
|
+
expected_checksum = file_metadata.get("physicalFile", {}).get("checksum")
|
|
852
1104
|
|
|
853
|
-
# Stream download
|
|
854
|
-
with self._client.stream("GET", f"/
|
|
1105
|
+
# Stream download using node API
|
|
1106
|
+
with self._client.stream("GET", f"/nodes/{file_id}/download") as response:
|
|
855
1107
|
response.raise_for_status()
|
|
856
1108
|
|
|
857
1109
|
with open(dest_path, "wb") as f:
|
|
858
1110
|
for chunk in response.iter_bytes(chunk_size=8192):
|
|
859
1111
|
f.write(chunk)
|
|
860
1112
|
|
|
861
|
-
# Verify checksum
|
|
862
|
-
|
|
863
|
-
|
|
864
|
-
|
|
865
|
-
|
|
866
|
-
|
|
1113
|
+
# Verify checksum if available
|
|
1114
|
+
if expected_checksum:
|
|
1115
|
+
from .files import verify_checksum
|
|
1116
|
+
if not verify_checksum(dest_path, expected_checksum):
|
|
1117
|
+
import os
|
|
1118
|
+
os.remove(dest_path)
|
|
1119
|
+
raise ValueError(f"Checksum verification failed for file {file_id}")
|
|
867
1120
|
|
|
868
1121
|
return dest_path
|
|
869
1122
|
|