ml-dash 0.6.11__tar.gz → 0.6.13__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ml_dash-0.6.11 → ml_dash-0.6.13}/PKG-INFO +1 -1
- {ml_dash-0.6.11 → ml_dash-0.6.13}/pyproject.toml +1 -1
- {ml_dash-0.6.11 → ml_dash-0.6.13}/src/ml_dash/__init__.py +15 -13
- {ml_dash-0.6.11 → ml_dash-0.6.13}/src/ml_dash/buffer.py +9 -6
- {ml_dash-0.6.11 → ml_dash-0.6.13}/src/ml_dash/cli_commands/upload.py +20 -20
- {ml_dash-0.6.11 → ml_dash-0.6.13}/src/ml_dash/client.py +28 -28
- {ml_dash-0.6.11 → ml_dash-0.6.13}/src/ml_dash/run.py +65 -0
- {ml_dash-0.6.11 → ml_dash-0.6.13}/LICENSE +0 -0
- {ml_dash-0.6.11 → ml_dash-0.6.13}/README.md +0 -0
- {ml_dash-0.6.11 → ml_dash-0.6.13}/src/ml_dash/auth/__init__.py +0 -0
- {ml_dash-0.6.11 → ml_dash-0.6.13}/src/ml_dash/auth/constants.py +0 -0
- {ml_dash-0.6.11 → ml_dash-0.6.13}/src/ml_dash/auth/device_flow.py +0 -0
- {ml_dash-0.6.11 → ml_dash-0.6.13}/src/ml_dash/auth/device_secret.py +0 -0
- {ml_dash-0.6.11 → ml_dash-0.6.13}/src/ml_dash/auth/exceptions.py +0 -0
- {ml_dash-0.6.11 → ml_dash-0.6.13}/src/ml_dash/auth/token_storage.py +0 -0
- {ml_dash-0.6.11 → ml_dash-0.6.13}/src/ml_dash/auto_start.py +0 -0
- {ml_dash-0.6.11 → ml_dash-0.6.13}/src/ml_dash/cli.py +0 -0
- {ml_dash-0.6.11 → ml_dash-0.6.13}/src/ml_dash/cli_commands/__init__.py +0 -0
- {ml_dash-0.6.11 → ml_dash-0.6.13}/src/ml_dash/cli_commands/api.py +0 -0
- {ml_dash-0.6.11 → ml_dash-0.6.13}/src/ml_dash/cli_commands/create.py +0 -0
- {ml_dash-0.6.11 → ml_dash-0.6.13}/src/ml_dash/cli_commands/download.py +0 -0
- {ml_dash-0.6.11 → ml_dash-0.6.13}/src/ml_dash/cli_commands/list.py +0 -0
- {ml_dash-0.6.11 → ml_dash-0.6.13}/src/ml_dash/cli_commands/login.py +0 -0
- {ml_dash-0.6.11 → ml_dash-0.6.13}/src/ml_dash/cli_commands/logout.py +0 -0
- {ml_dash-0.6.11 → ml_dash-0.6.13}/src/ml_dash/cli_commands/profile.py +0 -0
- {ml_dash-0.6.11 → ml_dash-0.6.13}/src/ml_dash/cli_commands/remove.py +0 -0
- {ml_dash-0.6.11 → ml_dash-0.6.13}/src/ml_dash/config.py +0 -0
- {ml_dash-0.6.11 → ml_dash-0.6.13}/src/ml_dash/experiment.py +0 -0
- {ml_dash-0.6.11 → ml_dash-0.6.13}/src/ml_dash/files.py +0 -0
- {ml_dash-0.6.11 → ml_dash-0.6.13}/src/ml_dash/log.py +0 -0
- {ml_dash-0.6.11 → ml_dash-0.6.13}/src/ml_dash/metric.py +0 -0
- {ml_dash-0.6.11 → ml_dash-0.6.13}/src/ml_dash/params.py +0 -0
- {ml_dash-0.6.11 → ml_dash-0.6.13}/src/ml_dash/py.typed +0 -0
- {ml_dash-0.6.11 → ml_dash-0.6.13}/src/ml_dash/remote_auto_start.py +0 -0
- {ml_dash-0.6.11 → ml_dash-0.6.13}/src/ml_dash/snowflake.py +0 -0
- {ml_dash-0.6.11 → ml_dash-0.6.13}/src/ml_dash/storage.py +0 -0
- {ml_dash-0.6.11 → ml_dash-0.6.13}/src/ml_dash/track.py +0 -0
|
@@ -43,18 +43,19 @@ from .params import ParametersBuilder
|
|
|
43
43
|
from .run import RUN
|
|
44
44
|
from .storage import LocalStorage
|
|
45
45
|
|
|
46
|
-
__version__ = "0.6.
|
|
46
|
+
__version__ = "0.6.13"
|
|
47
47
|
|
|
48
|
-
#
|
|
49
|
-
|
|
48
|
+
# Required version - MUST match exactly (blocks all older versions)
|
|
49
|
+
# Update this with EVERY release to force users to upgrade
|
|
50
|
+
REQUIRED_VERSION = "0.6.13"
|
|
50
51
|
|
|
51
52
|
|
|
52
53
|
def _check_version_compatibility():
|
|
53
54
|
"""
|
|
54
|
-
Enforce
|
|
55
|
+
Enforce strict version requirement.
|
|
55
56
|
|
|
56
|
-
Raises ImportError if installed version
|
|
57
|
-
This ensures users
|
|
57
|
+
Raises ImportError if installed version doesn't match the required version.
|
|
58
|
+
This ensures all users are on the latest version with newest features and bug fixes.
|
|
58
59
|
"""
|
|
59
60
|
try:
|
|
60
61
|
from packaging import version
|
|
@@ -64,25 +65,26 @@ def _check_version_compatibility():
|
|
|
64
65
|
return
|
|
65
66
|
|
|
66
67
|
current = version.parse(__version__)
|
|
67
|
-
|
|
68
|
+
required = version.parse(REQUIRED_VERSION)
|
|
68
69
|
|
|
69
|
-
if current <
|
|
70
|
+
if current < required:
|
|
70
71
|
raise ImportError(
|
|
71
72
|
f"\n"
|
|
72
73
|
f"{'=' * 80}\n"
|
|
73
|
-
f"ERROR: ml-dash version {__version__} is
|
|
74
|
+
f"ERROR: ml-dash version {__version__} is outdated!\n"
|
|
74
75
|
f"{'=' * 80}\n"
|
|
75
76
|
f"\n"
|
|
76
|
-
f"
|
|
77
|
-
f"
|
|
77
|
+
f"Your installed version ({__version__}) is no longer supported.\n"
|
|
78
|
+
f"Required version: {REQUIRED_VERSION}\n"
|
|
78
79
|
f"\n"
|
|
79
80
|
f"Please upgrade to the latest version:\n"
|
|
80
81
|
f"\n"
|
|
81
82
|
f" pip install --upgrade ml-dash\n"
|
|
82
83
|
f"\n"
|
|
83
|
-
f"Or
|
|
84
|
+
f"Or with uv:\n"
|
|
84
85
|
f"\n"
|
|
85
|
-
f" pip install ml-dash
|
|
86
|
+
f" uv pip install --upgrade ml-dash\n"
|
|
87
|
+
f" uv sync --upgrade-package ml-dash\n"
|
|
86
88
|
f"\n"
|
|
87
89
|
f"{'=' * 80}\n"
|
|
88
90
|
)
|
|
@@ -340,18 +340,21 @@ class BackgroundBufferManager:
|
|
|
340
340
|
if items:
|
|
341
341
|
print(f"[ML-Dash] Flushing {', '.join(items)}...", flush=True)
|
|
342
342
|
|
|
343
|
-
# Flush logs immediately
|
|
344
|
-
self.
|
|
343
|
+
# Flush logs immediately (loop until empty)
|
|
344
|
+
while not self._log_queue.empty():
|
|
345
|
+
self._flush_logs()
|
|
345
346
|
|
|
346
|
-
# Flush all metrics immediately
|
|
347
|
+
# Flush all metrics immediately (loop until empty for each metric)
|
|
347
348
|
for metric_name in list(self._metric_queues.keys()):
|
|
348
|
-
self.
|
|
349
|
+
while not self._metric_queues[metric_name].empty():
|
|
350
|
+
self._flush_metric(metric_name)
|
|
349
351
|
|
|
350
352
|
# Flush all tracks immediately
|
|
351
353
|
self.flush_tracks()
|
|
352
354
|
|
|
353
|
-
# Flush files immediately
|
|
354
|
-
self.
|
|
355
|
+
# Flush files immediately (loop until empty)
|
|
356
|
+
while not self._file_queue.empty():
|
|
357
|
+
self._flush_files()
|
|
355
358
|
|
|
356
359
|
if log_count > 0 or metric_count > 0 or track_count > 0 or file_count > 0:
|
|
357
360
|
print("[ML-Dash] ✓ Flush complete", flush=True)
|
|
@@ -67,7 +67,7 @@ class UploadResult:
|
|
|
67
67
|
class UploadState:
|
|
68
68
|
"""Tracks upload state for resume functionality."""
|
|
69
69
|
|
|
70
|
-
|
|
70
|
+
dash_root: str
|
|
71
71
|
remote_url: str
|
|
72
72
|
completed_experiments: List[str] = field(
|
|
73
73
|
default_factory=list
|
|
@@ -79,7 +79,7 @@ class UploadState:
|
|
|
79
79
|
def to_dict(self) -> Dict[str, Any]:
|
|
80
80
|
"""Convert to dictionary for JSON serialization."""
|
|
81
81
|
return {
|
|
82
|
-
"
|
|
82
|
+
"dash_root": self.dash_root,
|
|
83
83
|
"remote_url": self.remote_url,
|
|
84
84
|
"completed_experiments": self.completed_experiments,
|
|
85
85
|
"failed_experiments": self.failed_experiments,
|
|
@@ -91,7 +91,7 @@ class UploadState:
|
|
|
91
91
|
def from_dict(cls, data: Dict[str, Any]) -> "UploadState":
|
|
92
92
|
"""Create from dictionary."""
|
|
93
93
|
return cls(
|
|
94
|
-
|
|
94
|
+
dash_root=data["dash_root"],
|
|
95
95
|
remote_url=data["remote_url"],
|
|
96
96
|
completed_experiments=data.get("completed_experiments", []),
|
|
97
97
|
failed_experiments=data.get("failed_experiments", []),
|
|
@@ -265,18 +265,18 @@ def add_parser(subparsers) -> argparse.ArgumentParser:
|
|
|
265
265
|
|
|
266
266
|
|
|
267
267
|
def discover_experiments(
|
|
268
|
-
|
|
268
|
+
dash_root: Path,
|
|
269
269
|
project_filter: Optional[str] = None,
|
|
270
270
|
experiment_filter: Optional[str] = None,
|
|
271
271
|
) -> List[ExperimentInfo]:
|
|
272
272
|
"""
|
|
273
273
|
Discover experiments in local storage directory.
|
|
274
274
|
|
|
275
|
-
Supports both flat (
|
|
276
|
-
(
|
|
275
|
+
Supports both flat (dash_root/project/experiment) and folder-based
|
|
276
|
+
(dash_root/folder/project/experiment) hierarchies.
|
|
277
277
|
|
|
278
278
|
Args:
|
|
279
|
-
|
|
279
|
+
dash_root: Root path of local storage
|
|
280
280
|
project_filter: Either a simple project name (e.g., "proj1") or a glob
|
|
281
281
|
pattern for the full path (e.g., "tom/*/exp*"). If the
|
|
282
282
|
filter contains '/', '*', or '?', it's treated as a glob
|
|
@@ -289,15 +289,15 @@ def discover_experiments(
|
|
|
289
289
|
"""
|
|
290
290
|
import fnmatch
|
|
291
291
|
|
|
292
|
-
|
|
292
|
+
dash_root = Path(dash_root)
|
|
293
293
|
|
|
294
|
-
if not
|
|
294
|
+
if not dash_root.exists():
|
|
295
295
|
return []
|
|
296
296
|
|
|
297
297
|
experiments = []
|
|
298
298
|
|
|
299
299
|
# Find all experiment.json files recursively
|
|
300
|
-
for exp_json in
|
|
300
|
+
for exp_json in dash_root.rglob("*/experiment.json"):
|
|
301
301
|
exp_dir = exp_json.parent
|
|
302
302
|
|
|
303
303
|
# Read prefix from experiment.json first
|
|
@@ -313,7 +313,7 @@ def discover_experiments(
|
|
|
313
313
|
# This handles nested folders correctly
|
|
314
314
|
# Prefix format: owner/project/folder.../experiment
|
|
315
315
|
try:
|
|
316
|
-
relative_path = exp_dir.relative_to(
|
|
316
|
+
relative_path = exp_dir.relative_to(dash_root)
|
|
317
317
|
full_relative_path = str(relative_path)
|
|
318
318
|
|
|
319
319
|
if prefix:
|
|
@@ -1217,7 +1217,7 @@ def cmd_upload(args: argparse.Namespace) -> int:
|
|
|
1217
1217
|
Exit code (0 for success, 1 for error)
|
|
1218
1218
|
"""
|
|
1219
1219
|
# Handle track upload if --tracks is specified
|
|
1220
|
-
if args
|
|
1220
|
+
if getattr(args, 'tracks', False):
|
|
1221
1221
|
return cmd_upload_track(args)
|
|
1222
1222
|
|
|
1223
1223
|
# Load config
|
|
@@ -1234,9 +1234,9 @@ def cmd_upload(args: argparse.Namespace) -> int:
|
|
|
1234
1234
|
api_key = args.api_key or config.api_key
|
|
1235
1235
|
|
|
1236
1236
|
# Discover experiments
|
|
1237
|
-
|
|
1238
|
-
if not
|
|
1239
|
-
console.print(f"[red]Error:[/red] Local storage path does not exist: {
|
|
1237
|
+
dash_root = Path(args.path)
|
|
1238
|
+
if not dash_root.exists():
|
|
1239
|
+
console.print(f"[red]Error:[/red] Local storage path does not exist: {dash_root}")
|
|
1240
1240
|
return 1
|
|
1241
1241
|
|
|
1242
1242
|
# Handle state file for resume functionality
|
|
@@ -1247,7 +1247,7 @@ def cmd_upload(args: argparse.Namespace) -> int:
|
|
|
1247
1247
|
upload_state = UploadState.load(state_file)
|
|
1248
1248
|
if upload_state:
|
|
1249
1249
|
# Validate state matches current upload
|
|
1250
|
-
if upload_state.
|
|
1250
|
+
if upload_state.dash_root != str(dash_root.absolute()):
|
|
1251
1251
|
console.print(
|
|
1252
1252
|
"[yellow]Warning:[/yellow] State file local path doesn't match. Starting fresh upload."
|
|
1253
1253
|
)
|
|
@@ -1273,13 +1273,13 @@ def cmd_upload(args: argparse.Namespace) -> int:
|
|
|
1273
1273
|
# Create new state if not resuming
|
|
1274
1274
|
if not upload_state:
|
|
1275
1275
|
upload_state = UploadState(
|
|
1276
|
-
|
|
1276
|
+
dash_root=str(dash_root.absolute()),
|
|
1277
1277
|
remote_url=remote_url,
|
|
1278
1278
|
)
|
|
1279
1279
|
|
|
1280
|
-
console.print(f"[bold]Scanning local storage:[/bold] {
|
|
1280
|
+
console.print(f"[bold]Scanning local storage:[/bold] {dash_root.absolute()}")
|
|
1281
1281
|
experiments = discover_experiments(
|
|
1282
|
-
|
|
1282
|
+
dash_root,
|
|
1283
1283
|
project_filter=args.pref, # Using --prefix/-p argument
|
|
1284
1284
|
experiment_filter=None,
|
|
1285
1285
|
)
|
|
@@ -1398,7 +1398,7 @@ def cmd_upload(args: argparse.Namespace) -> int:
|
|
|
1398
1398
|
|
|
1399
1399
|
# Initialize remote client and local storage
|
|
1400
1400
|
remote_client = RemoteClient(base_url=remote_url, namespace=namespace, api_key=api_key)
|
|
1401
|
-
local_storage = LocalStorage(root_path=
|
|
1401
|
+
local_storage = LocalStorage(root_path=dash_root)
|
|
1402
1402
|
|
|
1403
1403
|
# Upload experiments with progress tracking
|
|
1404
1404
|
console.print(f"\n[bold]Uploading to:[/bold] {remote_url}")
|
|
@@ -369,7 +369,7 @@ class RemoteClient:
|
|
|
369
369
|
raise ValueError(f"Project '{project_slug}' not found in namespace '{self.namespace}'")
|
|
370
370
|
|
|
371
371
|
# Delete using project-specific endpoint
|
|
372
|
-
response = self._client.delete(f"
|
|
372
|
+
response = self._client.delete(f"projects/{project_id}")
|
|
373
373
|
response.raise_for_status()
|
|
374
374
|
return response.json()
|
|
375
375
|
|
|
@@ -463,7 +463,7 @@ class RemoteClient:
|
|
|
463
463
|
if not project_id:
|
|
464
464
|
# Create the project first since we need its ID for folders
|
|
465
465
|
project_response = self._client.post(
|
|
466
|
-
f"
|
|
466
|
+
f"namespaces/{self.namespace}/nodes",
|
|
467
467
|
json={
|
|
468
468
|
"type": "PROJECT",
|
|
469
469
|
"name": project,
|
|
@@ -483,7 +483,7 @@ class RemoteClient:
|
|
|
483
483
|
# Create folder (server handles upsert)
|
|
484
484
|
# NOTE: Do NOT pass experimentId for project-level folders
|
|
485
485
|
folder_response = self._client.post(
|
|
486
|
-
f"
|
|
486
|
+
f"namespaces/{self.namespace}/nodes",
|
|
487
487
|
json={
|
|
488
488
|
"type": "FOLDER",
|
|
489
489
|
"projectId": project_id,
|
|
@@ -525,7 +525,7 @@ class RemoteClient:
|
|
|
525
525
|
|
|
526
526
|
# Call unified node creation API
|
|
527
527
|
response = self._client.post(
|
|
528
|
-
f"
|
|
528
|
+
f"namespaces/{self.namespace}/nodes",
|
|
529
529
|
json=payload,
|
|
530
530
|
)
|
|
531
531
|
response.raise_for_status()
|
|
@@ -565,7 +565,7 @@ class RemoteClient:
|
|
|
565
565
|
payload = {"status": status}
|
|
566
566
|
|
|
567
567
|
response = self._client.patch(
|
|
568
|
-
f"
|
|
568
|
+
f"nodes/{node_id}",
|
|
569
569
|
json=payload,
|
|
570
570
|
)
|
|
571
571
|
response.raise_for_status()
|
|
@@ -602,7 +602,7 @@ class RemoteClient:
|
|
|
602
602
|
httpx.HTTPStatusError: If request fails
|
|
603
603
|
"""
|
|
604
604
|
response = self._client.post(
|
|
605
|
-
f"
|
|
605
|
+
f"experiments/{experiment_id}/logs",
|
|
606
606
|
json={"logs": logs}
|
|
607
607
|
)
|
|
608
608
|
response.raise_for_status()
|
|
@@ -638,7 +638,7 @@ class RemoteClient:
|
|
|
638
638
|
httpx.HTTPStatusError: If request fails
|
|
639
639
|
"""
|
|
640
640
|
response = self._client.post(
|
|
641
|
-
f"
|
|
641
|
+
f"experiments/{experiment_id}/parameters",
|
|
642
642
|
json={"data": data}
|
|
643
643
|
)
|
|
644
644
|
response.raise_for_status()
|
|
@@ -658,7 +658,7 @@ class RemoteClient:
|
|
|
658
658
|
Raises:
|
|
659
659
|
httpx.HTTPStatusError: If request fails or parameters don't exist
|
|
660
660
|
"""
|
|
661
|
-
response = self._client.get(f"
|
|
661
|
+
response = self._client.get(f"experiments/{experiment_id}/parameters")
|
|
662
662
|
response.raise_for_status()
|
|
663
663
|
result = response.json()
|
|
664
664
|
return result.get("data", {})
|
|
@@ -935,7 +935,7 @@ class RemoteClient:
|
|
|
935
935
|
|
|
936
936
|
# Create folder (server will return existing if duplicate)
|
|
937
937
|
folder_response = self._client.post(
|
|
938
|
-
f"
|
|
938
|
+
f"namespaces/{self.namespace}/nodes",
|
|
939
939
|
json={
|
|
940
940
|
"type": "FOLDER",
|
|
941
941
|
"projectId": project_id,
|
|
@@ -974,7 +974,7 @@ class RemoteClient:
|
|
|
974
974
|
|
|
975
975
|
# Call unified node creation API
|
|
976
976
|
response = self._client.post(
|
|
977
|
-
f"
|
|
977
|
+
f"namespaces/{self.namespace}/nodes",
|
|
978
978
|
files=files,
|
|
979
979
|
data=data
|
|
980
980
|
)
|
|
@@ -1092,7 +1092,7 @@ class RemoteClient:
|
|
|
1092
1092
|
httpx.HTTPStatusError: If request fails
|
|
1093
1093
|
"""
|
|
1094
1094
|
# file_id is actually the node ID in the new system
|
|
1095
|
-
response = self._client.get(f"
|
|
1095
|
+
response = self._client.get(f"nodes/{file_id}")
|
|
1096
1096
|
response.raise_for_status()
|
|
1097
1097
|
return response.json()
|
|
1098
1098
|
|
|
@@ -1127,7 +1127,7 @@ class RemoteClient:
|
|
|
1127
1127
|
dest_path = filename
|
|
1128
1128
|
|
|
1129
1129
|
# Download file using node API
|
|
1130
|
-
response = self._client.get(f"
|
|
1130
|
+
response = self._client.get(f"nodes/{file_id}/download")
|
|
1131
1131
|
response.raise_for_status()
|
|
1132
1132
|
|
|
1133
1133
|
# Write to file
|
|
@@ -1159,7 +1159,7 @@ class RemoteClient:
|
|
|
1159
1159
|
Raises:
|
|
1160
1160
|
httpx.HTTPStatusError: If request fails
|
|
1161
1161
|
"""
|
|
1162
|
-
response = self._client.delete(f"
|
|
1162
|
+
response = self._client.delete(f"nodes/{file_id}")
|
|
1163
1163
|
response.raise_for_status()
|
|
1164
1164
|
return response.json()
|
|
1165
1165
|
|
|
@@ -1196,7 +1196,7 @@ class RemoteClient:
|
|
|
1196
1196
|
payload["metadata"] = metadata
|
|
1197
1197
|
|
|
1198
1198
|
response = self._client.patch(
|
|
1199
|
-
f"
|
|
1199
|
+
f"nodes/{file_id}",
|
|
1200
1200
|
json=payload
|
|
1201
1201
|
)
|
|
1202
1202
|
response.raise_for_status()
|
|
@@ -1237,7 +1237,7 @@ class RemoteClient:
|
|
|
1237
1237
|
payload["metadata"] = metadata
|
|
1238
1238
|
|
|
1239
1239
|
response = self._client.post(
|
|
1240
|
-
f"
|
|
1240
|
+
f"experiments/{experiment_id}/metrics/{metric_name}/append",
|
|
1241
1241
|
json=payload
|
|
1242
1242
|
)
|
|
1243
1243
|
response.raise_for_status()
|
|
@@ -1278,7 +1278,7 @@ class RemoteClient:
|
|
|
1278
1278
|
payload["metadata"] = metadata
|
|
1279
1279
|
|
|
1280
1280
|
response = self._client.post(
|
|
1281
|
-
f"
|
|
1281
|
+
f"experiments/{experiment_id}/metrics/{metric_name}/append-batch",
|
|
1282
1282
|
json=payload
|
|
1283
1283
|
)
|
|
1284
1284
|
response.raise_for_status()
|
|
@@ -1307,7 +1307,7 @@ class RemoteClient:
|
|
|
1307
1307
|
httpx.HTTPStatusError: If request fails
|
|
1308
1308
|
"""
|
|
1309
1309
|
response = self._client.get(
|
|
1310
|
-
f"
|
|
1310
|
+
f"experiments/{experiment_id}/metrics/{metric_name}/data",
|
|
1311
1311
|
params={"startIndex": start_index, "limit": limit}
|
|
1312
1312
|
)
|
|
1313
1313
|
response.raise_for_status()
|
|
@@ -1332,7 +1332,7 @@ class RemoteClient:
|
|
|
1332
1332
|
httpx.HTTPStatusError: If request fails
|
|
1333
1333
|
"""
|
|
1334
1334
|
response = self._client.get(
|
|
1335
|
-
f"
|
|
1335
|
+
f"experiments/{experiment_id}/metrics/{metric_name}/stats"
|
|
1336
1336
|
)
|
|
1337
1337
|
response.raise_for_status()
|
|
1338
1338
|
return response.json()
|
|
@@ -1353,7 +1353,7 @@ class RemoteClient:
|
|
|
1353
1353
|
Raises:
|
|
1354
1354
|
httpx.HTTPStatusError: If request fails
|
|
1355
1355
|
"""
|
|
1356
|
-
response = self._client.get(f"
|
|
1356
|
+
response = self._client.get(f"experiments/{experiment_id}/metrics")
|
|
1357
1357
|
response.raise_for_status()
|
|
1358
1358
|
return response.json()["metrics"]
|
|
1359
1359
|
|
|
@@ -1668,7 +1668,7 @@ class RemoteClient:
|
|
|
1668
1668
|
expected_checksum = file_metadata.get("physicalFile", {}).get("checksum")
|
|
1669
1669
|
|
|
1670
1670
|
# Stream download using node API
|
|
1671
|
-
with self._client.stream("GET", f"
|
|
1671
|
+
with self._client.stream("GET", f"nodes/{file_id}/download") as response:
|
|
1672
1672
|
response.raise_for_status()
|
|
1673
1673
|
|
|
1674
1674
|
with open(dest_path, "wb") as f:
|
|
@@ -1736,7 +1736,7 @@ class RemoteClient:
|
|
|
1736
1736
|
if search is not None:
|
|
1737
1737
|
params["search"] = search
|
|
1738
1738
|
|
|
1739
|
-
response = self._client.get(f"
|
|
1739
|
+
response = self._client.get(f"experiments/{experiment_id}/logs", params=params)
|
|
1740
1740
|
response.raise_for_status()
|
|
1741
1741
|
return response.json()
|
|
1742
1742
|
|
|
@@ -1774,7 +1774,7 @@ class RemoteClient:
|
|
|
1774
1774
|
params["bufferOnly"] = "true"
|
|
1775
1775
|
|
|
1776
1776
|
response = self._client.get(
|
|
1777
|
-
f"
|
|
1777
|
+
f"experiments/{experiment_id}/metrics/{metric_name}/data",
|
|
1778
1778
|
params=params
|
|
1779
1779
|
)
|
|
1780
1780
|
response.raise_for_status()
|
|
@@ -1801,7 +1801,7 @@ class RemoteClient:
|
|
|
1801
1801
|
httpx.HTTPStatusError: If request fails
|
|
1802
1802
|
"""
|
|
1803
1803
|
response = self._client.get(
|
|
1804
|
-
f"
|
|
1804
|
+
f"experiments/{experiment_id}/metrics/{metric_name}/chunks/{chunk_number}"
|
|
1805
1805
|
)
|
|
1806
1806
|
response.raise_for_status()
|
|
1807
1807
|
return response.json()
|
|
@@ -1845,7 +1845,7 @@ class RemoteClient:
|
|
|
1845
1845
|
payload["metadata"] = metadata
|
|
1846
1846
|
|
|
1847
1847
|
response = self._client.post(
|
|
1848
|
-
f"
|
|
1848
|
+
f"experiments/{experiment_id}/tracks",
|
|
1849
1849
|
json=payload,
|
|
1850
1850
|
)
|
|
1851
1851
|
response.raise_for_status()
|
|
@@ -1878,7 +1878,7 @@ class RemoteClient:
|
|
|
1878
1878
|
topic_encoded = urllib.parse.quote(topic, safe='')
|
|
1879
1879
|
|
|
1880
1880
|
response = self._client.post(
|
|
1881
|
-
f"
|
|
1881
|
+
f"experiments/{experiment_id}/tracks/{topic_encoded}/append",
|
|
1882
1882
|
json={"timestamp": timestamp, "data": data},
|
|
1883
1883
|
)
|
|
1884
1884
|
response.raise_for_status()
|
|
@@ -1912,7 +1912,7 @@ class RemoteClient:
|
|
|
1912
1912
|
serialized_entries = [_serialize_value(entry) for entry in entries]
|
|
1913
1913
|
|
|
1914
1914
|
response = self._client.post(
|
|
1915
|
-
f"
|
|
1915
|
+
f"experiments/{experiment_id}/tracks/{topic_encoded}/append_batch",
|
|
1916
1916
|
json={"entries": serialized_entries},
|
|
1917
1917
|
)
|
|
1918
1918
|
response.raise_for_status()
|
|
@@ -1957,7 +1957,7 @@ class RemoteClient:
|
|
|
1957
1957
|
params["columns"] = ",".join(columns)
|
|
1958
1958
|
|
|
1959
1959
|
response = self._client.get(
|
|
1960
|
-
f"
|
|
1960
|
+
f"experiments/{experiment_id}/tracks/{topic_encoded}/data",
|
|
1961
1961
|
params=params,
|
|
1962
1962
|
)
|
|
1963
1963
|
response.raise_for_status()
|
|
@@ -1990,7 +1990,7 @@ class RemoteClient:
|
|
|
1990
1990
|
params["topic"] = topic_filter
|
|
1991
1991
|
|
|
1992
1992
|
response = self._client.get(
|
|
1993
|
-
f"
|
|
1993
|
+
f"experiments/{experiment_id}/tracks",
|
|
1994
1994
|
params=params,
|
|
1995
1995
|
)
|
|
1996
1996
|
response.raise_for_status()
|
|
@@ -158,6 +158,16 @@ class RUN:
|
|
|
158
158
|
now = datetime.now()
|
|
159
159
|
"""Timestamp at import time. Does not change during the session."""
|
|
160
160
|
|
|
161
|
+
@property
|
|
162
|
+
def date(self) -> str:
|
|
163
|
+
"""Date string in YYYYMMDD format."""
|
|
164
|
+
return self.now.strftime("%Y%m%d")
|
|
165
|
+
|
|
166
|
+
@property
|
|
167
|
+
def datetime_str(self) -> str:
|
|
168
|
+
"""DateTime string in YYYYMMDD.HHMMSS format."""
|
|
169
|
+
return self.now.strftime("%Y%m%d.%H%M%S")
|
|
170
|
+
|
|
161
171
|
timestamp: str = None
|
|
162
172
|
"""Timestamp created at instantiation"""
|
|
163
173
|
|
|
@@ -277,6 +287,61 @@ class RUN:
|
|
|
277
287
|
# self.name is the last segment
|
|
278
288
|
self.name = parts[-1] if len(parts) > 2 else parts[1]
|
|
279
289
|
|
|
290
|
+
def __setattr__(self, name: str, value):
|
|
291
|
+
"""
|
|
292
|
+
Intercept attribute setting to expand {EXP.attr} templates in prefix.
|
|
293
|
+
|
|
294
|
+
When prefix is set, expands any {EXP.name}, {EXP.id}, {EXP.date}, etc. templates
|
|
295
|
+
using current instance's attributes. Also syncs back to class-level RUN attributes.
|
|
296
|
+
"""
|
|
297
|
+
# Prevent prefix changes after experiment has started
|
|
298
|
+
if name == "prefix" and isinstance(value, str):
|
|
299
|
+
experiment = getattr(self, "_experiment", None)
|
|
300
|
+
if experiment is not None and getattr(experiment, "_is_open", False):
|
|
301
|
+
raise RuntimeError(
|
|
302
|
+
"Cannot change prefix after experiment has been initialized. "
|
|
303
|
+
"Set prefix before calling experiment.run.start() or entering the context manager."
|
|
304
|
+
)
|
|
305
|
+
|
|
306
|
+
# Expand templates if setting prefix
|
|
307
|
+
if name == "prefix" and isinstance(value, str):
|
|
308
|
+
# Check if value contains {EXP. templates
|
|
309
|
+
if "{EXP." in value:
|
|
310
|
+
import re
|
|
311
|
+
|
|
312
|
+
def replace_match(match):
|
|
313
|
+
attr_name = match.group(1)
|
|
314
|
+
# Special handling for id - generate if needed
|
|
315
|
+
if attr_name == "id" and not getattr(self, "id", None):
|
|
316
|
+
from ml_dash.snowflake import generate_id
|
|
317
|
+
object.__setattr__(self, "id", generate_id())
|
|
318
|
+
|
|
319
|
+
# Get attribute, raising error if not found
|
|
320
|
+
try:
|
|
321
|
+
attr_value = getattr(self, attr_name)
|
|
322
|
+
if attr_value is None:
|
|
323
|
+
raise AttributeError(f"Attribute '{attr_name}' is None")
|
|
324
|
+
return str(attr_value)
|
|
325
|
+
except AttributeError:
|
|
326
|
+
raise AttributeError(f"RUN has no attribute '{attr_name}'")
|
|
327
|
+
|
|
328
|
+
# Match {EXP.attr_name} pattern
|
|
329
|
+
pattern = r"\{EXP\.(\w+)\}"
|
|
330
|
+
value = re.sub(pattern, replace_match, value)
|
|
331
|
+
|
|
332
|
+
# Always update _folder_path when prefix changes
|
|
333
|
+
object.__setattr__(self, "_folder_path", value)
|
|
334
|
+
|
|
335
|
+
# Parse and update owner, project, name from new prefix
|
|
336
|
+
parts = value.strip("/").split("/")
|
|
337
|
+
if len(parts) >= 2:
|
|
338
|
+
object.__setattr__(self, "owner", parts[0])
|
|
339
|
+
object.__setattr__(self, "project", parts[1])
|
|
340
|
+
object.__setattr__(self, "name", parts[-1] if len(parts) > 2 else parts[1])
|
|
341
|
+
|
|
342
|
+
# Use object.__setattr__ to set the value
|
|
343
|
+
object.__setattr__(self, name, value)
|
|
344
|
+
|
|
280
345
|
def start(self) -> "Experiment":
|
|
281
346
|
"""
|
|
282
347
|
Start the experiment (sets status to RUNNING).
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|