ml-dash 0.5.8__py3-none-any.whl → 0.6.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ml_dash/__init__.py +35 -9
- ml_dash/auth/__init__.py +51 -0
- ml_dash/auth/constants.py +10 -0
- ml_dash/auth/device_flow.py +237 -0
- ml_dash/auth/device_secret.py +49 -0
- ml_dash/auth/exceptions.py +31 -0
- ml_dash/auth/token_storage.py +262 -0
- ml_dash/auto_start.py +37 -14
- ml_dash/cli.py +14 -2
- ml_dash/cli_commands/download.py +10 -38
- ml_dash/cli_commands/list.py +10 -34
- ml_dash/cli_commands/login.py +225 -0
- ml_dash/cli_commands/logout.py +54 -0
- ml_dash/cli_commands/upload.py +3 -53
- ml_dash/client.py +67 -34
- ml_dash/config.py +15 -1
- ml_dash/experiment.py +151 -55
- ml_dash/files.py +97 -0
- ml_dash/metric.py +192 -3
- ml_dash/params.py +92 -3
- ml_dash/remote_auto_start.py +55 -0
- ml_dash/storage.py +366 -235
- {ml_dash-0.5.8.dist-info → ml_dash-0.6.0.dist-info}/METADATA +5 -1
- ml_dash-0.6.0.dist-info/RECORD +29 -0
- ml_dash-0.5.8.dist-info/RECORD +0 -20
- {ml_dash-0.5.8.dist-info → ml_dash-0.6.0.dist-info}/WHEEL +0 -0
- {ml_dash-0.5.8.dist-info → ml_dash-0.6.0.dist-info}/entry_points.txt +0 -0
ml_dash/storage.py
CHANGED
|
@@ -6,6 +6,11 @@ from typing import Optional, Dict, Any, List
|
|
|
6
6
|
from pathlib import Path
|
|
7
7
|
import json
|
|
8
8
|
from datetime import datetime
|
|
9
|
+
import threading
|
|
10
|
+
import time
|
|
11
|
+
import fcntl
|
|
12
|
+
import sys
|
|
13
|
+
from contextlib import contextmanager
|
|
9
14
|
|
|
10
15
|
|
|
11
16
|
class LocalStorage:
|
|
@@ -37,6 +42,49 @@ class LocalStorage:
|
|
|
37
42
|
self.root_path = Path(root_path)
|
|
38
43
|
self.root_path.mkdir(parents=True, exist_ok=True)
|
|
39
44
|
|
|
45
|
+
@contextmanager
|
|
46
|
+
def _file_lock(self, lock_file: Path):
|
|
47
|
+
"""
|
|
48
|
+
Context manager for file-based locking (works across processes and threads).
|
|
49
|
+
|
|
50
|
+
Args:
|
|
51
|
+
lock_file: Path to the lock file
|
|
52
|
+
|
|
53
|
+
Yields:
|
|
54
|
+
File handle with exclusive lock
|
|
55
|
+
"""
|
|
56
|
+
lock_file.parent.mkdir(parents=True, exist_ok=True)
|
|
57
|
+
lock_fd = None
|
|
58
|
+
|
|
59
|
+
try:
|
|
60
|
+
# Open lock file
|
|
61
|
+
lock_fd = open(lock_file, 'a')
|
|
62
|
+
|
|
63
|
+
# Acquire exclusive lock (blocking)
|
|
64
|
+
# Use fcntl on Unix-like systems
|
|
65
|
+
if hasattr(fcntl, 'flock'):
|
|
66
|
+
fcntl.flock(lock_fd.fileno(), fcntl.LOCK_EX)
|
|
67
|
+
elif hasattr(fcntl, 'lockf'):
|
|
68
|
+
fcntl.lockf(lock_fd.fileno(), fcntl.LOCK_EX)
|
|
69
|
+
else:
|
|
70
|
+
# Fallback for systems without fcntl (like Windows)
|
|
71
|
+
# Use simple file existence as lock (not perfect but better than nothing)
|
|
72
|
+
pass
|
|
73
|
+
|
|
74
|
+
yield lock_fd
|
|
75
|
+
|
|
76
|
+
finally:
|
|
77
|
+
# Release lock and close file
|
|
78
|
+
if lock_fd:
|
|
79
|
+
try:
|
|
80
|
+
if hasattr(fcntl, 'flock'):
|
|
81
|
+
fcntl.flock(lock_fd.fileno(), fcntl.LOCK_UN)
|
|
82
|
+
elif hasattr(fcntl, 'lockf'):
|
|
83
|
+
fcntl.lockf(lock_fd.fileno(), fcntl.LOCK_UN)
|
|
84
|
+
except Exception:
|
|
85
|
+
pass
|
|
86
|
+
lock_fd.close()
|
|
87
|
+
|
|
40
88
|
def create_experiment(
|
|
41
89
|
self,
|
|
42
90
|
project: str,
|
|
@@ -97,28 +145,39 @@ class LocalStorage:
|
|
|
97
145
|
}
|
|
98
146
|
|
|
99
147
|
experiment_file = experiment_dir / "experiment.json"
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
148
|
+
|
|
149
|
+
# File-based lock for concurrent experiment creation/update
|
|
150
|
+
lock_file = experiment_dir / ".experiment.lock"
|
|
151
|
+
with self._file_lock(lock_file):
|
|
152
|
+
if not experiment_file.exists():
|
|
153
|
+
# Only create if doesn't exist (don't overwrite)
|
|
154
|
+
with open(experiment_file, "w") as f:
|
|
155
|
+
json.dump(experiment_metadata, f, indent=2)
|
|
156
|
+
else:
|
|
157
|
+
# Update existing experiment
|
|
158
|
+
try:
|
|
159
|
+
with open(experiment_file, "r") as f:
|
|
160
|
+
existing = json.load(f)
|
|
161
|
+
except (json.JSONDecodeError, IOError):
|
|
162
|
+
# File might be corrupted or empty, recreate it
|
|
163
|
+
with open(experiment_file, "w") as f:
|
|
164
|
+
json.dump(experiment_metadata, f, indent=2)
|
|
165
|
+
return experiment_dir
|
|
166
|
+
|
|
167
|
+
# Merge updates
|
|
168
|
+
if description is not None:
|
|
169
|
+
existing["description"] = description
|
|
170
|
+
if tags is not None:
|
|
171
|
+
existing["tags"] = tags
|
|
172
|
+
if bindrs is not None:
|
|
173
|
+
existing["bindrs"] = bindrs
|
|
174
|
+
if folder is not None:
|
|
175
|
+
existing["folder"] = folder
|
|
176
|
+
if metadata is not None:
|
|
177
|
+
existing["metadata"] = metadata
|
|
178
|
+
existing["updated_at"] = datetime.utcnow().isoformat() + "Z"
|
|
179
|
+
with open(experiment_file, "w") as f:
|
|
180
|
+
json.dump(existing, f, indent=2)
|
|
122
181
|
|
|
123
182
|
return experiment_dir
|
|
124
183
|
|
|
@@ -130,9 +189,10 @@ class LocalStorage:
|
|
|
130
189
|
self,
|
|
131
190
|
project: str,
|
|
132
191
|
experiment: str,
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
192
|
+
folder: Optional[str] = None,
|
|
193
|
+
message: str = "",
|
|
194
|
+
level: str = "info",
|
|
195
|
+
timestamp: str = "",
|
|
136
196
|
metadata: Optional[Dict[str, Any]] = None,
|
|
137
197
|
):
|
|
138
198
|
"""
|
|
@@ -141,40 +201,44 @@ class LocalStorage:
|
|
|
141
201
|
Args:
|
|
142
202
|
project: Project name
|
|
143
203
|
experiment: Experiment name
|
|
204
|
+
folder: Optional folder path
|
|
144
205
|
message: Log message
|
|
145
206
|
level: Log level
|
|
146
207
|
timestamp: ISO timestamp string
|
|
147
208
|
metadata: Optional metadata
|
|
148
209
|
"""
|
|
149
|
-
experiment_dir = self._get_experiment_dir(project, experiment)
|
|
210
|
+
experiment_dir = self._get_experiment_dir(project, experiment, folder)
|
|
150
211
|
logs_dir = experiment_dir / "logs"
|
|
151
212
|
logs_file = logs_dir / "logs.jsonl"
|
|
152
213
|
seq_file = logs_dir / ".log_sequence"
|
|
153
214
|
|
|
154
|
-
#
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
215
|
+
# File-based lock for concurrent log writes (prevents sequence collision)
|
|
216
|
+
lock_file = logs_dir / ".log_sequence.lock"
|
|
217
|
+
with self._file_lock(lock_file):
|
|
218
|
+
# Read and increment sequence counter
|
|
219
|
+
sequence_number = 0
|
|
220
|
+
if seq_file.exists():
|
|
221
|
+
try:
|
|
222
|
+
sequence_number = int(seq_file.read_text().strip())
|
|
223
|
+
except (ValueError, IOError):
|
|
224
|
+
sequence_number = 0
|
|
225
|
+
|
|
226
|
+
log_entry = {
|
|
227
|
+
"sequenceNumber": sequence_number,
|
|
228
|
+
"timestamp": timestamp,
|
|
229
|
+
"level": level,
|
|
230
|
+
"message": message,
|
|
231
|
+
}
|
|
168
232
|
|
|
169
|
-
|
|
170
|
-
|
|
233
|
+
if metadata:
|
|
234
|
+
log_entry["metadata"] = metadata
|
|
171
235
|
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
236
|
+
# Write log immediately
|
|
237
|
+
with open(logs_file, "a") as f:
|
|
238
|
+
f.write(json.dumps(log_entry) + "\n")
|
|
175
239
|
|
|
176
|
-
|
|
177
|
-
|
|
240
|
+
# Update sequence counter
|
|
241
|
+
seq_file.write_text(str(sequence_number + 1))
|
|
178
242
|
|
|
179
243
|
def write_metric_data(
|
|
180
244
|
self,
|
|
@@ -207,7 +271,8 @@ class LocalStorage:
|
|
|
207
271
|
self,
|
|
208
272
|
project: str,
|
|
209
273
|
experiment: str,
|
|
210
|
-
|
|
274
|
+
folder: Optional[str] = None,
|
|
275
|
+
data: Optional[Dict[str, Any]] = None,
|
|
211
276
|
):
|
|
212
277
|
"""
|
|
213
278
|
Write/merge parameters. Always merges with existing parameters.
|
|
@@ -222,39 +287,58 @@ class LocalStorage:
|
|
|
222
287
|
Args:
|
|
223
288
|
project: Project name
|
|
224
289
|
experiment: Experiment name
|
|
290
|
+
folder: Optional folder path
|
|
225
291
|
data: Flattened parameter dict with dot notation (already flattened)
|
|
226
292
|
"""
|
|
227
|
-
|
|
293
|
+
if data is None:
|
|
294
|
+
data = {}
|
|
295
|
+
experiment_dir = self._get_experiment_dir(project, experiment, folder)
|
|
228
296
|
params_file = experiment_dir / "parameters.json"
|
|
229
297
|
|
|
230
|
-
#
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
298
|
+
# File-based lock for concurrent parameter writes (prevents data loss and version conflicts)
|
|
299
|
+
lock_file = experiment_dir / ".parameters.lock"
|
|
300
|
+
with self._file_lock(lock_file):
|
|
301
|
+
# Read existing if present
|
|
302
|
+
if params_file.exists():
|
|
303
|
+
try:
|
|
304
|
+
with open(params_file, "r") as f:
|
|
305
|
+
existing_doc = json.load(f)
|
|
306
|
+
except (json.JSONDecodeError, IOError):
|
|
307
|
+
# Corrupted file, recreate
|
|
308
|
+
existing_doc = None
|
|
309
|
+
|
|
310
|
+
if existing_doc:
|
|
311
|
+
# Merge with existing data
|
|
312
|
+
existing_data = existing_doc.get("data", {})
|
|
313
|
+
existing_data.update(data)
|
|
314
|
+
|
|
315
|
+
# Increment version
|
|
316
|
+
version = existing_doc.get("version", 1) + 1
|
|
317
|
+
|
|
318
|
+
params_doc = {
|
|
319
|
+
"version": version,
|
|
320
|
+
"data": existing_data,
|
|
321
|
+
"updatedAt": datetime.utcnow().isoformat() + "Z"
|
|
322
|
+
}
|
|
323
|
+
else:
|
|
324
|
+
# Create new if corrupted
|
|
325
|
+
params_doc = {
|
|
326
|
+
"version": 1,
|
|
327
|
+
"data": data,
|
|
328
|
+
"createdAt": datetime.utcnow().isoformat() + "Z",
|
|
329
|
+
"updatedAt": datetime.utcnow().isoformat() + "Z"
|
|
330
|
+
}
|
|
331
|
+
else:
|
|
332
|
+
# Create new parameters document
|
|
333
|
+
params_doc = {
|
|
334
|
+
"version": 1,
|
|
335
|
+
"data": data,
|
|
336
|
+
"createdAt": datetime.utcnow().isoformat() + "Z",
|
|
337
|
+
"updatedAt": datetime.utcnow().isoformat() + "Z"
|
|
338
|
+
}
|
|
255
339
|
|
|
256
|
-
|
|
257
|
-
|
|
340
|
+
with open(params_file, "w") as f:
|
|
341
|
+
json.dump(params_doc, f, indent=2)
|
|
258
342
|
|
|
259
343
|
def read_parameters(
|
|
260
344
|
self,
|
|
@@ -288,15 +372,16 @@ class LocalStorage:
|
|
|
288
372
|
self,
|
|
289
373
|
project: str,
|
|
290
374
|
experiment: str,
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
375
|
+
folder: Optional[str] = None,
|
|
376
|
+
file_path: str = "",
|
|
377
|
+
prefix: str = "",
|
|
378
|
+
filename: str = "",
|
|
379
|
+
description: Optional[str] = None,
|
|
380
|
+
tags: Optional[List[str]] = None,
|
|
381
|
+
metadata: Optional[Dict[str, Any]] = None,
|
|
382
|
+
checksum: str = "",
|
|
383
|
+
content_type: str = "",
|
|
384
|
+
size_bytes: int = 0
|
|
300
385
|
) -> Dict[str, Any]:
|
|
301
386
|
"""
|
|
302
387
|
Write file to local storage.
|
|
@@ -307,6 +392,7 @@ class LocalStorage:
|
|
|
307
392
|
Args:
|
|
308
393
|
project: Project name
|
|
309
394
|
experiment: Experiment name
|
|
395
|
+
folder: Optional folder path
|
|
310
396
|
file_path: Source file path
|
|
311
397
|
prefix: Logical path prefix
|
|
312
398
|
filename: Original filename
|
|
@@ -323,7 +409,7 @@ class LocalStorage:
|
|
|
323
409
|
import shutil
|
|
324
410
|
from .files import generate_snowflake_id
|
|
325
411
|
|
|
326
|
-
experiment_dir = self._get_experiment_dir(project, experiment)
|
|
412
|
+
experiment_dir = self._get_experiment_dir(project, experiment, folder)
|
|
327
413
|
files_dir = experiment_dir / "files"
|
|
328
414
|
metadata_file = files_dir / ".files_metadata.json"
|
|
329
415
|
|
|
@@ -361,42 +447,45 @@ class LocalStorage:
|
|
|
361
447
|
"deletedAt": None
|
|
362
448
|
}
|
|
363
449
|
|
|
364
|
-
#
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
if
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
450
|
+
# File-based lock for concurrent safety (works across processes/threads/instances)
|
|
451
|
+
lock_file = files_dir / ".files_metadata.lock"
|
|
452
|
+
with self._file_lock(lock_file):
|
|
453
|
+
# Read existing metadata
|
|
454
|
+
files_metadata = {"files": []}
|
|
455
|
+
if metadata_file.exists():
|
|
456
|
+
try:
|
|
457
|
+
with open(metadata_file, "r") as f:
|
|
458
|
+
files_metadata = json.load(f)
|
|
459
|
+
except (json.JSONDecodeError, IOError):
|
|
460
|
+
files_metadata = {"files": []}
|
|
461
|
+
|
|
462
|
+
# Check if file with same prefix+filename exists (overwrite behavior)
|
|
463
|
+
existing_index = None
|
|
464
|
+
for i, existing_file in enumerate(files_metadata["files"]):
|
|
465
|
+
if (existing_file["path"] == prefix and
|
|
466
|
+
existing_file["filename"] == filename and
|
|
467
|
+
existing_file["deletedAt"] is None):
|
|
468
|
+
existing_index = i
|
|
469
|
+
break
|
|
470
|
+
|
|
471
|
+
if existing_index is not None:
|
|
472
|
+
# Overwrite: remove old file and update metadata
|
|
473
|
+
old_file = files_metadata["files"][existing_index]
|
|
474
|
+
old_prefix = old_file["path"].lstrip("/") if old_file["path"] else ""
|
|
475
|
+
if old_prefix:
|
|
476
|
+
old_file_dir = files_dir / old_prefix / old_file["id"]
|
|
477
|
+
else:
|
|
478
|
+
old_file_dir = files_dir / old_file["id"]
|
|
479
|
+
if old_file_dir.exists():
|
|
480
|
+
shutil.rmtree(old_file_dir)
|
|
481
|
+
files_metadata["files"][existing_index] = file_metadata
|
|
388
482
|
else:
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
shutil.rmtree(old_file_dir)
|
|
392
|
-
files_metadata["files"][existing_index] = file_metadata
|
|
393
|
-
else:
|
|
394
|
-
# New file: append to list
|
|
395
|
-
files_metadata["files"].append(file_metadata)
|
|
483
|
+
# New file: append to list
|
|
484
|
+
files_metadata["files"].append(file_metadata)
|
|
396
485
|
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
486
|
+
# Write updated metadata
|
|
487
|
+
with open(metadata_file, "w") as f:
|
|
488
|
+
json.dump(files_metadata, f, indent=2)
|
|
400
489
|
|
|
401
490
|
return file_metadata
|
|
402
491
|
|
|
@@ -543,27 +632,30 @@ class LocalStorage:
|
|
|
543
632
|
if not metadata_file.exists():
|
|
544
633
|
raise FileNotFoundError(f"File {file_id} not found")
|
|
545
634
|
|
|
546
|
-
#
|
|
547
|
-
|
|
548
|
-
|
|
635
|
+
# File-based lock for concurrent safety (works across processes/threads/instances)
|
|
636
|
+
lock_file = files_dir / ".files_metadata.lock"
|
|
637
|
+
with self._file_lock(lock_file):
|
|
638
|
+
# Read metadata
|
|
639
|
+
with open(metadata_file, "r") as f:
|
|
640
|
+
files_metadata = json.load(f)
|
|
549
641
|
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
642
|
+
# Find and soft delete file
|
|
643
|
+
file_found = False
|
|
644
|
+
for file_meta in files_metadata.get("files", []):
|
|
645
|
+
if file_meta["id"] == file_id:
|
|
646
|
+
if file_meta.get("deletedAt") is not None:
|
|
647
|
+
raise FileNotFoundError(f"File {file_id} already deleted")
|
|
648
|
+
file_meta["deletedAt"] = datetime.utcnow().isoformat() + "Z"
|
|
649
|
+
file_meta["updatedAt"] = file_meta["deletedAt"]
|
|
650
|
+
file_found = True
|
|
651
|
+
break
|
|
560
652
|
|
|
561
|
-
|
|
562
|
-
|
|
653
|
+
if not file_found:
|
|
654
|
+
raise FileNotFoundError(f"File {file_id} not found")
|
|
563
655
|
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
656
|
+
# Write updated metadata
|
|
657
|
+
with open(metadata_file, "w") as f:
|
|
658
|
+
json.dump(files_metadata, f, indent=2)
|
|
567
659
|
|
|
568
660
|
return {
|
|
569
661
|
"id": file_id,
|
|
@@ -602,37 +694,40 @@ class LocalStorage:
|
|
|
602
694
|
if not metadata_file.exists():
|
|
603
695
|
raise FileNotFoundError(f"File {file_id} not found")
|
|
604
696
|
|
|
605
|
-
#
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
updated_file = None
|
|
612
|
-
for file_meta in files_metadata.get("files", []):
|
|
613
|
-
if file_meta["id"] == file_id:
|
|
614
|
-
if file_meta.get("deletedAt") is not None:
|
|
615
|
-
raise FileNotFoundError(f"File {file_id} has been deleted")
|
|
616
|
-
|
|
617
|
-
# Update fields
|
|
618
|
-
if description is not None:
|
|
619
|
-
file_meta["description"] = description
|
|
620
|
-
if tags is not None:
|
|
621
|
-
file_meta["tags"] = tags
|
|
622
|
-
if metadata is not None:
|
|
623
|
-
file_meta["metadata"] = metadata
|
|
624
|
-
|
|
625
|
-
file_meta["updatedAt"] = datetime.utcnow().isoformat() + "Z"
|
|
626
|
-
file_found = True
|
|
627
|
-
updated_file = file_meta
|
|
628
|
-
break
|
|
629
|
-
|
|
630
|
-
if not file_found:
|
|
631
|
-
raise FileNotFoundError(f"File {file_id} not found")
|
|
697
|
+
# File-based lock for concurrent safety (works across processes/threads/instances)
|
|
698
|
+
lock_file = files_dir / ".files_metadata.lock"
|
|
699
|
+
with self._file_lock(lock_file):
|
|
700
|
+
# Read metadata
|
|
701
|
+
with open(metadata_file, "r") as f:
|
|
702
|
+
files_metadata = json.load(f)
|
|
632
703
|
|
|
633
|
-
|
|
634
|
-
|
|
635
|
-
|
|
704
|
+
# Find and update file
|
|
705
|
+
file_found = False
|
|
706
|
+
updated_file = None
|
|
707
|
+
for file_meta in files_metadata.get("files", []):
|
|
708
|
+
if file_meta["id"] == file_id:
|
|
709
|
+
if file_meta.get("deletedAt") is not None:
|
|
710
|
+
raise FileNotFoundError(f"File {file_id} has been deleted")
|
|
711
|
+
|
|
712
|
+
# Update fields
|
|
713
|
+
if description is not None:
|
|
714
|
+
file_meta["description"] = description
|
|
715
|
+
if tags is not None:
|
|
716
|
+
file_meta["tags"] = tags
|
|
717
|
+
if metadata is not None:
|
|
718
|
+
file_meta["metadata"] = metadata
|
|
719
|
+
|
|
720
|
+
file_meta["updatedAt"] = datetime.utcnow().isoformat() + "Z"
|
|
721
|
+
file_found = True
|
|
722
|
+
updated_file = file_meta
|
|
723
|
+
break
|
|
724
|
+
|
|
725
|
+
if not file_found:
|
|
726
|
+
raise FileNotFoundError(f"File {file_id} not found")
|
|
727
|
+
|
|
728
|
+
# Write updated metadata
|
|
729
|
+
with open(metadata_file, "w") as f:
|
|
730
|
+
json.dump(files_metadata, f, indent=2)
|
|
636
731
|
|
|
637
732
|
return updated_file
|
|
638
733
|
|
|
@@ -687,8 +782,9 @@ class LocalStorage:
|
|
|
687
782
|
self,
|
|
688
783
|
project: str,
|
|
689
784
|
experiment: str,
|
|
690
|
-
|
|
691
|
-
|
|
785
|
+
folder: Optional[str] = None,
|
|
786
|
+
metric_name: Optional[str] = None,
|
|
787
|
+
data: Optional[Dict[str, Any]] = None,
|
|
692
788
|
description: Optional[str] = None,
|
|
693
789
|
tags: Optional[List[str]] = None,
|
|
694
790
|
metadata: Optional[Dict[str, Any]] = None
|
|
@@ -704,6 +800,7 @@ class LocalStorage:
|
|
|
704
800
|
Args:
|
|
705
801
|
project: Project name
|
|
706
802
|
experiment: Experiment name
|
|
803
|
+
folder: Optional folder path
|
|
707
804
|
metric_name: Metric name (None for unnamed metrics)
|
|
708
805
|
data: Data point (flexible schema)
|
|
709
806
|
description: Optional metric description
|
|
@@ -713,7 +810,9 @@ class LocalStorage:
|
|
|
713
810
|
Returns:
|
|
714
811
|
Dict with metricId, index, bufferedDataPoints, chunkSize
|
|
715
812
|
"""
|
|
716
|
-
|
|
813
|
+
if data is None:
|
|
814
|
+
data = {}
|
|
815
|
+
experiment_dir = self._get_experiment_dir(project, experiment, folder)
|
|
717
816
|
metrics_dir = experiment_dir / "metrics"
|
|
718
817
|
metrics_dir.mkdir(parents=True, exist_ok=True)
|
|
719
818
|
|
|
@@ -725,42 +824,58 @@ class LocalStorage:
|
|
|
725
824
|
data_file = metric_dir / "data.jsonl"
|
|
726
825
|
metadata_file = metric_dir / "metadata.json"
|
|
727
826
|
|
|
728
|
-
#
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
|
|
735
|
-
|
|
736
|
-
|
|
737
|
-
|
|
738
|
-
|
|
739
|
-
|
|
740
|
-
|
|
741
|
-
|
|
742
|
-
|
|
827
|
+
# File-based lock for concurrent metric appends (prevents index collision and count errors)
|
|
828
|
+
lock_file = metric_dir / ".metadata.lock"
|
|
829
|
+
with self._file_lock(lock_file):
|
|
830
|
+
# Load or initialize metadata
|
|
831
|
+
if metadata_file.exists():
|
|
832
|
+
try:
|
|
833
|
+
with open(metadata_file, "r") as f:
|
|
834
|
+
metric_meta = json.load(f)
|
|
835
|
+
except (json.JSONDecodeError, IOError):
|
|
836
|
+
# Corrupted metadata, reinitialize
|
|
837
|
+
metric_meta = {
|
|
838
|
+
"metricId": f"local-metric-{metric_name}",
|
|
839
|
+
"name": metric_name,
|
|
840
|
+
"description": description,
|
|
841
|
+
"tags": tags or [],
|
|
842
|
+
"metadata": metadata,
|
|
843
|
+
"totalDataPoints": 0,
|
|
844
|
+
"nextIndex": 0,
|
|
845
|
+
"createdAt": datetime.utcnow().isoformat() + "Z"
|
|
846
|
+
}
|
|
847
|
+
else:
|
|
848
|
+
metric_meta = {
|
|
849
|
+
"metricId": f"local-metric-{metric_name}",
|
|
850
|
+
"name": metric_name,
|
|
851
|
+
"description": description,
|
|
852
|
+
"tags": tags or [],
|
|
853
|
+
"metadata": metadata,
|
|
854
|
+
"totalDataPoints": 0,
|
|
855
|
+
"nextIndex": 0,
|
|
856
|
+
"createdAt": datetime.utcnow().isoformat() + "Z"
|
|
857
|
+
}
|
|
743
858
|
|
|
744
|
-
|
|
745
|
-
|
|
859
|
+
# Get next index
|
|
860
|
+
index = metric_meta["nextIndex"]
|
|
746
861
|
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
|
|
750
|
-
|
|
751
|
-
|
|
752
|
-
|
|
862
|
+
# Append data point to JSONL file
|
|
863
|
+
data_entry = {
|
|
864
|
+
"index": index,
|
|
865
|
+
"data": data,
|
|
866
|
+
"createdAt": datetime.utcnow().isoformat() + "Z"
|
|
867
|
+
}
|
|
753
868
|
|
|
754
|
-
|
|
755
|
-
|
|
869
|
+
with open(data_file, "a") as f:
|
|
870
|
+
f.write(json.dumps(data_entry) + "\n")
|
|
756
871
|
|
|
757
|
-
|
|
758
|
-
|
|
759
|
-
|
|
760
|
-
|
|
872
|
+
# Update metadata
|
|
873
|
+
metric_meta["nextIndex"] = index + 1
|
|
874
|
+
metric_meta["totalDataPoints"] = metric_meta["totalDataPoints"] + 1
|
|
875
|
+
metric_meta["updatedAt"] = datetime.utcnow().isoformat() + "Z"
|
|
761
876
|
|
|
762
|
-
|
|
763
|
-
|
|
877
|
+
with open(metadata_file, "w") as f:
|
|
878
|
+
json.dump(metric_meta, f, indent=2)
|
|
764
879
|
|
|
765
880
|
return {
|
|
766
881
|
"metricId": metric_meta["metricId"],
|
|
@@ -806,42 +921,58 @@ class LocalStorage:
|
|
|
806
921
|
data_file = metric_dir / "data.jsonl"
|
|
807
922
|
metadata_file = metric_dir / "metadata.json"
|
|
808
923
|
|
|
809
|
-
#
|
|
810
|
-
|
|
811
|
-
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
|
|
816
|
-
|
|
817
|
-
|
|
818
|
-
|
|
819
|
-
|
|
820
|
-
|
|
821
|
-
|
|
822
|
-
|
|
823
|
-
|
|
824
|
-
|
|
825
|
-
|
|
826
|
-
|
|
827
|
-
|
|
828
|
-
|
|
829
|
-
|
|
830
|
-
|
|
831
|
-
|
|
832
|
-
"
|
|
833
|
-
"
|
|
924
|
+
# File-based lock for concurrent batch appends (prevents index collision and count errors)
|
|
925
|
+
lock_file = metric_dir / ".metadata.lock"
|
|
926
|
+
with self._file_lock(lock_file):
|
|
927
|
+
# Load or initialize metadata
|
|
928
|
+
if metadata_file.exists():
|
|
929
|
+
try:
|
|
930
|
+
with open(metadata_file, "r") as f:
|
|
931
|
+
metric_meta = json.load(f)
|
|
932
|
+
except (json.JSONDecodeError, IOError):
|
|
933
|
+
# Corrupted metadata, reinitialize
|
|
934
|
+
metric_meta = {
|
|
935
|
+
"metricId": f"local-metric-{metric_name}",
|
|
936
|
+
"name": metric_name,
|
|
937
|
+
"description": description,
|
|
938
|
+
"tags": tags or [],
|
|
939
|
+
"metadata": metadata,
|
|
940
|
+
"totalDataPoints": 0,
|
|
941
|
+
"nextIndex": 0,
|
|
942
|
+
"createdAt": datetime.utcnow().isoformat() + "Z"
|
|
943
|
+
}
|
|
944
|
+
else:
|
|
945
|
+
metric_meta = {
|
|
946
|
+
"metricId": f"local-metric-{metric_name}",
|
|
947
|
+
"name": metric_name,
|
|
948
|
+
"description": description,
|
|
949
|
+
"tags": tags or [],
|
|
950
|
+
"metadata": metadata,
|
|
951
|
+
"totalDataPoints": 0,
|
|
952
|
+
"nextIndex": 0,
|
|
834
953
|
"createdAt": datetime.utcnow().isoformat() + "Z"
|
|
835
954
|
}
|
|
836
|
-
f.write(json.dumps(data_entry) + "\n")
|
|
837
|
-
|
|
838
|
-
# Update metadata
|
|
839
|
-
metric_meta["nextIndex"] = end_index + 1
|
|
840
|
-
metric_meta["totalDataPoints"] = metric_meta["totalDataPoints"] + len(data_points)
|
|
841
|
-
metric_meta["updatedAt"] = datetime.utcnow().isoformat() + "Z"
|
|
842
955
|
|
|
843
|
-
|
|
844
|
-
|
|
956
|
+
start_index = metric_meta["nextIndex"]
|
|
957
|
+
end_index = start_index + len(data_points) - 1
|
|
958
|
+
|
|
959
|
+
# Append data points to JSONL file
|
|
960
|
+
with open(data_file, "a") as f:
|
|
961
|
+
for i, data in enumerate(data_points):
|
|
962
|
+
data_entry = {
|
|
963
|
+
"index": start_index + i,
|
|
964
|
+
"data": data,
|
|
965
|
+
"createdAt": datetime.utcnow().isoformat() + "Z"
|
|
966
|
+
}
|
|
967
|
+
f.write(json.dumps(data_entry) + "\n")
|
|
968
|
+
|
|
969
|
+
# Update metadata
|
|
970
|
+
metric_meta["nextIndex"] = end_index + 1
|
|
971
|
+
metric_meta["totalDataPoints"] = metric_meta["totalDataPoints"] + len(data_points)
|
|
972
|
+
metric_meta["updatedAt"] = datetime.utcnow().isoformat() + "Z"
|
|
973
|
+
|
|
974
|
+
with open(metadata_file, "w") as f:
|
|
975
|
+
json.dump(metric_meta, f, indent=2)
|
|
845
976
|
|
|
846
977
|
return {
|
|
847
978
|
"metricId": metric_meta["metricId"],
|