ml-dash 0.4.0__py3-none-any.whl → 0.5.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ml_dash/__init__.py +51 -7
- ml_dash/client.py +595 -0
- ml_dash/experiment.py +939 -0
- ml_dash/files.py +313 -0
- ml_dash/log.py +181 -0
- ml_dash/metric.py +186 -0
- ml_dash/params.py +188 -0
- ml_dash/py.typed +0 -0
- ml_dash/storage.py +941 -0
- ml_dash-0.5.1.dist-info/METADATA +240 -0
- ml_dash-0.5.1.dist-info/RECORD +12 -0
- {ml_dash-0.4.0.dist-info → ml_dash-0.5.1.dist-info}/WHEEL +1 -1
- ml_dash/ARCHITECTURE.md +0 -382
- ml_dash/autolog.py +0 -32
- ml_dash/backends/__init__.py +0 -11
- ml_dash/backends/base.py +0 -124
- ml_dash/backends/dash_backend.py +0 -571
- ml_dash/backends/local_backend.py +0 -90
- ml_dash/components/__init__.py +0 -13
- ml_dash/components/files.py +0 -246
- ml_dash/components/logs.py +0 -104
- ml_dash/components/metrics.py +0 -169
- ml_dash/components/parameters.py +0 -144
- ml_dash/job_logger.py +0 -42
- ml_dash/ml_logger.py +0 -234
- ml_dash/run.py +0 -331
- ml_dash-0.4.0.dist-info/METADATA +0 -1424
- ml_dash-0.4.0.dist-info/RECORD +0 -19
- ml_dash-0.4.0.dist-info/entry_points.txt +0 -3
ml_dash/components/parameters.py
DELETED
|
@@ -1,144 +0,0 @@
|
|
|
1
|
-
"""Parameter management component for ML-Logger."""
|
|
2
|
-
|
|
3
|
-
import json
|
|
4
|
-
import time
|
|
5
|
-
from typing import Any, Dict, Optional
|
|
6
|
-
|
|
7
|
-
from ..backends.base import StorageBackend
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
def deep_merge(base: Dict, updates: Dict) -> Dict:
|
|
11
|
-
"""Deep merge two dictionaries.
|
|
12
|
-
|
|
13
|
-
Args:
|
|
14
|
-
base: Base dictionary
|
|
15
|
-
updates: Updates to merge in
|
|
16
|
-
|
|
17
|
-
Returns:
|
|
18
|
-
Merged dictionary
|
|
19
|
-
"""
|
|
20
|
-
result = base.copy()
|
|
21
|
-
for key, value in updates.items():
|
|
22
|
-
if key in result and isinstance(result[key], dict) and isinstance(value, dict):
|
|
23
|
-
result[key] = deep_merge(result[key], value)
|
|
24
|
-
else:
|
|
25
|
-
result[key] = value
|
|
26
|
-
return result
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
class ParameterManager:
|
|
30
|
-
"""Manages experiment parameters.
|
|
31
|
-
|
|
32
|
-
Parameters are stored in an append-only JSONL file (parameters.jsonl)
|
|
33
|
-
with operations: set, extend, update.
|
|
34
|
-
|
|
35
|
-
Args:
|
|
36
|
-
backend: Storage backend
|
|
37
|
-
prefix: Experiment prefix path
|
|
38
|
-
"""
|
|
39
|
-
|
|
40
|
-
def __init__(self, backend: StorageBackend, prefix: str):
|
|
41
|
-
"""Initialize parameter manager.
|
|
42
|
-
|
|
43
|
-
Args:
|
|
44
|
-
backend: Storage backend
|
|
45
|
-
prefix: Experiment prefix path
|
|
46
|
-
"""
|
|
47
|
-
self.backend = backend
|
|
48
|
-
self.prefix = prefix
|
|
49
|
-
self.params_file = f"{prefix}/parameters.jsonl"
|
|
50
|
-
self._cache: Optional[Dict[str, Any]] = None
|
|
51
|
-
|
|
52
|
-
def set(self, **kwargs) -> None:
|
|
53
|
-
"""Set parameters (replaces existing).
|
|
54
|
-
|
|
55
|
-
Args:
|
|
56
|
-
**kwargs: Parameter key-value pairs
|
|
57
|
-
"""
|
|
58
|
-
self._append_operation("set", data=kwargs)
|
|
59
|
-
self._cache = None # Invalidate cache
|
|
60
|
-
|
|
61
|
-
def extend(self, **kwargs) -> None:
|
|
62
|
-
"""Extend parameters (deep merge with existing).
|
|
63
|
-
|
|
64
|
-
Args:
|
|
65
|
-
**kwargs: Parameter key-value pairs to merge
|
|
66
|
-
"""
|
|
67
|
-
self._append_operation("extend", data=kwargs)
|
|
68
|
-
self._cache = None # Invalidate cache
|
|
69
|
-
|
|
70
|
-
def update(self, key: str, value: Any) -> None:
|
|
71
|
-
"""Update a single parameter.
|
|
72
|
-
|
|
73
|
-
Args:
|
|
74
|
-
key: Parameter key (can be dot-separated like "model.layers")
|
|
75
|
-
value: New value
|
|
76
|
-
"""
|
|
77
|
-
self._append_operation("update", key=key, value=value)
|
|
78
|
-
self._cache = None # Invalidate cache
|
|
79
|
-
|
|
80
|
-
def read(self) -> Dict[str, Any]:
|
|
81
|
-
"""Read current parameters by replaying operations.
|
|
82
|
-
|
|
83
|
-
Returns:
|
|
84
|
-
Current parameter dictionary
|
|
85
|
-
"""
|
|
86
|
-
if self._cache is not None:
|
|
87
|
-
return self._cache.copy()
|
|
88
|
-
|
|
89
|
-
params = {}
|
|
90
|
-
|
|
91
|
-
if not self.backend.exists(self.params_file):
|
|
92
|
-
return params
|
|
93
|
-
|
|
94
|
-
# Read and replay all operations
|
|
95
|
-
content = self.backend.read_text(self.params_file)
|
|
96
|
-
for line in content.strip().split("\n"):
|
|
97
|
-
if not line:
|
|
98
|
-
continue
|
|
99
|
-
|
|
100
|
-
operation = json.loads(line)
|
|
101
|
-
op_type = operation.get("operation")
|
|
102
|
-
|
|
103
|
-
if op_type == "set":
|
|
104
|
-
params = operation.get("data", {})
|
|
105
|
-
elif op_type == "extend":
|
|
106
|
-
params = deep_merge(params, operation.get("data", {}))
|
|
107
|
-
elif op_type == "update":
|
|
108
|
-
key = operation.get("key")
|
|
109
|
-
value = operation.get("value")
|
|
110
|
-
if key:
|
|
111
|
-
# Support dot-separated keys
|
|
112
|
-
keys = key.split(".")
|
|
113
|
-
current = params
|
|
114
|
-
for k in keys[:-1]:
|
|
115
|
-
if k not in current:
|
|
116
|
-
current[k] = {}
|
|
117
|
-
current = current[k]
|
|
118
|
-
current[keys[-1]] = value
|
|
119
|
-
|
|
120
|
-
self._cache = params
|
|
121
|
-
return params.copy()
|
|
122
|
-
|
|
123
|
-
def log(self, **kwargs) -> None:
|
|
124
|
-
"""Alias for set() to match API documentation.
|
|
125
|
-
|
|
126
|
-
Args:
|
|
127
|
-
**kwargs: Parameter key-value pairs
|
|
128
|
-
"""
|
|
129
|
-
self.set(**kwargs)
|
|
130
|
-
|
|
131
|
-
def _append_operation(self, operation: str, **kwargs) -> None:
|
|
132
|
-
"""Append an operation to the parameters file.
|
|
133
|
-
|
|
134
|
-
Args:
|
|
135
|
-
operation: Operation type (set, extend, update)
|
|
136
|
-
**kwargs: Operation-specific data
|
|
137
|
-
"""
|
|
138
|
-
entry = {
|
|
139
|
-
"timestamp": time.time(),
|
|
140
|
-
"operation": operation,
|
|
141
|
-
**kwargs
|
|
142
|
-
}
|
|
143
|
-
line = json.dumps(entry) + "\n"
|
|
144
|
-
self.backend.append_text(self.params_file, line)
|
ml_dash/job_logger.py
DELETED
|
@@ -1,42 +0,0 @@
|
|
|
1
|
-
"""JobLogger - Job-based logging wrapper around ML_Logger.
|
|
2
|
-
|
|
3
|
-
This class provides a simple wrapper around ML_Logger for job-based logging.
|
|
4
|
-
"""
|
|
5
|
-
|
|
6
|
-
from typing import Optional
|
|
7
|
-
|
|
8
|
-
from .ml_logger import ML_Logger
|
|
9
|
-
from .backends.base import StorageBackend
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
class JobLogger(ML_Logger):
|
|
13
|
-
"""Job-based logger wrapper.
|
|
14
|
-
|
|
15
|
-
This is a simple wrapper around ML_Logger that can be extended
|
|
16
|
-
with job-specific functionality in the future.
|
|
17
|
-
|
|
18
|
-
Args:
|
|
19
|
-
prefix: Directory prefix for logging
|
|
20
|
-
backend: Storage backend (optional)
|
|
21
|
-
job_id: Optional job identifier
|
|
22
|
-
"""
|
|
23
|
-
|
|
24
|
-
def __init__(
|
|
25
|
-
self,
|
|
26
|
-
prefix: str,
|
|
27
|
-
backend: Optional[StorageBackend] = None,
|
|
28
|
-
job_id: Optional[str] = None,
|
|
29
|
-
):
|
|
30
|
-
"""Initialize JobLogger.
|
|
31
|
-
|
|
32
|
-
Args:
|
|
33
|
-
prefix: Directory prefix for logging
|
|
34
|
-
backend: Storage backend (optional)
|
|
35
|
-
job_id: Optional job identifier
|
|
36
|
-
"""
|
|
37
|
-
super().__init__(prefix, backend)
|
|
38
|
-
self.job_id = job_id
|
|
39
|
-
|
|
40
|
-
def __repr__(self) -> str:
|
|
41
|
-
"""String representation."""
|
|
42
|
-
return f"JobLogger(prefix='{self.prefix}', job_id='{self.job_id}', entries={len(self.buffer)})"
|
ml_dash/ml_logger.py
DELETED
|
@@ -1,234 +0,0 @@
|
|
|
1
|
-
"""ML_Logger - Legacy logging class for backward compatibility.
|
|
2
|
-
|
|
3
|
-
This class provides a simpler interface for basic logging with filtering capabilities.
|
|
4
|
-
"""
|
|
5
|
-
|
|
6
|
-
import re
|
|
7
|
-
from enum import Enum
|
|
8
|
-
from typing import Any, Callable, Dict, List, Optional, Pattern
|
|
9
|
-
|
|
10
|
-
from .backends.local_backend import LocalBackend
|
|
11
|
-
from .backends.base import StorageBackend
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
class LogLevel(Enum):
|
|
15
|
-
"""Log level enumeration."""
|
|
16
|
-
DEBUG = 0
|
|
17
|
-
INFO = 1
|
|
18
|
-
WARNING = 2
|
|
19
|
-
ERROR = 3
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
class ML_Logger:
|
|
23
|
-
"""Legacy logger class with filtering capabilities.
|
|
24
|
-
|
|
25
|
-
This class provides a simpler interface for logging with built-in filtering
|
|
26
|
-
by log level, patterns, and custom filter functions.
|
|
27
|
-
|
|
28
|
-
Args:
|
|
29
|
-
prefix: Directory prefix for logging (e.g., "../data")
|
|
30
|
-
backend: Storage backend (optional, defaults to LocalBackend)
|
|
31
|
-
"""
|
|
32
|
-
|
|
33
|
-
def __init__(
|
|
34
|
-
self,
|
|
35
|
-
prefix: str,
|
|
36
|
-
backend: Optional[StorageBackend] = None,
|
|
37
|
-
):
|
|
38
|
-
"""Initialize ML_Logger.
|
|
39
|
-
|
|
40
|
-
Args:
|
|
41
|
-
prefix: Directory prefix for logging
|
|
42
|
-
backend: Storage backend (optional)
|
|
43
|
-
"""
|
|
44
|
-
self.prefix = prefix
|
|
45
|
-
self.backend = backend or LocalBackend(prefix)
|
|
46
|
-
|
|
47
|
-
# Buffer for in-memory log storage
|
|
48
|
-
self.buffer: List[Dict[str, Any]] = []
|
|
49
|
-
|
|
50
|
-
# Filtering configuration
|
|
51
|
-
self._min_level = LogLevel.DEBUG
|
|
52
|
-
self._include_patterns: List[Pattern] = []
|
|
53
|
-
self._exclude_patterns: List[Pattern] = []
|
|
54
|
-
self._custom_filters: List[Callable] = []
|
|
55
|
-
|
|
56
|
-
def log(self, message: str, level: str = "INFO", **context) -> None:
|
|
57
|
-
"""Log a message with optional context.
|
|
58
|
-
|
|
59
|
-
Args:
|
|
60
|
-
message: Log message
|
|
61
|
-
level: Log level (DEBUG, INFO, WARNING, ERROR)
|
|
62
|
-
**context: Additional context fields
|
|
63
|
-
"""
|
|
64
|
-
entry = {
|
|
65
|
-
"message": message,
|
|
66
|
-
"level": level.upper(),
|
|
67
|
-
**context
|
|
68
|
-
}
|
|
69
|
-
|
|
70
|
-
# Apply filters
|
|
71
|
-
if self._should_log(entry):
|
|
72
|
-
self.buffer.append(entry)
|
|
73
|
-
|
|
74
|
-
def info(self, message: str, **context) -> None:
|
|
75
|
-
"""Log an info message.
|
|
76
|
-
|
|
77
|
-
Args:
|
|
78
|
-
message: Log message
|
|
79
|
-
**context: Additional context
|
|
80
|
-
"""
|
|
81
|
-
self.log(message, level="INFO", **context)
|
|
82
|
-
|
|
83
|
-
def warning(self, message: str, **context) -> None:
|
|
84
|
-
"""Log a warning message.
|
|
85
|
-
|
|
86
|
-
Args:
|
|
87
|
-
message: Log message
|
|
88
|
-
**context: Additional context
|
|
89
|
-
"""
|
|
90
|
-
self.log(message, level="WARNING", **context)
|
|
91
|
-
|
|
92
|
-
def error(self, message: str, **context) -> None:
|
|
93
|
-
"""Log an error message.
|
|
94
|
-
|
|
95
|
-
Args:
|
|
96
|
-
message: Log message
|
|
97
|
-
**context: Additional context
|
|
98
|
-
"""
|
|
99
|
-
self.log(message, level="ERROR", **context)
|
|
100
|
-
|
|
101
|
-
def debug(self, message: str, **context) -> None:
|
|
102
|
-
"""Log a debug message.
|
|
103
|
-
|
|
104
|
-
Args:
|
|
105
|
-
message: Log message
|
|
106
|
-
**context: Additional context
|
|
107
|
-
"""
|
|
108
|
-
self.log(message, level="DEBUG", **context)
|
|
109
|
-
|
|
110
|
-
def set_level(self, level: str) -> None:
|
|
111
|
-
"""Set minimum log level.
|
|
112
|
-
|
|
113
|
-
Args:
|
|
114
|
-
level: Minimum level (DEBUG, INFO, WARNING, ERROR)
|
|
115
|
-
"""
|
|
116
|
-
self._min_level = LogLevel[level.upper()]
|
|
117
|
-
|
|
118
|
-
def add_include_pattern(self, pattern: str) -> None:
|
|
119
|
-
"""Add a pattern to include in logs.
|
|
120
|
-
|
|
121
|
-
Args:
|
|
122
|
-
pattern: Regex pattern to match
|
|
123
|
-
"""
|
|
124
|
-
self._include_patterns.append(re.compile(pattern))
|
|
125
|
-
|
|
126
|
-
def add_exclude_pattern(self, pattern: str) -> None:
|
|
127
|
-
"""Add a pattern to exclude from logs.
|
|
128
|
-
|
|
129
|
-
Args:
|
|
130
|
-
pattern: Regex pattern to match
|
|
131
|
-
"""
|
|
132
|
-
self._exclude_patterns.append(re.compile(pattern))
|
|
133
|
-
|
|
134
|
-
def add_filter(self, filter_func: Callable[[Dict[str, Any]], bool]) -> None:
|
|
135
|
-
"""Add a custom filter function.
|
|
136
|
-
|
|
137
|
-
Args:
|
|
138
|
-
filter_func: Function that takes log entry and returns True to keep it
|
|
139
|
-
"""
|
|
140
|
-
self._custom_filters.append(filter_func)
|
|
141
|
-
|
|
142
|
-
def clear_filters(self) -> None:
|
|
143
|
-
"""Clear all filters."""
|
|
144
|
-
self._min_level = LogLevel.DEBUG
|
|
145
|
-
self._include_patterns.clear()
|
|
146
|
-
self._exclude_patterns.clear()
|
|
147
|
-
self._custom_filters.clear()
|
|
148
|
-
|
|
149
|
-
def get_filtered_logs(
|
|
150
|
-
self,
|
|
151
|
-
level: Optional[str] = None,
|
|
152
|
-
pattern: Optional[str] = None,
|
|
153
|
-
start_step: Optional[int] = None,
|
|
154
|
-
end_step: Optional[int] = None,
|
|
155
|
-
) -> List[Dict[str, Any]]:
|
|
156
|
-
"""Get filtered logs from buffer.
|
|
157
|
-
|
|
158
|
-
Args:
|
|
159
|
-
level: Filter by log level
|
|
160
|
-
pattern: Filter by regex pattern
|
|
161
|
-
start_step: Filter by minimum step
|
|
162
|
-
end_step: Filter by maximum step
|
|
163
|
-
|
|
164
|
-
Returns:
|
|
165
|
-
List of filtered log entries
|
|
166
|
-
"""
|
|
167
|
-
filtered = self.buffer.copy()
|
|
168
|
-
|
|
169
|
-
if level:
|
|
170
|
-
filtered = [entry for entry in filtered if entry.get("level") == level.upper()]
|
|
171
|
-
|
|
172
|
-
if pattern:
|
|
173
|
-
regex = re.compile(pattern)
|
|
174
|
-
filtered = [
|
|
175
|
-
entry for entry in filtered
|
|
176
|
-
if regex.search(entry.get("message", ""))
|
|
177
|
-
]
|
|
178
|
-
|
|
179
|
-
if start_step is not None or end_step is not None:
|
|
180
|
-
# Assign default step based on index in buffer if not present
|
|
181
|
-
result = []
|
|
182
|
-
for i, entry in enumerate(filtered):
|
|
183
|
-
step = entry.get("step", i)
|
|
184
|
-
if (start_step is None or step >= start_step) and \
|
|
185
|
-
(end_step is None or step <= end_step):
|
|
186
|
-
# Add step to entry if it wasn't there
|
|
187
|
-
entry_with_step = entry.copy()
|
|
188
|
-
if "step" not in entry_with_step:
|
|
189
|
-
entry_with_step["step"] = i
|
|
190
|
-
result.append(entry_with_step)
|
|
191
|
-
return result
|
|
192
|
-
|
|
193
|
-
return filtered
|
|
194
|
-
|
|
195
|
-
def _should_log(self, entry: Dict[str, Any]) -> bool:
|
|
196
|
-
"""Check if an entry should be logged based on filters.
|
|
197
|
-
|
|
198
|
-
Args:
|
|
199
|
-
entry: Log entry to check
|
|
200
|
-
|
|
201
|
-
Returns:
|
|
202
|
-
True if entry should be logged
|
|
203
|
-
"""
|
|
204
|
-
# Check log level
|
|
205
|
-
entry_level = LogLevel[entry.get("level", "INFO")]
|
|
206
|
-
if entry_level.value < self._min_level.value:
|
|
207
|
-
return False
|
|
208
|
-
|
|
209
|
-
message = entry.get("message", "")
|
|
210
|
-
|
|
211
|
-
# Check include patterns
|
|
212
|
-
if self._include_patterns:
|
|
213
|
-
if not any(pattern.search(message) for pattern in self._include_patterns):
|
|
214
|
-
return False
|
|
215
|
-
|
|
216
|
-
# Check exclude patterns
|
|
217
|
-
if self._exclude_patterns:
|
|
218
|
-
if any(pattern.search(message) for pattern in self._exclude_patterns):
|
|
219
|
-
return False
|
|
220
|
-
|
|
221
|
-
# Check custom filters
|
|
222
|
-
for filter_func in self._custom_filters:
|
|
223
|
-
if not filter_func(entry):
|
|
224
|
-
return False
|
|
225
|
-
|
|
226
|
-
return True
|
|
227
|
-
|
|
228
|
-
def clear_buffer(self) -> None:
|
|
229
|
-
"""Clear the log buffer."""
|
|
230
|
-
self.buffer.clear()
|
|
231
|
-
|
|
232
|
-
def __repr__(self) -> str:
|
|
233
|
-
"""String representation."""
|
|
234
|
-
return f"ML_Logger(prefix='{self.prefix}', entries={len(self.buffer)})"
|