flowyml 1.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- flowyml/__init__.py +207 -0
- flowyml/assets/__init__.py +22 -0
- flowyml/assets/artifact.py +40 -0
- flowyml/assets/base.py +209 -0
- flowyml/assets/dataset.py +100 -0
- flowyml/assets/featureset.py +301 -0
- flowyml/assets/metrics.py +104 -0
- flowyml/assets/model.py +82 -0
- flowyml/assets/registry.py +157 -0
- flowyml/assets/report.py +315 -0
- flowyml/cli/__init__.py +5 -0
- flowyml/cli/experiment.py +232 -0
- flowyml/cli/init.py +256 -0
- flowyml/cli/main.py +327 -0
- flowyml/cli/run.py +75 -0
- flowyml/cli/stack_cli.py +532 -0
- flowyml/cli/ui.py +33 -0
- flowyml/core/__init__.py +68 -0
- flowyml/core/advanced_cache.py +274 -0
- flowyml/core/approval.py +64 -0
- flowyml/core/cache.py +203 -0
- flowyml/core/checkpoint.py +148 -0
- flowyml/core/conditional.py +373 -0
- flowyml/core/context.py +155 -0
- flowyml/core/error_handling.py +419 -0
- flowyml/core/executor.py +354 -0
- flowyml/core/graph.py +185 -0
- flowyml/core/parallel.py +452 -0
- flowyml/core/pipeline.py +764 -0
- flowyml/core/project.py +253 -0
- flowyml/core/resources.py +424 -0
- flowyml/core/scheduler.py +630 -0
- flowyml/core/scheduler_config.py +32 -0
- flowyml/core/step.py +201 -0
- flowyml/core/step_grouping.py +292 -0
- flowyml/core/templates.py +226 -0
- flowyml/core/versioning.py +217 -0
- flowyml/integrations/__init__.py +1 -0
- flowyml/integrations/keras.py +134 -0
- flowyml/monitoring/__init__.py +1 -0
- flowyml/monitoring/alerts.py +57 -0
- flowyml/monitoring/data.py +102 -0
- flowyml/monitoring/llm.py +160 -0
- flowyml/monitoring/monitor.py +57 -0
- flowyml/monitoring/notifications.py +246 -0
- flowyml/registry/__init__.py +5 -0
- flowyml/registry/model_registry.py +491 -0
- flowyml/registry/pipeline_registry.py +55 -0
- flowyml/stacks/__init__.py +27 -0
- flowyml/stacks/base.py +77 -0
- flowyml/stacks/bridge.py +288 -0
- flowyml/stacks/components.py +155 -0
- flowyml/stacks/gcp.py +499 -0
- flowyml/stacks/local.py +112 -0
- flowyml/stacks/migration.py +97 -0
- flowyml/stacks/plugin_config.py +78 -0
- flowyml/stacks/plugins.py +401 -0
- flowyml/stacks/registry.py +226 -0
- flowyml/storage/__init__.py +26 -0
- flowyml/storage/artifacts.py +246 -0
- flowyml/storage/materializers/__init__.py +20 -0
- flowyml/storage/materializers/base.py +133 -0
- flowyml/storage/materializers/keras.py +185 -0
- flowyml/storage/materializers/numpy.py +94 -0
- flowyml/storage/materializers/pandas.py +142 -0
- flowyml/storage/materializers/pytorch.py +135 -0
- flowyml/storage/materializers/sklearn.py +110 -0
- flowyml/storage/materializers/tensorflow.py +152 -0
- flowyml/storage/metadata.py +931 -0
- flowyml/tracking/__init__.py +1 -0
- flowyml/tracking/experiment.py +211 -0
- flowyml/tracking/leaderboard.py +191 -0
- flowyml/tracking/runs.py +145 -0
- flowyml/ui/__init__.py +15 -0
- flowyml/ui/backend/Dockerfile +31 -0
- flowyml/ui/backend/__init__.py +0 -0
- flowyml/ui/backend/auth.py +163 -0
- flowyml/ui/backend/main.py +187 -0
- flowyml/ui/backend/routers/__init__.py +0 -0
- flowyml/ui/backend/routers/assets.py +45 -0
- flowyml/ui/backend/routers/execution.py +179 -0
- flowyml/ui/backend/routers/experiments.py +49 -0
- flowyml/ui/backend/routers/leaderboard.py +118 -0
- flowyml/ui/backend/routers/notifications.py +72 -0
- flowyml/ui/backend/routers/pipelines.py +110 -0
- flowyml/ui/backend/routers/plugins.py +192 -0
- flowyml/ui/backend/routers/projects.py +85 -0
- flowyml/ui/backend/routers/runs.py +66 -0
- flowyml/ui/backend/routers/schedules.py +222 -0
- flowyml/ui/backend/routers/traces.py +84 -0
- flowyml/ui/frontend/Dockerfile +20 -0
- flowyml/ui/frontend/README.md +315 -0
- flowyml/ui/frontend/dist/assets/index-DFNQnrUj.js +448 -0
- flowyml/ui/frontend/dist/assets/index-pWI271rZ.css +1 -0
- flowyml/ui/frontend/dist/index.html +16 -0
- flowyml/ui/frontend/index.html +15 -0
- flowyml/ui/frontend/nginx.conf +26 -0
- flowyml/ui/frontend/package-lock.json +3545 -0
- flowyml/ui/frontend/package.json +33 -0
- flowyml/ui/frontend/postcss.config.js +6 -0
- flowyml/ui/frontend/src/App.jsx +21 -0
- flowyml/ui/frontend/src/app/assets/page.jsx +397 -0
- flowyml/ui/frontend/src/app/dashboard/page.jsx +295 -0
- flowyml/ui/frontend/src/app/experiments/[experimentId]/page.jsx +255 -0
- flowyml/ui/frontend/src/app/experiments/page.jsx +360 -0
- flowyml/ui/frontend/src/app/leaderboard/page.jsx +133 -0
- flowyml/ui/frontend/src/app/pipelines/page.jsx +454 -0
- flowyml/ui/frontend/src/app/plugins/page.jsx +48 -0
- flowyml/ui/frontend/src/app/projects/page.jsx +292 -0
- flowyml/ui/frontend/src/app/runs/[runId]/page.jsx +682 -0
- flowyml/ui/frontend/src/app/runs/page.jsx +470 -0
- flowyml/ui/frontend/src/app/schedules/page.jsx +585 -0
- flowyml/ui/frontend/src/app/settings/page.jsx +314 -0
- flowyml/ui/frontend/src/app/tokens/page.jsx +456 -0
- flowyml/ui/frontend/src/app/traces/page.jsx +246 -0
- flowyml/ui/frontend/src/components/Layout.jsx +108 -0
- flowyml/ui/frontend/src/components/PipelineGraph.jsx +295 -0
- flowyml/ui/frontend/src/components/header/Header.jsx +72 -0
- flowyml/ui/frontend/src/components/plugins/AddPluginDialog.jsx +121 -0
- flowyml/ui/frontend/src/components/plugins/InstalledPlugins.jsx +124 -0
- flowyml/ui/frontend/src/components/plugins/PluginBrowser.jsx +167 -0
- flowyml/ui/frontend/src/components/plugins/PluginManager.jsx +60 -0
- flowyml/ui/frontend/src/components/sidebar/Sidebar.jsx +145 -0
- flowyml/ui/frontend/src/components/ui/Badge.jsx +26 -0
- flowyml/ui/frontend/src/components/ui/Button.jsx +34 -0
- flowyml/ui/frontend/src/components/ui/Card.jsx +44 -0
- flowyml/ui/frontend/src/components/ui/CodeSnippet.jsx +38 -0
- flowyml/ui/frontend/src/components/ui/CollapsibleCard.jsx +53 -0
- flowyml/ui/frontend/src/components/ui/DataView.jsx +175 -0
- flowyml/ui/frontend/src/components/ui/EmptyState.jsx +49 -0
- flowyml/ui/frontend/src/components/ui/ExecutionStatus.jsx +122 -0
- flowyml/ui/frontend/src/components/ui/KeyValue.jsx +25 -0
- flowyml/ui/frontend/src/components/ui/ProjectSelector.jsx +134 -0
- flowyml/ui/frontend/src/contexts/ProjectContext.jsx +79 -0
- flowyml/ui/frontend/src/contexts/ThemeContext.jsx +54 -0
- flowyml/ui/frontend/src/index.css +11 -0
- flowyml/ui/frontend/src/layouts/MainLayout.jsx +23 -0
- flowyml/ui/frontend/src/main.jsx +10 -0
- flowyml/ui/frontend/src/router/index.jsx +39 -0
- flowyml/ui/frontend/src/services/pluginService.js +90 -0
- flowyml/ui/frontend/src/utils/api.js +47 -0
- flowyml/ui/frontend/src/utils/cn.js +6 -0
- flowyml/ui/frontend/tailwind.config.js +31 -0
- flowyml/ui/frontend/vite.config.js +21 -0
- flowyml/ui/utils.py +77 -0
- flowyml/utils/__init__.py +67 -0
- flowyml/utils/config.py +308 -0
- flowyml/utils/debug.py +240 -0
- flowyml/utils/environment.py +346 -0
- flowyml/utils/git.py +319 -0
- flowyml/utils/logging.py +61 -0
- flowyml/utils/performance.py +314 -0
- flowyml/utils/stack_config.py +296 -0
- flowyml/utils/validation.py +270 -0
- flowyml-1.1.0.dist-info/METADATA +372 -0
- flowyml-1.1.0.dist-info/RECORD +159 -0
- flowyml-1.1.0.dist-info/WHEEL +4 -0
- flowyml-1.1.0.dist-info/entry_points.txt +3 -0
- flowyml-1.1.0.dist-info/licenses/LICENSE +17 -0
|
@@ -0,0 +1,274 @@
|
|
|
1
|
+
"""Advanced caching strategies."""
|
|
2
|
+
|
|
3
|
+
import hashlib
|
|
4
|
+
import pickle
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Any
|
|
7
|
+
from collections.abc import Callable
|
|
8
|
+
from datetime import datetime, timedelta
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class ContentBasedCache:
|
|
12
|
+
"""Content-based caching using input hashing.
|
|
13
|
+
|
|
14
|
+
Caches based on actual input content, not just step name.
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
def __init__(self, cache_dir: str = ".flowyml/cache"):
|
|
18
|
+
self.cache_dir = Path(cache_dir)
|
|
19
|
+
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
|
20
|
+
|
|
21
|
+
def _compute_hash(self, *args, **kwargs) -> str:
|
|
22
|
+
"""Compute hash of inputs."""
|
|
23
|
+
# Serialize inputs
|
|
24
|
+
try:
|
|
25
|
+
content = pickle.dumps((args, kwargs))
|
|
26
|
+
return hashlib.sha256(content).hexdigest()
|
|
27
|
+
except Exception:
|
|
28
|
+
# Fallback to str representation
|
|
29
|
+
content = str((args, kwargs)).encode()
|
|
30
|
+
return hashlib.sha256(content).hexdigest()
|
|
31
|
+
|
|
32
|
+
def get(self, step_name: str, *args, **kwargs) -> Any | None:
|
|
33
|
+
"""Get cached result if exists."""
|
|
34
|
+
content_hash = self._compute_hash(*args, **kwargs)
|
|
35
|
+
cache_key = f"{step_name}_{content_hash}"
|
|
36
|
+
cache_file = self.cache_dir / f"{cache_key}.pkl"
|
|
37
|
+
|
|
38
|
+
if cache_file.exists():
|
|
39
|
+
with open(cache_file, "rb") as f:
|
|
40
|
+
cached_data = pickle.load(f)
|
|
41
|
+
|
|
42
|
+
# Check if still valid
|
|
43
|
+
if "result" in cached_data:
|
|
44
|
+
return cached_data["result"]
|
|
45
|
+
|
|
46
|
+
return None
|
|
47
|
+
|
|
48
|
+
def set_value(self, step_name: str, result: Any, *args, **kwargs) -> None:
|
|
49
|
+
"""Cache a result."""
|
|
50
|
+
content_hash = self._compute_hash(*args, **kwargs)
|
|
51
|
+
cache_key = f"{step_name}_{content_hash}"
|
|
52
|
+
cache_file = self.cache_dir / f"{cache_key}.pkl"
|
|
53
|
+
|
|
54
|
+
cached_data = {
|
|
55
|
+
"result": result,
|
|
56
|
+
"cached_at": datetime.now().isoformat(),
|
|
57
|
+
"inputs_hash": content_hash,
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
with open(cache_file, "wb") as f:
|
|
61
|
+
pickle.dump(cached_data, f)
|
|
62
|
+
|
|
63
|
+
def invalidate(self, step_name: str | None = None) -> None:
|
|
64
|
+
"""Invalidate cache entries."""
|
|
65
|
+
pattern = f"{step_name}_*.pkl" if step_name else "*.pkl"
|
|
66
|
+
|
|
67
|
+
for cache_file in self.cache_dir.glob(pattern):
|
|
68
|
+
cache_file.unlink()
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
class SharedCache:
|
|
72
|
+
"""Shared cache across different pipeline runs.
|
|
73
|
+
|
|
74
|
+
Allows cache reuse across multiple executions.
|
|
75
|
+
"""
|
|
76
|
+
|
|
77
|
+
def __init__(self, cache_dir: str = ".flowyml/shared_cache"):
|
|
78
|
+
self.cache_dir = Path(cache_dir)
|
|
79
|
+
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
|
80
|
+
self.index_file = self.cache_dir / "index.json"
|
|
81
|
+
self._load_index()
|
|
82
|
+
|
|
83
|
+
def _load_index(self) -> None:
|
|
84
|
+
"""Load cache index."""
|
|
85
|
+
import json
|
|
86
|
+
|
|
87
|
+
if self.index_file.exists():
|
|
88
|
+
with open(self.index_file) as f:
|
|
89
|
+
self.index = json.load(f)
|
|
90
|
+
else:
|
|
91
|
+
self.index = {}
|
|
92
|
+
|
|
93
|
+
def _save_index(self) -> None:
|
|
94
|
+
"""Save cache index."""
|
|
95
|
+
import json
|
|
96
|
+
|
|
97
|
+
with open(self.index_file, "w") as f:
|
|
98
|
+
json.dump(self.index, f, indent=2)
|
|
99
|
+
|
|
100
|
+
def get(self, cache_key: str) -> Any | None:
|
|
101
|
+
"""Get from shared cache."""
|
|
102
|
+
if cache_key in self.index:
|
|
103
|
+
cache_file = self.cache_dir / f"{cache_key}.pkl"
|
|
104
|
+
if cache_file.exists():
|
|
105
|
+
with open(cache_file, "rb") as f:
|
|
106
|
+
return pickle.load(f)
|
|
107
|
+
return None
|
|
108
|
+
|
|
109
|
+
def set_value(self, cache_key: str, value: Any, metadata: dict | None = None) -> None:
|
|
110
|
+
"""Set shared cache entry."""
|
|
111
|
+
cache_file = self.cache_dir / f"{cache_key}.pkl"
|
|
112
|
+
|
|
113
|
+
with open(cache_file, "wb") as f:
|
|
114
|
+
pickle.dump(value, f)
|
|
115
|
+
|
|
116
|
+
self.index[cache_key] = {
|
|
117
|
+
"cached_at": datetime.now().isoformat(),
|
|
118
|
+
"metadata": metadata or {},
|
|
119
|
+
}
|
|
120
|
+
self._save_index()
|
|
121
|
+
|
|
122
|
+
def list_keys(self) -> list:
|
|
123
|
+
"""List all cache keys."""
|
|
124
|
+
return list(self.index.keys())
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
class SmartCache:
|
|
128
|
+
"""Smart cache with TTL and automatic invalidation.
|
|
129
|
+
|
|
130
|
+
Features:
|
|
131
|
+
- Time-to-live (TTL)
|
|
132
|
+
- Size limits
|
|
133
|
+
- LRU eviction
|
|
134
|
+
"""
|
|
135
|
+
|
|
136
|
+
def __init__(
|
|
137
|
+
self,
|
|
138
|
+
cache_dir: str = ".flowyml/smart_cache",
|
|
139
|
+
ttl_seconds: int = 3600,
|
|
140
|
+
max_size_mb: int = 1000,
|
|
141
|
+
):
|
|
142
|
+
self.cache_dir = Path(cache_dir)
|
|
143
|
+
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
|
144
|
+
self.ttl_seconds = ttl_seconds
|
|
145
|
+
self.max_size_mb = max_size_mb
|
|
146
|
+
|
|
147
|
+
def get(self, key: str) -> Any | None:
|
|
148
|
+
"""Get cached value if not expired."""
|
|
149
|
+
cache_file = self.cache_dir / f"{key}.pkl"
|
|
150
|
+
metadata_file = self.cache_dir / f"{key}.meta"
|
|
151
|
+
|
|
152
|
+
if not cache_file.exists() or not metadata_file.exists():
|
|
153
|
+
return None
|
|
154
|
+
|
|
155
|
+
# Check TTL
|
|
156
|
+
import json
|
|
157
|
+
|
|
158
|
+
with open(metadata_file) as f:
|
|
159
|
+
metadata = json.load(f)
|
|
160
|
+
|
|
161
|
+
cached_at = datetime.fromisoformat(metadata["cached_at"])
|
|
162
|
+
if datetime.now() - cached_at > timedelta(seconds=self.ttl_seconds):
|
|
163
|
+
# Expired
|
|
164
|
+
cache_file.unlink()
|
|
165
|
+
metadata_file.unlink()
|
|
166
|
+
return None
|
|
167
|
+
|
|
168
|
+
# Update access time
|
|
169
|
+
metadata["last_accessed"] = datetime.now().isoformat()
|
|
170
|
+
with open(metadata_file, "w") as f:
|
|
171
|
+
json.dump(metadata, f)
|
|
172
|
+
|
|
173
|
+
with open(cache_file, "rb") as f:
|
|
174
|
+
return pickle.load(f)
|
|
175
|
+
|
|
176
|
+
def set_value(self, key: str, value: Any) -> None:
|
|
177
|
+
"""Set cached value."""
|
|
178
|
+
import json
|
|
179
|
+
|
|
180
|
+
# Check size limits
|
|
181
|
+
self._evict_if_needed()
|
|
182
|
+
|
|
183
|
+
cache_file = self.cache_dir / f"{key}.pkl"
|
|
184
|
+
metadata_file = self.cache_dir / f"{key}.meta"
|
|
185
|
+
|
|
186
|
+
with open(cache_file, "wb") as f:
|
|
187
|
+
pickle.dump(value, f)
|
|
188
|
+
|
|
189
|
+
metadata = {
|
|
190
|
+
"cached_at": datetime.now().isoformat(),
|
|
191
|
+
"last_accessed": datetime.now().isoformat(),
|
|
192
|
+
"size_bytes": cache_file.stat().st_size,
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
with open(metadata_file, "w") as f:
|
|
196
|
+
json.dump(metadata, f)
|
|
197
|
+
|
|
198
|
+
def _evict_if_needed(self) -> None:
|
|
199
|
+
"""Evict old entries if cache is too large."""
|
|
200
|
+
total_size = sum(f.stat().st_size for f in self.cache_dir.glob("*.pkl"))
|
|
201
|
+
max_size_bytes = self.max_size_mb * 1024 * 1024
|
|
202
|
+
|
|
203
|
+
if total_size > max_size_bytes:
|
|
204
|
+
# LRU eviction
|
|
205
|
+
import json
|
|
206
|
+
|
|
207
|
+
entries = []
|
|
208
|
+
for meta_file in self.cache_dir.glob("*.meta"):
|
|
209
|
+
with open(meta_file) as f:
|
|
210
|
+
metadata = json.load(f)
|
|
211
|
+
entries.append(
|
|
212
|
+
(
|
|
213
|
+
meta_file.stem,
|
|
214
|
+
datetime.fromisoformat(metadata["last_accessed"]),
|
|
215
|
+
),
|
|
216
|
+
)
|
|
217
|
+
|
|
218
|
+
# Sort by access time
|
|
219
|
+
entries.sort(key=lambda x: x[1])
|
|
220
|
+
|
|
221
|
+
# Remove oldest entries until under limit
|
|
222
|
+
for key, _ in entries:
|
|
223
|
+
cache_file = self.cache_dir / f"{key}.pkl"
|
|
224
|
+
meta_file = self.cache_dir / f"{key}.meta"
|
|
225
|
+
|
|
226
|
+
if cache_file.exists():
|
|
227
|
+
cache_file.unlink()
|
|
228
|
+
if meta_file.exists():
|
|
229
|
+
meta_file.unlink()
|
|
230
|
+
|
|
231
|
+
# Recalculate size
|
|
232
|
+
total_size = sum(f.stat().st_size for f in self.cache_dir.glob("*.pkl"))
|
|
233
|
+
if total_size <= max_size_bytes:
|
|
234
|
+
break
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
def memoize(ttl_seconds: int | None = None):
|
|
238
|
+
"""Memoization decorator for functions.
|
|
239
|
+
|
|
240
|
+
Args:
|
|
241
|
+
ttl_seconds: Time-to-live for cached results
|
|
242
|
+
|
|
243
|
+
Example:
|
|
244
|
+
>>> @memoize(ttl_seconds=3600)
|
|
245
|
+
... def expensive_function(x):
|
|
246
|
+
... return x**2
|
|
247
|
+
"""
|
|
248
|
+
cache = {}
|
|
249
|
+
cache_time = {}
|
|
250
|
+
|
|
251
|
+
def decorator(func: Callable):
|
|
252
|
+
def wrapper(*args, **kwargs):
|
|
253
|
+
# Create cache key
|
|
254
|
+
key = (args, tuple(sorted(kwargs.items())))
|
|
255
|
+
|
|
256
|
+
# Check if cached and valid
|
|
257
|
+
if key in cache:
|
|
258
|
+
if ttl_seconds is None:
|
|
259
|
+
return cache[key]
|
|
260
|
+
|
|
261
|
+
elapsed = (datetime.now() - cache_time[key]).total_seconds()
|
|
262
|
+
if elapsed < ttl_seconds:
|
|
263
|
+
return cache[key]
|
|
264
|
+
|
|
265
|
+
# Compute and cache
|
|
266
|
+
result = func(*args, **kwargs)
|
|
267
|
+
cache[key] = result
|
|
268
|
+
cache_time[key] = datetime.now()
|
|
269
|
+
|
|
270
|
+
return result
|
|
271
|
+
|
|
272
|
+
return wrapper
|
|
273
|
+
|
|
274
|
+
return decorator
|
flowyml/core/approval.py
ADDED
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
"""Human-in-the-loop approval step."""
|
|
2
|
+
|
|
3
|
+
from collections.abc import Callable
|
|
4
|
+
from flowyml.core.step import Step
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class ApprovalStep(Step):
|
|
8
|
+
"""A step that pauses execution until manual approval is granted.
|
|
9
|
+
|
|
10
|
+
This is useful for:
|
|
11
|
+
- Reviewing LLM outputs before proceeding
|
|
12
|
+
- Cost control (approving expensive operations)
|
|
13
|
+
- Safety checks
|
|
14
|
+
|
|
15
|
+
The step will poll for approval status or wait for a signal.
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
def __init__(
|
|
19
|
+
self,
|
|
20
|
+
name: str,
|
|
21
|
+
approver: str | None = None,
|
|
22
|
+
timeout_seconds: int = 3600, # 1 hour default
|
|
23
|
+
auto_approve_if: Callable | None = None,
|
|
24
|
+
):
|
|
25
|
+
super().__init__(name)
|
|
26
|
+
self.approver = approver
|
|
27
|
+
self.timeout_seconds = timeout_seconds
|
|
28
|
+
self.auto_approve_if = auto_approve_if
|
|
29
|
+
|
|
30
|
+
def __call__(self, *args, **kwargs):
|
|
31
|
+
"""Execute the approval logic."""
|
|
32
|
+
# Check auto-approval condition
|
|
33
|
+
if self.auto_approve_if and self.auto_approve_if(*args, **kwargs):
|
|
34
|
+
return args[0] if args else None
|
|
35
|
+
|
|
36
|
+
# In a real implementation, this would:
|
|
37
|
+
# 1. Create an 'Approval Request' in the DB
|
|
38
|
+
# 2. Send a notification (Slack/Email)
|
|
39
|
+
# 3. Poll DB for status change
|
|
40
|
+
|
|
41
|
+
# For this local version, we'll simulate a simple CLI prompt if interactive,
|
|
42
|
+
# or just fail if non-interactive (safety first).
|
|
43
|
+
|
|
44
|
+
# Check if we are in an interactive terminal
|
|
45
|
+
import sys
|
|
46
|
+
|
|
47
|
+
if sys.stdin.isatty():
|
|
48
|
+
response = input(" Approve execution? [y/N]: ")
|
|
49
|
+
if response.lower() == "y":
|
|
50
|
+
return args[0] if args else None
|
|
51
|
+
else:
|
|
52
|
+
raise RuntimeError(f"Step '{self.name}' was rejected by user.")
|
|
53
|
+
else:
|
|
54
|
+
# Non-interactive mode - check for a file or env var?
|
|
55
|
+
# For now, we'll just raise an error saying manual intervention needed
|
|
56
|
+
# In a real system, this would block/suspend the workflow state.
|
|
57
|
+
raise RuntimeError(
|
|
58
|
+
"Manual approval required but running in non-interactive mode. Implement persistent state storage to handle async approvals.",
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def approval(name: str = "approval", **kwargs):
|
|
63
|
+
"""Decorator/helper to create an approval step."""
|
|
64
|
+
return ApprovalStep(name, **kwargs)
|
flowyml/core/cache.py
ADDED
|
@@ -0,0 +1,203 @@
|
|
|
1
|
+
"""Cache Module - Intelligent caching strategies for pipeline steps."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import pickle
|
|
5
|
+
import hashlib
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from typing import Any
|
|
8
|
+
from datetime import datetime
|
|
9
|
+
from dataclasses import dataclass, asdict
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@dataclass
|
|
13
|
+
class CacheEntry:
|
|
14
|
+
"""A cache entry with metadata."""
|
|
15
|
+
|
|
16
|
+
key: str
|
|
17
|
+
value: Any
|
|
18
|
+
created_at: datetime
|
|
19
|
+
step_name: str
|
|
20
|
+
code_hash: str
|
|
21
|
+
input_hash: str | None = None
|
|
22
|
+
size_bytes: int = 0
|
|
23
|
+
|
|
24
|
+
def to_dict(self) -> dict[str, Any]:
|
|
25
|
+
"""Convert to dictionary."""
|
|
26
|
+
data = asdict(self)
|
|
27
|
+
data["created_at"] = self.created_at.isoformat()
|
|
28
|
+
return data
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class CacheStrategy:
|
|
32
|
+
"""Base class for caching strategies."""
|
|
33
|
+
|
|
34
|
+
def get_key(self, step_name: str, inputs: dict[str, Any], code_hash: str) -> str:
|
|
35
|
+
"""Generate cache key."""
|
|
36
|
+
raise NotImplementedError
|
|
37
|
+
|
|
38
|
+
def should_cache(self, step_name: str) -> bool:
|
|
39
|
+
"""Determine if step should be cached."""
|
|
40
|
+
return True
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class CodeHashCache(CacheStrategy):
|
|
44
|
+
"""Cache based on function code hash."""
|
|
45
|
+
|
|
46
|
+
def get_key(self, step_name: str, inputs: dict[str, Any], code_hash: str) -> str:
|
|
47
|
+
return f"{step_name}:{code_hash}"
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class InputHashCache(CacheStrategy):
|
|
51
|
+
"""Cache based on input hash."""
|
|
52
|
+
|
|
53
|
+
def get_key(self, step_name: str, inputs: dict[str, Any], code_hash: str) -> str:
|
|
54
|
+
input_str = json.dumps(inputs, sort_keys=True, default=str)
|
|
55
|
+
input_hash = hashlib.sha256(input_str.encode()).hexdigest()[:16]
|
|
56
|
+
return f"{step_name}:{code_hash}:{input_hash}"
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
class CacheStore:
|
|
60
|
+
"""Local cache storage for pipeline steps."""
|
|
61
|
+
|
|
62
|
+
def __init__(self, cache_dir: str = ".flowyml/cache"):
|
|
63
|
+
self.cache_dir = Path(cache_dir)
|
|
64
|
+
self.cache_dir.mkdir(parents=True, exist_ok=True)
|
|
65
|
+
self.metadata_file = self.cache_dir / "metadata.json"
|
|
66
|
+
self.metadata = self._load_metadata()
|
|
67
|
+
|
|
68
|
+
# Statistics
|
|
69
|
+
self.hits = 0
|
|
70
|
+
self.misses = 0
|
|
71
|
+
|
|
72
|
+
def _load_metadata(self) -> dict[str, dict[str, Any]]:
|
|
73
|
+
"""Load cache metadata."""
|
|
74
|
+
if self.metadata_file.exists():
|
|
75
|
+
with open(self.metadata_file) as f:
|
|
76
|
+
return json.load(f)
|
|
77
|
+
return {}
|
|
78
|
+
|
|
79
|
+
def _save_metadata(self) -> None:
|
|
80
|
+
"""Save cache metadata."""
|
|
81
|
+
with open(self.metadata_file, "w") as f:
|
|
82
|
+
json.dump(self.metadata, f, indent=2)
|
|
83
|
+
|
|
84
|
+
def _get_cache_path(self, key: str) -> Path:
|
|
85
|
+
"""Get path for cache file."""
|
|
86
|
+
# Use hash to avoid filesystem issues with long keys
|
|
87
|
+
key_hash = hashlib.sha256(key.encode()).hexdigest()
|
|
88
|
+
return self.cache_dir / f"{key_hash}.pkl"
|
|
89
|
+
|
|
90
|
+
def get(self, key: str) -> Any | None:
|
|
91
|
+
"""Retrieve value from cache.
|
|
92
|
+
|
|
93
|
+
Args:
|
|
94
|
+
key: Cache key
|
|
95
|
+
|
|
96
|
+
Returns:
|
|
97
|
+
Cached value or None if not found
|
|
98
|
+
"""
|
|
99
|
+
cache_path = self._get_cache_path(key)
|
|
100
|
+
|
|
101
|
+
if not cache_path.exists():
|
|
102
|
+
self.misses += 1
|
|
103
|
+
return None
|
|
104
|
+
|
|
105
|
+
try:
|
|
106
|
+
with open(cache_path, "rb") as f:
|
|
107
|
+
value = pickle.load(f)
|
|
108
|
+
self.hits += 1
|
|
109
|
+
return value
|
|
110
|
+
except Exception:
|
|
111
|
+
self.misses += 1
|
|
112
|
+
return None
|
|
113
|
+
|
|
114
|
+
def set_value(self, key: str, value: Any, step_name: str, code_hash: str, input_hash: str | None = None) -> None:
|
|
115
|
+
"""Store value in cache.
|
|
116
|
+
|
|
117
|
+
Args:
|
|
118
|
+
key: Cache key
|
|
119
|
+
value: Value to cache
|
|
120
|
+
step_name: Name of the step
|
|
121
|
+
code_hash: Hash of step code
|
|
122
|
+
input_hash: Hash of inputs (optional)
|
|
123
|
+
"""
|
|
124
|
+
cache_path = self._get_cache_path(key)
|
|
125
|
+
|
|
126
|
+
try:
|
|
127
|
+
with open(cache_path, "wb") as f:
|
|
128
|
+
pickle.dump(value, f)
|
|
129
|
+
|
|
130
|
+
# Update metadata
|
|
131
|
+
size_bytes = cache_path.stat().st_size
|
|
132
|
+
self.metadata[key] = {
|
|
133
|
+
"step_name": step_name,
|
|
134
|
+
"code_hash": code_hash,
|
|
135
|
+
"input_hash": input_hash,
|
|
136
|
+
"created_at": datetime.now().isoformat(),
|
|
137
|
+
"size_bytes": size_bytes,
|
|
138
|
+
"file": str(cache_path.name),
|
|
139
|
+
}
|
|
140
|
+
self._save_metadata()
|
|
141
|
+
|
|
142
|
+
except Exception:
|
|
143
|
+
pass
|
|
144
|
+
|
|
145
|
+
def invalidate(self, key: str | None = None, step_name: str | None = None) -> None:
|
|
146
|
+
"""Invalidate cache entries.
|
|
147
|
+
|
|
148
|
+
Args:
|
|
149
|
+
key: Specific cache key to invalidate
|
|
150
|
+
step_name: Invalidate all entries for a step
|
|
151
|
+
"""
|
|
152
|
+
if key:
|
|
153
|
+
cache_path = self._get_cache_path(key)
|
|
154
|
+
if cache_path.exists():
|
|
155
|
+
cache_path.unlink()
|
|
156
|
+
if key in self.metadata:
|
|
157
|
+
del self.metadata[key]
|
|
158
|
+
|
|
159
|
+
elif step_name:
|
|
160
|
+
keys_to_remove = [k for k, v in self.metadata.items() if v["step_name"] == step_name]
|
|
161
|
+
for k in keys_to_remove:
|
|
162
|
+
cache_path = self._get_cache_path(k)
|
|
163
|
+
if cache_path.exists():
|
|
164
|
+
cache_path.unlink()
|
|
165
|
+
del self.metadata[k]
|
|
166
|
+
|
|
167
|
+
self._save_metadata()
|
|
168
|
+
|
|
169
|
+
def clear(self) -> None:
|
|
170
|
+
"""Clear all cache entries."""
|
|
171
|
+
for cache_file in self.cache_dir.glob("*.pkl"):
|
|
172
|
+
cache_file.unlink()
|
|
173
|
+
self.metadata = {}
|
|
174
|
+
self._save_metadata()
|
|
175
|
+
|
|
176
|
+
def stats(self) -> dict[str, Any]:
|
|
177
|
+
"""Get cache statistics.
|
|
178
|
+
|
|
179
|
+
Returns:
|
|
180
|
+
Dictionary with cache statistics
|
|
181
|
+
"""
|
|
182
|
+
total_size = sum(v["size_bytes"] for v in self.metadata.values())
|
|
183
|
+
total_entries = len(self.metadata)
|
|
184
|
+
|
|
185
|
+
by_step = {}
|
|
186
|
+
for entry in self.metadata.values():
|
|
187
|
+
step = entry["step_name"]
|
|
188
|
+
if step not in by_step:
|
|
189
|
+
by_step[step] = {"count": 0, "size_bytes": 0}
|
|
190
|
+
by_step[step]["count"] += 1
|
|
191
|
+
by_step[step]["size_bytes"] += entry["size_bytes"]
|
|
192
|
+
|
|
193
|
+
total_requests = self.hits + self.misses
|
|
194
|
+
hit_rate = self.hits / total_requests if total_requests > 0 else 0
|
|
195
|
+
|
|
196
|
+
return {
|
|
197
|
+
"total_entries": total_entries,
|
|
198
|
+
"total_size_mb": total_size / (1024 * 1024),
|
|
199
|
+
"hits": self.hits,
|
|
200
|
+
"misses": self.misses,
|
|
201
|
+
"hit_rate": hit_rate,
|
|
202
|
+
"by_step": by_step,
|
|
203
|
+
}
|
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
"""Pipeline checkpointing for resumable execution."""
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
import pickle
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Any
|
|
7
|
+
from datetime import datetime
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class PipelineCheckpoint:
|
|
11
|
+
"""Save and restore pipeline execution state.
|
|
12
|
+
|
|
13
|
+
Allows resuming failed pipelines from the last successful step.
|
|
14
|
+
|
|
15
|
+
Examples:
|
|
16
|
+
>>> checkpoint = PipelineCheckpoint(run_id="run_123")
|
|
17
|
+
>>> # Save state after each step
|
|
18
|
+
>>> checkpoint.save_step_state("step1", outputs)
|
|
19
|
+
>>> # Resume from checkpoint
|
|
20
|
+
>>> state = checkpoint.load()
|
|
21
|
+
>>> last_step = state["last_completed_step"]
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
def __init__(
|
|
25
|
+
self,
|
|
26
|
+
run_id: str,
|
|
27
|
+
checkpoint_dir: str = ".flowyml/checkpoints",
|
|
28
|
+
):
|
|
29
|
+
self.run_id = run_id
|
|
30
|
+
self.checkpoint_dir = Path(checkpoint_dir)
|
|
31
|
+
self.checkpoint_dir.mkdir(parents=True, exist_ok=True)
|
|
32
|
+
|
|
33
|
+
self.checkpoint_file = self.checkpoint_dir / f"{run_id}.json"
|
|
34
|
+
self.state_dir = self.checkpoint_dir / run_id
|
|
35
|
+
self.state_dir.mkdir(exist_ok=True)
|
|
36
|
+
|
|
37
|
+
def save_step_state(
|
|
38
|
+
self,
|
|
39
|
+
step_name: str,
|
|
40
|
+
outputs: Any,
|
|
41
|
+
metadata: dict[str, Any] | None = None,
|
|
42
|
+
) -> None:
|
|
43
|
+
"""Save state after completing a step."""
|
|
44
|
+
# Save outputs
|
|
45
|
+
output_file = self.state_dir / f"{step_name}.pkl"
|
|
46
|
+
with open(output_file, "wb") as f:
|
|
47
|
+
pickle.dump(outputs, f)
|
|
48
|
+
|
|
49
|
+
# Update checkpoint metadata
|
|
50
|
+
checkpoint_data = self.load() if self.checkpoint_file.exists() else {}
|
|
51
|
+
|
|
52
|
+
checkpoint_data.update(
|
|
53
|
+
{
|
|
54
|
+
"run_id": self.run_id,
|
|
55
|
+
"last_completed_step": step_name,
|
|
56
|
+
"last_update": datetime.now().isoformat(),
|
|
57
|
+
"completed_steps": checkpoint_data.get("completed_steps", []) + [step_name],
|
|
58
|
+
"step_metadata": checkpoint_data.get("step_metadata", {}),
|
|
59
|
+
},
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
if metadata:
|
|
63
|
+
checkpoint_data["step_metadata"][step_name] = metadata
|
|
64
|
+
|
|
65
|
+
# Save checkpoint
|
|
66
|
+
with open(self.checkpoint_file, "w") as f:
|
|
67
|
+
json.dump(checkpoint_data, f, indent=2)
|
|
68
|
+
|
|
69
|
+
def load_step_state(self, step_name: str) -> Any:
|
|
70
|
+
"""Load state for a specific step."""
|
|
71
|
+
output_file = self.state_dir / f"{step_name}.pkl"
|
|
72
|
+
if not output_file.exists():
|
|
73
|
+
raise FileNotFoundError(f"No checkpoint found for step: {step_name}")
|
|
74
|
+
|
|
75
|
+
with open(output_file, "rb") as f:
|
|
76
|
+
return pickle.load(f)
|
|
77
|
+
|
|
78
|
+
def load(self) -> dict[str, Any]:
|
|
79
|
+
"""Load checkpoint metadata."""
|
|
80
|
+
if not self.checkpoint_file.exists():
|
|
81
|
+
return {}
|
|
82
|
+
|
|
83
|
+
with open(self.checkpoint_file) as f:
|
|
84
|
+
return json.load(f)
|
|
85
|
+
|
|
86
|
+
def exists(self) -> bool:
|
|
87
|
+
"""Check if checkpoint exists."""
|
|
88
|
+
return self.checkpoint_file.exists()
|
|
89
|
+
|
|
90
|
+
def get_completed_steps(self) -> list:
|
|
91
|
+
"""Get list of completed steps."""
|
|
92
|
+
data = self.load()
|
|
93
|
+
return data.get("completed_steps", [])
|
|
94
|
+
|
|
95
|
+
def clear(self) -> None:
|
|
96
|
+
"""Clear checkpoint data."""
|
|
97
|
+
if self.checkpoint_file.exists():
|
|
98
|
+
self.checkpoint_file.unlink()
|
|
99
|
+
|
|
100
|
+
# Clear state files
|
|
101
|
+
for file in self.state_dir.glob("*.pkl"):
|
|
102
|
+
file.unlink()
|
|
103
|
+
|
|
104
|
+
def resume_point(self) -> str | None:
|
|
105
|
+
"""Get the resume point (last completed step)."""
|
|
106
|
+
data = self.load()
|
|
107
|
+
return data.get("last_completed_step")
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def checkpoint_enabled_pipeline(pipeline, run_id: str):
|
|
111
|
+
"""Wrap a pipeline to enable checkpointing.
|
|
112
|
+
|
|
113
|
+
This is a decorator-style wrapper that adds checkpoint functionality.
|
|
114
|
+
"""
|
|
115
|
+
checkpoint = PipelineCheckpoint(run_id)
|
|
116
|
+
|
|
117
|
+
# Store original run method
|
|
118
|
+
original_run = pipeline.run
|
|
119
|
+
|
|
120
|
+
def run_with_checkpoints(*args, **kwargs):
|
|
121
|
+
"""Modified run method with checkpointing."""
|
|
122
|
+
if checkpoint.exists():
|
|
123
|
+
response = input("Resume from checkpoint? [y/N]: ")
|
|
124
|
+
|
|
125
|
+
if response.lower() == "y":
|
|
126
|
+
# Load completed steps
|
|
127
|
+
checkpoint.get_completed_steps()
|
|
128
|
+
|
|
129
|
+
# In a real implementation, we would modify the execution
|
|
130
|
+
# to skip completed steps. For now, just notify.
|
|
131
|
+
|
|
132
|
+
# Run the pipeline
|
|
133
|
+
result = original_run(*args, **kwargs)
|
|
134
|
+
|
|
135
|
+
# Save final checkpoint
|
|
136
|
+
if result.success:
|
|
137
|
+
checkpoint.save_step_state(
|
|
138
|
+
"pipeline_complete",
|
|
139
|
+
result.outputs,
|
|
140
|
+
metadata={"duration": result.duration_seconds},
|
|
141
|
+
)
|
|
142
|
+
|
|
143
|
+
return result
|
|
144
|
+
|
|
145
|
+
# Replace run method
|
|
146
|
+
pipeline.run = run_with_checkpoints
|
|
147
|
+
|
|
148
|
+
return pipeline
|