flowyml 1.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (159) hide show
  1. flowyml/__init__.py +207 -0
  2. flowyml/assets/__init__.py +22 -0
  3. flowyml/assets/artifact.py +40 -0
  4. flowyml/assets/base.py +209 -0
  5. flowyml/assets/dataset.py +100 -0
  6. flowyml/assets/featureset.py +301 -0
  7. flowyml/assets/metrics.py +104 -0
  8. flowyml/assets/model.py +82 -0
  9. flowyml/assets/registry.py +157 -0
  10. flowyml/assets/report.py +315 -0
  11. flowyml/cli/__init__.py +5 -0
  12. flowyml/cli/experiment.py +232 -0
  13. flowyml/cli/init.py +256 -0
  14. flowyml/cli/main.py +327 -0
  15. flowyml/cli/run.py +75 -0
  16. flowyml/cli/stack_cli.py +532 -0
  17. flowyml/cli/ui.py +33 -0
  18. flowyml/core/__init__.py +68 -0
  19. flowyml/core/advanced_cache.py +274 -0
  20. flowyml/core/approval.py +64 -0
  21. flowyml/core/cache.py +203 -0
  22. flowyml/core/checkpoint.py +148 -0
  23. flowyml/core/conditional.py +373 -0
  24. flowyml/core/context.py +155 -0
  25. flowyml/core/error_handling.py +419 -0
  26. flowyml/core/executor.py +354 -0
  27. flowyml/core/graph.py +185 -0
  28. flowyml/core/parallel.py +452 -0
  29. flowyml/core/pipeline.py +764 -0
  30. flowyml/core/project.py +253 -0
  31. flowyml/core/resources.py +424 -0
  32. flowyml/core/scheduler.py +630 -0
  33. flowyml/core/scheduler_config.py +32 -0
  34. flowyml/core/step.py +201 -0
  35. flowyml/core/step_grouping.py +292 -0
  36. flowyml/core/templates.py +226 -0
  37. flowyml/core/versioning.py +217 -0
  38. flowyml/integrations/__init__.py +1 -0
  39. flowyml/integrations/keras.py +134 -0
  40. flowyml/monitoring/__init__.py +1 -0
  41. flowyml/monitoring/alerts.py +57 -0
  42. flowyml/monitoring/data.py +102 -0
  43. flowyml/monitoring/llm.py +160 -0
  44. flowyml/monitoring/monitor.py +57 -0
  45. flowyml/monitoring/notifications.py +246 -0
  46. flowyml/registry/__init__.py +5 -0
  47. flowyml/registry/model_registry.py +491 -0
  48. flowyml/registry/pipeline_registry.py +55 -0
  49. flowyml/stacks/__init__.py +27 -0
  50. flowyml/stacks/base.py +77 -0
  51. flowyml/stacks/bridge.py +288 -0
  52. flowyml/stacks/components.py +155 -0
  53. flowyml/stacks/gcp.py +499 -0
  54. flowyml/stacks/local.py +112 -0
  55. flowyml/stacks/migration.py +97 -0
  56. flowyml/stacks/plugin_config.py +78 -0
  57. flowyml/stacks/plugins.py +401 -0
  58. flowyml/stacks/registry.py +226 -0
  59. flowyml/storage/__init__.py +26 -0
  60. flowyml/storage/artifacts.py +246 -0
  61. flowyml/storage/materializers/__init__.py +20 -0
  62. flowyml/storage/materializers/base.py +133 -0
  63. flowyml/storage/materializers/keras.py +185 -0
  64. flowyml/storage/materializers/numpy.py +94 -0
  65. flowyml/storage/materializers/pandas.py +142 -0
  66. flowyml/storage/materializers/pytorch.py +135 -0
  67. flowyml/storage/materializers/sklearn.py +110 -0
  68. flowyml/storage/materializers/tensorflow.py +152 -0
  69. flowyml/storage/metadata.py +931 -0
  70. flowyml/tracking/__init__.py +1 -0
  71. flowyml/tracking/experiment.py +211 -0
  72. flowyml/tracking/leaderboard.py +191 -0
  73. flowyml/tracking/runs.py +145 -0
  74. flowyml/ui/__init__.py +15 -0
  75. flowyml/ui/backend/Dockerfile +31 -0
  76. flowyml/ui/backend/__init__.py +0 -0
  77. flowyml/ui/backend/auth.py +163 -0
  78. flowyml/ui/backend/main.py +187 -0
  79. flowyml/ui/backend/routers/__init__.py +0 -0
  80. flowyml/ui/backend/routers/assets.py +45 -0
  81. flowyml/ui/backend/routers/execution.py +179 -0
  82. flowyml/ui/backend/routers/experiments.py +49 -0
  83. flowyml/ui/backend/routers/leaderboard.py +118 -0
  84. flowyml/ui/backend/routers/notifications.py +72 -0
  85. flowyml/ui/backend/routers/pipelines.py +110 -0
  86. flowyml/ui/backend/routers/plugins.py +192 -0
  87. flowyml/ui/backend/routers/projects.py +85 -0
  88. flowyml/ui/backend/routers/runs.py +66 -0
  89. flowyml/ui/backend/routers/schedules.py +222 -0
  90. flowyml/ui/backend/routers/traces.py +84 -0
  91. flowyml/ui/frontend/Dockerfile +20 -0
  92. flowyml/ui/frontend/README.md +315 -0
  93. flowyml/ui/frontend/dist/assets/index-DFNQnrUj.js +448 -0
  94. flowyml/ui/frontend/dist/assets/index-pWI271rZ.css +1 -0
  95. flowyml/ui/frontend/dist/index.html +16 -0
  96. flowyml/ui/frontend/index.html +15 -0
  97. flowyml/ui/frontend/nginx.conf +26 -0
  98. flowyml/ui/frontend/package-lock.json +3545 -0
  99. flowyml/ui/frontend/package.json +33 -0
  100. flowyml/ui/frontend/postcss.config.js +6 -0
  101. flowyml/ui/frontend/src/App.jsx +21 -0
  102. flowyml/ui/frontend/src/app/assets/page.jsx +397 -0
  103. flowyml/ui/frontend/src/app/dashboard/page.jsx +295 -0
  104. flowyml/ui/frontend/src/app/experiments/[experimentId]/page.jsx +255 -0
  105. flowyml/ui/frontend/src/app/experiments/page.jsx +360 -0
  106. flowyml/ui/frontend/src/app/leaderboard/page.jsx +133 -0
  107. flowyml/ui/frontend/src/app/pipelines/page.jsx +454 -0
  108. flowyml/ui/frontend/src/app/plugins/page.jsx +48 -0
  109. flowyml/ui/frontend/src/app/projects/page.jsx +292 -0
  110. flowyml/ui/frontend/src/app/runs/[runId]/page.jsx +682 -0
  111. flowyml/ui/frontend/src/app/runs/page.jsx +470 -0
  112. flowyml/ui/frontend/src/app/schedules/page.jsx +585 -0
  113. flowyml/ui/frontend/src/app/settings/page.jsx +314 -0
  114. flowyml/ui/frontend/src/app/tokens/page.jsx +456 -0
  115. flowyml/ui/frontend/src/app/traces/page.jsx +246 -0
  116. flowyml/ui/frontend/src/components/Layout.jsx +108 -0
  117. flowyml/ui/frontend/src/components/PipelineGraph.jsx +295 -0
  118. flowyml/ui/frontend/src/components/header/Header.jsx +72 -0
  119. flowyml/ui/frontend/src/components/plugins/AddPluginDialog.jsx +121 -0
  120. flowyml/ui/frontend/src/components/plugins/InstalledPlugins.jsx +124 -0
  121. flowyml/ui/frontend/src/components/plugins/PluginBrowser.jsx +167 -0
  122. flowyml/ui/frontend/src/components/plugins/PluginManager.jsx +60 -0
  123. flowyml/ui/frontend/src/components/sidebar/Sidebar.jsx +145 -0
  124. flowyml/ui/frontend/src/components/ui/Badge.jsx +26 -0
  125. flowyml/ui/frontend/src/components/ui/Button.jsx +34 -0
  126. flowyml/ui/frontend/src/components/ui/Card.jsx +44 -0
  127. flowyml/ui/frontend/src/components/ui/CodeSnippet.jsx +38 -0
  128. flowyml/ui/frontend/src/components/ui/CollapsibleCard.jsx +53 -0
  129. flowyml/ui/frontend/src/components/ui/DataView.jsx +175 -0
  130. flowyml/ui/frontend/src/components/ui/EmptyState.jsx +49 -0
  131. flowyml/ui/frontend/src/components/ui/ExecutionStatus.jsx +122 -0
  132. flowyml/ui/frontend/src/components/ui/KeyValue.jsx +25 -0
  133. flowyml/ui/frontend/src/components/ui/ProjectSelector.jsx +134 -0
  134. flowyml/ui/frontend/src/contexts/ProjectContext.jsx +79 -0
  135. flowyml/ui/frontend/src/contexts/ThemeContext.jsx +54 -0
  136. flowyml/ui/frontend/src/index.css +11 -0
  137. flowyml/ui/frontend/src/layouts/MainLayout.jsx +23 -0
  138. flowyml/ui/frontend/src/main.jsx +10 -0
  139. flowyml/ui/frontend/src/router/index.jsx +39 -0
  140. flowyml/ui/frontend/src/services/pluginService.js +90 -0
  141. flowyml/ui/frontend/src/utils/api.js +47 -0
  142. flowyml/ui/frontend/src/utils/cn.js +6 -0
  143. flowyml/ui/frontend/tailwind.config.js +31 -0
  144. flowyml/ui/frontend/vite.config.js +21 -0
  145. flowyml/ui/utils.py +77 -0
  146. flowyml/utils/__init__.py +67 -0
  147. flowyml/utils/config.py +308 -0
  148. flowyml/utils/debug.py +240 -0
  149. flowyml/utils/environment.py +346 -0
  150. flowyml/utils/git.py +319 -0
  151. flowyml/utils/logging.py +61 -0
  152. flowyml/utils/performance.py +314 -0
  153. flowyml/utils/stack_config.py +296 -0
  154. flowyml/utils/validation.py +270 -0
  155. flowyml-1.1.0.dist-info/METADATA +372 -0
  156. flowyml-1.1.0.dist-info/RECORD +159 -0
  157. flowyml-1.1.0.dist-info/WHEEL +4 -0
  158. flowyml-1.1.0.dist-info/entry_points.txt +3 -0
  159. flowyml-1.1.0.dist-info/licenses/LICENSE +17 -0
@@ -0,0 +1,226 @@
1
+ """Pre-built pipeline templates."""
2
+
3
+ from typing import Any
4
+ from flowyml.core.pipeline import Pipeline
5
+
6
+
7
+ class PipelineTemplate:
8
+ """Base class for pipeline templates."""
9
+
10
+ @staticmethod
11
+ def create(name: str, **kwargs) -> Pipeline:
12
+ """Create a pipeline from this template."""
13
+ raise NotImplementedError()
14
+
15
+
16
+ class MLTrainingTemplate(PipelineTemplate):
17
+ """Standard ML training pipeline template.
18
+
19
+ Steps:
20
+ 1. Load data
21
+ 2. Preprocess
22
+ 3. Train model
23
+ 4. Evaluate
24
+ 5. Save model
25
+ """
26
+
27
+ @staticmethod
28
+ def create(
29
+ name: str = "ml_training",
30
+ data_loader: Any | None = None,
31
+ preprocessor: Any | None = None,
32
+ trainer: Any | None = None,
33
+ evaluator: Any | None = None,
34
+ model_saver: Any | None = None,
35
+ **ctx_params,
36
+ ) -> Pipeline:
37
+ """Create an ML training pipeline.
38
+
39
+ Args:
40
+ name: Pipeline name
41
+ data_loader: Function to load data
42
+ preprocessor: Function to preprocess data
43
+ trainer: Function to train model
44
+ evaluator: Function to evaluate model
45
+ model_saver: Function to save model
46
+ **ctx_params: Additional context parameters
47
+ """
48
+ from flowyml.core.pipeline import Pipeline
49
+ from flowyml.core.step import step
50
+ from flowyml.core.context import context
51
+
52
+ ctx = context(**ctx_params)
53
+ pipeline = Pipeline(name, context=ctx)
54
+
55
+ if data_loader:
56
+ load_step = step(name="load_data", outputs=["dataset"])(data_loader)
57
+ pipeline.add_step(load_step)
58
+
59
+ if preprocessor:
60
+ preprocess_step = step(
61
+ name="preprocess",
62
+ inputs=["dataset"],
63
+ outputs=["processed_data"],
64
+ )(preprocessor)
65
+ pipeline.add_step(preprocess_step)
66
+
67
+ if trainer:
68
+ train_step = step(
69
+ name="train",
70
+ inputs=["processed_data"],
71
+ outputs=["model"],
72
+ )(trainer)
73
+ pipeline.add_step(train_step)
74
+
75
+ if evaluator:
76
+ eval_step = step(
77
+ name="evaluate",
78
+ inputs=["model", "processed_data"],
79
+ outputs=["metrics"],
80
+ )(evaluator)
81
+ pipeline.add_step(eval_step)
82
+
83
+ if model_saver:
84
+ save_step = step(
85
+ name="save_model",
86
+ inputs=["model"],
87
+ )(model_saver)
88
+ pipeline.add_step(save_step)
89
+
90
+ return pipeline
91
+
92
+
93
+ class DataPipelineTemplate(PipelineTemplate):
94
+ """Data processing pipeline template.
95
+
96
+ Steps:
97
+ 1. Extract
98
+ 2. Transform
99
+ 3. Load (ETL)
100
+ """
101
+
102
+ @staticmethod
103
+ def create(
104
+ name: str = "etl_pipeline",
105
+ extractor: Any | None = None,
106
+ transformer: Any | None = None,
107
+ loader: Any | None = None,
108
+ **ctx_params,
109
+ ) -> Pipeline:
110
+ """Create an ETL pipeline."""
111
+ from flowyml.core.pipeline import Pipeline
112
+ from flowyml.core.step import step
113
+ from flowyml.core.context import context
114
+
115
+ ctx = context(**ctx_params)
116
+ pipeline = Pipeline(name, context=ctx)
117
+
118
+ if extractor:
119
+ extract_step = step(name="extract", outputs=["raw_data"])(extractor)
120
+ pipeline.add_step(extract_step)
121
+
122
+ if transformer:
123
+ transform_step = step(
124
+ name="transform",
125
+ inputs=["raw_data"],
126
+ outputs=["transformed_data"],
127
+ )(transformer)
128
+ pipeline.add_step(transform_step)
129
+
130
+ if loader:
131
+ load_step = step(
132
+ name="load",
133
+ inputs=["transformed_data"],
134
+ )(loader)
135
+ pipeline.add_step(load_step)
136
+
137
+ return pipeline
138
+
139
+
140
+ class ABTestPipelineTemplate(PipelineTemplate):
141
+ """A/B testing pipeline template.
142
+
143
+ Runs multiple model variants and compares results.
144
+ """
145
+
146
+ @staticmethod
147
+ def create(
148
+ name: str = "ab_test",
149
+ data_loader: Any | None = None,
150
+ model_a_trainer: Any | None = None,
151
+ model_b_trainer: Any | None = None,
152
+ comparator: Any | None = None,
153
+ **ctx_params,
154
+ ) -> Pipeline:
155
+ """Create an A/B test pipeline."""
156
+ from flowyml.core.pipeline import Pipeline
157
+ from flowyml.core.step import step
158
+ from flowyml.core.context import context
159
+
160
+ ctx = context(**ctx_params)
161
+ pipeline = Pipeline(name, context=ctx)
162
+
163
+ if data_loader:
164
+ load_step = step(name="load_data", outputs=["dataset"])(data_loader)
165
+ pipeline.add_step(load_step)
166
+
167
+ if model_a_trainer:
168
+ train_a = step(
169
+ name="train_model_a",
170
+ inputs=["dataset"],
171
+ outputs=["model_a", "metrics_a"],
172
+ )(model_a_trainer)
173
+ pipeline.add_step(train_a)
174
+
175
+ if model_b_trainer:
176
+ train_b = step(
177
+ name="train_model_b",
178
+ inputs=["dataset"],
179
+ outputs=["model_b", "metrics_b"],
180
+ )(model_b_trainer)
181
+ pipeline.add_step(train_b)
182
+
183
+ if comparator:
184
+ compare = step(
185
+ name="compare",
186
+ inputs=["metrics_a", "metrics_b"],
187
+ outputs=["winner"],
188
+ )(comparator)
189
+ pipeline.add_step(compare)
190
+
191
+ return pipeline
192
+
193
+
194
+ # Template registry
195
+ TEMPLATES = {
196
+ "ml_training": MLTrainingTemplate,
197
+ "etl": DataPipelineTemplate,
198
+ "data_pipeline": DataPipelineTemplate,
199
+ "ab_test": ABTestPipelineTemplate,
200
+ }
201
+
202
+
203
+ def create_from_template(template_name: str, **kwargs) -> Pipeline:
204
+ """Create a pipeline from a template.
205
+
206
+ Args:
207
+ template_name: Name of the template
208
+ **kwargs: Template-specific arguments
209
+
210
+ Returns:
211
+ Configured pipeline
212
+
213
+ Examples:
214
+ >>> pipeline = create_from_template("ml_training", data_loader=load_data, trainer=train_model)
215
+ """
216
+ if template_name not in TEMPLATES:
217
+ available = ", ".join(TEMPLATES.keys())
218
+ raise ValueError(f"Unknown template '{template_name}'. Available: {available}")
219
+
220
+ template_class = TEMPLATES[template_name]
221
+ return template_class.create(**kwargs)
222
+
223
+
224
+ def list_templates() -> list[str]:
225
+ """List available templates."""
226
+ return list(TEMPLATES.keys())
@@ -0,0 +1,217 @@
1
+ """Pipeline versioning system."""
2
+
3
+ import json
4
+ import hashlib
5
+ from pathlib import Path
6
+ from typing import Any, Never
7
+ from datetime import datetime
8
+ from dataclasses import dataclass, asdict
9
+
10
+
11
+ @dataclass
12
+ class PipelineVersion:
13
+ """Represents a pipeline version."""
14
+
15
+ version: str
16
+ pipeline_name: str
17
+ created_at: str
18
+ steps: list[str]
19
+ step_hashes: dict[str, str]
20
+ context_params: dict[str, Any]
21
+ metadata: dict[str, Any]
22
+
23
+
24
+ class VersionedPipeline:
25
+ """Pipeline with version control.
26
+
27
+ Tracks changes between versions and allows comparison.
28
+
29
+ Examples:
30
+ >>> from flowyml import VersionedPipeline, step
31
+ >>> pipeline = VersionedPipeline("training")
32
+ >>> pipeline.version = "v1.0.0"
33
+ >>> pipeline.add_step(load_data)
34
+ >>> pipeline.add_step(train_model)
35
+ >>> pipeline.save_version()
36
+ >>> # Make changes
37
+ >>> pipeline.add_step(evaluate)
38
+ >>> pipeline.version = "v1.1.0"
39
+ >>> pipeline.save_version()
40
+ >>> # Compare versions
41
+ >>> diff = pipeline.compare_with("v1.0.0")
42
+ """
43
+
44
+ def __init__(
45
+ self,
46
+ name: str,
47
+ version: str = "v0.1.0",
48
+ versions_dir: str = ".flowyml/versions",
49
+ ):
50
+ from flowyml.core.pipeline import Pipeline
51
+
52
+ self.name = name
53
+ self._version = version
54
+ self.pipeline = Pipeline(name)
55
+
56
+ # Version storage
57
+ self.versions_dir = Path(versions_dir) / name
58
+ self.versions_dir.mkdir(parents=True, exist_ok=True)
59
+
60
+ # Load version history
61
+ self.versions: dict[str, PipelineVersion] = {}
62
+ self._load_versions()
63
+
64
+ @property
65
+ def version(self) -> str:
66
+ """Get current version."""
67
+ return self._version
68
+
69
+ @version.setter
70
+ def version(self, value: str) -> None:
71
+ """Set version."""
72
+ self._version = value
73
+
74
+ def add_step(self, step):
75
+ """Add a step to the pipeline."""
76
+ self.pipeline.add_step(step)
77
+ return self
78
+
79
+ def _compute_step_hash(self, step) -> str:
80
+ """Compute hash of step definition."""
81
+ # Hash based on source code
82
+ if hasattr(step, "source_code") and step.source_code:
83
+ return hashlib.md5(step.source_code.encode()).hexdigest()
84
+ # Fallback to name
85
+ return hashlib.md5(step.name.encode()).hexdigest()
86
+
87
+ def save_version(self, metadata: dict[str, Any] | None = None):
88
+ """Save current version."""
89
+ # Compute step hashes
90
+ step_hashes = {}
91
+ step_names = []
92
+
93
+ for step in self.pipeline.steps:
94
+ step_names.append(step.name)
95
+ step_hashes[step.name] = self._compute_step_hash(step)
96
+
97
+ # Create version record
98
+ version_data = PipelineVersion(
99
+ version=self._version,
100
+ pipeline_name=self.name,
101
+ created_at=datetime.now().isoformat(),
102
+ steps=step_names,
103
+ step_hashes=step_hashes,
104
+ context_params=self.pipeline.context._params if hasattr(self.pipeline.context, "_params") else {},
105
+ metadata=metadata or {},
106
+ )
107
+
108
+ # Save to disk
109
+ version_file = self.versions_dir / f"{self._version}.json"
110
+ with open(version_file, "w") as f:
111
+ json.dump(asdict(version_data), f, indent=2)
112
+
113
+ self.versions[self._version] = version_data
114
+
115
+ return version_data
116
+
117
+ def _load_versions(self) -> None:
118
+ """Load version history."""
119
+ for version_file in self.versions_dir.glob("*.json"):
120
+ with open(version_file) as f:
121
+ data = json.load(f)
122
+ version = data["version"]
123
+ self.versions[version] = PipelineVersion(**data)
124
+
125
+ def list_versions(self) -> list[str]:
126
+ """List all saved versions."""
127
+ return sorted(self.versions.keys())
128
+
129
+ def get_version(self, version: str) -> PipelineVersion | None:
130
+ """Get specific version details."""
131
+ return self.versions.get(version)
132
+
133
+ def compare_with(self, other_version: str) -> dict[str, Any]:
134
+ """Compare current pipeline with another version.
135
+
136
+ Returns:
137
+ Dictionary with differences
138
+ """
139
+ if other_version not in self.versions:
140
+ raise ValueError(f"Version {other_version} not found")
141
+
142
+ current_steps = {s.name: self._compute_step_hash(s) for s in self.pipeline.steps}
143
+ other = self.versions[other_version]
144
+
145
+ # Find differences
146
+ added_steps = set(current_steps.keys()) - set(other.steps)
147
+ removed_steps = set(other.steps) - set(current_steps.keys())
148
+
149
+ # Modified steps (same name, different hash)
150
+ modified_steps = []
151
+ for step_name in set(current_steps.keys()) & set(other.steps):
152
+ if current_steps[step_name] != other.step_hashes.get(step_name):
153
+ modified_steps.append(step_name)
154
+
155
+ comparison = {
156
+ "current_version": self._version,
157
+ "compared_to": other_version,
158
+ "added_steps": list(added_steps),
159
+ "removed_steps": list(removed_steps),
160
+ "modified_steps": modified_steps,
161
+ "step_order_changed": current_steps.keys() != other.steps,
162
+ "context_changes": self._compare_dicts(
163
+ self.pipeline.context._params if hasattr(self.pipeline.context, "_params") else {},
164
+ other.context_params,
165
+ ),
166
+ }
167
+
168
+ return comparison
169
+
170
+ def _compare_dicts(self, d1: dict, d2: dict) -> dict[str, Any]:
171
+ """Compare two dictionaries."""
172
+ added = set(d1.keys()) - set(d2.keys())
173
+ removed = set(d2.keys()) - set(d1.keys())
174
+ modified = {k for k in set(d1.keys()) & set(d2.keys()) if d1[k] != d2[k]}
175
+
176
+ return {
177
+ "added": {k: d1[k] for k in added},
178
+ "removed": {k: d2[k] for k in removed},
179
+ "modified": {k: {"old": d2[k], "new": d1[k]} for k in modified},
180
+ }
181
+
182
+ def display_comparison(self, other_version: str) -> None:
183
+ """Display comparison in readable format."""
184
+ diff = self.compare_with(other_version)
185
+
186
+ if diff["added_steps"]:
187
+ pass
188
+
189
+ if diff["removed_steps"]:
190
+ pass
191
+
192
+ if diff["modified_steps"]:
193
+ pass
194
+
195
+ if diff["step_order_changed"]:
196
+ pass
197
+
198
+ changes = diff["context_changes"]
199
+ if any([changes["added"], changes["removed"], changes["modified"]]):
200
+ if changes["added"]:
201
+ pass
202
+ if changes["removed"]:
203
+ pass
204
+ if changes["modified"]:
205
+ pass
206
+
207
+ def rollback(self, version: str) -> Never:
208
+ """Rollback to a previous version (not implemented - would need to reconstruct pipeline)."""
209
+ raise NotImplementedError("Rollback requires pipeline reconstruction from saved state")
210
+
211
+ def run(self, *args, **kwargs):
212
+ """Run the pipeline."""
213
+ return self.pipeline.run(*args, **kwargs)
214
+
215
+ def __getattr__(self, name):
216
+ """Delegate to underlying pipeline."""
217
+ return getattr(self.pipeline, name)
@@ -0,0 +1 @@
1
+ """flowyml Integrations."""
@@ -0,0 +1,134 @@
1
+ """Keras integration for flowyml."""
2
+
3
+ from pathlib import Path
4
+ from datetime import datetime
5
+
6
+ try:
7
+ from tensorflow import keras
8
+ except ImportError:
9
+ try:
10
+ import keras
11
+ except ImportError:
12
+ keras = None
13
+
14
+ from flowyml.tracking.experiment import Experiment
15
+ from flowyml.storage.metadata import SQLiteMetadataStore
16
+
17
+
18
+ class FlowymlKerasCallback(keras.callbacks.Callback if keras else object):
19
+ """Keras callback for flowyml tracking.
20
+
21
+ Automatically logs:
22
+ - Training metrics (loss, accuracy, etc.)
23
+ - Model checkpoints (optional)
24
+ - Model architecture
25
+ - Training parameters
26
+ """
27
+
28
+ def __init__(
29
+ self,
30
+ experiment_name: str,
31
+ run_name: str | None = None,
32
+ log_model: bool = True,
33
+ log_every_epoch: bool = True,
34
+ metadata_store: SQLiteMetadataStore | None = None,
35
+ ):
36
+ """Args:
37
+ experiment_name: Name of the experiment
38
+ run_name: Optional run name (defaults to timestamp)
39
+ log_model: Whether to save the model as an artifact
40
+ log_every_epoch: Whether to log metrics every epoch
41
+ metadata_store: Optional metadata store override.
42
+ """
43
+ if keras is None:
44
+ raise ImportError("Keras is not installed. Please install tensorflow or keras.")
45
+
46
+ super().__init__()
47
+ self.experiment_name = experiment_name
48
+ self.run_name = run_name or datetime.now().strftime("run_%Y%m%d_%H%M%S")
49
+ self.log_model = log_model
50
+ self.log_every_epoch = log_every_epoch
51
+
52
+ self.metadata_store = metadata_store or SQLiteMetadataStore()
53
+
54
+ # Initialize experiment
55
+ self.experiment = Experiment(experiment_name)
56
+
57
+ # Track params
58
+ self.params_logged = False
59
+
60
+ def on_train_begin(self, logs=None) -> None:
61
+ """Log initial parameters."""
62
+ if not self.params_logged:
63
+ params = {
64
+ "optimizer": str(self.model.optimizer.get_config()),
65
+ "loss": str(self.model.loss),
66
+ "metrics": [str(m) for m in self.model.metrics_names],
67
+ "epochs": self.params.get("epochs"),
68
+ "batch_size": self.params.get("batch_size"),
69
+ "samples": self.params.get("samples"),
70
+ }
71
+
72
+ # Log architecture
73
+ model_json = self.model.to_json()
74
+
75
+ self.metadata_store.log_experiment_run(
76
+ experiment_id=self.experiment_name,
77
+ run_id=self.run_name,
78
+ parameters=params,
79
+ )
80
+
81
+ # Save architecture as artifact
82
+ self.metadata_store.save_artifact(
83
+ artifact_id=f"{self.run_name}_model_arch",
84
+ metadata={
85
+ "name": "model_architecture",
86
+ "type": "json",
87
+ "run_id": self.run_name,
88
+ "value": model_json,
89
+ "created_at": datetime.now().isoformat(),
90
+ },
91
+ )
92
+
93
+ self.params_logged = True
94
+
95
+ def on_epoch_end(self, epoch, logs=None) -> None:
96
+ """Log metrics at the end of each epoch."""
97
+ if self.log_every_epoch and logs:
98
+ # Log metrics to DB
99
+ for k, v in logs.items():
100
+ self.metadata_store.save_metric(
101
+ run_id=self.run_name,
102
+ name=k,
103
+ value=float(v),
104
+ step=epoch,
105
+ )
106
+
107
+ # Update experiment run
108
+ self.metadata_store.log_experiment_run(
109
+ experiment_id=self.experiment_name,
110
+ run_id=self.run_name,
111
+ metrics=logs,
112
+ )
113
+
114
+ def on_train_end(self, logs=None) -> None:
115
+ """Save model at the end of training."""
116
+ if self.log_model:
117
+ # Create artifacts directory
118
+ # Create artifacts directory
119
+ artifact_dir = Path(f".flowyml/artifacts/{self.run_name}")
120
+ artifact_dir.mkdir(parents=True, exist_ok=True)
121
+
122
+ model_path = artifact_dir / "model.keras"
123
+ self.model.save(model_path)
124
+
125
+ self.metadata_store.save_artifact(
126
+ artifact_id=f"{self.run_name}_model",
127
+ metadata={
128
+ "name": "trained_model",
129
+ "type": "keras_model",
130
+ "run_id": self.run_name,
131
+ "path": str(model_path.resolve()),
132
+ "created_at": datetime.now().isoformat(),
133
+ },
134
+ )
@@ -0,0 +1 @@
1
+ """flowyml Monitoring module."""
@@ -0,0 +1,57 @@
1
+ from dataclasses import dataclass, field
2
+ from enum import Enum
3
+ from typing import Any, Never
4
+ from datetime import datetime
5
+ import logging
6
+
7
+ logger = logging.getLogger(__name__)
8
+
9
+
10
+ class AlertLevel(Enum):
11
+ INFO = "info"
12
+ WARNING = "warning"
13
+ ERROR = "error"
14
+ CRITICAL = "critical"
15
+
16
+
17
+ @dataclass
18
+ class Alert:
19
+ title: str
20
+ message: str
21
+ level: AlertLevel
22
+ timestamp: datetime = field(default_factory=datetime.now)
23
+ metadata: dict[str, Any] | None = None
24
+
25
+
26
+ class AlertHandler:
27
+ def handle(self, alert: Alert) -> Never:
28
+ raise NotImplementedError
29
+
30
+
31
+ class ConsoleAlertHandler(AlertHandler):
32
+ def handle(self, alert: Alert) -> None:
33
+ # Simple ANSI colors if supported
34
+ if alert.level == AlertLevel.ERROR or alert.level == AlertLevel.CRITICAL or alert.level == AlertLevel.WARNING:
35
+ pass
36
+
37
+
38
+ class AlertManager:
39
+ def __init__(self):
40
+ self.handlers: list[AlertHandler] = [ConsoleAlertHandler()]
41
+ self.history: list[Alert] = []
42
+
43
+ def add_handler(self, handler: AlertHandler) -> None:
44
+ self.handlers.append(handler)
45
+
46
+ def send_alert(self, title: str, message: str, level: AlertLevel = AlertLevel.INFO, metadata: dict = None) -> None:
47
+ alert = Alert(title=title, message=message, level=level, metadata=metadata)
48
+ self.history.append(alert)
49
+ for handler in self.handlers:
50
+ try:
51
+ handler.handle(alert)
52
+ except Exception as e:
53
+ logger.error(f"Failed to handle alert: {e}")
54
+
55
+
56
+ # Global instance
57
+ alert_manager = AlertManager()