flowyml 1.2.0__py3-none-any.whl → 1.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (104) hide show
  1. flowyml/__init__.py +3 -0
  2. flowyml/assets/base.py +10 -0
  3. flowyml/assets/metrics.py +6 -0
  4. flowyml/cli/main.py +108 -2
  5. flowyml/cli/run.py +9 -2
  6. flowyml/core/execution_status.py +52 -0
  7. flowyml/core/hooks.py +106 -0
  8. flowyml/core/observability.py +210 -0
  9. flowyml/core/orchestrator.py +274 -0
  10. flowyml/core/pipeline.py +193 -231
  11. flowyml/core/project.py +34 -2
  12. flowyml/core/remote_orchestrator.py +109 -0
  13. flowyml/core/resources.py +34 -17
  14. flowyml/core/retry_policy.py +80 -0
  15. flowyml/core/scheduler.py +9 -9
  16. flowyml/core/scheduler_config.py +2 -3
  17. flowyml/core/step.py +18 -1
  18. flowyml/core/submission_result.py +53 -0
  19. flowyml/integrations/keras.py +95 -22
  20. flowyml/monitoring/alerts.py +2 -2
  21. flowyml/stacks/__init__.py +15 -0
  22. flowyml/stacks/aws.py +599 -0
  23. flowyml/stacks/azure.py +295 -0
  24. flowyml/stacks/bridge.py +9 -9
  25. flowyml/stacks/components.py +24 -2
  26. flowyml/stacks/gcp.py +158 -11
  27. flowyml/stacks/local.py +5 -0
  28. flowyml/stacks/plugins.py +2 -2
  29. flowyml/stacks/registry.py +21 -0
  30. flowyml/storage/artifacts.py +15 -5
  31. flowyml/storage/materializers/__init__.py +2 -0
  32. flowyml/storage/materializers/base.py +33 -0
  33. flowyml/storage/materializers/cloudpickle.py +74 -0
  34. flowyml/storage/metadata.py +3 -881
  35. flowyml/storage/remote.py +590 -0
  36. flowyml/storage/sql.py +911 -0
  37. flowyml/ui/backend/dependencies.py +28 -0
  38. flowyml/ui/backend/main.py +43 -80
  39. flowyml/ui/backend/routers/assets.py +483 -17
  40. flowyml/ui/backend/routers/client.py +46 -0
  41. flowyml/ui/backend/routers/execution.py +13 -2
  42. flowyml/ui/backend/routers/experiments.py +97 -14
  43. flowyml/ui/backend/routers/metrics.py +168 -0
  44. flowyml/ui/backend/routers/pipelines.py +77 -12
  45. flowyml/ui/backend/routers/projects.py +33 -7
  46. flowyml/ui/backend/routers/runs.py +221 -12
  47. flowyml/ui/backend/routers/schedules.py +5 -21
  48. flowyml/ui/backend/routers/stats.py +14 -0
  49. flowyml/ui/backend/routers/traces.py +37 -53
  50. flowyml/ui/frontend/dist/assets/index-DcYwrn2j.css +1 -0
  51. flowyml/ui/frontend/dist/assets/index-Dlz_ygOL.js +592 -0
  52. flowyml/ui/frontend/dist/index.html +2 -2
  53. flowyml/ui/frontend/src/App.jsx +4 -1
  54. flowyml/ui/frontend/src/app/assets/page.jsx +260 -230
  55. flowyml/ui/frontend/src/app/dashboard/page.jsx +38 -7
  56. flowyml/ui/frontend/src/app/experiments/page.jsx +61 -314
  57. flowyml/ui/frontend/src/app/observability/page.jsx +277 -0
  58. flowyml/ui/frontend/src/app/pipelines/page.jsx +79 -402
  59. flowyml/ui/frontend/src/app/projects/[projectId]/_components/ProjectArtifactsList.jsx +151 -0
  60. flowyml/ui/frontend/src/app/projects/[projectId]/_components/ProjectExperimentsList.jsx +145 -0
  61. flowyml/ui/frontend/src/app/projects/[projectId]/_components/ProjectHeader.jsx +45 -0
  62. flowyml/ui/frontend/src/app/projects/[projectId]/_components/ProjectHierarchy.jsx +467 -0
  63. flowyml/ui/frontend/src/app/projects/[projectId]/_components/ProjectMetricsPanel.jsx +253 -0
  64. flowyml/ui/frontend/src/app/projects/[projectId]/_components/ProjectPipelinesList.jsx +105 -0
  65. flowyml/ui/frontend/src/app/projects/[projectId]/_components/ProjectRelations.jsx +189 -0
  66. flowyml/ui/frontend/src/app/projects/[projectId]/_components/ProjectRunsList.jsx +136 -0
  67. flowyml/ui/frontend/src/app/projects/[projectId]/_components/ProjectTabs.jsx +95 -0
  68. flowyml/ui/frontend/src/app/projects/[projectId]/page.jsx +326 -0
  69. flowyml/ui/frontend/src/app/projects/page.jsx +13 -3
  70. flowyml/ui/frontend/src/app/runs/[runId]/page.jsx +79 -10
  71. flowyml/ui/frontend/src/app/runs/page.jsx +82 -424
  72. flowyml/ui/frontend/src/app/settings/page.jsx +1 -0
  73. flowyml/ui/frontend/src/app/tokens/page.jsx +62 -16
  74. flowyml/ui/frontend/src/components/AssetDetailsPanel.jsx +373 -0
  75. flowyml/ui/frontend/src/components/AssetLineageGraph.jsx +291 -0
  76. flowyml/ui/frontend/src/components/AssetStatsDashboard.jsx +302 -0
  77. flowyml/ui/frontend/src/components/AssetTreeHierarchy.jsx +477 -0
  78. flowyml/ui/frontend/src/components/ExperimentDetailsPanel.jsx +227 -0
  79. flowyml/ui/frontend/src/components/NavigationTree.jsx +401 -0
  80. flowyml/ui/frontend/src/components/PipelineDetailsPanel.jsx +239 -0
  81. flowyml/ui/frontend/src/components/PipelineGraph.jsx +67 -3
  82. flowyml/ui/frontend/src/components/ProjectSelector.jsx +115 -0
  83. flowyml/ui/frontend/src/components/RunDetailsPanel.jsx +298 -0
  84. flowyml/ui/frontend/src/components/header/Header.jsx +48 -1
  85. flowyml/ui/frontend/src/components/plugins/ZenMLIntegration.jsx +106 -0
  86. flowyml/ui/frontend/src/components/sidebar/Sidebar.jsx +52 -26
  87. flowyml/ui/frontend/src/components/ui/DataView.jsx +35 -17
  88. flowyml/ui/frontend/src/components/ui/ErrorBoundary.jsx +118 -0
  89. flowyml/ui/frontend/src/contexts/ProjectContext.jsx +2 -2
  90. flowyml/ui/frontend/src/contexts/ToastContext.jsx +116 -0
  91. flowyml/ui/frontend/src/layouts/MainLayout.jsx +5 -1
  92. flowyml/ui/frontend/src/router/index.jsx +4 -0
  93. flowyml/ui/frontend/src/utils/date.js +10 -0
  94. flowyml/ui/frontend/src/utils/downloads.js +11 -0
  95. flowyml/utils/config.py +6 -0
  96. flowyml/utils/stack_config.py +45 -3
  97. {flowyml-1.2.0.dist-info → flowyml-1.4.0.dist-info}/METADATA +44 -4
  98. flowyml-1.4.0.dist-info/RECORD +200 -0
  99. {flowyml-1.2.0.dist-info → flowyml-1.4.0.dist-info}/licenses/LICENSE +1 -1
  100. flowyml/ui/frontend/dist/assets/index-DFNQnrUj.js +0 -448
  101. flowyml/ui/frontend/dist/assets/index-pWI271rZ.css +0 -1
  102. flowyml-1.2.0.dist-info/RECORD +0 -159
  103. {flowyml-1.2.0.dist-info → flowyml-1.4.0.dist-info}/WHEEL +0 -0
  104. {flowyml-1.2.0.dist-info → flowyml-1.4.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,274 @@
1
+ """Orchestrator Module - Manages the execution of pipelines."""
2
+
3
+ from typing import Any, TYPE_CHECKING
4
+
5
+ from flowyml.stacks.components import Orchestrator, ComponentType, ResourceConfig, DockerConfig
6
+
7
+ # Import existing materializer system
8
+ from flowyml.storage.materializers.base import get_materializer # noqa
9
+
10
+ # Import lifecycle hooks
11
+ from flowyml.core.hooks import get_global_hooks
12
+
13
+ # Import observability
14
+ from flowyml.core.observability import get_metrics_collector
15
+
16
+ # Import retry policy
17
+ from flowyml.core.retry_policy import with_retry
18
+
19
+ if TYPE_CHECKING:
20
+ from flowyml.core.pipeline import Pipeline
21
+
22
+
23
+ class LocalOrchestrator(Orchestrator):
24
+ """Orchestrator that runs pipelines locally."""
25
+
26
+ def __init__(self, name: str = "local"):
27
+ super().__init__(name)
28
+
29
+ @property
30
+ def component_type(self) -> ComponentType:
31
+ return ComponentType.ORCHESTRATOR
32
+
33
+ def validate(self) -> bool:
34
+ return True
35
+
36
+ def to_dict(self) -> dict[str, Any]:
37
+ return {
38
+ "name": self.name,
39
+ "type": "local",
40
+ }
41
+
42
+ def get_run_status(self, run_id: str) -> str:
43
+ # Local runs are synchronous, so if we are asking for status, it's likely finished
44
+ # But we don't track status persistence here (MetadataStore does that).
45
+ # We can return "UNKNOWN" or query metadata store if we had access.
46
+ return "COMPLETED"
47
+
48
+ @with_retry
49
+ def run_pipeline(
50
+ self,
51
+ pipeline: "Pipeline",
52
+ run_id: str,
53
+ resources: ResourceConfig | None = None,
54
+ docker_config: DockerConfig | None = None,
55
+ inputs: dict[str, Any] | None = None,
56
+ context: dict[str, Any] | None = None,
57
+ **kwargs,
58
+ ) -> Any:
59
+ """Run the pipeline locally in the current process."""
60
+ from flowyml.core.pipeline import PipelineResult
61
+ from flowyml.core.step_grouping import get_execution_units, StepGroup
62
+ from flowyml.core.executor import ExecutionResult
63
+ import inspect
64
+
65
+ # Initialize result
66
+ result = PipelineResult(run_id, pipeline.name)
67
+ result.attach_configs(resources, docker_config)
68
+
69
+ # Run pipeline start hooks
70
+ hooks = get_global_hooks()
71
+ hooks.run_pipeline_start_hooks(pipeline)
72
+
73
+ # Record metrics if collector configured
74
+ metrics_collector = get_metrics_collector()
75
+ if metrics_collector:
76
+ metrics_collector.record_pipeline_start(pipeline, run_id)
77
+
78
+ # Check executor
79
+ if pipeline.executor is None:
80
+ raise RuntimeError(
81
+ "Pipeline has no executor configured. LocalOrchestrator requires a stack with an executor.",
82
+ )
83
+
84
+ step_outputs = inputs or {}
85
+
86
+ # Map step names to step objects for easier lookup
87
+ pipeline.steps_dict = {step.name: step for step in pipeline.steps}
88
+
89
+ # Get execution units (individual steps or groups)
90
+ execution_units = get_execution_units(pipeline.dag, pipeline.steps)
91
+
92
+ # Execute steps/groups in order
93
+ for unit in execution_units:
94
+ # Check if unit is a group or individual step
95
+ if isinstance(unit, StepGroup):
96
+ # Execute entire group
97
+
98
+ # Get context parameters (use first step's function as representative)
99
+ first_step = unit.steps[0]
100
+ context_params = pipeline.context.inject_params(first_step.func)
101
+
102
+ # Execute the group
103
+ group_results = pipeline.executor.execute_step_group(
104
+ step_group=unit,
105
+ inputs=step_outputs,
106
+ context_params=context_params,
107
+ cache_store=pipeline.cache_store,
108
+ artifact_store=pipeline.stack.artifact_store if pipeline.stack else None,
109
+ run_id=run_id,
110
+ project_name=pipeline.name,
111
+ )
112
+
113
+ # Process each step result
114
+ for step_result in group_results:
115
+ result.add_step_result(step_result)
116
+
117
+ # Handle failure
118
+ if not step_result.success and not step_result.skipped:
119
+ result.finalize(success=False)
120
+ pipeline._save_run(result)
121
+ return result
122
+
123
+ # Store outputs for next steps/groups
124
+ if step_result.output is not None:
125
+ self._process_step_output(pipeline, step_result, step_outputs, result)
126
+
127
+ else:
128
+ # Execute single ungrouped step
129
+ step = unit
130
+
131
+ # Prepare step inputs
132
+ step_inputs = {}
133
+
134
+ # Get function signature to map inputs to parameters
135
+ sig = inspect.signature(step.func)
136
+ params = list(sig.parameters.values())
137
+
138
+ # Filter out self/cls
139
+ params = [p for p in params if p.name not in ("self", "cls")]
140
+
141
+ # Track which parameters have been assigned
142
+ assigned_params = set()
143
+
144
+ if step.inputs:
145
+ for i, input_name in enumerate(step.inputs):
146
+ if input_name not in step_outputs:
147
+ continue
148
+
149
+ val = step_outputs[input_name]
150
+
151
+ # Check if input name matches a parameter
152
+ param_match = next((p for p in params if p.name == input_name), None)
153
+
154
+ if param_match:
155
+ step_inputs[param_match.name] = val
156
+ assigned_params.add(param_match.name)
157
+ elif i < len(params):
158
+ # Positional fallback
159
+ target_param = params[i]
160
+ if target_param.name not in assigned_params:
161
+ step_inputs[target_param.name] = val
162
+ assigned_params.add(target_param.name)
163
+
164
+ # Auto-map parameters from available outputs
165
+ for param in params:
166
+ if param.name in step_outputs and param.name not in step_inputs:
167
+ step_inputs[param.name] = step_outputs[param.name]
168
+ assigned_params.add(param.name)
169
+
170
+ # Validate context parameters
171
+ exclude_params = list(step.inputs) + list(step_inputs.keys())
172
+ missing_params = pipeline.context.validate_for_step(step.func, exclude=exclude_params)
173
+ if missing_params:
174
+ error_msg = f"Missing required parameters: {missing_params}"
175
+ step_result = ExecutionResult(
176
+ step_name=step.name,
177
+ success=False,
178
+ error=error_msg,
179
+ )
180
+ result.add_step_result(step_result)
181
+ result.finalize(success=False)
182
+ pipeline._save_run(result)
183
+ pipeline._save_pipeline_definition()
184
+ return result
185
+
186
+ # Get context parameters for this step
187
+ context_params = pipeline.context.inject_params(step.func)
188
+
189
+ # Run step start hooks
190
+ hooks.run_step_start_hooks(step, step_inputs)
191
+
192
+ # Execute step
193
+ step_result = pipeline.executor.execute_step(
194
+ step,
195
+ step_inputs,
196
+ context_params,
197
+ pipeline.cache_store,
198
+ artifact_store=pipeline.stack.artifact_store if pipeline.stack else None,
199
+ run_id=run_id,
200
+ project_name=pipeline.name,
201
+ )
202
+
203
+ # Run step end hooks
204
+ hooks.run_step_end_hooks(step, step_result)
205
+
206
+ result.add_step_result(step_result)
207
+
208
+ # Handle failure
209
+ if not step_result.success:
210
+ result.finalize(success=False)
211
+ pipeline._save_run(result)
212
+ pipeline._save_pipeline_definition()
213
+ return result
214
+
215
+ # Store outputs for next steps/groups
216
+ if step_result.output is not None:
217
+ self._process_step_output(pipeline, step_result, step_outputs, result)
218
+
219
+ # Success! Finalize and return
220
+ result.finalize(success=True)
221
+
222
+ # Run pipeline end hooks
223
+ hooks.run_pipeline_end_hooks(pipeline, result)
224
+
225
+ # Record metrics
226
+ if metrics_collector:
227
+ metrics_collector.record_pipeline_end(pipeline, result)
228
+
229
+ pipeline._save_run(result)
230
+ pipeline._save_pipeline_definition()
231
+ return result
232
+
233
+ def _process_step_output(self, pipeline, step_result, step_outputs, result):
234
+ """Helper to process step outputs and update state."""
235
+ from pathlib import Path
236
+
237
+ step_def = next((s for s in pipeline.steps if s.name == step_result.step_name), None)
238
+ if not step_def:
239
+ return
240
+
241
+ outputs_to_process = {}
242
+
243
+ # Normalize outputs
244
+ if len(step_def.outputs) == 1:
245
+ outputs_to_process[step_def.outputs[0]] = step_result.output
246
+ elif isinstance(step_result.output, (list, tuple)) and len(step_result.output) == len(step_def.outputs):
247
+ for name, val in zip(step_def.outputs, step_result.output, strict=False):
248
+ outputs_to_process[name] = val
249
+ elif isinstance(step_result.output, dict):
250
+ for name in step_def.outputs:
251
+ if name in step_result.output:
252
+ outputs_to_process[name] = step_result.output[name]
253
+ else:
254
+ if step_def.outputs:
255
+ outputs_to_process[step_def.outputs[0]] = step_result.output
256
+
257
+ # Save and update state
258
+ for name, value in outputs_to_process.items():
259
+ # Update in-memory outputs for immediate next steps (optimization)
260
+ step_outputs[name] = value
261
+ result.outputs[name] = value
262
+
263
+ # Materialize to artifact store using existing materializer system
264
+ if pipeline.stack and pipeline.stack.artifact_store:
265
+ try:
266
+ materializer = get_materializer(value)
267
+ if materializer:
268
+ # Use artifact store's base path
269
+ artifact_path = (
270
+ Path(pipeline.stack.artifact_store.base_path) / result.run_id / step_result.step_name / name
271
+ )
272
+ materializer.save(value, artifact_path)
273
+ except Exception as e:
274
+ print(f"Warning: Failed to materialize output '{name}': {e}")