FlowerPower 0.20.0__py3-none-any.whl → 0.30.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. flowerpower/__init__.py +2 -6
  2. flowerpower/cfg/__init__.py +4 -11
  3. flowerpower/cfg/base.py +29 -25
  4. flowerpower/cfg/pipeline/__init__.py +3 -3
  5. flowerpower/cfg/pipeline/_schedule.py +32 -0
  6. flowerpower/cfg/pipeline/adapter.py +0 -5
  7. flowerpower/cfg/pipeline/builder.py +377 -0
  8. flowerpower/cfg/pipeline/run.py +89 -0
  9. flowerpower/cfg/project/__init__.py +8 -21
  10. flowerpower/cfg/project/adapter.py +0 -12
  11. flowerpower/cli/__init__.py +2 -28
  12. flowerpower/cli/pipeline.py +10 -4
  13. flowerpower/flowerpower.py +275 -585
  14. flowerpower/pipeline/base.py +19 -10
  15. flowerpower/pipeline/io.py +52 -46
  16. flowerpower/pipeline/manager.py +149 -91
  17. flowerpower/pipeline/pipeline.py +159 -87
  18. flowerpower/pipeline/registry.py +68 -33
  19. flowerpower/pipeline/visualizer.py +4 -4
  20. flowerpower/plugins/{_io → io}/__init__.py +1 -1
  21. flowerpower/settings/__init__.py +0 -2
  22. flowerpower/settings/{backend.py → _backend.py} +0 -19
  23. flowerpower/settings/logging.py +1 -1
  24. flowerpower/utils/logging.py +24 -12
  25. flowerpower/utils/misc.py +17 -0
  26. flowerpower-0.30.0.dist-info/METADATA +451 -0
  27. flowerpower-0.30.0.dist-info/RECORD +42 -0
  28. flowerpower/cfg/pipeline/schedule.py +0 -74
  29. flowerpower/cfg/project/job_queue.py +0 -111
  30. flowerpower/cli/job_queue.py +0 -1329
  31. flowerpower/cli/mqtt.py +0 -174
  32. flowerpower/job_queue/__init__.py +0 -205
  33. flowerpower/job_queue/base.py +0 -611
  34. flowerpower/job_queue/rq/__init__.py +0 -10
  35. flowerpower/job_queue/rq/_trigger.py +0 -37
  36. flowerpower/job_queue/rq/concurrent_workers/gevent_worker.py +0 -226
  37. flowerpower/job_queue/rq/concurrent_workers/thread_worker.py +0 -228
  38. flowerpower/job_queue/rq/manager.py +0 -1893
  39. flowerpower/job_queue/rq/setup.py +0 -154
  40. flowerpower/job_queue/rq/utils.py +0 -69
  41. flowerpower/mqtt.py +0 -12
  42. flowerpower/plugins/mqtt/__init__.py +0 -12
  43. flowerpower/plugins/mqtt/cfg.py +0 -17
  44. flowerpower/plugins/mqtt/manager.py +0 -962
  45. flowerpower/settings/job_queue.py +0 -31
  46. flowerpower-0.20.0.dist-info/METADATA +0 -693
  47. flowerpower-0.20.0.dist-info/RECORD +0 -58
  48. {flowerpower-0.20.0.dist-info → flowerpower-0.30.0.dist-info}/WHEEL +0 -0
  49. {flowerpower-0.20.0.dist-info → flowerpower-0.30.0.dist-info}/entry_points.txt +0 -0
  50. {flowerpower-0.20.0.dist-info → flowerpower-0.30.0.dist-info}/licenses/LICENSE +0 -0
  51. {flowerpower-0.20.0.dist-info → flowerpower-0.30.0.dist-info}/top_level.txt +0 -0
@@ -7,6 +7,7 @@ from fsspec_utils import AbstractFileSystem, BaseStorageOptions, filesystem
7
7
  from loguru import logger
8
8
  from munch import Munch
9
9
 
10
+ from ..settings import CONFIG_DIR, PIPELINES_DIR
10
11
  from ..cfg import PipelineConfig, ProjectConfig
11
12
  from ..utils.logging import setup_logging
12
13
 
@@ -40,26 +41,35 @@ class BasePipeline:
40
41
  base_dir: str | None = None,
41
42
  storage_options: dict | Munch | BaseStorageOptions = {},
42
43
  fs: AbstractFileSystem | None = None,
43
- cfg_dir: str = "conf",
44
- pipelines_dir: str = "pipelines",
45
- job_queue_type: str | None = None, # New parameter for worker backend
44
+
46
45
  ):
47
46
  self._base_dir = base_dir
48
47
  self._storage_options = storage_options
49
48
  if fs is None:
50
49
  fs = filesystem(self._base_dir, **self._storage_options)
51
50
  self._fs = fs
52
- self._cfg_dir = cfg_dir
53
- self._pipelines_dir = pipelines_dir
54
- self._job_queue_type = job_queue_type
51
+
55
52
 
53
+ self._setup_paths()
54
+ self._setup_directories()
55
+ self._add_modules_path()
56
+
57
+ def _setup_paths(self) -> None:
58
+ """Set up configuration and pipeline directory paths."""
59
+ self._cfg_dir = CONFIG_DIR
60
+ self._pipelines_dir = PIPELINES_DIR
61
+
62
+ def _setup_directories(self) -> None:
63
+ """Set up required directories with proper error handling."""
56
64
  try:
57
65
  self._fs.makedirs(f"{self._cfg_dir}/pipelines", exist_ok=True)
58
66
  self._fs.makedirs(self._pipelines_dir, exist_ok=True)
59
- except Exception as e:
67
+ except (OSError, PermissionError) as e:
60
68
  logger.error(f"Error creating directories: {e}")
61
-
62
- self._add_modules_path()
69
+ raise
70
+ except Exception as e:
71
+ logger.error(f"Unexpected error creating directories: {e}")
72
+ raise
63
73
 
64
74
  def __enter__(self) -> "BasePipeline":
65
75
  return self
@@ -98,7 +108,6 @@ class BasePipeline:
98
108
  """
99
109
  return ProjectConfig.load(
100
110
  base_dir=self._base_dir,
101
- job_queue_type=self._job_queue_type,
102
111
  fs=self._fs,
103
112
  storage_options=self._storage_options,
104
113
  )
@@ -43,6 +43,41 @@ class PipelineIOManager:
43
43
  self._cfg_dir = registry._cfg_dir
44
44
  self._pipelines_dir = registry._pipelines_dir
45
45
 
46
+ def _get_pipeline_files(self, name: str) -> list[str]:
47
+ """Get the list of files for a single pipeline."""
48
+ return [
49
+ "conf/project.yml",
50
+ f"conf/pipelines/{name}.yml",
51
+ f"pipelines/{name}.py",
52
+ ]
53
+
54
+ def _get_many_pipeline_files(self, names: list[str]) -> list[str]:
55
+ """Get the list of files for multiple pipelines."""
56
+ files = ["conf/project.yml"]
57
+ for name in names:
58
+ files.extend([
59
+ f"conf/pipelines/{name}.yml",
60
+ f"pipelines/{name}.py",
61
+ ])
62
+ return files
63
+
64
+ def _get_all_pipeline_files(self) -> list[str] | None:
65
+ """Get all pipeline files (returns None to let _sync_filesystem auto-discover)."""
66
+ return None
67
+
68
+ def _print_import_success(self, names: list[str], src_base_dir: str) -> None:
69
+ """Print success message for import operations."""
70
+ console.print(
71
+ f"✅ Imported pipelines [bold blue]{', '.join(names)}[/bold blue] from [green]{src_base_dir}[/green] to [bold blue]{self.project_cfg.name}[/bold blue]"
72
+ )
73
+
74
+ def _print_export_success(self, names: list[str] | None, dest_base_dir: str) -> None:
75
+ """Print success message for export operations."""
76
+ if names:
77
+ self._print_export_success(names, dest_base_dir)
78
+ else:
79
+ self._print_export_success(None, dest_base_dir)
80
+
46
81
  def _sync_filesystem(
47
82
  self,
48
83
  src_base_dir: str,
@@ -156,12 +191,7 @@ class PipelineIOManager:
156
191
  pm.import_pipeline("my_pipeline", "/path/to/pipeline")
157
192
  ```
158
193
  """
159
- files = [
160
- "conf/project.yml",
161
- f"conf/pipelines/{name}.yml",
162
- f"pipelines/{name}.py",
163
- ]
164
-
194
+ files = self._get_pipeline_files(name)
165
195
  self._sync_filesystem(
166
196
  src_base_dir=src_base_dir,
167
197
  src_fs=src_fs,
@@ -173,10 +203,7 @@ class PipelineIOManager:
173
203
  overwrite=overwrite,
174
204
  )
175
205
 
176
- # Use project_cfg.name directly
177
- console.print(
178
- f"✅ Imported pipeline [bold blue]{name}[/bold blue] from [green]{src_base_dir}[/green] to [bold blue]{self.project_cfg.name}[/bold blue]"
179
- )
206
+ self._print_import_success([name], src_base_dir)
180
207
 
181
208
  def import_many(
182
209
  self,
@@ -210,15 +237,8 @@ class PipelineIOManager:
210
237
  ```
211
238
  """
212
239
 
213
- files = ["conf/project.yml"]
240
+ files = self._get_many_pipeline_files(names)
214
241
 
215
- for name in names:
216
- files.extend([
217
- f"conf/pipelines/{name}.yml",
218
- f"pipelines/{name}.py",
219
- ])
220
-
221
- # Sync the filesystem
222
242
  self._sync_filesystem(
223
243
  src_base_dir=src_base_dir,
224
244
  src_fs=src_fs,
@@ -229,9 +249,7 @@ class PipelineIOManager:
229
249
  files=files,
230
250
  overwrite=overwrite,
231
251
  )
232
- console.print(
233
- f"✅ Imported pipelines [bold blue]{', '.join(names)}[/bold blue] from [green]{src_base_dir}[/green] to [bold blue]{self.project_cfg.name}[/bold blue]"
234
- )
252
+ self._print_import_success(names, src_base_dir)
235
253
 
236
254
  def import_all(
237
255
  self,
@@ -260,6 +278,8 @@ class PipelineIOManager:
260
278
  # pm.import_all("s3://my-bucket/pipelines_backup", storage_options={"key": "...", "secret": "..."}, overwrite=False)
261
279
  ```
262
280
  """
281
+ files = self._get_all_pipeline_files()
282
+
263
283
  self._sync_filesystem(
264
284
  src_base_dir=src_base_dir,
265
285
  src_fs=src_fs,
@@ -267,12 +287,10 @@ class PipelineIOManager:
267
287
  dest_base_dir=".",
268
288
  dest_fs=self._fs,
269
289
  dest_storage_options=None,
270
- files=None,
290
+ files=files,
271
291
  overwrite=overwrite,
272
292
  )
273
- console.print(
274
- f"✅ Imported all pipelines from [green]{src_base_dir}[/green] to [bold blue]{self.project_cfg.name}[/bold blue]"
275
- )
293
+ self._print_import_success([], src_base_dir)
276
294
 
277
295
  def export_pipeline(
278
296
  self,
@@ -306,13 +324,8 @@ class PipelineIOManager:
306
324
  # pm.export("my_pipeline", "s3://my-bucket/exports", storage_options={"key": "...", "secret": "..."})
307
325
  ```
308
326
  """
309
- files = [
310
- "conf/project.yml",
311
- f"conf/pipelines/{name}.yml",
312
- f"pipelines/{name}.py",
313
- ]
327
+ files = self._get_pipeline_files(name)
314
328
 
315
- # Sync the filesystem
316
329
  self._sync_filesystem(
317
330
  src_base_dir=".",
318
331
  src_fs=self._fs,
@@ -324,9 +337,7 @@ class PipelineIOManager:
324
337
  overwrite=overwrite,
325
338
  )
326
339
 
327
- console.print(
328
- f"✅ Exported pipeline [bold blue]{self.project_cfg.name}.{name}[/bold blue] to [green]{dest_base_dir}[/green]"
329
- )
340
+ self._print_export_success([name], dest_base_dir)
330
341
 
331
342
  def export_many(
332
343
  self,
@@ -356,21 +367,15 @@ class PipelineIOManager:
356
367
  pm.export_many(pipelines_to_export, "/path/to/export_dir", overwrite=True)
357
368
  ```
358
369
  """
359
- files = [
360
- "conf/project.yml",
361
- ]
370
+ # Check if pipelines exist in the registry
362
371
  for name in names:
363
- # Check if the pipeline exists in the registry
364
372
  if not self.registry.has_pipeline(name):
365
373
  raise ValueError(
366
374
  f"Pipeline {name} does not exist in the registry. Please check the name."
367
375
  )
368
- # Add pipeline files to the list
369
- files.extend([
370
- f"conf/pipelines/{name}.yml",
371
- f"pipelines/{name}.py",
372
- ])
373
- # Sync the filesystem
376
+
377
+ files = self._get_many_pipeline_files(names)
378
+
374
379
  self._sync_filesystem(
375
380
  src_base_dir=".",
376
381
  src_fs=self._fs,
@@ -412,7 +417,8 @@ class PipelineIOManager:
412
417
  # pm.export_all("s3://my-bucket/pipelines_backup", storage_options={"key": "...", "secret": "..."}, overwrite=False)
413
418
  ```
414
419
  """
415
- # sync the filesystem
420
+ files = self._get_all_pipeline_files()
421
+
416
422
  self._sync_filesystem(
417
423
  src_base_dir=".",
418
424
  src_fs=self._fs,
@@ -420,7 +426,7 @@ class PipelineIOManager:
420
426
  dest_base_dir=dest_base_dir,
421
427
  dest_fs=dest_fs,
422
428
  dest_storage_options=dest_storage_options,
423
- files=None,
429
+ files=files,
424
430
  overwrite=overwrite,
425
431
  )
426
432
  console.print(
@@ -18,17 +18,17 @@ except ImportError:
18
18
 
19
19
  from fsspec_utils import AbstractFileSystem, BaseStorageOptions, filesystem
20
20
 
21
- from .. import settings
21
+ from ..settings import CONFIG_DIR, PIPELINES_DIR, CACHE_DIR
22
22
  from ..cfg import PipelineConfig, ProjectConfig
23
23
  from ..cfg.pipeline.adapter import AdapterConfig as PipelineAdapterConfig
24
- from ..cfg.pipeline.run import ExecutorConfig, WithAdapterConfig
24
+ from ..cfg.pipeline.run import ExecutorConfig, RunConfig, WithAdapterConfig
25
25
  from ..cfg.project.adapter import AdapterConfig as ProjectAdapterConfig
26
26
  from ..utils.logging import setup_logging
27
27
  from .io import PipelineIOManager
28
28
  from .registry import HookType, PipelineRegistry
29
29
  from .visualizer import PipelineVisualizer
30
30
 
31
- setup_logging(level=settings.LOG_LEVEL)
31
+ setup_logging()
32
32
 
33
33
  GraphType = TypeVar("GraphType") # Type variable for graphviz.Digraph
34
34
 
@@ -40,13 +40,11 @@ class PipelineManager:
40
40
  - Configuration management and loading
41
41
  - Pipeline creation, deletion, and discovery
42
42
  - Pipeline execution via PipelineRunner
43
- - Job scheduling via PipelineScheduler
44
43
  - Visualization via PipelineVisualizer
45
44
  - Import/export operations via PipelineIOManager
46
45
 
47
46
  Attributes:
48
47
  registry (PipelineRegistry): Handles pipeline registration and discovery
49
- scheduler (PipelineScheduler): Manages job scheduling and execution
50
48
  visualizer (PipelineVisualizer): Handles pipeline visualization
51
49
  io (PipelineIOManager): Manages pipeline import/export operations
52
50
  project_cfg (ProjectConfig): Current project configuration
@@ -64,7 +62,6 @@ class PipelineManager:
64
62
  >>> # Create manager with custom settings
65
63
  >>> manager = PipelineManager(
66
64
  ... base_dir="/path/to/project",
67
- ... job_queue_type="rq",
68
65
  ... log_level="DEBUG"
69
66
  ... )
70
67
  """
@@ -74,9 +71,9 @@ class PipelineManager:
74
71
  base_dir: str | None = None,
75
72
  storage_options: dict | Munch | BaseStorageOptions | None = None,
76
73
  fs: AbstractFileSystem | None = None,
77
- cfg_dir: str | None = settings.CONFIG_DIR,
78
- pipelines_dir: str | None = settings.PIPELINES_DIR,
79
- job_queue_type: str | None = None,
74
+ cfg_dir: str | None = CONFIG_DIR,
75
+ pipelines_dir: str | None = PIPELINES_DIR,
76
+
80
77
  log_level: str | None = None,
81
78
  ) -> None:
82
79
  """Initialize the PipelineManager.
@@ -95,8 +92,7 @@ class PipelineManager:
95
92
  Example: "config" or "settings".
96
93
  pipelines_dir: Override default pipelines directory name ('pipelines').
97
94
  Example: "flows" or "dags".
98
- job_queue_type: Override worker type from project config/settings.
99
- Valid values: "rq".
95
+
100
96
  log_level: Set logging level for the manager.
101
97
  Valid values: "DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"
102
98
 
@@ -116,7 +112,7 @@ class PipelineManager:
116
112
  ... "key": "ACCESS_KEY",
117
113
  ... "secret": "SECRET_KEY"
118
114
  ... },
119
- ... job_queue_type="rq",
115
+
120
116
  ... log_level="DEBUG"
121
117
  ... )
122
118
  """
@@ -128,7 +124,7 @@ class PipelineManager:
128
124
  if storage_options is not None:
129
125
  cached = True
130
126
  cache_storage = posixpath.join(
131
- posixpath.expanduser(settings.CACHE_DIR),
127
+ posixpath.expanduser(CACHE_DIR),
132
128
  self._base_dir.split("://")[-1],
133
129
  )
134
130
  os.makedirs(cache_storage, exist_ok=True)
@@ -154,17 +150,20 @@ class PipelineManager:
154
150
  self._pipelines_dir = pipelines_dir
155
151
 
156
152
  self._load_project_cfg(
157
- reload=True, job_queue_type=job_queue_type
153
+ reload=True
158
154
  ) # Load project config
159
- self._job_queue_type = job_queue_type or self.project_cfg.job_queue.type
155
+
160
156
 
161
157
  # Ensure essential directories exist (using paths from loaded project_cfg)
162
158
  try:
163
159
  self._fs.makedirs(self._cfg_dir, exist_ok=True)
164
160
  self._fs.makedirs(self._pipelines_dir, exist_ok=True)
165
- except Exception as e:
161
+ except (OSError, PermissionError) as e:
166
162
  logger.error(f"Error creating essential directories: {e}")
167
- # Consider raising an error here depending on desired behavior
163
+ raise RuntimeError(f"Failed to create essential directories: {e}") from e
164
+ except Exception as e:
165
+ logger.error(f"Unexpected error creating essential directories: {e}")
166
+ raise RuntimeError(f"Unexpected filesystem error: {e}") from e
168
167
 
169
168
  # Ensure pipeline modules can be imported
170
169
  self._add_modules_path()
@@ -263,7 +262,7 @@ class PipelineManager:
263
262
  sys.path.insert(0, modules_path)
264
263
 
265
264
  def _load_project_cfg(
266
- self, reload: bool = False, job_queue_type: str | None = None
265
+ self, reload: bool = False
267
266
  ) -> ProjectConfig:
268
267
  """Load or reload the project configuration.
269
268
 
@@ -287,8 +286,8 @@ class PipelineManager:
287
286
  >>> # Internal usage
288
287
  >>> manager = PipelineManager()
289
288
  >>> project_cfg = manager._load_project_cfg(reload=True)
290
- >>> print(project_cfg.worker.type)
291
- 'rq'
289
+ >>> print(project_cfg.name)
290
+ 'my_project'
292
291
  """
293
292
  if hasattr(self, "_project_cfg") and not reload:
294
293
  return self._project_cfg
@@ -296,7 +295,6 @@ class PipelineManager:
296
295
  # Pass overrides to ProjectConfig.load
297
296
  self._project_cfg = ProjectConfig.load(
298
297
  base_dir=self._base_dir,
299
- job_queue_type=job_queue_type,
300
298
  fs=self._fs, # Pass pre-configured fs if provided
301
299
  storage_options=self._storage_options,
302
300
  )
@@ -371,8 +369,8 @@ class PipelineManager:
371
369
  Example:
372
370
  >>> manager = PipelineManager()
373
371
  >>> cfg = manager.project_cfg
374
- >>> print(cfg.worker.type)
375
- 'rq'
372
+ >>> print(cfg.name)
373
+ 'my_project'
376
374
  """
377
375
  if not hasattr(self, "_project_cfg"):
378
376
  self._load_project_cfg()
@@ -402,26 +400,82 @@ class PipelineManager:
402
400
 
403
401
  # --- Core Execution Method ---
404
402
 
403
+ def _merge_run_config_with_kwargs(self, run_config: RunConfig, kwargs: dict) -> RunConfig:
404
+ """Merge kwargs into a RunConfig object.
405
+
406
+ This helper method updates the RunConfig object with values from kwargs,
407
+ handling different types of attributes appropriately.
408
+
409
+ Args:
410
+ run_config: The RunConfig object to update
411
+ kwargs: Dictionary of additional parameters to merge
412
+
413
+ Returns:
414
+ RunConfig: Updated RunConfig object
415
+ """
416
+ # Handle dictionary-like attributes with update or deep merge
417
+ if 'inputs' in kwargs and kwargs['inputs'] is not None:
418
+ if run_config.inputs is None:
419
+ run_config.inputs = kwargs['inputs']
420
+ else:
421
+ run_config.inputs.update(kwargs['inputs'])
422
+
423
+ if 'config' in kwargs and kwargs['config'] is not None:
424
+ if run_config.config is None:
425
+ run_config.config = kwargs['config']
426
+ else:
427
+ run_config.config.update(kwargs['config'])
428
+
429
+ if 'cache' in kwargs and kwargs['cache'] is not None:
430
+ run_config.cache = kwargs['cache']
431
+
432
+ if 'adapter' in kwargs and kwargs['adapter'] is not None:
433
+ if run_config.adapter is None:
434
+ run_config.adapter = kwargs['adapter']
435
+ else:
436
+ run_config.adapter.update(kwargs['adapter'])
437
+
438
+ # Handle executor_cfg - convert string/dict to ExecutorConfig if needed
439
+ if 'executor_cfg' in kwargs and kwargs['executor_cfg'] is not None:
440
+ executor_cfg = kwargs['executor_cfg']
441
+ if isinstance(executor_cfg, str):
442
+ run_config.executor = ExecutorConfig(type=executor_cfg)
443
+ elif isinstance(executor_cfg, dict):
444
+ run_config.executor = ExecutorConfig.from_dict(executor_cfg)
445
+ elif isinstance(executor_cfg, ExecutorConfig):
446
+ run_config.executor = executor_cfg
447
+
448
+ # Handle adapter configurations
449
+ if 'with_adapter_cfg' in kwargs and kwargs['with_adapter_cfg'] is not None:
450
+ with_adapter_cfg = kwargs['with_adapter_cfg']
451
+ if isinstance(with_adapter_cfg, dict):
452
+ run_config.with_adapter = WithAdapterConfig.from_dict(with_adapter_cfg)
453
+ elif isinstance(with_adapter_cfg, WithAdapterConfig):
454
+ run_config.with_adapter = with_adapter_cfg
455
+
456
+ if 'pipeline_adapter_cfg' in kwargs and kwargs['pipeline_adapter_cfg'] is not None:
457
+ run_config.pipeline_adapter_cfg = kwargs['pipeline_adapter_cfg']
458
+
459
+ if 'project_adapter_cfg' in kwargs and kwargs['project_adapter_cfg'] is not None:
460
+ run_config.project_adapter_cfg = kwargs['project_adapter_cfg']
461
+
462
+ # Handle simple attributes
463
+ simple_attrs = [
464
+ 'final_vars', 'reload', 'log_level', 'max_retries', 'retry_delay',
465
+ 'jitter_factor', 'retry_exceptions', 'on_success', 'on_failure'
466
+ ]
467
+
468
+ for attr in simple_attrs:
469
+ if attr in kwargs and kwargs[attr] is not None:
470
+ setattr(run_config, attr, kwargs[attr])
471
+
472
+ return run_config
473
+
405
474
  def run(
406
475
  self,
407
476
  name: str,
408
- inputs: dict | None = None,
409
- final_vars: list[str] | None = None,
410
- config: dict | None = None,
411
- cache: dict | None = None,
412
- executor_cfg: str | dict | ExecutorConfig | None = None,
413
- with_adapter_cfg: dict | WithAdapterConfig | None = None,
414
- pipeline_adapter_cfg: dict | PipelineAdapterConfig | None = None,
415
- project_adapter_cfg: dict | ProjectAdapterConfig | None = None,
416
- adapter: dict[str, Any] | None = None,
417
- reload: bool = False,
418
- log_level: str | None = None,
419
- max_retries: int | None = None,
420
- retry_delay: float | None = None,
421
- jitter_factor: float | None = None,
422
- retry_exceptions: tuple | list | None = None,
423
- on_success: Callable | tuple[Callable, tuple | None, dict | None] | None = None,
424
- on_failure: Callable | tuple[Callable, tuple | None, dict | None] | None = None,
477
+ run_config: RunConfig | None = None,
478
+ **kwargs
425
479
  ) -> dict[str, Any]:
426
480
  """Execute a pipeline synchronously and return its results.
427
481
 
@@ -430,33 +484,36 @@ class PipelineManager:
430
484
 
431
485
  Args:
432
486
  name (str): Name of the pipeline to run. Must be a valid identifier.
433
- inputs (dict | None): Override pipeline input values. Example: {"data_date": "2025-04-28"}
434
- final_vars (list[str] | None): Specify which output variables to return.
435
- Example: ["model", "metrics"]
436
- config (dict | None): Configuration for Hamilton pipeline executor.
437
- Example: {"model": "LogisticRegression"}
438
- cache (dict | None): Cache configuration for results. Example: {"recompute": ["node1", "final_node"]}
439
- executor_cfg (str | dict | ExecutorConfig | None): Execution configuration, can be:
440
- - str: Executor name, e.g. "threadpool", "local"
441
- - dict: Raw config, e.g. {"type": "threadpool", "max_workers": 4}
442
- - ExecutorConfig: Structured config object
443
- with_adapter_cfg (dict | WithAdapterConfig | None): Adapter settings for pipeline execution.
444
- Example: {"opentelemetry": True, "tracker": False}
445
- pipeline_adapter_cfg (dict | PipelineAdapterConfig | None): Pipeline-specific adapter settings.
446
- Example: {"tracker": {"project_id": "123", "tags": {"env": "prod"}}}
447
- project_adapter_cfg (dict | ProjectAdapterConfig | None): Project-level adapter settings.
448
- Example: {"opentelemetry": {"host": "http://localhost:4317"}}
449
- adapter (dict[str, Any] | None): Custom adapter instance for pipeline
450
- Example: {"ray_graph_adapter": RayGraphAdapter()}
451
- reload (bool): Force reload of pipeline configuration.
452
- log_level (str | None): Logging level for the execution. Default None uses project config.
453
- Valid values: "DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"
454
- max_retries (int): Maximum number of retries for execution.
455
- retry_delay (float): Delay between retries in seconds.
456
- jitter_factor (float): Random jitter factor to add to retry delay
457
- retry_exceptions (tuple): Exceptions that trigger a retry.
458
- on_success (Callable | tuple[Callable, tuple | None, dict | None] | None): Callback to run on successful pipeline execution.
459
- on_failure (Callable | tuple[Callable, tuple | None, dict | None] | None): Callback to run on pipeline execution failure.
487
+ run_config (RunConfig | None): Run configuration object containing all execution parameters.
488
+ If None, the default configuration from the pipeline will be used.
489
+ **kwargs: Additional parameters to override the run_config. Supported parameters include:
490
+ inputs (dict | None): Override pipeline input values. Example: {"data_date": "2025-04-28"}
491
+ final_vars (list[str] | None): Specify which output variables to return.
492
+ Example: ["model", "metrics"]
493
+ config (dict | None): Configuration for Hamilton pipeline executor.
494
+ Example: {"model": "LogisticRegression"}
495
+ cache (dict | None): Cache configuration for results. Example: {"recompute": ["node1", "final_node"]}
496
+ executor_cfg (str | dict | ExecutorConfig | None): Execution configuration, can be:
497
+ - str: Executor name, e.g. "threadpool", "local"
498
+ - dict: Raw config, e.g. {"type": "threadpool", "max_workers": 4}
499
+ - ExecutorConfig: Structured config object
500
+ with_adapter_cfg (dict | WithAdapterConfig | None): Adapter settings for pipeline execution.
501
+ Example: {"opentelemetry": True, "tracker": False}
502
+ pipeline_adapter_cfg (dict | PipelineAdapterConfig | None): Pipeline-specific adapter settings.
503
+ Example: {"tracker": {"project_id": "123", "tags": {"env": "prod"}}}
504
+ project_adapter_cfg (dict | ProjectAdapterConfig | None): Project-level adapter settings.
505
+ Example: {"opentelemetry": {"host": "http://localhost:4317"}}
506
+ adapter (dict[str, Any] | None): Custom adapter instance for pipeline
507
+ Example: {"ray_graph_adapter": RayGraphAdapter()}
508
+ reload (bool): Force reload of pipeline configuration.
509
+ log_level (str | None): Logging level for the execution. Default None uses project config.
510
+ Valid values: "DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"
511
+ max_retries (int): Maximum number of retries for execution.
512
+ retry_delay (float): Delay between retries in seconds.
513
+ jitter_factor (float): Random jitter factor to add to retry delay
514
+ retry_exceptions (tuple): Exceptions that trigger a retry.
515
+ on_success (Callable | tuple[Callable, tuple | None, dict | None] | None): Callback to run on successful pipeline execution.
516
+ on_failure (Callable | tuple[Callable, tuple | None, dict | None] | None): Callback to run on pipeline execution failure.
460
517
 
461
518
  Returns:
462
519
  dict[str, Any]: Pipeline execution results, mapping output variable names
@@ -475,46 +532,47 @@ class PipelineManager:
475
532
  >>> # Basic pipeline run
476
533
  >>> results = manager.run("data_pipeline")
477
534
  >>>
478
- >>> # Complex run with overrides
535
+ >>> # Run with custom RunConfig
536
+ >>> from flowerpower.cfg.pipeline.run import RunConfig
537
+ >>> config = RunConfig(inputs={"date": "2025-04-28"}, final_vars=["result"])
538
+ >>> results = manager.run("ml_pipeline", run_config=config)
539
+ >>>
540
+ >>> # Complex run with kwargs overrides
479
541
  >>> results = manager.run(
480
- ... name="ml_pipeline",
481
- ... inputs={
482
- ... "training_date": "2025-04-28",
483
- ... "model_params": {"n_estimators": 100}
484
- ... },
542
+ ... "ml_pipeline",
543
+ ... inputs={"training_date": "2025-04-28"},
485
544
  ... final_vars=["model", "metrics"],
486
545
  ... executor_cfg={"type": "threadpool", "max_workers": 4},
487
546
  ... with_adapter_cfg={"tracker": True},
488
547
  ... reload=True
489
548
  ... )
490
549
  """
550
+ # Initialize run_config - use provided config or load pipeline default
551
+ if run_config is None:
552
+ run_config = self.load_pipeline(name=name).run
553
+
554
+ # Merge kwargs into run_config
555
+ if kwargs:
556
+ run_config = self._merge_run_config_with_kwargs(run_config, kwargs)
557
+
558
+ # Set up logging for this specific run if log_level is provided
559
+ if run_config.log_level is not None:
560
+ setup_logging(level=run_config.log_level)
561
+ else:
562
+ # Ensure logging is reset to default if no specific level is provided for this run
563
+ setup_logging()
564
+
491
565
  # Use injected project context, fallback to self for backward compatibility
492
566
  project_context = getattr(self, "_project_context", self)
493
567
 
494
568
  # Get Pipeline instance from registry
495
569
  pipeline = self.registry.get_pipeline(
496
- name=name, project_context=project_context, reload=reload
570
+ name=name, project_context=project_context, reload=run_config.reload
497
571
  )
498
572
 
499
573
  # Execute pipeline using its own run method
500
574
  return pipeline.run(
501
- inputs=inputs,
502
- final_vars=final_vars,
503
- config=config,
504
- cache=cache,
505
- executor_cfg=executor_cfg,
506
- with_adapter_cfg=with_adapter_cfg,
507
- pipeline_adapter_cfg=pipeline_adapter_cfg,
508
- project_adapter_cfg=project_adapter_cfg,
509
- adapter=adapter,
510
- reload=reload,
511
- log_level=log_level,
512
- max_retries=max_retries,
513
- retry_delay=retry_delay,
514
- jitter_factor=jitter_factor,
515
- retry_exceptions=retry_exceptions,
516
- on_success=on_success,
517
- on_failure=on_failure,
575
+ run_config=run_config,
518
576
  )
519
577
 
520
578
  # --- Delegated Methods ---