FlowerPower 0.21.0__py3-none-any.whl → 0.31.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- flowerpower/cfg/__init__.py +143 -25
- flowerpower/cfg/base.py +132 -11
- flowerpower/cfg/exceptions.py +53 -0
- flowerpower/cfg/pipeline/__init__.py +151 -35
- flowerpower/cfg/pipeline/adapter.py +1 -0
- flowerpower/cfg/pipeline/builder.py +24 -25
- flowerpower/cfg/pipeline/builder_adapter.py +142 -0
- flowerpower/cfg/pipeline/builder_executor.py +101 -0
- flowerpower/cfg/pipeline/run.py +134 -22
- flowerpower/cfg/project/__init__.py +59 -14
- flowerpower/cfg/project/adapter.py +6 -0
- flowerpower/cli/__init__.py +8 -9
- flowerpower/cli/cfg.py +0 -38
- flowerpower/cli/pipeline.py +121 -83
- flowerpower/cli/utils.py +120 -71
- flowerpower/flowerpower.py +94 -120
- flowerpower/pipeline/config_manager.py +180 -0
- flowerpower/pipeline/executor.py +126 -0
- flowerpower/pipeline/lifecycle_manager.py +231 -0
- flowerpower/pipeline/manager.py +121 -276
- flowerpower/pipeline/pipeline.py +66 -278
- flowerpower/pipeline/registry.py +45 -4
- flowerpower/utils/__init__.py +19 -0
- flowerpower/utils/adapter.py +286 -0
- flowerpower/utils/callback.py +73 -67
- flowerpower/utils/config.py +306 -0
- flowerpower/utils/executor.py +178 -0
- flowerpower/utils/filesystem.py +194 -0
- flowerpower/utils/misc.py +249 -76
- flowerpower/utils/security.py +221 -0
- {flowerpower-0.21.0.dist-info → flowerpower-0.31.0.dist-info}/METADATA +1 -13
- flowerpower-0.31.0.dist-info/RECORD +53 -0
- flowerpower/cfg/pipeline/_schedule.py +0 -32
- flowerpower/cli/mqtt.py +0 -168
- flowerpower/plugins/mqtt/__init__.py +0 -8
- flowerpower-0.21.0.dist-info/RECORD +0 -44
- {flowerpower-0.21.0.dist-info → flowerpower-0.31.0.dist-info}/WHEEL +0 -0
- {flowerpower-0.21.0.dist-info → flowerpower-0.31.0.dist-info}/entry_points.txt +0 -0
- {flowerpower-0.21.0.dist-info → flowerpower-0.31.0.dist-info}/licenses/LICENSE +0 -0
- {flowerpower-0.21.0.dist-info → flowerpower-0.31.0.dist-info}/top_level.txt +0 -0
flowerpower/pipeline/pipeline.py
CHANGED
@@ -9,6 +9,7 @@ import importlib.util
|
|
9
9
|
import random
|
10
10
|
import time
|
11
11
|
from typing import TYPE_CHECKING, Any, Callable
|
12
|
+
from requests.exceptions import HTTPError, ConnectionError, Timeout # Example exception
|
12
13
|
|
13
14
|
import humanize
|
14
15
|
import msgspec
|
@@ -20,6 +21,8 @@ from hamilton_sdk.api.clients import UnauthorizedException
|
|
20
21
|
from requests.exceptions import ConnectionError, HTTPError
|
21
22
|
|
22
23
|
from .. import settings
|
24
|
+
from ..utils.adapter import create_adapter_manager
|
25
|
+
from ..utils.executor import create_executor_factory
|
23
26
|
|
24
27
|
if importlib.util.find_spec("opentelemetry"):
|
25
28
|
from hamilton.plugins import h_opentelemetry
|
@@ -57,8 +60,9 @@ else:
|
|
57
60
|
|
58
61
|
from ..cfg import PipelineConfig, ProjectConfig
|
59
62
|
from ..cfg.pipeline.adapter import AdapterConfig as PipelineAdapterConfig
|
60
|
-
from ..cfg.pipeline.run import ExecutorConfig, RunConfig
|
63
|
+
from ..cfg.pipeline.run import ExecutorConfig, RunConfig
|
61
64
|
from ..cfg.project.adapter import AdapterConfig as ProjectAdapterConfig
|
65
|
+
from ..utils.config import merge_run_config_with_kwargs
|
62
66
|
|
63
67
|
if TYPE_CHECKING:
|
64
68
|
from ..flowerpower import FlowerPowerProject
|
@@ -82,83 +86,20 @@ class Pipeline(msgspec.Struct):
|
|
82
86
|
config: PipelineConfig
|
83
87
|
module: Any
|
84
88
|
project_context: FlowerPowerProject
|
89
|
+
_adapter_manager: Any = None
|
90
|
+
_executor_factory: Any = None
|
85
91
|
|
86
92
|
def __post_init__(self):
|
87
|
-
"""Initialize Hamilton settings."""
|
93
|
+
"""Initialize Hamilton settings and utility managers."""
|
88
94
|
if not settings.HAMILTON_TELEMETRY_ENABLED:
|
89
95
|
disable_telemetry()
|
90
96
|
if not settings.HAMILTON_AUTOLOAD_EXTENSIONS:
|
91
97
|
disable_autoload()
|
92
98
|
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
run_config: The base RunConfig object to merge into
|
98
|
-
kwargs: Additional parameters to merge into the run_config
|
99
|
-
|
100
|
-
Returns:
|
101
|
-
Updated RunConfig object with merged kwargs
|
102
|
-
"""
|
103
|
-
from copy import deepcopy
|
104
|
-
|
105
|
-
# Create a deep copy of the run_config to avoid modifying the original
|
106
|
-
merged_config = deepcopy(run_config)
|
107
|
-
|
108
|
-
# Handle each possible kwarg
|
109
|
-
for key, value in kwargs.items():
|
110
|
-
if key == 'inputs' and value is not None:
|
111
|
-
if merged_config.inputs is None:
|
112
|
-
merged_config.inputs = {}
|
113
|
-
merged_config.inputs.update(value)
|
114
|
-
elif key == 'final_vars' and value is not None:
|
115
|
-
if merged_config.final_vars is None:
|
116
|
-
merged_config.final_vars = []
|
117
|
-
merged_config.final_vars = value
|
118
|
-
elif key == 'config' and value is not None:
|
119
|
-
if merged_config.config is None:
|
120
|
-
merged_config.config = {}
|
121
|
-
merged_config.config.update(value)
|
122
|
-
elif key == 'cache' and value is not None:
|
123
|
-
merged_config.cache = value
|
124
|
-
elif key == 'executor_cfg' and value is not None:
|
125
|
-
if isinstance(value, str):
|
126
|
-
merged_config.executor = ExecutorConfig(type=value)
|
127
|
-
elif isinstance(value, dict):
|
128
|
-
merged_config.executor = ExecutorConfig.from_dict(value)
|
129
|
-
elif isinstance(value, ExecutorConfig):
|
130
|
-
merged_config.executor = value
|
131
|
-
elif key == 'with_adapter_cfg' and value is not None:
|
132
|
-
if isinstance(value, dict):
|
133
|
-
merged_config.with_adapter = WithAdapterConfig.from_dict(value)
|
134
|
-
elif isinstance(value, WithAdapterConfig):
|
135
|
-
merged_config.with_adapter = value
|
136
|
-
elif key == 'pipeline_adapter_cfg' and value is not None:
|
137
|
-
merged_config.pipeline_adapter_cfg = value
|
138
|
-
elif key == 'project_adapter_cfg' and value is not None:
|
139
|
-
merged_config.project_adapter_cfg = value
|
140
|
-
elif key == 'adapter' and value is not None:
|
141
|
-
if merged_config.adapter is None:
|
142
|
-
merged_config.adapter = {}
|
143
|
-
merged_config.adapter.update(value)
|
144
|
-
elif key == 'reload' and value is not None:
|
145
|
-
merged_config.reload = value
|
146
|
-
elif key == 'log_level' and value is not None:
|
147
|
-
merged_config.log_level = value
|
148
|
-
elif key == 'max_retries' and value is not None:
|
149
|
-
merged_config.max_retries = value
|
150
|
-
elif key == 'retry_delay' and value is not None:
|
151
|
-
merged_config.retry_delay = value
|
152
|
-
elif key == 'jitter_factor' and value is not None:
|
153
|
-
merged_config.jitter_factor = value
|
154
|
-
elif key == 'retry_exceptions' and value is not None:
|
155
|
-
merged_config.retry_exceptions = value
|
156
|
-
elif key == 'on_success' and value is not None:
|
157
|
-
merged_config.on_success = value
|
158
|
-
elif key == 'on_failure' and value is not None:
|
159
|
-
merged_config.on_failure = value
|
160
|
-
|
161
|
-
return merged_config
|
99
|
+
# Initialize utility managers
|
100
|
+
self._adapter_manager = create_adapter_manager()
|
101
|
+
self._executor_factory = create_executor_factory()
|
102
|
+
|
162
103
|
|
163
104
|
def run(
|
164
105
|
self,
|
@@ -182,7 +123,7 @@ class Pipeline(msgspec.Struct):
|
|
182
123
|
|
183
124
|
# Merge kwargs into the run_config
|
184
125
|
if kwargs:
|
185
|
-
run_config =
|
126
|
+
run_config = merge_run_config_with_kwargs(run_config, kwargs)
|
186
127
|
|
187
128
|
# Reload module if requested
|
188
129
|
if run_config.reload:
|
@@ -222,23 +163,33 @@ class Pipeline(msgspec.Struct):
|
|
222
163
|
# Convert string exceptions to actual exception classes
|
223
164
|
if retry_exceptions and isinstance(retry_exceptions, (list, tuple)):
|
224
165
|
converted_exceptions = []
|
166
|
+
# Safe mapping of exception names to classes
|
167
|
+
exception_mapping = {
|
168
|
+
'Exception': Exception,
|
169
|
+
'ValueError': ValueError,
|
170
|
+
'TypeError': TypeError,
|
171
|
+
'RuntimeError': RuntimeError,
|
172
|
+
'FileNotFoundError': FileNotFoundError,
|
173
|
+
'PermissionError': PermissionError,
|
174
|
+
'ConnectionError': ConnectionError,
|
175
|
+
'TimeoutError': TimeoutError,
|
176
|
+
'KeyError': KeyError,
|
177
|
+
'AttributeError': AttributeError,
|
178
|
+
'ImportError': ImportError,
|
179
|
+
'OSError': OSError,
|
180
|
+
'IOError': IOError,
|
181
|
+
'HTTPError': HTTPError,
|
182
|
+
'ConnectionError': ConnectionError,
|
183
|
+
'Timeout': Timeout,
|
184
|
+
}
|
225
185
|
for exc in retry_exceptions:
|
226
186
|
if isinstance(exc, str):
|
227
|
-
|
228
|
-
|
229
|
-
|
230
|
-
|
231
|
-
exc_class, BaseException
|
232
|
-
):
|
233
|
-
converted_exceptions.append(exc_class)
|
234
|
-
else:
|
235
|
-
logger.warning(
|
236
|
-
f"'{exc}' is not an exception class, using Exception"
|
237
|
-
)
|
238
|
-
converted_exceptions.append(Exception)
|
239
|
-
except (NameError, AttributeError):
|
187
|
+
exc_class = exception_mapping.get(exc)
|
188
|
+
if exc_class is not None:
|
189
|
+
converted_exceptions.append(exc_class)
|
190
|
+
else:
|
240
191
|
logger.warning(
|
241
|
-
f"Unknown exception
|
192
|
+
f"Unknown exception '{exc}', using Exception as fallback"
|
242
193
|
)
|
243
194
|
converted_exceptions.append(Exception)
|
244
195
|
elif isinstance(exc, type) and issubclass(exc, BaseException):
|
@@ -379,6 +330,7 @@ class Pipeline(msgspec.Struct):
|
|
379
330
|
"""Get the executor based on the provided configuration."""
|
380
331
|
logger.debug("Setting up executor...")
|
381
332
|
|
333
|
+
# Merge with default configuration
|
382
334
|
if executor_cfg:
|
383
335
|
if isinstance(executor_cfg, str):
|
384
336
|
executor_cfg = ExecutorConfig(type=executor_cfg)
|
@@ -393,60 +345,25 @@ class Pipeline(msgspec.Struct):
|
|
393
345
|
else:
|
394
346
|
executor_cfg = self.config.run.executor
|
395
347
|
|
396
|
-
|
397
|
-
|
398
|
-
return executors.SynchronousLocalTaskExecutor(), None
|
348
|
+
# Create executor using factory
|
349
|
+
executor = self._executor_factory.create_executor(executor_cfg)
|
399
350
|
|
400
|
-
|
401
|
-
|
402
|
-
|
403
|
-
|
404
|
-
|
405
|
-
|
406
|
-
), None
|
407
|
-
elif executor_cfg.type == "processpool":
|
408
|
-
logger.debug(
|
409
|
-
f"Using MultiProcessingExecutor with max_workers={executor_cfg.max_workers}"
|
410
|
-
)
|
411
|
-
return executors.MultiProcessingExecutor(
|
412
|
-
max_tasks=executor_cfg.max_workers
|
413
|
-
), None
|
414
|
-
elif executor_cfg.type == "ray":
|
415
|
-
if h_ray:
|
416
|
-
logger.debug(
|
417
|
-
f"Using RayTaskExecutor with num_cpus={executor_cfg.num_cpus}"
|
418
|
-
)
|
419
|
-
|
420
|
-
# Handle temporary case where project_context is PipelineManager
|
421
|
-
project_cfg = getattr(
|
422
|
-
self.project_context, "project_cfg", None
|
423
|
-
) or getattr(self.project_context, "_project_cfg", None)
|
351
|
+
# Handle special cleanup for certain executor types
|
352
|
+
cleanup_fn = None
|
353
|
+
if executor_cfg.type == "ray" and h_ray:
|
354
|
+
# Handle temporary case where project_context is PipelineManager
|
355
|
+
project_cfg = getattr(
|
356
|
+
self.project_context, "project_cfg", None
|
357
|
+
) or getattr(self.project_context, "_project_cfg", None)
|
424
358
|
|
425
|
-
|
426
|
-
|
427
|
-
num_cpus=executor_cfg.num_cpus,
|
428
|
-
ray_init_config=project_cfg.adapter.ray.ray_init_config,
|
429
|
-
),
|
359
|
+
if project_cfg and hasattr(project_cfg.adapter, 'ray'):
|
360
|
+
cleanup_fn = (
|
430
361
|
ray.shutdown
|
431
362
|
if project_cfg.adapter.ray.shutdown_ray_on_completion
|
432
|
-
else None
|
363
|
+
else None
|
433
364
|
)
|
434
|
-
|
435
|
-
|
436
|
-
return executors.SynchronousLocalTaskExecutor(), None
|
437
|
-
elif executor_cfg.type == "dask":
|
438
|
-
if distributed:
|
439
|
-
cluster = distributed.LocalCluster()
|
440
|
-
client = distributed.Client(cluster)
|
441
|
-
return h_dask.DaskExecutor(client=client), cluster.close
|
442
|
-
else:
|
443
|
-
logger.warning("Dask is not installed. Using local executor.")
|
444
|
-
return executors.SynchronousLocalTaskExecutor(), None
|
445
|
-
else:
|
446
|
-
logger.warning(
|
447
|
-
f"Unknown executor type: {executor_cfg.type}. Using local executor."
|
448
|
-
)
|
449
|
-
return executors.SynchronousLocalTaskExecutor(), None
|
365
|
+
|
366
|
+
return executor, cleanup_fn
|
450
367
|
|
451
368
|
def _get_adapters(
|
452
369
|
self,
|
@@ -458,152 +375,23 @@ class Pipeline(msgspec.Struct):
|
|
458
375
|
"""Set up the adapters for the pipeline."""
|
459
376
|
logger.debug("Setting up adapters...")
|
460
377
|
|
461
|
-
# Resolve adapter configurations
|
462
|
-
|
463
|
-
|
464
|
-
|
465
|
-
elif not isinstance(with_adapter_cfg, WithAdapterConfig):
|
466
|
-
raise TypeError(
|
467
|
-
"with_adapter must be a dictionary or WithAdapterConfig instance."
|
468
|
-
)
|
469
|
-
|
470
|
-
with_adapter_cfg = self.config.run.with_adapter.merge(with_adapter_cfg)
|
471
|
-
else:
|
472
|
-
with_adapter_cfg = self.config.run.with_adapter
|
473
|
-
|
474
|
-
if pipeline_adapter_cfg:
|
475
|
-
if isinstance(pipeline_adapter_cfg, dict):
|
476
|
-
pipeline_adapter_cfg = PipelineAdapterConfig.from_dict(
|
477
|
-
pipeline_adapter_cfg
|
478
|
-
)
|
479
|
-
elif not isinstance(pipeline_adapter_cfg, PipelineAdapterConfig):
|
480
|
-
raise TypeError(
|
481
|
-
"pipeline_adapter_cfg must be a dictionary or PipelineAdapterConfig instance."
|
482
|
-
)
|
483
|
-
|
484
|
-
pipeline_adapter_cfg = self.config.adapter.merge(pipeline_adapter_cfg)
|
485
|
-
else:
|
486
|
-
pipeline_adapter_cfg = self.config.adapter
|
487
|
-
|
488
|
-
if project_adapter_cfg:
|
489
|
-
if isinstance(project_adapter_cfg, dict):
|
490
|
-
project_adapter_cfg = ProjectAdapterConfig.from_dict(
|
491
|
-
project_adapter_cfg
|
492
|
-
)
|
493
|
-
elif not isinstance(project_adapter_cfg, ProjectAdapterConfig):
|
494
|
-
raise TypeError(
|
495
|
-
"project_adapter_cfg must be a dictionary or ProjectAdapterConfig instance."
|
496
|
-
)
|
497
|
-
|
498
|
-
# Handle temporary case where project_context is PipelineManager
|
499
|
-
manager_project_cfg = getattr(
|
500
|
-
self.project_context, "project_cfg", None
|
501
|
-
) or getattr(self.project_context, "_project_cfg", None)
|
502
|
-
if manager_project_cfg and hasattr(manager_project_cfg, "adapter"):
|
503
|
-
project_adapter_cfg = manager_project_cfg.adapter.merge(
|
504
|
-
project_adapter_cfg
|
505
|
-
)
|
506
|
-
else:
|
507
|
-
# Use project context directly if it's FlowerPowerProject
|
508
|
-
if hasattr(self.project_context, "pipeline_manager"):
|
509
|
-
pm_cfg = getattr(
|
510
|
-
self.project_context.pipeline_manager, "project_cfg", None
|
511
|
-
) or getattr(
|
512
|
-
self.project_context.pipeline_manager, "_project_cfg", None
|
513
|
-
)
|
514
|
-
base_cfg = pm_cfg.adapter if pm_cfg else None
|
515
|
-
if base_cfg:
|
516
|
-
project_adapter_cfg = base_cfg.merge(project_adapter_cfg)
|
517
|
-
else:
|
518
|
-
from ..cfg.project.adapter import \
|
519
|
-
AdapterConfig as ProjectAdapterConfig
|
520
|
-
|
521
|
-
project_adapter_cfg = ProjectAdapterConfig()
|
522
|
-
else:
|
523
|
-
from ..cfg.project.adapter import \
|
524
|
-
AdapterConfig as ProjectAdapterConfig
|
525
|
-
|
526
|
-
project_adapter_cfg = ProjectAdapterConfig()
|
527
|
-
else:
|
528
|
-
# Handle temporary case where project_context is PipelineManager
|
529
|
-
manager_project_cfg = getattr(
|
530
|
-
self.project_context, "project_cfg", None
|
531
|
-
) or getattr(self.project_context, "_project_cfg", None)
|
532
|
-
if manager_project_cfg and hasattr(manager_project_cfg, "adapter"):
|
533
|
-
project_adapter_cfg = manager_project_cfg.adapter
|
534
|
-
else:
|
535
|
-
# Use project context directly if it's FlowerPowerProject
|
536
|
-
if hasattr(self.project_context, "pipeline_manager"):
|
537
|
-
pm_cfg = getattr(
|
538
|
-
self.project_context.pipeline_manager, "project_cfg", None
|
539
|
-
) or getattr(
|
540
|
-
self.project_context.pipeline_manager, "_project_cfg", None
|
541
|
-
)
|
542
|
-
project_adapter_cfg = pm_cfg.adapter if pm_cfg else None
|
543
|
-
else:
|
544
|
-
project_adapter_cfg = None
|
545
|
-
|
546
|
-
# Create default adapter config if none found
|
547
|
-
if project_adapter_cfg is None:
|
548
|
-
from ..cfg.project.adapter import \
|
549
|
-
AdapterConfig as ProjectAdapterConfig
|
550
|
-
|
551
|
-
project_adapter_cfg = ProjectAdapterConfig()
|
552
|
-
|
553
|
-
adapters = []
|
554
|
-
|
555
|
-
# Hamilton Tracker adapter
|
556
|
-
if with_adapter_cfg.hamilton_tracker:
|
557
|
-
tracker_kwargs = project_adapter_cfg.hamilton_tracker.to_dict()
|
558
|
-
tracker_kwargs.update(pipeline_adapter_cfg.hamilton_tracker.to_dict())
|
559
|
-
tracker_kwargs["hamilton_api_url"] = tracker_kwargs.pop("api_url", None)
|
560
|
-
tracker_kwargs["hamilton_ui_url"] = tracker_kwargs.pop("ui_url", None)
|
378
|
+
# Resolve adapter configurations using the adapter manager
|
379
|
+
with_adapter_cfg = self._adapter_manager.resolve_with_adapter_config(
|
380
|
+
with_adapter_cfg, self.config.run.with_adapter
|
381
|
+
)
|
561
382
|
|
562
|
-
|
563
|
-
|
564
|
-
|
565
|
-
)
|
566
|
-
constants.MAX_LIST_LENGTH_CAPTURE = (
|
567
|
-
tracker_kwargs.pop("max_list_length_capture", None)
|
568
|
-
or settings.HAMILTON_MAX_LIST_LENGTH_CAPTURE
|
569
|
-
)
|
570
|
-
constants.CAPTURE_DATA_STATISTICS = (
|
571
|
-
tracker_kwargs.pop("capture_data_statistics", None)
|
572
|
-
or settings.HAMILTON_CAPTURE_DATA_STATISTICS
|
573
|
-
)
|
383
|
+
pipeline_adapter_cfg = self._adapter_manager.resolve_pipeline_adapter_config(
|
384
|
+
pipeline_adapter_cfg, self.config.adapter
|
385
|
+
)
|
574
386
|
|
575
|
-
|
576
|
-
|
387
|
+
project_adapter_cfg = self._adapter_manager.resolve_project_adapter_config(
|
388
|
+
project_adapter_cfg, self.project_context
|
389
|
+
)
|
577
390
|
|
578
|
-
#
|
579
|
-
|
580
|
-
|
581
|
-
|
582
|
-
else:
|
583
|
-
mlflow_kwargs = project_adapter_cfg.mlflow.to_dict()
|
584
|
-
mlflow_kwargs.update(pipeline_adapter_cfg.mlflow.to_dict())
|
585
|
-
mlflow_adapter = h_mlflow.MLFlowTracker(**mlflow_kwargs)
|
586
|
-
adapters.append(mlflow_adapter)
|
587
|
-
|
588
|
-
# OpenTelemetry adapter
|
589
|
-
if with_adapter_cfg.opentelemetry:
|
590
|
-
if h_opentelemetry is None:
|
591
|
-
logger.warning(
|
592
|
-
"OpenTelemetry is not installed. Skipping OpenTelemetry adapter."
|
593
|
-
)
|
594
|
-
else:
|
595
|
-
otel_kwargs = project_adapter_cfg.opentelemetry.to_dict()
|
596
|
-
otel_kwargs.update(pipeline_adapter_cfg.opentelemetry.to_dict())
|
597
|
-
init_tracer()
|
598
|
-
otel_adapter = h_opentelemetry.OpenTelemetryTracker(**otel_kwargs)
|
599
|
-
adapters.append(otel_adapter)
|
600
|
-
|
601
|
-
# Progress bar adapter
|
602
|
-
if with_adapter_cfg.progressbar:
|
603
|
-
progressbar_kwargs = project_adapter_cfg.progressbar.to_dict()
|
604
|
-
progressbar_kwargs.update(pipeline_adapter_cfg.progressbar.to_dict())
|
605
|
-
progressbar_adapter = h_rich.ProgressBar(**progressbar_kwargs)
|
606
|
-
adapters.append(progressbar_adapter)
|
391
|
+
# Create adapters
|
392
|
+
adapters = self._adapter_manager.create_adapters(
|
393
|
+
with_adapter_cfg, pipeline_adapter_cfg, project_adapter_cfg
|
394
|
+
)
|
607
395
|
|
608
396
|
# Add any additional adapters
|
609
397
|
if adapter:
|
flowerpower/pipeline/registry.py
CHANGED
@@ -8,6 +8,8 @@ import sys
|
|
8
8
|
from dataclasses import dataclass
|
9
9
|
from typing import TYPE_CHECKING, Any, Dict
|
10
10
|
|
11
|
+
import msgspec
|
12
|
+
|
11
13
|
import rich
|
12
14
|
from fsspec_utils import AbstractFileSystem, filesystem
|
13
15
|
from loguru import logger
|
@@ -48,8 +50,7 @@ class HookType(str, Enum):
|
|
48
50
|
return self.value
|
49
51
|
|
50
52
|
|
51
|
-
|
52
|
-
class CachedPipelineData:
|
53
|
+
class CachedPipelineData(msgspec.Struct):
|
53
54
|
"""Container for cached pipeline data."""
|
54
55
|
pipeline: "Pipeline"
|
55
56
|
config: PipelineConfig
|
@@ -832,5 +833,45 @@ class PipelineRegistry:
|
|
832
833
|
f.write(template.format(function_name=function_name))
|
833
834
|
|
834
835
|
rich.print(
|
835
|
-
f"🔧 Added hook [bold blue]{type.value}[/bold blue] to {to} as {function_name} for {name}"
|
836
|
-
|
836
|
+
f"🔧 Added hook [bold blue]{type.value}[/bold blue] to {to} as {function_name} for {name}")
|
837
|
+
|
838
|
+
|
839
|
+
def create_pipeline(
|
840
|
+
self,
|
841
|
+
name: str,
|
842
|
+
overwrite: bool = False,
|
843
|
+
template: str | None = None,
|
844
|
+
tags: list[str] | None = None,
|
845
|
+
description: str | None = None
|
846
|
+
) -> None:
|
847
|
+
"""Create a new pipeline.
|
848
|
+
|
849
|
+
This method provides compatibility with the lifecycle manager interface.
|
850
|
+
Additional parameters (template, tags, description) are currently not used.
|
851
|
+
|
852
|
+
Args:
|
853
|
+
name: Name of the pipeline to create
|
854
|
+
overwrite: Whether to overwrite existing pipeline
|
855
|
+
template: Template to use (not currently implemented)
|
856
|
+
tags: Tags for the pipeline (not currently implemented)
|
857
|
+
description: Description of the pipeline (not currently implemented)
|
858
|
+
"""
|
859
|
+
self.new(name=name, overwrite=overwrite)
|
860
|
+
|
861
|
+
def delete_pipeline(
|
862
|
+
self,
|
863
|
+
name: str,
|
864
|
+
cfg: bool = True,
|
865
|
+
module: bool = False
|
866
|
+
) -> None:
|
867
|
+
"""Delete a pipeline.
|
868
|
+
|
869
|
+
This method provides compatibility with the lifecycle manager interface.
|
870
|
+
|
871
|
+
Args:
|
872
|
+
name: Name of the pipeline to delete
|
873
|
+
cfg: Whether to delete configuration files
|
874
|
+
module: Whether to delete module files
|
875
|
+
"""
|
876
|
+
self.delete(name=name, cfg=cfg, module=module)
|
877
|
+
|
@@ -0,0 +1,19 @@
|
|
1
|
+
"""
|
2
|
+
Utility modules for FlowerPower.
|
3
|
+
|
4
|
+
This package contains utility classes and functions that help simplify
|
5
|
+
the main codebase by centralizing common operations.
|
6
|
+
"""
|
7
|
+
|
8
|
+
from .adapter import AdapterManager, create_adapter_manager
|
9
|
+
from .executor import ExecutorFactory, create_executor_factory
|
10
|
+
from .filesystem import FilesystemHelper, create_filesystem_helper
|
11
|
+
|
12
|
+
__all__ = [
|
13
|
+
"AdapterManager",
|
14
|
+
"create_adapter_manager",
|
15
|
+
"ExecutorFactory",
|
16
|
+
"create_executor_factory",
|
17
|
+
"FilesystemHelper",
|
18
|
+
"create_filesystem_helper",
|
19
|
+
]
|