FlowerPower 0.11.6.20__py3-none-any.whl → 0.20.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (80) hide show
  1. flowerpower/cfg/__init__.py +3 -3
  2. flowerpower/cfg/pipeline/__init__.py +5 -3
  3. flowerpower/cfg/project/__init__.py +3 -3
  4. flowerpower/cfg/project/job_queue.py +1 -128
  5. flowerpower/cli/__init__.py +5 -5
  6. flowerpower/cli/cfg.py +0 -3
  7. flowerpower/cli/job_queue.py +400 -132
  8. flowerpower/cli/pipeline.py +14 -413
  9. flowerpower/cli/utils.py +0 -1
  10. flowerpower/flowerpower.py +537 -28
  11. flowerpower/job_queue/__init__.py +5 -94
  12. flowerpower/job_queue/base.py +201 -3
  13. flowerpower/job_queue/rq/concurrent_workers/thread_worker.py +0 -3
  14. flowerpower/job_queue/rq/manager.py +388 -77
  15. flowerpower/pipeline/__init__.py +2 -0
  16. flowerpower/pipeline/base.py +2 -2
  17. flowerpower/pipeline/io.py +14 -16
  18. flowerpower/pipeline/manager.py +21 -642
  19. flowerpower/pipeline/pipeline.py +571 -0
  20. flowerpower/pipeline/registry.py +242 -10
  21. flowerpower/pipeline/visualizer.py +1 -2
  22. flowerpower/plugins/_io/__init__.py +8 -0
  23. flowerpower/plugins/mqtt/manager.py +6 -6
  24. flowerpower/settings/backend.py +0 -2
  25. flowerpower/settings/job_queue.py +1 -57
  26. flowerpower/utils/misc.py +0 -256
  27. flowerpower/utils/monkey.py +1 -83
  28. {flowerpower-0.11.6.20.dist-info → flowerpower-0.20.0.dist-info}/METADATA +308 -152
  29. flowerpower-0.20.0.dist-info/RECORD +58 -0
  30. flowerpower/fs/__init__.py +0 -29
  31. flowerpower/fs/base.py +0 -662
  32. flowerpower/fs/ext.py +0 -2143
  33. flowerpower/fs/storage_options.py +0 -1420
  34. flowerpower/job_queue/apscheduler/__init__.py +0 -11
  35. flowerpower/job_queue/apscheduler/_setup/datastore.py +0 -110
  36. flowerpower/job_queue/apscheduler/_setup/eventbroker.py +0 -93
  37. flowerpower/job_queue/apscheduler/manager.py +0 -1051
  38. flowerpower/job_queue/apscheduler/setup.py +0 -554
  39. flowerpower/job_queue/apscheduler/trigger.py +0 -169
  40. flowerpower/job_queue/apscheduler/utils.py +0 -311
  41. flowerpower/pipeline/job_queue.py +0 -583
  42. flowerpower/pipeline/runner.py +0 -603
  43. flowerpower/plugins/io/base.py +0 -2520
  44. flowerpower/plugins/io/helpers/datetime.py +0 -298
  45. flowerpower/plugins/io/helpers/polars.py +0 -875
  46. flowerpower/plugins/io/helpers/pyarrow.py +0 -570
  47. flowerpower/plugins/io/helpers/sql.py +0 -202
  48. flowerpower/plugins/io/loader/__init__.py +0 -28
  49. flowerpower/plugins/io/loader/csv.py +0 -37
  50. flowerpower/plugins/io/loader/deltatable.py +0 -190
  51. flowerpower/plugins/io/loader/duckdb.py +0 -19
  52. flowerpower/plugins/io/loader/json.py +0 -37
  53. flowerpower/plugins/io/loader/mqtt.py +0 -159
  54. flowerpower/plugins/io/loader/mssql.py +0 -26
  55. flowerpower/plugins/io/loader/mysql.py +0 -26
  56. flowerpower/plugins/io/loader/oracle.py +0 -26
  57. flowerpower/plugins/io/loader/parquet.py +0 -35
  58. flowerpower/plugins/io/loader/postgres.py +0 -26
  59. flowerpower/plugins/io/loader/pydala.py +0 -19
  60. flowerpower/plugins/io/loader/sqlite.py +0 -23
  61. flowerpower/plugins/io/metadata.py +0 -244
  62. flowerpower/plugins/io/saver/__init__.py +0 -28
  63. flowerpower/plugins/io/saver/csv.py +0 -36
  64. flowerpower/plugins/io/saver/deltatable.py +0 -186
  65. flowerpower/plugins/io/saver/duckdb.py +0 -19
  66. flowerpower/plugins/io/saver/json.py +0 -36
  67. flowerpower/plugins/io/saver/mqtt.py +0 -28
  68. flowerpower/plugins/io/saver/mssql.py +0 -26
  69. flowerpower/plugins/io/saver/mysql.py +0 -26
  70. flowerpower/plugins/io/saver/oracle.py +0 -26
  71. flowerpower/plugins/io/saver/parquet.py +0 -36
  72. flowerpower/plugins/io/saver/postgres.py +0 -26
  73. flowerpower/plugins/io/saver/pydala.py +0 -20
  74. flowerpower/plugins/io/saver/sqlite.py +0 -24
  75. flowerpower/utils/scheduler.py +0 -311
  76. flowerpower-0.11.6.20.dist-info/RECORD +0 -102
  77. {flowerpower-0.11.6.20.dist-info → flowerpower-0.20.0.dist-info}/WHEEL +0 -0
  78. {flowerpower-0.11.6.20.dist-info → flowerpower-0.20.0.dist-info}/entry_points.txt +0 -0
  79. {flowerpower-0.11.6.20.dist-info → flowerpower-0.20.0.dist-info}/licenses/LICENSE +0 -0
  80. {flowerpower-0.11.6.20.dist-info → flowerpower-0.20.0.dist-info}/top_level.txt +0 -0
@@ -1,14 +1,12 @@
1
1
  # Import necessary libraries
2
- import datetime as dt
3
-
4
- import duration_parser
5
2
  import typer
6
3
  from loguru import logger
7
4
  from typing_extensions import Annotated
8
5
 
6
+ from ..flowerpower import FlowerPowerProject
9
7
  from ..pipeline.manager import HookType, PipelineManager
10
8
  from ..utils.logging import setup_logging
11
- from .utils import parse_dict_or_list_param # , parse_param_dict
9
+ from .utils import parse_dict_or_list_param
12
10
 
13
11
  setup_logging()
14
12
 
@@ -104,357 +102,19 @@ def run(
104
102
  parsed_storage_options = parse_dict_or_list_param(storage_options, "dict")
105
103
  parsed_with_adapter = parse_dict_or_list_param(with_adapter, "dict")
106
104
 
107
- with PipelineManager(
108
- base_dir=base_dir,
109
- storage_options=parsed_storage_options or {},
110
- log_level=log_level,
111
- ) as manager:
112
- _ = manager.run(
113
- name=name,
114
- inputs=parsed_inputs,
115
- final_vars=parsed_final_vars,
116
- config=parsed_config,
117
- cache=parsed_cache,
118
- executor_cfg=executor,
119
- with_adapter_cfg=parsed_with_adapter,
120
- max_retries=max_retries,
121
- retry_delay=retry_delay,
122
- jitter_factor=jitter_factor,
123
- )
124
- logger.info(f"Pipeline '{name}' finished running.")
125
-
126
-
127
- @app.command()
128
- def run_job(
129
- name: str = typer.Argument(..., help="Name or ID of the pipeline job to run"),
130
- executor: str | None = typer.Option(
131
- None, help="Executor to use for running the job"
132
- ),
133
- base_dir: str | None = typer.Option(None, help="Base directory for the pipeline"),
134
- inputs: str | None = typer.Option(
135
- None, help="Input parameters as JSON, dict string, or key=value pairs"
136
- ),
137
- final_vars: str | None = typer.Option(None, help="Final variables as JSON or list"),
138
- config: str | None = typer.Option(
139
- None, help="Config for the hamilton pipeline executor"
140
- ),
141
- cache: str | None = typer.Option(
142
- None, help="Cache configuration as JSON or dict string"
143
- ),
144
- storage_options: str | None = typer.Option(
145
- None, help="Storage options as JSON, dict string, or key=value pairs"
146
- ),
147
- log_level: str | None = typer.Option(
148
- None, help="Logging level (debug, info, warning, error, critical)"
149
- ),
150
- with_adapter: str | None = typer.Option(
151
- None, help="Adapter configuration as JSON or dict string"
152
- ),
153
- max_retries: int = typer.Option(
154
- 0, help="Maximum number of retry attempts on failure"
155
- ),
156
- retry_delay: float = typer.Option(
157
- 1.0, help="Base delay between retries in seconds"
158
- ),
159
- jitter_factor: float = typer.Option(
160
- 0.1, help="Random factor applied to delay for jitter (0-1)"
161
- ),
162
- ):
163
- """
164
- Run a specific pipeline job.
165
-
166
- This command runs an existing job by its ID. The job should have been previously
167
- added to the system via the add-job command or through scheduling.
168
-
169
- Args:
170
- name: Job ID to run
171
- executor: Type of executor to use (maps to executor_cfg in manager)
172
- base_dir: Base directory containing pipelines and configurations
173
- inputs: Input parameters for the pipeline
174
- final_vars: Final variables to request from the pipeline
175
- config: Configuration for the Hamilton executor
176
- cache: Cache configuration
177
- storage_options: Options for storage backends
178
- log_level: Set the logging level
179
- with_adapter: Configuration for adapters like trackers or monitors
180
- max_retries: Maximum number of retry attempts on failure
181
- retry_delay: Base delay between retries in seconds
182
- jitter_factor: Random factor applied to delay for jitter (0-1)
183
-
184
- Examples:
185
- # Run a job with a specific ID
186
- $ pipeline run-job job-123456
187
-
188
- # Run a job with custom inputs
189
- $ pipeline run-job job-123456 --inputs '{"data_path": "data/myfile.csv"}'
190
-
191
- # Specify a different executor
192
- $ pipeline run-job job-123456 --executor local
193
-
194
- # Use caching for better performance
195
- $ pipeline run-job job-123456 --cache '{"type": "memory"}'
196
-
197
- # Configure adapters for monitoring
198
- $ pipeline run-job job-123456 --with-adapter '{"tracker": true, "opentelemetry": false}'
199
-
200
- # Set up automatic retries for resilience
201
- $ pipeline run-job job-123456 --max-retries 3 --retry-delay 2.0
202
- """
203
- parsed_inputs = parse_dict_or_list_param(inputs, "dict")
204
- parsed_config = parse_dict_or_list_param(config, "dict")
205
- parsed_cache = parse_dict_or_list_param(cache, "dict")
206
- parsed_final_vars = parse_dict_or_list_param(final_vars, "list")
207
- parsed_storage_options = parse_dict_or_list_param(storage_options, "dict")
208
- parsed_with_adapter = parse_dict_or_list_param(with_adapter, "dict")
209
-
210
- with PipelineManager(
211
- base_dir=base_dir,
212
- storage_options=parsed_storage_options or {},
213
- log_level=log_level,
214
- ) as manager:
215
- _ = manager.run_job(
216
- name=name,
217
- inputs=parsed_inputs,
218
- final_vars=parsed_final_vars,
219
- config=parsed_config,
220
- cache=parsed_cache,
221
- executor_cfg=executor,
222
- with_adapter_cfg=parsed_with_adapter,
223
- max_retries=max_retries,
224
- retry_delay=retry_delay,
225
- jitter_factor=jitter_factor,
226
- )
227
- logger.info(f"Job '{name}' finished running.")
228
-
229
-
230
- @app.command()
231
- def add_job(
232
- name: str = typer.Argument(..., help="Name of the pipeline to add as a job"),
233
- executor: str | None = typer.Option(
234
- None, help="Executor to use for running the job"
235
- ),
236
- base_dir: str | None = typer.Option(None, help="Base directory for the pipeline"),
237
- inputs: str | None = typer.Option(
238
- None, help="Input parameters as JSON, dict string, or key=value pairs"
239
- ),
240
- final_vars: str | None = typer.Option(None, help="Final variables as JSON or list"),
241
- config: str | None = typer.Option(
242
- None, help="Config for the hamilton pipeline executor"
243
- ),
244
- cache: str | None = typer.Option(
245
- None, help="Cache configuration as JSON or dict string"
246
- ),
247
- storage_options: str | None = typer.Option(
248
- None, help="Storage options as JSON, dict string, or key=value pairs"
249
- ),
250
- log_level: str | None = typer.Option(
251
- None, help="Logging level (debug, info, warning, error, critical)"
252
- ),
253
- with_adapter: str | None = typer.Option(
254
- None, help="Adapter configuration as JSON or dict string"
255
- ),
256
- run_at: str | None = typer.Option(None, help="Run at a specific time (ISO format)"),
257
- run_in: str | None = typer.Option(
258
- None, help="Run in a specific interval (e.g., '5m', '1h', '12m34s')"
259
- ),
260
- max_retries: int = typer.Option(
261
- 3, help="Maximum number of retry attempts on failure"
262
- ),
263
- retry_delay: float = typer.Option(
264
- 1.0, help="Base delay between retries in seconds"
265
- ),
266
- jitter_factor: float = typer.Option(
267
- 0.1, help="Random factor applied to delay for jitter (0-1)"
268
- ),
269
- ):
270
- """
271
- Add a pipeline job to the queue.
272
-
273
- This command adds a job to the queue for later execution. The job is based on
274
- an existing pipeline with customized inputs and configuration.
275
-
276
- Args:
277
- name: Pipeline name to add as a job
278
- executor: Type of executor to use
279
- base_dir: Base directory containing pipelines and configurations
280
- inputs: Input parameters for the pipeline
281
- final_vars: Final variables to request from the pipeline
282
- config: Configuration for the Hamilton executor
283
- cache: Cache configuration
284
- storage_options: Options for storage backends
285
- log_level: Set the logging level
286
- with_adapter: Configuration for adapters like trackers or monitors
287
- run_at: Run the job at a specific time (ISO format)
288
- run_in: Run the job in a specific interval (e.g., '5m', '1h')
289
- max_retries: Maximum number of retry attempts on failure
290
- retry_delay: Base delay between retries in seconds
291
- jitter_factor: Random factor applied to delay for jitter (0-1)
292
-
293
- Examples:
294
- # Add a basic job
295
- $ pipeline add-job my_pipeline
296
-
297
- # Add a job with custom inputs
298
- $ pipeline add-job my_pipeline --inputs '{"data_path": "data/myfile.csv"}'
299
-
300
- # Specify final variables to calculate
301
- $ pipeline add-job my_pipeline --final-vars '["output_table", "metrics"]'
302
-
303
- # Configure caching
304
- $ pipeline add-job my_pipeline --cache '{"type": "memory", "ttl": 3600}'
305
-
306
- # Use a specific log level
307
- $ pipeline add-job my_pipeline --log-level debug
308
-
309
- # Configure automatic retries for resilience
310
- $ pipeline add-job my_pipeline --max-retries 5 --retry-delay 2.0 --jitter-factor 0.2
311
- """
312
- parsed_inputs = parse_dict_or_list_param(inputs, "dict")
313
- parsed_config = parse_dict_or_list_param(config, "dict")
314
- parsed_cache = parse_dict_or_list_param(cache, "dict")
315
- parsed_final_vars = parse_dict_or_list_param(final_vars, "list")
316
- parsed_storage_options = parse_dict_or_list_param(storage_options, "dict")
317
- parsed_with_adapter = parse_dict_or_list_param(with_adapter, "dict")
318
- run_at = dt.datetime.fromisoformat(run_at) if run_at else None
319
- run_in = duration_parser.parse(run_in) if run_in else None
320
-
321
- with PipelineManager(
105
+ # Use FlowerPowerProject for better consistency with the new architecture
106
+ project = FlowerPowerProject.load(
322
107
  base_dir=base_dir,
323
108
  storage_options=parsed_storage_options or {},
324
109
  log_level=log_level,
325
- ) as manager:
326
- job_id = manager.add_job(
327
- name=name,
328
- inputs=parsed_inputs,
329
- final_vars=parsed_final_vars,
330
- config=parsed_config,
331
- cache=parsed_cache,
332
- executor_cfg=executor,
333
- with_adapter_cfg=parsed_with_adapter,
334
- run_at=run_at,
335
- run_in=run_in,
336
- max_retries=max_retries,
337
- retry_delay=retry_delay,
338
- jitter_factor=jitter_factor,
339
- )
340
- logger.info(f"Job {job_id} added for pipeline '{name}'.")
341
-
342
-
343
- @app.command()
344
- def schedule(
345
- name: str = typer.Argument(..., help="Name of the pipeline to schedule"),
346
- executor: str | None = typer.Option(
347
- None, help="Executor to use for running the job"
348
- ),
349
- base_dir: str | None = typer.Option(None, help="Base directory for the pipeline"),
350
- inputs: str | None = typer.Option(
351
- None, help="Input parameters as JSON, dict string, or key=value pairs"
352
- ),
353
- final_vars: str | None = typer.Option(None, help="Final variables as JSON or list"),
354
- config: str | None = typer.Option(
355
- None, help="Config for the hamilton pipeline executor"
356
- ),
357
- cache: str | None = typer.Option(
358
- None, help="Cache configuration as JSON or dict string"
359
- ),
360
- cron: str | None = typer.Option(None, help="Cron expression for scheduling"),
361
- interval: str | None = typer.Option(
362
- None, help="Interval for scheduling (e.g., '5m', '1h')"
363
- ),
364
- date: str | None = typer.Option(
365
- None, help="Specific date and time for scheduling (ISO format)"
366
- ),
367
- storage_options: str | None = typer.Option(
368
- None, help="Storage options as JSON, dict string, or key=value pairs"
369
- ),
370
- log_level: str | None = typer.Option(
371
- None, help="Logging level (debug, info, warning, error, critical)"
372
- ),
373
- with_adapter: str | None = typer.Option(
374
- None, help="Adapter configuration as JSON or dict string"
375
- ),
376
- overwrite: bool = typer.Option(
377
- False, help="Overwrite existing schedule if it exists"
378
- ),
379
- schedule_id: str | None = typer.Option(
380
- None, help="Custom ID for the schedule (autogenerated if not provided)"
381
- ),
382
- max_retries: int = typer.Option(
383
- 3, help="Maximum number of retry attempts on failure"
384
- ),
385
- retry_delay: float = typer.Option(
386
- 1.0, help="Base delay between retries in seconds"
387
- ),
388
- jitter_factor: float = typer.Option(
389
- 0.1, help="Random factor applied to delay for jitter (0-1)"
390
- ),
391
- ):
392
- """
393
- Schedule a pipeline to run at specified times.
394
-
395
- This command schedules a pipeline to run automatically based on various
396
- scheduling triggers like cron expressions, time intervals, or specific dates.
397
-
398
- Args:
399
- name: Pipeline name to schedule
400
- executor: Type of executor to use
401
- base_dir: Base directory containing pipelines and configurations
402
- inputs: Input parameters for the pipeline
403
- final_vars: Final variables to request from the pipeline
404
- config: Configuration for the Hamilton executor
405
- cache: Cache configuration
406
- cron: Cron expression for scheduling (e.g., "0 * * * *")
407
- interval: Interval for scheduling (e.g., "5m", "1h")
408
- date: Specific date and time for scheduling (ISO format)
409
- storage_options: Options for storage backends
410
- log_level: Set the logging level
411
- with_adapter: Configuration for adapters like trackers or monitors
412
- overwrite: Overwrite existing schedule with same ID
413
- schedule_id: Custom identifier for the schedule
414
- max_retries: Maximum number of retry attempts on failure
415
- retry_delay: Base delay between retries in seconds
416
- jitter_factor: Random factor applied to delay for jitter (0-1)
417
-
418
- Examples:
419
- # Schedule with cron expression (every hour)
420
- $ pipeline schedule my_pipeline --trigger-type cron --crontab "0 * * * *"
421
-
422
- # Schedule to run every 15 minutes
423
- $ pipeline schedule my_pipeline --trigger-type interval --interval_params minutes=15
424
-
425
- # Schedule to run at a specific date and time
426
- $ pipeline schedule my_pipeline --trigger-type date --date_params run_date="2025-12-31 23:59:59"
427
-
428
- # Schedule with custom inputs and cache settings
429
- $ pipeline schedule my_pipeline --inputs '{"source": "database"}' --cache '{"type": "redis"}'
430
-
431
- # Create a schedule in paused state
432
- $ pipeline schedule my_pipeline --crontab "0 9 * * 1-5" --paused
433
-
434
- # Set a custom schedule ID
435
- $ pipeline schedule my_pipeline --crontab "0 12 * * *" --schedule_id "daily-noon-run"
436
-
437
- # Configure automatic retries for resilience
438
- $ pipeline schedule my_pipeline --max-retries 5 --retry-delay 2.0 --jitter-factor 0.2
439
- """
440
- parsed_inputs = parse_dict_or_list_param(inputs, "dict")
441
- parsed_config = parse_dict_or_list_param(config, "dict")
442
- parsed_cache = parse_dict_or_list_param(cache, "dict")
443
- parsed_final_vars = parse_dict_or_list_param(final_vars, "list")
444
- parsed_storage_options = parse_dict_or_list_param(storage_options, "dict")
445
- parsed_with_adapter = parse_dict_or_list_param(with_adapter, "dict")
446
- interval = duration_parser.parse(interval) if interval else None
447
- cron = cron if cron else None
448
- date = dt.datetime.fromisoformat(date) if date else None
110
+ )
449
111
 
450
- with PipelineManager(
451
- base_dir=base_dir,
452
- storage_options=parsed_storage_options or {},
453
- log_level=log_level,
454
- ) as manager:
455
- # Combine common schedule kwargs
112
+ if project is None:
113
+ logger.error(f"Failed to load FlowerPower project from {base_dir or '.'}")
114
+ raise typer.Exit(1)
456
115
 
457
- id_ = manager.schedule(
116
+ try:
117
+ _ = project.run(
458
118
  name=name,
459
119
  inputs=parsed_inputs,
460
120
  final_vars=parsed_final_vars,
@@ -462,73 +122,14 @@ def schedule(
462
122
  cache=parsed_cache,
463
123
  executor_cfg=executor,
464
124
  with_adapter_cfg=parsed_with_adapter,
465
- cron=cron,
466
- interval=interval,
467
- date=date,
468
- overwrite=overwrite,
469
- schedule_id=schedule_id,
470
125
  max_retries=max_retries,
471
126
  retry_delay=retry_delay,
472
127
  jitter_factor=jitter_factor,
473
128
  )
474
-
475
- logger.info(f"Pipeline '{name}' scheduled with ID {id_}.")
476
-
477
-
478
- @app.command()
479
- def schedule_all(
480
- executor: str | None = typer.Option(
481
- None, help="Override executor specified in pipeline configs"
482
- ),
483
- base_dir: str | None = typer.Option(
484
- None, help="Base directory containing pipelines and configurations"
485
- ),
486
- storage_options: str | None = typer.Option(
487
- None, help="Storage options as JSON, dict string, or key=value pairs"
488
- ),
489
- log_level: str | None = typer.Option(
490
- None, help="Logging level (debug, info, warning, error, critical)"
491
- ),
492
- overwrite: bool = typer.Option(
493
- False, help="Overwrite existing schedules if they exist"
494
- ),
495
- ):
496
- """
497
- Schedule all pipelines based on their individual configurations.
498
-
499
- This command reads the configuration files for all pipelines in the project
500
- and schedules them based on their individual scheduling settings. This is useful
501
- for setting up all scheduled pipelines at once after deployment or system restart.
502
-
503
- Args:
504
- executor: Override executor specified in pipeline configs
505
- base_dir: Base directory containing pipelines and configurations
506
- storage_options: Options for storage backends
507
- log_level: Set the logging level
508
- overwrite: Whether to overwrite existing schedules
509
-
510
- Examples:
511
- # Schedule all pipelines using their configurations
512
- $ pipeline schedule-all
513
-
514
- # Force overwrite of existing schedules
515
- $ pipeline schedule-all --overwrite
516
-
517
- # Override executor for all pipelines
518
- $ pipeline schedule-all --executor distributed
519
-
520
- # Set custom base directory
521
- $ pipeline schedule-all --base-dir /path/to/project
522
- """
523
- parsed_storage_options = parse_dict_or_list_param(storage_options, "dict")
524
-
525
- with PipelineManager(
526
- base_dir=base_dir,
527
- storage_options=parsed_storage_options or {},
528
- log_level=log_level,
529
- ) as manager:
530
- manager.schedule_all(overwrite=overwrite, executor_cfg=executor)
531
- logger.info("Scheduled all pipelines based on their configurations.")
129
+ logger.info(f"Pipeline '{name}' finished running.")
130
+ except Exception as e:
131
+ logger.error(f"Pipeline execution failed: {e}")
132
+ raise typer.Exit(1)
532
133
 
533
134
 
534
135
  @app.command()
flowerpower/cli/utils.py CHANGED
@@ -4,7 +4,6 @@ import json
4
4
  import posixpath
5
5
  import re
6
6
  import sys
7
- from pathlib import Path
8
7
  from typing import Callable
9
8
 
10
9
  from loguru import logger