FlowerPower 0.20.0__py3-none-any.whl → 0.30.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. flowerpower/__init__.py +2 -6
  2. flowerpower/cfg/__init__.py +4 -11
  3. flowerpower/cfg/base.py +29 -25
  4. flowerpower/cfg/pipeline/__init__.py +3 -3
  5. flowerpower/cfg/pipeline/_schedule.py +32 -0
  6. flowerpower/cfg/pipeline/adapter.py +0 -5
  7. flowerpower/cfg/pipeline/builder.py +377 -0
  8. flowerpower/cfg/pipeline/run.py +89 -0
  9. flowerpower/cfg/project/__init__.py +8 -21
  10. flowerpower/cfg/project/adapter.py +0 -12
  11. flowerpower/cli/__init__.py +2 -28
  12. flowerpower/cli/pipeline.py +10 -4
  13. flowerpower/flowerpower.py +275 -585
  14. flowerpower/pipeline/base.py +19 -10
  15. flowerpower/pipeline/io.py +52 -46
  16. flowerpower/pipeline/manager.py +149 -91
  17. flowerpower/pipeline/pipeline.py +159 -87
  18. flowerpower/pipeline/registry.py +68 -33
  19. flowerpower/pipeline/visualizer.py +4 -4
  20. flowerpower/plugins/{_io → io}/__init__.py +1 -1
  21. flowerpower/settings/__init__.py +0 -2
  22. flowerpower/settings/{backend.py → _backend.py} +0 -19
  23. flowerpower/settings/logging.py +1 -1
  24. flowerpower/utils/logging.py +24 -12
  25. flowerpower/utils/misc.py +17 -0
  26. flowerpower-0.30.0.dist-info/METADATA +451 -0
  27. flowerpower-0.30.0.dist-info/RECORD +42 -0
  28. flowerpower/cfg/pipeline/schedule.py +0 -74
  29. flowerpower/cfg/project/job_queue.py +0 -111
  30. flowerpower/cli/job_queue.py +0 -1329
  31. flowerpower/cli/mqtt.py +0 -174
  32. flowerpower/job_queue/__init__.py +0 -205
  33. flowerpower/job_queue/base.py +0 -611
  34. flowerpower/job_queue/rq/__init__.py +0 -10
  35. flowerpower/job_queue/rq/_trigger.py +0 -37
  36. flowerpower/job_queue/rq/concurrent_workers/gevent_worker.py +0 -226
  37. flowerpower/job_queue/rq/concurrent_workers/thread_worker.py +0 -228
  38. flowerpower/job_queue/rq/manager.py +0 -1893
  39. flowerpower/job_queue/rq/setup.py +0 -154
  40. flowerpower/job_queue/rq/utils.py +0 -69
  41. flowerpower/mqtt.py +0 -12
  42. flowerpower/plugins/mqtt/__init__.py +0 -12
  43. flowerpower/plugins/mqtt/cfg.py +0 -17
  44. flowerpower/plugins/mqtt/manager.py +0 -962
  45. flowerpower/settings/job_queue.py +0 -31
  46. flowerpower-0.20.0.dist-info/METADATA +0 -693
  47. flowerpower-0.20.0.dist-info/RECORD +0 -58
  48. {flowerpower-0.20.0.dist-info → flowerpower-0.30.0.dist-info}/WHEEL +0 -0
  49. {flowerpower-0.20.0.dist-info → flowerpower-0.30.0.dist-info}/entry_points.txt +0 -0
  50. {flowerpower-0.20.0.dist-info → flowerpower-0.30.0.dist-info}/licenses/LICENSE +0 -0
  51. {flowerpower-0.20.0.dist-info → flowerpower-0.30.0.dist-info}/top_level.txt +0 -0
@@ -1,5 +1,6 @@
1
1
  import msgspec
2
2
  from munch import munchify
3
+ from typing import Any, Callable
3
4
 
4
5
  from ... import settings
5
6
  from ..base import BaseConfig
@@ -33,6 +34,39 @@ class RunConfig(BaseConfig):
33
34
  retry_delay: int | float = msgspec.field(default=1)
34
35
  jitter_factor: float | None = msgspec.field(default=0.1)
35
36
  retry_exceptions: list[str] = msgspec.field(default_factory=lambda: ["Exception"])
37
+ # New fields for comprehensive configuration
38
+ pipeline_adapter_cfg: dict | None = msgspec.field(default=None)
39
+ project_adapter_cfg: dict | None = msgspec.field(default=None)
40
+ adapter: dict[str, Any] | None = msgspec.field(default=None)
41
+ reload: bool = msgspec.field(default=False)
42
+
43
+
44
+ class CallbackSpec(msgspec.Struct):
45
+ """Specification for a callback function with optional arguments."""
46
+ func: Callable
47
+ args: tuple | None = None
48
+ kwargs: dict | None = None
49
+
50
+
51
+ class RunConfig(BaseConfig):
52
+ inputs: dict | None = msgspec.field(default_factory=dict)
53
+ final_vars: list[str] | None = msgspec.field(default_factory=list)
54
+ config: dict | None = msgspec.field(default_factory=dict)
55
+ cache: dict | bool | None = msgspec.field(default=False)
56
+ with_adapter: WithAdapterConfig = msgspec.field(default_factory=WithAdapterConfig)
57
+ executor: ExecutorConfig = msgspec.field(default_factory=ExecutorConfig)
58
+ log_level: str | None = msgspec.field(default="INFO")
59
+ max_retries: int = msgspec.field(default=3)
60
+ retry_delay: int | float = msgspec.field(default=1)
61
+ jitter_factor: float | None = msgspec.field(default=0.1)
62
+ retry_exceptions: list[str] = msgspec.field(default_factory=lambda: ["Exception"])
63
+ # New fields for comprehensive configuration
64
+ pipeline_adapter_cfg: dict | None = msgspec.field(default=None)
65
+ project_adapter_cfg: dict | None = msgspec.field(default=None)
66
+ adapter: dict[str, Any] | None = msgspec.field(default=None)
67
+ reload: bool = msgspec.field(default=False)
68
+ on_success: CallbackSpec | None = msgspec.field(default=None)
69
+ on_failure: CallbackSpec | None = msgspec.field(default=None)
36
70
 
37
71
  def __post_init__(self):
38
72
  if isinstance(self.inputs, dict):
@@ -45,3 +79,58 @@ class RunConfig(BaseConfig):
45
79
  self.with_adapter = WithAdapterConfig.from_dict(self.with_adapter)
46
80
  if isinstance(self.executor, dict):
47
81
  self.executor = ExecutorConfig.from_dict(self.executor)
82
+ if isinstance(self.pipeline_adapter_cfg, dict):
83
+ from ..pipeline.adapter import AdapterConfig as PipelineAdapterConfig
84
+ self.pipeline_adapter_cfg = PipelineAdapterConfig.from_dict(self.pipeline_adapter_cfg)
85
+ if isinstance(self.project_adapter_cfg, dict):
86
+ from ..project.adapter import AdapterConfig as ProjectAdapterConfig
87
+ self.project_adapter_cfg = ProjectAdapterConfig.from_dict(self.project_adapter_cfg)
88
+ if isinstance(self.adapter, dict):
89
+ # Convert adapter instances if needed
90
+ pass
91
+ if isinstance(self.retry_exceptions, list):
92
+ # Convert string exceptions to actual exception classes
93
+ converted_exceptions = []
94
+ for exc in self.retry_exceptions:
95
+ if isinstance(exc, str):
96
+ try:
97
+ exc_class = eval(exc)
98
+ # Ensure it's actually an exception class
99
+ if isinstance(exc_class, type) and issubclass(exc_class, BaseException):
100
+ converted_exceptions.append(exc_class)
101
+ else:
102
+ converted_exceptions.append(Exception)
103
+ except (NameError, AttributeError):
104
+ converted_exceptions.append(Exception)
105
+ elif isinstance(exc, type) and issubclass(exc, BaseException):
106
+ converted_exceptions.append(exc)
107
+ else:
108
+ converted_exceptions.append(Exception)
109
+ self.retry_exceptions = converted_exceptions
110
+
111
+ # Handle callback conversions
112
+ if self.on_success is not None and not isinstance(self.on_success, CallbackSpec):
113
+ if callable(self.on_success):
114
+ self.on_success = CallbackSpec(func=self.on_success)
115
+ elif isinstance(self.on_success, tuple) and len(self.on_success) == 3:
116
+ func, args, kwargs = self.on_success
117
+ self.on_success = CallbackSpec(func=func, args=args, kwargs=kwargs)
118
+ else:
119
+ self.on_success = None
120
+ warnings.warn(
121
+ "Invalid on_success format, must be Callable or (Callable, args, kwargs)",
122
+ RuntimeWarning
123
+ )
124
+
125
+ if self.on_failure is not None and not isinstance(self.on_failure, CallbackSpec):
126
+ if callable(self.on_failure):
127
+ self.on_failure = CallbackSpec(func=self.on_failure)
128
+ elif isinstance(self.on_failure, tuple) and len(self.on_failure) == 3:
129
+ func, args, kwargs = self.on_failure
130
+ self.on_failure = CallbackSpec(func=func, args=args, kwargs=kwargs)
131
+ else:
132
+ self.on_failure = None
133
+ warnings.warn(
134
+ "Invalid on_failure format, must be Callable or (Callable, args, kwargs)",
135
+ RuntimeWarning
136
+ )
@@ -1,20 +1,20 @@
1
1
  import msgspec
2
2
  from fsspec_utils import AbstractFileSystem, BaseStorageOptions, filesystem
3
+ import posixpath
3
4
 
5
+ from ...settings import CONFIG_DIR
4
6
  from ..base import BaseConfig
5
7
  from .adapter import AdapterConfig
6
- from .job_queue import JobQueueConfig
7
8
 
8
9
 
9
10
  class ProjectConfig(BaseConfig):
10
11
  """A configuration class for managing project-level settings in FlowerPower.
11
12
 
12
- This class handles project-wide configuration including job queue and adapter settings.
13
+ This class handles project-wide configuration including adapter settings.
13
14
  It supports loading from and saving to YAML files, with filesystem abstraction.
14
15
 
15
16
  Attributes:
16
17
  name (str | None): The name of the project.
17
- job_queue (JobQueueConfig): Configuration for the job queue component.
18
18
  adapter (AdapterConfig): Configuration for the adapter component.
19
19
 
20
20
  Example:
@@ -31,12 +31,9 @@ class ProjectConfig(BaseConfig):
31
31
  """
32
32
 
33
33
  name: str | None = msgspec.field(default=None)
34
- job_queue: JobQueueConfig = msgspec.field(default_factory=JobQueueConfig)
35
34
  adapter: AdapterConfig = msgspec.field(default_factory=AdapterConfig)
36
35
 
37
36
  def __post_init__(self):
38
- if isinstance(self.job_queue, dict):
39
- self.job_queue = JobQueueConfig.from_dict(self.job_queue)
40
37
  if isinstance(self.adapter, dict):
41
38
  self.adapter = AdapterConfig.from_dict(self.adapter)
42
39
 
@@ -45,7 +42,6 @@ class ProjectConfig(BaseConfig):
45
42
  cls,
46
43
  base_dir: str = ".",
47
44
  name: str | None = None,
48
- job_queue_type: str | None = None,
49
45
  fs: AbstractFileSystem | None = None,
50
46
  storage_options: dict | BaseStorageOptions | None = {},
51
47
  ):
@@ -54,7 +50,6 @@ class ProjectConfig(BaseConfig):
54
50
  Args:
55
51
  base_dir (str, optional): Base directory for the project. Defaults to ".".
56
52
  name (str | None, optional): Project name. Defaults to None.
57
- job_queue_type (str | None, optional): Type of job queue to use. Defaults to None.
58
53
  fs (AbstractFileSystem | None, optional): Filesystem to use. Defaults to None.
59
54
  storage_options (dict | Munch, optional): Options for filesystem. Defaults to empty Munch.
60
55
 
@@ -65,8 +60,7 @@ class ProjectConfig(BaseConfig):
65
60
  ```python
66
61
  project = ProjectConfig.load(
67
62
  base_dir="my_project",
68
- name="pipeline1",
69
- job_queue_type="rq"
63
+ name="pipeline1"
70
64
  )
71
65
  ```
72
66
  """
@@ -78,9 +72,6 @@ class ProjectConfig(BaseConfig):
78
72
  project = ProjectConfig.from_yaml(path="conf/project.yml", fs=fs)
79
73
  else:
80
74
  project = ProjectConfig(name=name)
81
- if job_queue_type is not None:
82
- if job_queue_type != project.job_queue.type:
83
- project.job_queue.update_type(job_queue_type)
84
75
 
85
76
  return project
86
77
 
@@ -107,14 +98,13 @@ class ProjectConfig(BaseConfig):
107
98
  base_dir, cached=True, dirfs=True, storage_options=storage_options
108
99
  )
109
100
 
110
- fs.makedirs("conf", exist_ok=True)
111
- self.to_yaml(path="conf/project.yml", fs=fs)
101
+ fs.makedirs(CONFIG_DIR, exist_ok=True)
102
+ self.to_yaml(path=posixpath.join(CONFIG_DIR, "project.yml"), fs=fs)
112
103
 
113
104
 
114
105
  def init_project_config(
115
106
  base_dir: str = ".",
116
107
  name: str | None = None,
117
- job_queue_type: str | None = None,
118
108
  fs: AbstractFileSystem | None = None,
119
109
  storage_options: dict | BaseStorageOptions | None = {},
120
110
  ):
@@ -125,7 +115,6 @@ def init_project_config(
125
115
  Args:
126
116
  base_dir (str, optional): Base directory for the project. Defaults to ".".
127
117
  name (str | None, optional): Project name. Defaults to None.
128
- job_queue_type (str | None, optional): Type of job queue to use. Defaults to None.
129
118
  fs (AbstractFileSystem | None, optional): Filesystem to use. Defaults to None.
130
119
  storage_options (dict | Munch, optional): Options for filesystem. Defaults to empty Munch.
131
120
 
@@ -136,17 +125,15 @@ def init_project_config(
136
125
  ```python
137
126
  project = init_project_config(
138
127
  base_dir="my_project",
139
- name="test_project",
140
- job_queue_type="rq"
128
+ name="test_project"
141
129
  )
142
130
  ```
143
131
  """
144
132
  project = ProjectConfig.load(
145
133
  base_dir=base_dir,
146
134
  name=name,
147
- job_queue_type=job_queue_type,
148
135
  fs=fs,
149
136
  storage_options=storage_options,
150
137
  )
151
138
  project.save(base_dir=base_dir, fs=fs, storage_options=storage_options)
152
- return project
139
+ return project
@@ -24,18 +24,6 @@ class OpenTelemetryConfig(BaseConfig):
24
24
  port: int = msgspec.field(default=6831)
25
25
 
26
26
 
27
- # class OpenLineageConfig(BaseConfig):
28
- # from openlineage.client import OpenLineageClientOptions
29
- # from openlineage.client.transport import Transport
30
- # from openlineage.client.transport import TransportFactory
31
- # url: str | None = msgspec.field(default=None)
32
- # options: OpenLineageClientOptions | None = msgspec.field(
33
- # default=None)
34
- # transport: Transport | None = msgspec.field(default=None)
35
- # factory: TransportFactory | None = msgspec.field(
36
- # default=None)
37
- # config: dict | None = msgspec.field(default=None)
38
-
39
27
 
40
28
  class RayConfig(BaseConfig):
41
29
  ray_init_config: dict | None = msgspec.field(default=None)
@@ -4,7 +4,7 @@ import os
4
4
  import typer
5
5
  from loguru import logger
6
6
 
7
- from ..flowerpower import init as init_
7
+ from ..flowerpower import FlowerPowerProject
8
8
  from .pipeline import app as pipeline_app
9
9
  from .utils import parse_dict_or_list_param
10
10
 
@@ -18,21 +18,6 @@ app.add_typer(
18
18
  pipeline_app, name="pipeline", help="Manage and execute FlowerPower pipelines"
19
19
  )
20
20
 
21
- if importlib.util.find_spec("rq"):
22
- from .job_queue import app as job_queue_app
23
-
24
- app.add_typer(
25
- job_queue_app,
26
- name="job-queue",
27
- help="Manage job queue workers and scheduled tasks",
28
- )
29
-
30
- if importlib.util.find_spec("paho"):
31
- from .mqtt import app as mqtt_app
32
-
33
- app.add_typer(
34
- mqtt_app, name="mqtt", help="Connect pipelines to MQTT message brokers"
35
- )
36
21
 
37
22
 
38
23
  @app.command()
@@ -49,12 +34,6 @@ def init(
49
34
  storage_options: str = typer.Option(
50
35
  None, "--storage-options", "-s", help="Storage options as a JSON or dict string"
51
36
  ),
52
- job_queue_type: str = typer.Option(
53
- "rq",
54
- "--job-queue-type",
55
- "-q",
56
- help="Job queue backend type to use (rq)",
57
- ),
58
37
  ):
59
38
  """
60
39
  Initialize a new FlowerPower project.
@@ -69,7 +48,6 @@ def init(
69
48
  base_dir: Base directory where the project will be created. If not provided,
70
49
  the current directory's parent will be used
71
50
  storage_options: Storage options for filesystem access, as a JSON or dict string
72
- job_queue_type: Type of job queue backend to use (rq)
73
51
 
74
52
  Examples:
75
53
  # Create a project in the current directory using its name
@@ -80,9 +58,6 @@ def init(
80
58
 
81
59
  # Create a project in a specific location
82
60
  $ flowerpower init --name my-project --base-dir /path/to/projects
83
-
84
- # Create a project with RQ as the job queue backend (default)
85
- $ flowerpower init --job-queue-type rq
86
61
  """
87
62
  parsed_storage_options = {}
88
63
  if storage_options:
@@ -95,11 +70,10 @@ def init(
95
70
  raise typer.Exit(code=1)
96
71
 
97
72
  try:
98
- init_(
73
+ FlowerPowerProject.new(
99
74
  name=project_name,
100
75
  base_dir=base_dir,
101
76
  storage_options=parsed_storage_options,
102
- job_queue_type=job_queue_type,
103
77
  )
104
78
  except Exception as e:
105
79
  logger.error(f"Error initializing project: {e}")
@@ -5,6 +5,7 @@ from typing_extensions import Annotated
5
5
 
6
6
  from ..flowerpower import FlowerPowerProject
7
7
  from ..pipeline.manager import HookType, PipelineManager
8
+ from ..cfg.pipeline.run import RunConfig
8
9
  from ..utils.logging import setup_logging
9
10
  from .utils import parse_dict_or_list_param
10
11
 
@@ -114,18 +115,23 @@ def run(
114
115
  raise typer.Exit(1)
115
116
 
116
117
  try:
117
- _ = project.run(
118
- name=name,
118
+ # Construct RunConfig object from parsed CLI arguments
119
+ run_config = RunConfig(
119
120
  inputs=parsed_inputs,
120
121
  final_vars=parsed_final_vars,
121
122
  config=parsed_config,
122
123
  cache=parsed_cache,
123
- executor_cfg=executor,
124
- with_adapter_cfg=parsed_with_adapter,
124
+ with_adapter=parsed_with_adapter,
125
125
  max_retries=max_retries,
126
126
  retry_delay=retry_delay,
127
127
  jitter_factor=jitter_factor,
128
128
  )
129
+
130
+ # Handle executor configuration
131
+ if executor is not None:
132
+ run_config.executor.type = executor
133
+
134
+ _ = project.run(name=name, run_config=run_config)
129
135
  logger.info(f"Pipeline '{name}' finished running.")
130
136
  except Exception as e:
131
137
  logger.error(f"Pipeline execution failed: {e}")