FlowerPower 0.11.6.20__py3-none-any.whl → 0.21.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (101) hide show
  1. flowerpower/__init__.py +2 -6
  2. flowerpower/cfg/__init__.py +7 -14
  3. flowerpower/cfg/base.py +29 -25
  4. flowerpower/cfg/pipeline/__init__.py +8 -6
  5. flowerpower/cfg/pipeline/_schedule.py +32 -0
  6. flowerpower/cfg/pipeline/adapter.py +0 -5
  7. flowerpower/cfg/pipeline/builder.py +377 -0
  8. flowerpower/cfg/pipeline/run.py +36 -0
  9. flowerpower/cfg/project/__init__.py +11 -24
  10. flowerpower/cfg/project/adapter.py +0 -12
  11. flowerpower/cli/__init__.py +2 -21
  12. flowerpower/cli/cfg.py +0 -3
  13. flowerpower/cli/mqtt.py +0 -6
  14. flowerpower/cli/pipeline.py +22 -415
  15. flowerpower/cli/utils.py +0 -1
  16. flowerpower/flowerpower.py +345 -146
  17. flowerpower/pipeline/__init__.py +2 -0
  18. flowerpower/pipeline/base.py +21 -12
  19. flowerpower/pipeline/io.py +58 -54
  20. flowerpower/pipeline/manager.py +165 -726
  21. flowerpower/pipeline/pipeline.py +643 -0
  22. flowerpower/pipeline/registry.py +285 -18
  23. flowerpower/pipeline/visualizer.py +5 -6
  24. flowerpower/plugins/io/__init__.py +8 -0
  25. flowerpower/plugins/mqtt/__init__.py +7 -11
  26. flowerpower/settings/__init__.py +0 -2
  27. flowerpower/settings/{backend.py → _backend.py} +0 -21
  28. flowerpower/settings/logging.py +1 -1
  29. flowerpower/utils/logging.py +24 -12
  30. flowerpower/utils/misc.py +17 -256
  31. flowerpower/utils/monkey.py +1 -83
  32. flowerpower-0.21.0.dist-info/METADATA +463 -0
  33. flowerpower-0.21.0.dist-info/RECORD +44 -0
  34. flowerpower/cfg/pipeline/schedule.py +0 -74
  35. flowerpower/cfg/project/job_queue.py +0 -238
  36. flowerpower/cli/job_queue.py +0 -1061
  37. flowerpower/fs/__init__.py +0 -29
  38. flowerpower/fs/base.py +0 -662
  39. flowerpower/fs/ext.py +0 -2143
  40. flowerpower/fs/storage_options.py +0 -1420
  41. flowerpower/job_queue/__init__.py +0 -294
  42. flowerpower/job_queue/apscheduler/__init__.py +0 -11
  43. flowerpower/job_queue/apscheduler/_setup/datastore.py +0 -110
  44. flowerpower/job_queue/apscheduler/_setup/eventbroker.py +0 -93
  45. flowerpower/job_queue/apscheduler/manager.py +0 -1051
  46. flowerpower/job_queue/apscheduler/setup.py +0 -554
  47. flowerpower/job_queue/apscheduler/trigger.py +0 -169
  48. flowerpower/job_queue/apscheduler/utils.py +0 -311
  49. flowerpower/job_queue/base.py +0 -413
  50. flowerpower/job_queue/rq/__init__.py +0 -10
  51. flowerpower/job_queue/rq/_trigger.py +0 -37
  52. flowerpower/job_queue/rq/concurrent_workers/gevent_worker.py +0 -226
  53. flowerpower/job_queue/rq/concurrent_workers/thread_worker.py +0 -231
  54. flowerpower/job_queue/rq/manager.py +0 -1582
  55. flowerpower/job_queue/rq/setup.py +0 -154
  56. flowerpower/job_queue/rq/utils.py +0 -69
  57. flowerpower/mqtt.py +0 -12
  58. flowerpower/pipeline/job_queue.py +0 -583
  59. flowerpower/pipeline/runner.py +0 -603
  60. flowerpower/plugins/io/base.py +0 -2520
  61. flowerpower/plugins/io/helpers/datetime.py +0 -298
  62. flowerpower/plugins/io/helpers/polars.py +0 -875
  63. flowerpower/plugins/io/helpers/pyarrow.py +0 -570
  64. flowerpower/plugins/io/helpers/sql.py +0 -202
  65. flowerpower/plugins/io/loader/__init__.py +0 -28
  66. flowerpower/plugins/io/loader/csv.py +0 -37
  67. flowerpower/plugins/io/loader/deltatable.py +0 -190
  68. flowerpower/plugins/io/loader/duckdb.py +0 -19
  69. flowerpower/plugins/io/loader/json.py +0 -37
  70. flowerpower/plugins/io/loader/mqtt.py +0 -159
  71. flowerpower/plugins/io/loader/mssql.py +0 -26
  72. flowerpower/plugins/io/loader/mysql.py +0 -26
  73. flowerpower/plugins/io/loader/oracle.py +0 -26
  74. flowerpower/plugins/io/loader/parquet.py +0 -35
  75. flowerpower/plugins/io/loader/postgres.py +0 -26
  76. flowerpower/plugins/io/loader/pydala.py +0 -19
  77. flowerpower/plugins/io/loader/sqlite.py +0 -23
  78. flowerpower/plugins/io/metadata.py +0 -244
  79. flowerpower/plugins/io/saver/__init__.py +0 -28
  80. flowerpower/plugins/io/saver/csv.py +0 -36
  81. flowerpower/plugins/io/saver/deltatable.py +0 -186
  82. flowerpower/plugins/io/saver/duckdb.py +0 -19
  83. flowerpower/plugins/io/saver/json.py +0 -36
  84. flowerpower/plugins/io/saver/mqtt.py +0 -28
  85. flowerpower/plugins/io/saver/mssql.py +0 -26
  86. flowerpower/plugins/io/saver/mysql.py +0 -26
  87. flowerpower/plugins/io/saver/oracle.py +0 -26
  88. flowerpower/plugins/io/saver/parquet.py +0 -36
  89. flowerpower/plugins/io/saver/postgres.py +0 -26
  90. flowerpower/plugins/io/saver/pydala.py +0 -20
  91. flowerpower/plugins/io/saver/sqlite.py +0 -24
  92. flowerpower/plugins/mqtt/cfg.py +0 -17
  93. flowerpower/plugins/mqtt/manager.py +0 -962
  94. flowerpower/settings/job_queue.py +0 -87
  95. flowerpower/utils/scheduler.py +0 -311
  96. flowerpower-0.11.6.20.dist-info/METADATA +0 -537
  97. flowerpower-0.11.6.20.dist-info/RECORD +0 -102
  98. {flowerpower-0.11.6.20.dist-info → flowerpower-0.21.0.dist-info}/WHEEL +0 -0
  99. {flowerpower-0.11.6.20.dist-info → flowerpower-0.21.0.dist-info}/entry_points.txt +0 -0
  100. {flowerpower-0.11.6.20.dist-info → flowerpower-0.21.0.dist-info}/licenses/LICENSE +0 -0
  101. {flowerpower-0.11.6.20.dist-info → flowerpower-0.21.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,377 @@
1
+ import copy
2
+ from typing import Any, Callable, Optional, Union
3
+
4
+ from fsspec_utils import AbstractFileSystem, BaseStorageOptions, filesystem
5
+
6
+ from ... import settings
7
+ from ..base import BaseConfig
8
+ from .adapter import AdapterConfig as PipelineAdapterConfig
9
+ from .run import ExecutorConfig, RunConfig, WithAdapterConfig
10
+ from ..project.adapter import AdapterConfig as ProjectAdapterConfig
11
+
12
+
13
+ class RunConfigBuilder:
14
+ """A fluent builder for creating RunConfig objects.
15
+
16
+ This builder provides a clean interface for constructing RunConfig objects
17
+ with proper configuration merging from project and pipeline defaults.
18
+ """
19
+
20
+ def __init__(
21
+ self,
22
+ pipeline_name: str,
23
+ base_dir: str | None = None,
24
+ fs: AbstractFileSystem | None = None,
25
+ storage_options: dict | BaseStorageOptions | None = {}
26
+ ):
27
+ """Initialize the RunConfigBuilder.
28
+
29
+ Args:
30
+ pipeline_name: Name of the pipeline to build config for
31
+ base_dir: Base directory for the project (defaults to current directory)
32
+ fs: Optional filesystem instance
33
+ storage_options: Options for filesystem access
34
+ """
35
+ self.pipeline_name = pipeline_name
36
+ self.base_dir = base_dir or "."
37
+ self._fs = fs
38
+ self._storage_options = storage_options
39
+
40
+ # Initialize with empty config
41
+ self._config = RunConfig()
42
+
43
+ # Load defaults from pipeline and project configs
44
+ self._load_defaults()
45
+
46
+ def _load_defaults(self):
47
+ """Load default configuration from pipeline and project YAML files."""
48
+ if self._fs is None:
49
+ self._fs = filesystem(
50
+ self.base_dir,
51
+ cached=False,
52
+ dirfs=True,
53
+ storage_options=self._storage_options
54
+ )
55
+
56
+ # Load pipeline configuration
57
+ try:
58
+ from .. import PipelineConfig
59
+ pipeline_cfg = PipelineConfig.load(
60
+ base_dir=self.base_dir,
61
+ name=self.pipeline_name,
62
+ fs=self._fs,
63
+ storage_options=self._storage_options
64
+ )
65
+ if pipeline_cfg and pipeline_cfg.run:
66
+ self._config = copy.deepcopy(pipeline_cfg.run)
67
+ except Exception:
68
+ # If pipeline config doesn't exist, use defaults
69
+ pass
70
+
71
+ # Load project configuration for adapter defaults
72
+ try:
73
+ from .. import ProjectConfig
74
+ project_cfg = ProjectConfig.load(
75
+ base_dir=self.base_dir,
76
+ fs=self._fs,
77
+ storage_options=self._storage_options
78
+ )
79
+ if project_cfg and project_cfg.adapter:
80
+ # Store project adapter config for merging
81
+ self._project_adapter_cfg = project_cfg.adapter
82
+ else:
83
+ self._project_adapter_cfg = ProjectAdapterConfig()
84
+ except Exception:
85
+ self._project_adapter_cfg = ProjectAdapterConfig()
86
+
87
+ def with_inputs(self, inputs: dict) -> "RunConfigBuilder":
88
+ """Set pipeline input values.
89
+
90
+ Args:
91
+ inputs: Dictionary of input values to override defaults
92
+
93
+ Returns:
94
+ Self for method chaining
95
+ """
96
+ if inputs:
97
+ if self._config.inputs is None:
98
+ self._config.inputs = {}
99
+ self._config.inputs.update(inputs)
100
+ return self
101
+
102
+ def with_final_vars(self, final_vars: list[str]) -> "RunConfigBuilder":
103
+ """Set the final output variables.
104
+
105
+ Args:
106
+ final_vars: List of variable names to return from execution
107
+
108
+ Returns:
109
+ Self for method chaining
110
+ """
111
+ self._config.final_vars = final_vars
112
+ return self
113
+
114
+ def with_config(self, config: dict) -> "RunConfigBuilder":
115
+ """Set Hamilton driver configuration.
116
+
117
+ Args:
118
+ config: Dictionary of configuration values for Hamilton
119
+
120
+ Returns:
121
+ Self for method chaining
122
+ """
123
+ if config:
124
+ if self._config.config is None:
125
+ self._config.config = {}
126
+ self._config.config.update(config)
127
+ return self
128
+
129
+ def with_cache(self, cache: Union[dict, bool]) -> "RunConfigBuilder":
130
+ """Set cache configuration.
131
+
132
+ Args:
133
+ cache: Cache configuration (dict) or enable/disable flag (bool)
134
+
135
+ Returns:
136
+ Self for method chaining
137
+ """
138
+ self._config.cache = cache
139
+ return self
140
+
141
+ def with_executor(self, executor_type: str, **kwargs) -> "RunConfigBuilder":
142
+ """Set executor configuration.
143
+
144
+ Args:
145
+ executor_type: Type of executor ('synchronous', 'threadpool', 'processpool', 'ray', 'dask')
146
+ **kwargs: Additional executor configuration options
147
+
148
+ Returns:
149
+ Self for method chaining
150
+ """
151
+ if not self._config.executor:
152
+ self._config.executor = ExecutorConfig()
153
+
154
+ self._config.executor.type = executor_type
155
+
156
+ # Apply additional executor options
157
+ for key, value in kwargs.items():
158
+ if hasattr(self._config.executor, key):
159
+ setattr(self._config.executor, key, value)
160
+
161
+ return self
162
+
163
+ def with_adapter(self, adapter_name: str, **kwargs) -> "RunConfigBuilder":
164
+ """Enable and configure a specific adapter.
165
+
166
+ Args:
167
+ adapter_name: Name of the adapter ('hamilton_tracker', 'mlflow', 'opentelemetry', etc.)
168
+ **kwargs: Adapter-specific configuration options
169
+
170
+ Returns:
171
+ Self for method chaining
172
+ """
173
+ if not self._config.with_adapter:
174
+ self._config.with_adapter = WithAdapterConfig()
175
+
176
+ # Enable the adapter
177
+ if hasattr(self._config.with_adapter, adapter_name):
178
+ setattr(self._config.with_adapter, adapter_name, True)
179
+
180
+ # Store adapter configuration for merging
181
+ if not hasattr(self, '_adapter_configs'):
182
+ self._adapter_configs = {}
183
+ self._adapter_configs[adapter_name] = kwargs
184
+
185
+ return self
186
+
187
+ def with_retries(
188
+ self,
189
+ max_attempts: int = 3,
190
+ delay: float = 1.0,
191
+ jitter: float = 0.1,
192
+ exceptions: Optional[list] = None
193
+ ) -> "RunConfigBuilder":
194
+ """Configure retry behavior.
195
+
196
+ Args:
197
+ max_attempts: Maximum number of retry attempts
198
+ delay: Base delay between retries in seconds
199
+ jitter: Random jitter factor to add to retry delay
200
+ exceptions: List of exception types that should trigger retries
201
+
202
+ Returns:
203
+ Self for method chaining
204
+ """
205
+ self._config.max_retries = max_attempts
206
+ self._config.retry_delay = delay
207
+ self._config.jitter_factor = jitter
208
+
209
+ if exceptions:
210
+ self._config.retry_exceptions = exceptions
211
+
212
+ return self
213
+
214
+ def with_callbacks(
215
+ self,
216
+ on_success: Optional[Callable] = None,
217
+ on_failure: Optional[Callable] = None
218
+ ) -> "RunConfigBuilder":
219
+ """Set success and failure callbacks.
220
+
221
+ Args:
222
+ on_success: Callback function to execute on successful completion
223
+ on_failure: Callback function to execute on failure
224
+
225
+ Returns:
226
+ Self for method chaining
227
+ """
228
+ if on_success:
229
+ self._config.on_success = on_success
230
+ if on_failure:
231
+ self._config.on_failure = on_failure
232
+
233
+ return self
234
+
235
+ def with_log_level(self, log_level: str) -> "RunConfigBuilder":
236
+ """Set the log level for execution.
237
+
238
+ Args:
239
+ log_level: Log level ('DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL')
240
+
241
+ Returns:
242
+ Self for method chaining
243
+ """
244
+ self._config.log_level = log_level
245
+ return self
246
+
247
+ def with_reload(self, reload: bool = True) -> "RunConfigBuilder":
248
+ """Set whether to reload the pipeline module.
249
+
250
+ Args:
251
+ reload: Whether to force reload of the pipeline module
252
+
253
+ Returns:
254
+ Self for method chaining
255
+ """
256
+ self._config.reload = reload
257
+ return self
258
+
259
+ def with_pipeline_adapter_config(self, config: dict) -> "RunConfigBuilder":
260
+ """Set pipeline-specific adapter configuration.
261
+
262
+ Args:
263
+ config: Pipeline adapter configuration dictionary
264
+
265
+ Returns:
266
+ Self for method chaining
267
+ """
268
+ if config:
269
+ if self._config.pipeline_adapter_cfg is None:
270
+ self._config.pipeline_adapter_cfg = {}
271
+ self._config.pipeline_adapter_cfg.update(config)
272
+ return self
273
+
274
+ def with_project_adapter_config(self, config: dict) -> "RunConfigBuilder":
275
+ """Set project-level adapter configuration.
276
+
277
+ Args:
278
+ config: Project adapter configuration dictionary
279
+
280
+ Returns:
281
+ Self for method chaining
282
+ """
283
+ if config:
284
+ if self._config.project_adapter_cfg is None:
285
+ self._config.project_adapter_cfg = {}
286
+ self._config.project_adapter_cfg.update(config)
287
+ return self
288
+
289
+ def with_custom_adapter(self, name: str, adapter: Any) -> "RunConfigBuilder":
290
+ """Add a custom adapter instance.
291
+
292
+ Args:
293
+ name: Name/identifier for the adapter
294
+ adapter: Adapter instance
295
+
296
+ Returns:
297
+ Self for method chaining
298
+ """
299
+ if self._config.adapter is None:
300
+ self._config.adapter = {}
301
+ self._config.adapter[name] = adapter
302
+ return self
303
+
304
+ def build(self) -> RunConfig:
305
+ """Build the final RunConfig object.
306
+
307
+ This method merges all configurations and validates the final result.
308
+
309
+ Returns:
310
+ Fully configured RunConfig object
311
+
312
+ Raises:
313
+ ValueError: If configuration is invalid
314
+ """
315
+ # Create a deep copy to avoid modifying the internal state
316
+ final_config = copy.deepcopy(self._config)
317
+
318
+ # Merge adapter configurations
319
+ if hasattr(self, '_adapter_configs') and self._adapter_configs:
320
+ self._merge_adapter_configs(final_config)
321
+
322
+ # Validate configuration
323
+ self._validate_config(final_config)
324
+
325
+ return final_config
326
+
327
+ def _merge_adapter_configs(self, config: RunConfig):
328
+ """Merge adapter configurations from builder with project/pipeline configs."""
329
+ if not config.pipeline_adapter_cfg:
330
+ config.pipeline_adapter_cfg = {}
331
+
332
+ if not config.project_adapter_cfg:
333
+ config.project_adapter_cfg = {}
334
+
335
+ # Merge project adapter defaults
336
+ for adapter_name, adapter_config in self._adapter_configs.items():
337
+ if adapter_name in ['hamilton_tracker', 'mlflow', 'opentelemetry']:
338
+ # Merge with project config
339
+ if hasattr(self._project_adapter_cfg, adapter_name):
340
+ project_config = getattr(self._project_adapter_cfg, adapter_name).to_dict()
341
+ adapter_config = {**project_config, **adapter_config}
342
+
343
+ # Store in pipeline adapter config
344
+ if adapter_name not in config.pipeline_adapter_cfg:
345
+ config.pipeline_adapter_cfg[adapter_name] = {}
346
+ config.pipeline_adapter_cfg[adapter_name].update(adapter_config)
347
+
348
+ def _validate_config(self, config: RunConfig):
349
+ """Validate the final configuration.
350
+
351
+ Args:
352
+ config: RunConfig object to validate
353
+
354
+ Raises:
355
+ ValueError: If configuration is invalid
356
+ """
357
+ # Validate retry configuration
358
+ if config.max_retries < 0:
359
+ raise ValueError("max_retries must be non-negative")
360
+
361
+ if config.retry_delay < 0:
362
+ raise ValueError("retry_delay must be non-negative")
363
+
364
+ if config.jitter_factor is not None and config.jitter_factor < 0:
365
+ raise ValueError("jitter_factor must be non-negative")
366
+
367
+ # Validate executor configuration
368
+ if config.executor and config.executor.type:
369
+ valid_executors = ['synchronous', 'threadpool', 'processpool', 'ray', 'dask']
370
+ if config.executor.type not in valid_executors:
371
+ raise ValueError(f"Invalid executor type: {config.executor.type}")
372
+
373
+ # Validate log level
374
+ if config.log_level:
375
+ valid_levels = ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']
376
+ if config.log_level.upper() not in valid_levels:
377
+ raise ValueError(f"Invalid log level: {config.log_level}")
@@ -1,5 +1,6 @@
1
1
  import msgspec
2
2
  from munch import munchify
3
+ from typing import Any, Callable
3
4
 
4
5
  from ... import settings
5
6
  from ..base import BaseConfig
@@ -33,6 +34,13 @@ class RunConfig(BaseConfig):
33
34
  retry_delay: int | float = msgspec.field(default=1)
34
35
  jitter_factor: float | None = msgspec.field(default=0.1)
35
36
  retry_exceptions: list[str] = msgspec.field(default_factory=lambda: ["Exception"])
37
+ # New fields for comprehensive configuration
38
+ pipeline_adapter_cfg: dict | None = msgspec.field(default=None)
39
+ project_adapter_cfg: dict | None = msgspec.field(default=None)
40
+ adapter: dict[str, Any] | None = msgspec.field(default=None)
41
+ reload: bool = msgspec.field(default=False)
42
+ on_success: Callable | tuple[Callable, tuple | None, dict | None] | None = msgspec.field(default=None)
43
+ on_failure: Callable | tuple[Callable, tuple | None, dict | None] | None = msgspec.field(default=None)
36
44
 
37
45
  def __post_init__(self):
38
46
  if isinstance(self.inputs, dict):
@@ -45,3 +53,31 @@ class RunConfig(BaseConfig):
45
53
  self.with_adapter = WithAdapterConfig.from_dict(self.with_adapter)
46
54
  if isinstance(self.executor, dict):
47
55
  self.executor = ExecutorConfig.from_dict(self.executor)
56
+ if isinstance(self.pipeline_adapter_cfg, dict):
57
+ from ..pipeline.adapter import AdapterConfig as PipelineAdapterConfig
58
+ self.pipeline_adapter_cfg = PipelineAdapterConfig.from_dict(self.pipeline_adapter_cfg)
59
+ if isinstance(self.project_adapter_cfg, dict):
60
+ from ..project.adapter import AdapterConfig as ProjectAdapterConfig
61
+ self.project_adapter_cfg = ProjectAdapterConfig.from_dict(self.project_adapter_cfg)
62
+ if isinstance(self.adapter, dict):
63
+ # Convert adapter instances if needed
64
+ pass
65
+ if isinstance(self.retry_exceptions, list):
66
+ # Convert string exceptions to actual exception classes
67
+ converted_exceptions = []
68
+ for exc in self.retry_exceptions:
69
+ if isinstance(exc, str):
70
+ try:
71
+ exc_class = eval(exc)
72
+ # Ensure it's actually an exception class
73
+ if isinstance(exc_class, type) and issubclass(exc_class, BaseException):
74
+ converted_exceptions.append(exc_class)
75
+ else:
76
+ converted_exceptions.append(Exception)
77
+ except (NameError, AttributeError):
78
+ converted_exceptions.append(Exception)
79
+ elif isinstance(exc, type) and issubclass(exc, BaseException):
80
+ converted_exceptions.append(exc)
81
+ else:
82
+ converted_exceptions.append(Exception)
83
+ self.retry_exceptions = converted_exceptions
@@ -1,20 +1,20 @@
1
1
  import msgspec
2
+ from fsspec_utils import AbstractFileSystem, BaseStorageOptions, filesystem
3
+ import posixpath
2
4
 
3
- from ...fs import AbstractFileSystem, BaseStorageOptions, get_filesystem
5
+ from ...settings import CONFIG_DIR
4
6
  from ..base import BaseConfig
5
7
  from .adapter import AdapterConfig
6
- from .job_queue import JobQueueConfig
7
8
 
8
9
 
9
10
  class ProjectConfig(BaseConfig):
10
11
  """A configuration class for managing project-level settings in FlowerPower.
11
12
 
12
- This class handles project-wide configuration including job queue and adapter settings.
13
+ This class handles project-wide configuration including adapter settings.
13
14
  It supports loading from and saving to YAML files, with filesystem abstraction.
14
15
 
15
16
  Attributes:
16
17
  name (str | None): The name of the project.
17
- job_queue (JobQueueConfig): Configuration for the job queue component.
18
18
  adapter (AdapterConfig): Configuration for the adapter component.
19
19
 
20
20
  Example:
@@ -31,12 +31,9 @@ class ProjectConfig(BaseConfig):
31
31
  """
32
32
 
33
33
  name: str | None = msgspec.field(default=None)
34
- job_queue: JobQueueConfig = msgspec.field(default_factory=JobQueueConfig)
35
34
  adapter: AdapterConfig = msgspec.field(default_factory=AdapterConfig)
36
35
 
37
36
  def __post_init__(self):
38
- if isinstance(self.job_queue, dict):
39
- self.job_queue = JobQueueConfig.from_dict(self.job_queue)
40
37
  if isinstance(self.adapter, dict):
41
38
  self.adapter = AdapterConfig.from_dict(self.adapter)
42
39
 
@@ -45,7 +42,6 @@ class ProjectConfig(BaseConfig):
45
42
  cls,
46
43
  base_dir: str = ".",
47
44
  name: str | None = None,
48
- job_queue_type: str | None = None,
49
45
  fs: AbstractFileSystem | None = None,
50
46
  storage_options: dict | BaseStorageOptions | None = {},
51
47
  ):
@@ -54,7 +50,6 @@ class ProjectConfig(BaseConfig):
54
50
  Args:
55
51
  base_dir (str, optional): Base directory for the project. Defaults to ".".
56
52
  name (str | None, optional): Project name. Defaults to None.
57
- job_queue_type (str | None, optional): Type of job queue to use. Defaults to None.
58
53
  fs (AbstractFileSystem | None, optional): Filesystem to use. Defaults to None.
59
54
  storage_options (dict | Munch, optional): Options for filesystem. Defaults to empty Munch.
60
55
 
@@ -65,22 +60,18 @@ class ProjectConfig(BaseConfig):
65
60
  ```python
66
61
  project = ProjectConfig.load(
67
62
  base_dir="my_project",
68
- name="pipeline1",
69
- job_queue_type="rq"
63
+ name="pipeline1"
70
64
  )
71
65
  ```
72
66
  """
73
67
  if fs is None:
74
- fs = get_filesystem(
68
+ fs = filesystem(
75
69
  base_dir, cached=False, dirfs=True, storage_options=storage_options
76
70
  )
77
71
  if fs.exists("conf/project.yml"):
78
72
  project = ProjectConfig.from_yaml(path="conf/project.yml", fs=fs)
79
73
  else:
80
74
  project = ProjectConfig(name=name)
81
- if job_queue_type is not None:
82
- if job_queue_type != project.job_queue.type:
83
- project.job_queue.update_type(job_queue_type)
84
75
 
85
76
  return project
86
77
 
@@ -103,18 +94,17 @@ class ProjectConfig(BaseConfig):
103
94
  ```
104
95
  """
105
96
  if fs is None:
106
- fs = get_filesystem(
97
+ fs = filesystem(
107
98
  base_dir, cached=True, dirfs=True, storage_options=storage_options
108
99
  )
109
100
 
110
- fs.makedirs("conf", exist_ok=True)
111
- self.to_yaml(path="conf/project.yml", fs=fs)
101
+ fs.makedirs(CONFIG_DIR, exist_ok=True)
102
+ self.to_yaml(path=posixpath.join(CONFIG_DIR, "project.yml"), fs=fs)
112
103
 
113
104
 
114
105
  def init_project_config(
115
106
  base_dir: str = ".",
116
107
  name: str | None = None,
117
- job_queue_type: str | None = None,
118
108
  fs: AbstractFileSystem | None = None,
119
109
  storage_options: dict | BaseStorageOptions | None = {},
120
110
  ):
@@ -125,7 +115,6 @@ def init_project_config(
125
115
  Args:
126
116
  base_dir (str, optional): Base directory for the project. Defaults to ".".
127
117
  name (str | None, optional): Project name. Defaults to None.
128
- job_queue_type (str | None, optional): Type of job queue to use. Defaults to None.
129
118
  fs (AbstractFileSystem | None, optional): Filesystem to use. Defaults to None.
130
119
  storage_options (dict | Munch, optional): Options for filesystem. Defaults to empty Munch.
131
120
 
@@ -136,17 +125,15 @@ def init_project_config(
136
125
  ```python
137
126
  project = init_project_config(
138
127
  base_dir="my_project",
139
- name="test_project",
140
- job_queue_type="rq"
128
+ name="test_project"
141
129
  )
142
130
  ```
143
131
  """
144
132
  project = ProjectConfig.load(
145
133
  base_dir=base_dir,
146
134
  name=name,
147
- job_queue_type=job_queue_type,
148
135
  fs=fs,
149
136
  storage_options=storage_options,
150
137
  )
151
138
  project.save(base_dir=base_dir, fs=fs, storage_options=storage_options)
152
- return project
139
+ return project
@@ -24,18 +24,6 @@ class OpenTelemetryConfig(BaseConfig):
24
24
  port: int = msgspec.field(default=6831)
25
25
 
26
26
 
27
- # class OpenLineageConfig(BaseConfig):
28
- # from openlineage.client import OpenLineageClientOptions
29
- # from openlineage.client.transport import Transport
30
- # from openlineage.client.transport import TransportFactory
31
- # url: str | None = msgspec.field(default=None)
32
- # options: OpenLineageClientOptions | None = msgspec.field(
33
- # default=None)
34
- # transport: Transport | None = msgspec.field(default=None)
35
- # factory: TransportFactory | None = msgspec.field(
36
- # default=None)
37
- # config: dict | None = msgspec.field(default=None)
38
-
39
27
 
40
28
  class RayConfig(BaseConfig):
41
29
  ray_init_config: dict | None = msgspec.field(default=None)
@@ -4,7 +4,7 @@ import os
4
4
  import typer
5
5
  from loguru import logger
6
6
 
7
- from ..flowerpower import init as init_
7
+ from ..flowerpower import FlowerPowerProject
8
8
  from .pipeline import app as pipeline_app
9
9
  from .utils import parse_dict_or_list_param
10
10
 
@@ -18,14 +18,6 @@ app.add_typer(
18
18
  pipeline_app, name="pipeline", help="Manage and execute FlowerPower pipelines"
19
19
  )
20
20
 
21
- if importlib.util.find_spec("apscheduler") or importlib.util.find_spec("rq"):
22
- from .job_queue import app as job_queue_app
23
-
24
- app.add_typer(
25
- job_queue_app,
26
- name="job-queue",
27
- help="Manage job queue workers and scheduled tasks",
28
- )
29
21
 
30
22
  if importlib.util.find_spec("paho"):
31
23
  from .mqtt import app as mqtt_app
@@ -49,12 +41,6 @@ def init(
49
41
  storage_options: str = typer.Option(
50
42
  None, "--storage-options", "-s", help="Storage options as a JSON or dict string"
51
43
  ),
52
- job_queue_type: str = typer.Option(
53
- "rq",
54
- "--job-queue-type",
55
- "-q",
56
- help="Job queue backend type to use (rq, apscheduler)",
57
- ),
58
44
  ):
59
45
  """
60
46
  Initialize a new FlowerPower project.
@@ -69,7 +55,6 @@ def init(
69
55
  base_dir: Base directory where the project will be created. If not provided,
70
56
  the current directory's parent will be used
71
57
  storage_options: Storage options for filesystem access, as a JSON or dict string
72
- job_queue_type: Type of job queue backend to use (rq, apscheduler)
73
58
 
74
59
  Examples:
75
60
  # Create a project in the current directory using its name
@@ -80,9 +65,6 @@ def init(
80
65
 
81
66
  # Create a project in a specific location
82
67
  $ flowerpower init --name my-project --base-dir /path/to/projects
83
-
84
- # Create a project with APScheduler as the job queue backend
85
- $ flowerpower init --job-queue-type apscheduler
86
68
  """
87
69
  parsed_storage_options = {}
88
70
  if storage_options:
@@ -95,11 +77,10 @@ def init(
95
77
  raise typer.Exit(code=1)
96
78
 
97
79
  try:
98
- init_(
80
+ FlowerPowerProject.new(
99
81
  name=project_name,
100
82
  base_dir=base_dir,
101
83
  storage_options=parsed_storage_options,
102
- job_queue_type=job_queue_type,
103
84
  )
104
85
  except Exception as e:
105
86
  logger.error(f"Error initializing project: {e}")
flowerpower/cli/cfg.py CHANGED
@@ -1,8 +1,5 @@
1
1
  import typer
2
2
 
3
- from ..cfg import Config
4
- from ..cli.utils import parse_dict_or_list_param
5
-
6
3
  app = typer.Typer(help="Config management commands")
7
4
 
8
5
 
flowerpower/cli/mqtt.py CHANGED
@@ -69,7 +69,6 @@ def run_pipeline_on_message(
69
69
  with_opentelemetry: bool = False,
70
70
  with_progressbar: bool = False,
71
71
  storage_options: str | None = None,
72
- as_job: bool = False,
73
72
  host: str | None = None,
74
73
  port: int | None = None,
75
74
  username: str | None = None,
@@ -107,7 +106,6 @@ def run_pipeline_on_message(
107
106
  with_opentelemetry: Enable OpenTelemetry tracing
108
107
  with_progressbar: Enable progress bar
109
108
  storage_options: Storage options as JSON, dict string or key=value pairs
110
- as_job: Run as a job in the scheduler
111
109
  host: MQTT broker host
112
110
  port: MQTT broker port
113
111
  username: MQTT broker username
@@ -128,9 +126,6 @@ def run_pipeline_on_message(
128
126
  # Configure retries for resilience
129
127
  $ flowerpower mqtt run-pipeline-on-message my_pipeline --topic sensors/data --max-retries 5 --retry-delay 2.0
130
128
 
131
- # Run as a job with custom MQTT settings
132
- $ flowerpower mqtt run-pipeline-on-message my_pipeline --topic events/process --as-job --qos 2 --host mqtt.example.com
133
-
134
129
  # Use a config hook to process messages
135
130
  $ flowerpower mqtt run-pipeline-on-message my_pipeline --topic data/incoming --config-hook process_message
136
131
 
@@ -158,7 +153,6 @@ def run_pipeline_on_message(
158
153
  with_opentelemetry=with_opentelemetry,
159
154
  with_progressbar=with_progressbar,
160
155
  storage_options=parsed_storage_options,
161
- as_job=as_job,
162
156
  host=host,
163
157
  port=port,
164
158
  username=username,