FlowerPower 0.11.6.20__py3-none-any.whl → 0.21.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (101) hide show
  1. flowerpower/__init__.py +2 -6
  2. flowerpower/cfg/__init__.py +7 -14
  3. flowerpower/cfg/base.py +29 -25
  4. flowerpower/cfg/pipeline/__init__.py +8 -6
  5. flowerpower/cfg/pipeline/_schedule.py +32 -0
  6. flowerpower/cfg/pipeline/adapter.py +0 -5
  7. flowerpower/cfg/pipeline/builder.py +377 -0
  8. flowerpower/cfg/pipeline/run.py +36 -0
  9. flowerpower/cfg/project/__init__.py +11 -24
  10. flowerpower/cfg/project/adapter.py +0 -12
  11. flowerpower/cli/__init__.py +2 -21
  12. flowerpower/cli/cfg.py +0 -3
  13. flowerpower/cli/mqtt.py +0 -6
  14. flowerpower/cli/pipeline.py +22 -415
  15. flowerpower/cli/utils.py +0 -1
  16. flowerpower/flowerpower.py +345 -146
  17. flowerpower/pipeline/__init__.py +2 -0
  18. flowerpower/pipeline/base.py +21 -12
  19. flowerpower/pipeline/io.py +58 -54
  20. flowerpower/pipeline/manager.py +165 -726
  21. flowerpower/pipeline/pipeline.py +643 -0
  22. flowerpower/pipeline/registry.py +285 -18
  23. flowerpower/pipeline/visualizer.py +5 -6
  24. flowerpower/plugins/io/__init__.py +8 -0
  25. flowerpower/plugins/mqtt/__init__.py +7 -11
  26. flowerpower/settings/__init__.py +0 -2
  27. flowerpower/settings/{backend.py → _backend.py} +0 -21
  28. flowerpower/settings/logging.py +1 -1
  29. flowerpower/utils/logging.py +24 -12
  30. flowerpower/utils/misc.py +17 -256
  31. flowerpower/utils/monkey.py +1 -83
  32. flowerpower-0.21.0.dist-info/METADATA +463 -0
  33. flowerpower-0.21.0.dist-info/RECORD +44 -0
  34. flowerpower/cfg/pipeline/schedule.py +0 -74
  35. flowerpower/cfg/project/job_queue.py +0 -238
  36. flowerpower/cli/job_queue.py +0 -1061
  37. flowerpower/fs/__init__.py +0 -29
  38. flowerpower/fs/base.py +0 -662
  39. flowerpower/fs/ext.py +0 -2143
  40. flowerpower/fs/storage_options.py +0 -1420
  41. flowerpower/job_queue/__init__.py +0 -294
  42. flowerpower/job_queue/apscheduler/__init__.py +0 -11
  43. flowerpower/job_queue/apscheduler/_setup/datastore.py +0 -110
  44. flowerpower/job_queue/apscheduler/_setup/eventbroker.py +0 -93
  45. flowerpower/job_queue/apscheduler/manager.py +0 -1051
  46. flowerpower/job_queue/apscheduler/setup.py +0 -554
  47. flowerpower/job_queue/apscheduler/trigger.py +0 -169
  48. flowerpower/job_queue/apscheduler/utils.py +0 -311
  49. flowerpower/job_queue/base.py +0 -413
  50. flowerpower/job_queue/rq/__init__.py +0 -10
  51. flowerpower/job_queue/rq/_trigger.py +0 -37
  52. flowerpower/job_queue/rq/concurrent_workers/gevent_worker.py +0 -226
  53. flowerpower/job_queue/rq/concurrent_workers/thread_worker.py +0 -231
  54. flowerpower/job_queue/rq/manager.py +0 -1582
  55. flowerpower/job_queue/rq/setup.py +0 -154
  56. flowerpower/job_queue/rq/utils.py +0 -69
  57. flowerpower/mqtt.py +0 -12
  58. flowerpower/pipeline/job_queue.py +0 -583
  59. flowerpower/pipeline/runner.py +0 -603
  60. flowerpower/plugins/io/base.py +0 -2520
  61. flowerpower/plugins/io/helpers/datetime.py +0 -298
  62. flowerpower/plugins/io/helpers/polars.py +0 -875
  63. flowerpower/plugins/io/helpers/pyarrow.py +0 -570
  64. flowerpower/plugins/io/helpers/sql.py +0 -202
  65. flowerpower/plugins/io/loader/__init__.py +0 -28
  66. flowerpower/plugins/io/loader/csv.py +0 -37
  67. flowerpower/plugins/io/loader/deltatable.py +0 -190
  68. flowerpower/plugins/io/loader/duckdb.py +0 -19
  69. flowerpower/plugins/io/loader/json.py +0 -37
  70. flowerpower/plugins/io/loader/mqtt.py +0 -159
  71. flowerpower/plugins/io/loader/mssql.py +0 -26
  72. flowerpower/plugins/io/loader/mysql.py +0 -26
  73. flowerpower/plugins/io/loader/oracle.py +0 -26
  74. flowerpower/plugins/io/loader/parquet.py +0 -35
  75. flowerpower/plugins/io/loader/postgres.py +0 -26
  76. flowerpower/plugins/io/loader/pydala.py +0 -19
  77. flowerpower/plugins/io/loader/sqlite.py +0 -23
  78. flowerpower/plugins/io/metadata.py +0 -244
  79. flowerpower/plugins/io/saver/__init__.py +0 -28
  80. flowerpower/plugins/io/saver/csv.py +0 -36
  81. flowerpower/plugins/io/saver/deltatable.py +0 -186
  82. flowerpower/plugins/io/saver/duckdb.py +0 -19
  83. flowerpower/plugins/io/saver/json.py +0 -36
  84. flowerpower/plugins/io/saver/mqtt.py +0 -28
  85. flowerpower/plugins/io/saver/mssql.py +0 -26
  86. flowerpower/plugins/io/saver/mysql.py +0 -26
  87. flowerpower/plugins/io/saver/oracle.py +0 -26
  88. flowerpower/plugins/io/saver/parquet.py +0 -36
  89. flowerpower/plugins/io/saver/postgres.py +0 -26
  90. flowerpower/plugins/io/saver/pydala.py +0 -20
  91. flowerpower/plugins/io/saver/sqlite.py +0 -24
  92. flowerpower/plugins/mqtt/cfg.py +0 -17
  93. flowerpower/plugins/mqtt/manager.py +0 -962
  94. flowerpower/settings/job_queue.py +0 -87
  95. flowerpower/utils/scheduler.py +0 -311
  96. flowerpower-0.11.6.20.dist-info/METADATA +0 -537
  97. flowerpower-0.11.6.20.dist-info/RECORD +0 -102
  98. {flowerpower-0.11.6.20.dist-info → flowerpower-0.21.0.dist-info}/WHEEL +0 -0
  99. {flowerpower-0.11.6.20.dist-info → flowerpower-0.21.0.dist-info}/entry_points.txt +0 -0
  100. {flowerpower-0.11.6.20.dist-info → flowerpower-0.21.0.dist-info}/licenses/LICENSE +0 -0
  101. {flowerpower-0.11.6.20.dist-info → flowerpower-0.21.0.dist-info}/top_level.txt +0 -0
@@ -4,9 +4,12 @@
4
4
  import datetime as dt
5
5
  import os
6
6
  import posixpath
7
- from typing import TYPE_CHECKING
7
+ import sys
8
+ from dataclasses import dataclass
9
+ from typing import TYPE_CHECKING, Any, Dict
8
10
 
9
11
  import rich
12
+ from fsspec_utils import AbstractFileSystem, filesystem
10
13
  from loguru import logger
11
14
  from rich.console import Console
12
15
  from rich.panel import Panel
@@ -14,18 +17,19 @@ from rich.syntax import Syntax
14
17
  from rich.table import Table
15
18
  from rich.tree import Tree
16
19
 
17
- from .. import settings
20
+ from ..settings import CONFIG_DIR, LOG_LEVEL, PIPELINES_DIR
18
21
  # Import necessary config types and utility functions
19
22
  from ..cfg import PipelineConfig, ProjectConfig
20
- from ..fs import AbstractFileSystem
21
23
  from ..utils.logging import setup_logging
22
24
  # Assuming view_img might be used indirectly or needed later
23
25
  from ..utils.templates import (HOOK_TEMPLATE__MQTT_BUILD_CONFIG,
24
26
  PIPELINE_PY_TEMPLATE)
27
+ # Import base utilities
28
+ from .base import load_module
25
29
 
26
30
  if TYPE_CHECKING:
27
- # Keep this for type hinting if needed elsewhere, though Config is imported directly now
28
- pass
31
+ from .pipeline import Pipeline
32
+ from ..flowerpower import FlowerPowerProject
29
33
 
30
34
  from enum import Enum
31
35
 
@@ -44,7 +48,15 @@ class HookType(str, Enum):
44
48
  return self.value
45
49
 
46
50
 
47
- setup_logging(level=settings.LOG_LEVEL)
51
+ @dataclass
52
+ class CachedPipelineData:
53
+ """Container for cached pipeline data."""
54
+ pipeline: "Pipeline"
55
+ config: PipelineConfig
56
+ module: Any
57
+
58
+
59
+ setup_logging(level=LOG_LEVEL)
48
60
 
49
61
 
50
62
  class PipelineRegistry:
@@ -54,8 +66,8 @@ class PipelineRegistry:
54
66
  self,
55
67
  project_cfg: ProjectConfig,
56
68
  fs: AbstractFileSystem,
57
- cfg_dir: str,
58
- pipelines_dir: str,
69
+ base_dir: str | None = None,
70
+ storage_options: dict | None = None,
59
71
  ):
60
72
  """
61
73
  Initializes the PipelineRegistry.
@@ -63,15 +75,258 @@ class PipelineRegistry:
63
75
  Args:
64
76
  project_cfg: The project configuration object.
65
77
  fs: The filesystem instance.
66
- cfg_dir: The configuration directory path.
67
- pipelines_dir: The pipelines directory path.
78
+ base_dir: The base directory path.
79
+ storage_options: Storage options for filesystem operations.
68
80
  """
69
81
  self.project_cfg = project_cfg
70
82
  self._fs = fs
71
- self._cfg_dir = cfg_dir
72
- self._pipelines_dir = pipelines_dir
83
+ self._cfg_dir = CONFIG_DIR
84
+ self._pipelines_dir = PIPELINES_DIR
85
+ self._base_dir = base_dir
86
+ self._storage_options = storage_options or {}
73
87
  self._console = Console()
74
88
 
89
+ # Consolidated cache for pipeline data
90
+ self._pipeline_data_cache: Dict[str, CachedPipelineData] = {}
91
+
92
+ # Ensure module paths are added
93
+ self._add_modules_path()
94
+
95
+ @classmethod
96
+ def from_filesystem(
97
+ cls,
98
+ base_dir: str,
99
+ fs: AbstractFileSystem | None = None,
100
+ storage_options: dict | None = None,
101
+ ) -> "PipelineRegistry":
102
+ """
103
+ Create a PipelineRegistry from filesystem parameters.
104
+
105
+ This factory method creates a complete PipelineRegistry instance by:
106
+ 1. Creating the filesystem if not provided
107
+ 2. Loading the ProjectConfig from the base directory
108
+ 3. Initializing the registry with the loaded configuration
109
+
110
+ Args:
111
+ base_dir: The base directory path for the FlowerPower project
112
+ fs: Optional filesystem instance. If None, will be created from base_dir
113
+ storage_options: Optional storage options for filesystem access
114
+
115
+ Returns:
116
+ PipelineRegistry: A fully configured registry instance
117
+
118
+ Raises:
119
+ ValueError: If base_dir is invalid or ProjectConfig cannot be loaded
120
+ RuntimeError: If filesystem creation fails
121
+
122
+ Example:
123
+ ```python
124
+ # Create registry from local directory
125
+ registry = PipelineRegistry.from_filesystem("/path/to/project")
126
+
127
+ # Create registry with S3 storage
128
+ registry = PipelineRegistry.from_filesystem(
129
+ "s3://my-bucket/project",
130
+ storage_options={"key": "secret"}
131
+ )
132
+ ```
133
+ """
134
+ # Create filesystem if not provided
135
+ if fs is None:
136
+ fs = filesystem(
137
+ base_dir,
138
+ storage_options=storage_options,
139
+ cached=storage_options is not None,
140
+ )
141
+
142
+ # Load project configuration
143
+ project_cfg = ProjectConfig.load(base_dir=base_dir, fs=fs)
144
+
145
+ # Ensure we have a ProjectConfig instance
146
+ if not isinstance(project_cfg, ProjectConfig):
147
+ raise TypeError(f"Expected ProjectConfig, got {type(project_cfg)}")
148
+
149
+ # Create and return registry instance
150
+ return cls(
151
+ project_cfg=project_cfg,
152
+ fs=fs,
153
+ base_dir=base_dir,
154
+ storage_options=storage_options,
155
+ )
156
+
157
+ def _add_modules_path(self) -> None:
158
+ """Add pipeline module paths to Python path."""
159
+ try:
160
+ if hasattr(self._fs, "is_cache_fs") and self._fs.is_cache_fs:
161
+ self._fs.sync_cache()
162
+ project_path = self._fs._mapper.directory
163
+ modules_path = posixpath.join(project_path, self._pipelines_dir)
164
+ else:
165
+ # Use the base directory directly if not using cache
166
+ if hasattr(self._fs, "path"):
167
+ project_path = self._fs.path
168
+ elif self._base_dir:
169
+ project_path = self._base_dir
170
+ else:
171
+ # Fallback for mocked filesystems
172
+ project_path = "."
173
+ modules_path = posixpath.join(project_path, self._pipelines_dir)
174
+
175
+ if project_path not in sys.path:
176
+ sys.path.insert(0, project_path)
177
+
178
+ if modules_path not in sys.path:
179
+ sys.path.insert(0, modules_path)
180
+ except (AttributeError, TypeError):
181
+ # Handle case where filesystem is mocked or doesn't have required properties
182
+ logger.debug("Could not add modules path - using default Python path")
183
+
184
+ # --- Pipeline Factory Methods ---
185
+
186
+ def get_pipeline(
187
+ self, name: str, project_context: "FlowerPowerProject", reload: bool = False
188
+ ) -> "Pipeline":
189
+ """Get a Pipeline instance for the given name.
190
+
191
+ This method creates a fully-formed Pipeline object by loading its configuration
192
+ and Python module, then injecting the project context.
193
+
194
+ Args:
195
+ name: Name of the pipeline to get
196
+ project_context: Reference to the FlowerPowerProject
197
+ reload: Whether to reload configuration and module from disk
198
+
199
+ Returns:
200
+ Pipeline instance ready for execution
201
+
202
+ Raises:
203
+ FileNotFoundError: If pipeline configuration or module doesn't exist
204
+ ImportError: If pipeline module cannot be imported
205
+ ValueError: If pipeline configuration is invalid
206
+ """
207
+ # Use cache if available and not reloading
208
+ if not reload and name in self._pipeline_data_cache:
209
+ logger.debug(f"Returning cached pipeline '{name}'")
210
+ return self._pipeline_data_cache[name].pipeline
211
+
212
+ logger.debug(f"Creating pipeline instance for '{name}'")
213
+
214
+ # Load pipeline configuration
215
+ config = self.load_config(name, reload=reload)
216
+
217
+ # Load pipeline module
218
+ module = self.load_module(name, reload=reload)
219
+
220
+ # Import Pipeline class here to avoid circular import
221
+ from .pipeline import Pipeline
222
+
223
+ # Create Pipeline instance
224
+ pipeline = Pipeline(
225
+ name=name,
226
+ config=config,
227
+ module=module,
228
+ project_context=project_context,
229
+ )
230
+
231
+ # Cache the pipeline data
232
+ self._pipeline_data_cache[name] = CachedPipelineData(
233
+ pipeline=pipeline,
234
+ config=config,
235
+ module=module,
236
+ )
237
+
238
+ logger.debug(f"Successfully created pipeline instance for '{name}'")
239
+ return pipeline
240
+
241
+ def load_config(self, name: str, reload: bool = False) -> PipelineConfig:
242
+ """Load pipeline configuration from disk.
243
+
244
+ Args:
245
+ name: Name of the pipeline
246
+ reload: Whether to reload from disk even if cached
247
+
248
+ Returns:
249
+ PipelineConfig instance
250
+ """
251
+ # Use cache if available and not reloading
252
+ if not reload and name in self._pipeline_data_cache:
253
+ logger.debug(f"Returning cached config for pipeline '{name}'")
254
+ return self._pipeline_data_cache[name].config
255
+
256
+ logger.debug(f"Loading configuration for pipeline '{name}'")
257
+
258
+ # Load configuration from disk
259
+ config = PipelineConfig.load(
260
+ base_dir=self._base_dir,
261
+ name=name,
262
+ fs=self._fs,
263
+ storage_options=self._storage_options,
264
+ )
265
+
266
+ # Cache the configuration (will be stored in consolidated cache when pipeline is created)
267
+ # For now, we'll create a temporary cache entry if it doesn't exist
268
+ if name not in self._pipeline_data_cache:
269
+ self._pipeline_data_cache[name] = CachedPipelineData(
270
+ pipeline=None, # type: ignore
271
+ config=config,
272
+ module=None, # type: ignore
273
+ )
274
+
275
+ return config
276
+
277
+ def load_module(self, name: str, reload: bool = False) -> Any:
278
+ """Load pipeline module from disk.
279
+
280
+ Args:
281
+ name: Name of the pipeline
282
+ reload: Whether to reload from disk even if cached
283
+
284
+ Returns:
285
+ Loaded Python module
286
+ """
287
+ # Use cache if available and not reloading
288
+ if not reload and name in self._pipeline_data_cache:
289
+ cached_data = self._pipeline_data_cache[name]
290
+ if cached_data.module is not None:
291
+ logger.debug(f"Returning cached module for pipeline '{name}'")
292
+ return cached_data.module
293
+
294
+ logger.debug(f"Loading module for pipeline '{name}'")
295
+
296
+ # Convert pipeline name to module name
297
+ formatted_name = name.replace(".", "/").replace("-", "_")
298
+ module_name = f"pipelines.{formatted_name}"
299
+
300
+ # Load the module
301
+ module = load_module(module_name, reload=reload)
302
+
303
+ # Cache the module (will be stored in consolidated cache when pipeline is created)
304
+ # For now, we'll update the existing cache entry if it exists
305
+ if name in self._pipeline_data_cache:
306
+ self._pipeline_data_cache[name].module = module
307
+ else:
308
+ self._pipeline_data_cache[name] = CachedPipelineData(
309
+ pipeline=None, # type: ignore
310
+ config=None, # type: ignore
311
+ module=module,
312
+ )
313
+
314
+ return module
315
+
316
+ def clear_cache(self, name: str | None = None):
317
+ """Clear cached pipelines, configurations, and modules.
318
+
319
+ Args:
320
+ name: If provided, clear cache only for this pipeline.
321
+ If None, clear entire cache.
322
+ """
323
+ if name:
324
+ logger.debug(f"Clearing cache for pipeline '{name}'")
325
+ self._pipeline_data_cache.pop(name, None)
326
+ else:
327
+ logger.debug("Clearing entire pipeline cache")
328
+ self._pipeline_data_cache.clear()
329
+
75
330
  # --- Methods moved from PipelineManager ---
76
331
  def new(self, name: str, overwrite: bool = False):
77
332
  """
@@ -80,7 +335,6 @@ class PipelineRegistry:
80
335
  Args:
81
336
  name (str): The name of the pipeline.
82
337
  overwrite (bool): Whether to overwrite an existing pipeline. Defaults to False.
83
- job_queue_type (str | None): The type of worker to use. Defaults to None.
84
338
 
85
339
  Raises:
86
340
  ValueError: If the configuration or pipeline path does not exist, or if the pipeline already exists.
@@ -101,7 +355,7 @@ class PipelineRegistry:
101
355
 
102
356
  formatted_name = name.replace(".", "/").replace("-", "_")
103
357
  pipeline_file = posixpath.join(self._pipelines_dir, f"{formatted_name}.py")
104
- cfg_file = posixpath.join(self._cfg_dir, "pipelines", f"{formatted_name}.yml")
358
+ cfg_file = posixpath.join(self._cfg_dir, PIPELINES_DIR, f"{formatted_name}.yml")
105
359
 
106
360
  def check_and_handle(path: str):
107
361
  if self._fs.exists(path):
@@ -158,7 +412,7 @@ class PipelineRegistry:
158
412
  deleted_files = []
159
413
  if cfg:
160
414
  pipeline_cfg_path = posixpath.join(
161
- self._cfg_dir, "pipelines", f"{name}.yml"
415
+ self._cfg_dir, PIPELINES_DIR, f"{name}.yml"
162
416
  )
163
417
  if self._fs.exists(pipeline_cfg_path):
164
418
  self._fs.rm(pipeline_cfg_path)
@@ -204,11 +458,16 @@ class PipelineRegistry:
204
458
  """
205
459
  try:
206
460
  return self._fs.glob(posixpath.join(self._pipelines_dir, "*.py"))
207
- except Exception as e:
461
+ except (OSError, PermissionError) as e:
208
462
  logger.error(
209
463
  f"Error accessing pipeline directory {self._pipelines_dir}: {e}"
210
464
  )
211
465
  return []
466
+ except Exception as e:
467
+ logger.error(
468
+ f"Unexpected error accessing pipeline directory {self._pipelines_dir}: {e}"
469
+ )
470
+ return []
212
471
 
213
472
  def _get_names(self) -> list[str]:
214
473
  """
@@ -272,11 +531,16 @@ class PipelineRegistry:
272
531
  except FileNotFoundError:
273
532
  logger.warning(f"Module file not found for pipeline '{name}'")
274
533
  pipeline_summary["module"] = "# Module file not found"
275
- except Exception as e:
534
+ except (OSError, PermissionError, UnicodeDecodeError) as e:
276
535
  logger.error(
277
536
  f"Error reading module file for pipeline '{name}': {e}"
278
537
  )
279
538
  pipeline_summary["module"] = f"# Error reading module file: {e}"
539
+ except Exception as e:
540
+ logger.error(
541
+ f"Unexpected error reading module file for pipeline '{name}': {e}"
542
+ )
543
+ pipeline_summary["module"] = f"# Unexpected error reading module file: {e}"
280
544
 
281
545
  if pipeline_summary: # Only add if cfg or code was requested and found
282
546
  summary["pipelines"][name] = pipeline_summary
@@ -445,9 +709,12 @@ class PipelineRegistry:
445
709
  size = f"{size_bytes / 1024:.1f} KB" if size_bytes else "0.0 KB"
446
710
  except NotImplementedError:
447
711
  size = "N/A"
448
- except Exception as e:
712
+ except (OSError, PermissionError) as e:
449
713
  logger.warning(f"Could not get size for {path}: {e}")
450
714
  size = "Error"
715
+ except Exception as e:
716
+ logger.warning(f"Unexpected error getting size for {path}: {e}")
717
+ size = "Error"
451
718
 
452
719
  pipeline_info.append({
453
720
  "name": name,
@@ -1,12 +1,11 @@
1
1
  import posixpath
2
- from typing import Any
3
2
 
3
+ from fsspec_utils import AbstractFileSystem
4
4
  from hamilton import driver
5
5
  from rich import print
6
6
 
7
7
  # Import necessary config types and utility functions
8
8
  from ..cfg import PipelineConfig, ProjectConfig
9
- from ..fs import AbstractFileSystem
10
9
  from ..utils.misc import view_img
11
10
  from .base import load_module # Import module loading utility
12
11
 
@@ -26,8 +25,8 @@ class PipelineVisualizer:
26
25
  self._fs = fs
27
26
  # Attributes like fs and base_dir are accessed via self.project_cfg
28
27
 
29
- def _display_all_function(self, name: str, reload: bool = False):
30
- """Internal helper to load module/config and get the Hamilton DAG object.
28
+ def _get_dag_object(self, name: str, reload: bool = False):
29
+ """Get the Hamilton DAG object for a pipeline.
31
30
 
32
31
  Args:
33
32
  name (str): The name of the pipeline.
@@ -85,7 +84,7 @@ class PipelineVisualizer:
85
84
  >>> visualizer = PipelineVisualizer(project_cfg, fs)
86
85
  >>> visualizer.save_dag(name="example_pipeline", format="png")
87
86
  """
88
- dag = self._display_all_function(name=name, reload=reload)
87
+ dag = self._get_dag_object(name=name, reload=reload)
89
88
 
90
89
  # Use project_cfg attributes for path and filesystem access
91
90
  graph_dir = posixpath.join(self.project_cfg.base_dir, "graphs")
@@ -134,7 +133,7 @@ class PipelineVisualizer:
134
133
  >>> visualizer = PipelineVisualizer(project_cfg, fs)
135
134
  >>> visualizer.show_dag(name="example_pipeline", format="png")
136
135
  """
137
- dag = self._display_all_function(name=name, reload=reload)
136
+ dag = self._get_dag_object(name=name, reload=reload)
138
137
  if raw:
139
138
  return dag
140
139
  # Use view_img utility to display the rendered graph
@@ -0,0 +1,8 @@
1
+ import warnings
2
+
3
+ warnings.warn(
4
+ "The flowerpower.plugins.io module is deprecated. "
5
+ "Please use 'flowerpower-io' instead. Install it with 'pip install flowerpower-io'.",
6
+ DeprecationWarning,
7
+ stacklevel=2,
8
+ )
@@ -1,12 +1,8 @@
1
- from .cfg import MqttConfig
2
- from .manager import MqttManager, run_pipeline_on_message, start_listener
1
+ import warnings
3
2
 
4
- MQTTManager = MqttManager
5
-
6
- __all__ = [
7
- "MqttConfig",
8
- "MqttManager",
9
- "MQTTManager",
10
- "start_listener",
11
- "run_pipeline_on_message",
12
- ]
3
+ warnings.warn(
4
+ "The flowerpower.plugins.mqtt module is deprecated. "
5
+ "Please use 'flowerpower-mqtt' instead. Install it with 'pip install flowerpower-mqtt'.",
6
+ DeprecationWarning,
7
+ stacklevel=2,
8
+ )
@@ -1,8 +1,6 @@
1
1
  # flake8: noqa
2
- from .backend import *
3
2
  from .executor import *
4
3
  from .general import *
5
4
  from .hamilton import *
6
- from .job_queue import *
7
5
  from .logging import *
8
6
  from .retry import *
@@ -1,5 +1,3 @@
1
- import os
2
-
3
1
  # Define backend properties in a dictionary for easier maintenance
4
2
 
5
3
  BACKEND_PROPERTIES = {
@@ -49,14 +47,6 @@ BACKEND_PROPERTIES = {
49
47
  "default_password": None,
50
48
  "is_sqla_type": False,
51
49
  },
52
- "redis": {
53
- "uri_prefix": "redis://",
54
- "default_port": 6379,
55
- "default_host": "localhost",
56
- "default_database": 0,
57
- "default_username": None,
58
- "default_password": None,
59
- },
60
50
  "nats_kv": {
61
51
  "uri_prefix": "nats://",
62
52
  "default_port": 4222,
@@ -75,17 +65,6 @@ BACKEND_PROPERTIES = {
75
65
  },
76
66
  }
77
67
 
78
- # # REDIS ENVIRONMENT VARIABLES
79
- # REDIS_HOST = os.getenv("FP_REDIS_HOST", BACKEND_PROPERTIES["redis"]["default_host"])
80
- # REDIS_PORT = int(
81
- # os.getenv("FP_REDIS_PORT", BACKEND_PROPERTIES["redis"]["default_port"])
82
- # )
83
- # REDIS_DB = int(
84
- # os.getenv("FP_REDIS_DB", BACKEND_PROPERTIES["redis"]["default_database"])
85
- # )
86
- # REDIS_PASSWORD = os.getenv("FP_REDIS_PASSWORD", None)
87
- # REDIS_USERNAME = os.getenv("FP_REDIS_USERNAME", None)
88
- # REDIS_SSL = bool(os.getenv("FP_REDIS_SSL", False))
89
68
 
90
69
  # # POSTGRES ENVIRONMENT VARIABLES
91
70
  # POSTGRES_HOST = os.getenv(
@@ -1,4 +1,4 @@
1
1
  import os
2
2
 
3
3
  # LOGGING
4
- LOG_LEVEL = os.getenv("FP_LOG_LEVEL", "INFO")
4
+ LOG_LEVEL = os.getenv("FP_LOG_LEVEL", "CRITICAL")
@@ -1,23 +1,35 @@
1
+ import os
1
2
  import sys
2
3
 
3
4
  from loguru import logger
4
5
 
5
- from ..settings import LOG_LEVEL # Import the setting
6
+ from ..settings import LOG_LEVEL
6
7
 
7
8
 
8
- def setup_logging(level: str = LOG_LEVEL, disable: bool = False) -> None:
9
+ def setup_logging(level: str | None = None) -> None:
9
10
  """
10
11
  Configures the Loguru logger.
11
12
 
12
- Removes the default handler and adds a new one targeting stderr
13
- with the level specified by the FP_LOG_LEVEL setting.
13
+ Determines the logging level based on the following precedence:
14
+ 1. The 'level' argument passed to the function.
15
+ 2. The 'FP_LOG_LEVEL' environment variable.
16
+ 3. The 'LOG_LEVEL' from ..settings (which defaults to "CRITICAL").
17
+
18
+ If the effective logging level is "CRITICAL", logging for the "flowerpower" module
19
+ is disabled. Otherwise, logging is enabled and configured.
14
20
  """
15
- logger.remove() # Remove the default handler added by Loguru
16
- logger.add(
17
- sys.stderr,
18
- level=level.upper(), # Use the level from the parameter, ensure it's uppercase
19
- format="<green>{time:YYYY-MM-DD HH:mm:ss.SSS}</green> | <level>{level: <8}</level> | <cyan>{name}</cyan>:<cyan>{function}</cyan>:<cyan>{line}</cyan> - <level>{message}</level>", # Example format
20
- )
21
- if disable:
21
+ # Remove all existing handlers to prevent duplicate logs
22
+ logger.remove()
23
+
24
+ # Determine the effective logging level
25
+ effective_level = level or os.getenv("FP_LOG_LEVEL") or LOG_LEVEL
26
+
27
+ if effective_level.upper() == "CRITICAL":
22
28
  logger.disable("flowerpower")
23
- # logger.info(f"Log level set to: {FP_LOG_LEVEL.upper()}")
29
+ else:
30
+ logger.enable("flowerpower")
31
+ logger.add(
32
+ sys.stderr,
33
+ level=effective_level.upper(),
34
+ format="<green>{time:YYYY-MM-DD HH:mm:ss.SSS}</green> | <level>{level: <8}</level> | <cyan>{name}</cyan>:<cyan>{function}</cyan>:<cyan>{line}</cyan> - <level>{message}</level>",
35
+ )