FlowerPower 0.21.0__py3-none-any.whl → 0.31.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. flowerpower/cfg/__init__.py +143 -25
  2. flowerpower/cfg/base.py +132 -11
  3. flowerpower/cfg/exceptions.py +53 -0
  4. flowerpower/cfg/pipeline/__init__.py +151 -35
  5. flowerpower/cfg/pipeline/adapter.py +1 -0
  6. flowerpower/cfg/pipeline/builder.py +24 -25
  7. flowerpower/cfg/pipeline/builder_adapter.py +142 -0
  8. flowerpower/cfg/pipeline/builder_executor.py +101 -0
  9. flowerpower/cfg/pipeline/run.py +134 -22
  10. flowerpower/cfg/project/__init__.py +59 -14
  11. flowerpower/cfg/project/adapter.py +6 -0
  12. flowerpower/cli/__init__.py +8 -9
  13. flowerpower/cli/cfg.py +0 -38
  14. flowerpower/cli/pipeline.py +121 -83
  15. flowerpower/cli/utils.py +120 -71
  16. flowerpower/flowerpower.py +94 -120
  17. flowerpower/pipeline/config_manager.py +180 -0
  18. flowerpower/pipeline/executor.py +126 -0
  19. flowerpower/pipeline/lifecycle_manager.py +231 -0
  20. flowerpower/pipeline/manager.py +121 -276
  21. flowerpower/pipeline/pipeline.py +66 -278
  22. flowerpower/pipeline/registry.py +45 -4
  23. flowerpower/utils/__init__.py +19 -0
  24. flowerpower/utils/adapter.py +286 -0
  25. flowerpower/utils/callback.py +73 -67
  26. flowerpower/utils/config.py +306 -0
  27. flowerpower/utils/executor.py +178 -0
  28. flowerpower/utils/filesystem.py +194 -0
  29. flowerpower/utils/misc.py +249 -76
  30. flowerpower/utils/security.py +221 -0
  31. {flowerpower-0.21.0.dist-info → flowerpower-0.31.0.dist-info}/METADATA +1 -13
  32. flowerpower-0.31.0.dist-info/RECORD +53 -0
  33. flowerpower/cfg/pipeline/_schedule.py +0 -32
  34. flowerpower/cli/mqtt.py +0 -168
  35. flowerpower/plugins/mqtt/__init__.py +0 -8
  36. flowerpower-0.21.0.dist-info/RECORD +0 -44
  37. {flowerpower-0.21.0.dist-info → flowerpower-0.31.0.dist-info}/WHEEL +0 -0
  38. {flowerpower-0.21.0.dist-info → flowerpower-0.31.0.dist-info}/entry_points.txt +0 -0
  39. {flowerpower-0.21.0.dist-info → flowerpower-0.31.0.dist-info}/licenses/LICENSE +0 -0
  40. {flowerpower-0.21.0.dist-info → flowerpower-0.31.0.dist-info}/top_level.txt +0 -0
flowerpower/utils/misc.py CHANGED
@@ -8,53 +8,35 @@ from typing import Any
8
8
 
9
9
  import msgspec
10
10
  from fsspec_utils import AbstractFileSystem, filesystem
11
+ from .security import validate_file_path
11
12
 
12
13
  if importlib.util.find_spec("joblib"):
13
14
  from joblib import Parallel, delayed
14
15
  from rich.progress import (BarColumn, Progress, TextColumn,
15
16
  TimeElapsedColumn)
16
17
 
17
- def run_parallel(
18
- func: callable,
19
- *args,
20
- n_jobs: int = -1,
21
- backend: str = "threading",
22
- verbose: bool = True,
23
- **kwargs,
24
- ) -> list[any]:
25
- """Runs a function for a list of parameters in parallel.
26
-
18
+ def _prepare_parallel_args(
19
+ args: tuple, kwargs: dict
20
+ ) -> tuple[list, list, dict, dict, int]:
21
+ """Prepare and validate arguments for parallel execution.
22
+
27
23
  Args:
28
- func (Callable): function to run in parallel
29
- *args: Positional arguments. Can be single values or iterables
30
- n_jobs (int, optional): Number of joblib workers. Defaults to -1
31
- backend (str, optional): joblib backend. Valid options are
32
- `loky`,`threading`, `mutliprocessing` or `sequential`. Defaults to "threading"
33
- verbose (bool, optional): Show progress bar. Defaults to True
34
- **kwargs: Keyword arguments. Can be single values or iterables
35
-
24
+ args: Positional arguments
25
+ kwargs: Keyword arguments
26
+
36
27
  Returns:
37
- list[any]: Function output
38
-
39
- Examples:
40
- >>> # Single iterable argument
41
- >>> run_parallel(func, [1,2,3], fixed_arg=42)
42
-
43
- >>> # Multiple iterables in args and kwargs
44
- >>> run_parallel(func, [1,2,3], val=[7,8,9], fixed=42)
45
-
46
- >>> # Only kwargs iterables
47
- >>> run_parallel(func, x=[1,2,3], y=[4,5,6], fixed=42)
28
+ tuple: (iterables, fixed_args, iterable_kwargs, fixed_kwargs, first_iterable_len)
29
+
30
+ Raises:
31
+ ValueError: If no iterable arguments or length mismatch
48
32
  """
49
- parallel_kwargs = {"n_jobs": n_jobs, "backend": backend, "verbose": 0}
50
-
51
33
  iterables = []
52
34
  fixed_args = []
53
35
  iterable_kwargs = {}
54
36
  fixed_kwargs = {}
55
-
56
37
  first_iterable_len = None
57
38
 
39
+ # Process positional arguments
58
40
  for arg in args:
59
41
  if isinstance(arg, (list, tuple)) and not isinstance(arg[0], (list, tuple)):
60
42
  iterables.append(arg)
@@ -67,6 +49,7 @@ if importlib.util.find_spec("joblib"):
67
49
  else:
68
50
  fixed_args.append(arg)
69
51
 
52
+ # Process keyword arguments
70
53
  for key, value in kwargs.items():
71
54
  if isinstance(value, (list, tuple)) and not isinstance(
72
55
  value[0], (list, tuple)
@@ -84,12 +67,45 @@ if importlib.util.find_spec("joblib"):
84
67
  if first_iterable_len is None:
85
68
  raise ValueError("At least one iterable argument is required")
86
69
 
87
- all_iterables = iterables + list(iterable_kwargs.values())
88
- param_combinations = list(zip(*all_iterables))
70
+ return iterables, fixed_args, iterable_kwargs, fixed_kwargs, first_iterable_len
89
71
 
90
- if not verbose:
91
- return Parallel(**parallel_kwargs)(
92
- delayed(func)(
72
+ def _execute_parallel_with_progress(
73
+ func: callable,
74
+ iterables: list,
75
+ fixed_args: list,
76
+ iterable_kwargs: dict,
77
+ fixed_kwargs: dict,
78
+ param_combinations: list,
79
+ parallel_kwargs: dict,
80
+ ) -> list:
81
+ """Execute parallel tasks with progress tracking.
82
+
83
+ Args:
84
+ func: Function to execute
85
+ iterables: List of iterable arguments
86
+ fixed_args: List of fixed arguments
87
+ iterable_kwargs: Dictionary of iterable keyword arguments
88
+ fixed_kwargs: Dictionary of fixed keyword arguments
89
+ param_combinations: List of parameter combinations
90
+ parallel_kwargs: Parallel execution configuration
91
+
92
+ Returns:
93
+ list: Results from parallel execution
94
+ """
95
+ results = [None] * len(param_combinations)
96
+ with Progress(
97
+ TextColumn("[progress.description]{task.description}"),
98
+ BarColumn(),
99
+ "[progress.percentage]{task.percentage:>3.0f}%",
100
+ TimeElapsedColumn(),
101
+ transient=True,
102
+ ) as progress:
103
+ task = progress.add_task(
104
+ "Running in parallel...", total=len(param_combinations)
105
+ )
106
+
107
+ def wrapper(idx, param_tuple):
108
+ res = func(
93
109
  *(list(param_tuple[: len(iterables)]) + fixed_args),
94
110
  **{
95
111
  k: v
@@ -99,41 +115,107 @@ if importlib.util.find_spec("joblib"):
99
115
  },
100
116
  **fixed_kwargs,
101
117
  )
102
- for param_tuple in param_combinations
103
- )
104
- else:
105
- results = [None] * len(param_combinations)
106
- with Progress(
107
- TextColumn("[progress.description]{task.description}"),
108
- BarColumn(),
109
- "[progress.percentage]{task.percentage:>3.0f}%",
110
- TimeElapsedColumn(),
111
- transient=True,
112
- ) as progress:
113
- task = progress.add_task(
114
- "Running in parallel...", total=len(param_combinations)
115
- )
118
+ progress.update(task, advance=1)
119
+ return idx, res
116
120
 
117
- def wrapper(idx, param_tuple):
118
- res = func(
119
- *(list(param_tuple[: len(iterables)]) + fixed_args),
120
- **{
121
- k: v
122
- for k, v in zip(
123
- iterable_kwargs.keys(), param_tuple[len(iterables) :]
124
- )
125
- },
126
- **fixed_kwargs,
121
+ for idx, result in Parallel(**parallel_kwargs)(
122
+ delayed(wrapper)(i, param_tuple)
123
+ for i, param_tuple in enumerate(param_combinations)
124
+ ):
125
+ results[idx] = result
126
+ return results
127
+
128
+ def _execute_parallel_without_progress(
129
+ func: callable,
130
+ iterables: list,
131
+ fixed_args: list,
132
+ iterable_kwargs: dict,
133
+ fixed_kwargs: dict,
134
+ param_combinations: list,
135
+ parallel_kwargs: dict,
136
+ ) -> list:
137
+ """Execute parallel tasks without progress tracking.
138
+
139
+ Args:
140
+ func: Function to execute
141
+ iterables: List of iterable arguments
142
+ fixed_args: List of fixed arguments
143
+ iterable_kwargs: Dictionary of iterable keyword arguments
144
+ fixed_kwargs: Dictionary of fixed keyword arguments
145
+ param_combinations: List of parameter combinations
146
+ parallel_kwargs: Parallel execution configuration
147
+
148
+ Returns:
149
+ list: Results from parallel execution
150
+ """
151
+ return Parallel(**parallel_kwargs)(
152
+ delayed(func)(
153
+ *(list(param_tuple[: len(iterables)]) + fixed_args),
154
+ **{
155
+ k: v
156
+ for k, v in zip(
157
+ iterable_kwargs.keys(), param_tuple[len(iterables) :]
127
158
  )
128
- progress.update(task, advance=1)
129
- return idx, res
159
+ },
160
+ **fixed_kwargs,
161
+ )
162
+ for param_tuple in param_combinations
163
+ )
130
164
 
131
- for idx, result in Parallel(**parallel_kwargs)(
132
- delayed(wrapper)(i, param_tuple)
133
- for i, param_tuple in enumerate(param_combinations)
134
- ):
135
- results[idx] = result
136
- return results
165
+ def run_parallel(
166
+ func: callable,
167
+ *args,
168
+ n_jobs: int = -1,
169
+ backend: str = "threading",
170
+ verbose: bool = True,
171
+ **kwargs,
172
+ ) -> list[any]:
173
+ """Runs a function for a list of parameters in parallel.
174
+
175
+ Args:
176
+ func (Callable): function to run in parallel
177
+ *args: Positional arguments. Can be single values or iterables
178
+ n_jobs (int, optional): Number of joblib workers. Defaults to -1
179
+ backend (str, optional): joblib backend. Valid options are
180
+ `loky`,`threading`, `mutliprocessing` or `sequential`. Defaults to "threading"
181
+ verbose (bool, optional): Show progress bar. Defaults to True
182
+ **kwargs: Keyword arguments. Can be single values or iterables
183
+
184
+ Returns:
185
+ list[any]: Function output
186
+
187
+ Examples:
188
+ >>> # Single iterable argument
189
+ >>> run_parallel(func, [1,2,3], fixed_arg=42)
190
+
191
+ >>> # Multiple iterables in args and kwargs
192
+ >>> run_parallel(func, [1,2,3], val=[7,8,9], fixed=42)
193
+
194
+ >>> # Only kwargs iterables
195
+ >>> run_parallel(func, x=[1,2,3], y=[4,5,6], fixed=42)
196
+ """
197
+ parallel_kwargs = {"n_jobs": n_jobs, "backend": backend, "verbose": 0}
198
+
199
+ # Prepare and validate arguments
200
+ iterables, fixed_args, iterable_kwargs, fixed_kwargs, first_iterable_len = _prepare_parallel_args(
201
+ args, kwargs
202
+ )
203
+
204
+ # Create parameter combinations
205
+ all_iterables = iterables + list(iterable_kwargs.values())
206
+ param_combinations = list(zip(*all_iterables))
207
+
208
+ # Execute with or without progress tracking
209
+ if not verbose:
210
+ return _execute_parallel_without_progress(
211
+ func, iterables, fixed_args, iterable_kwargs, fixed_kwargs,
212
+ param_combinations, parallel_kwargs
213
+ )
214
+ else:
215
+ return _execute_parallel_with_progress(
216
+ func, iterables, fixed_args, iterable_kwargs, fixed_kwargs,
217
+ param_combinations, parallel_kwargs
218
+ )
137
219
 
138
220
  else:
139
221
 
@@ -170,19 +252,110 @@ def get_partitions_from_path(
170
252
  return list(zip(partitioning, parts[-len(partitioning) :]))
171
253
 
172
254
 
173
- def view_img(data: str | bytes, format: str = "svg"):
174
- # Create a temporary file with .svg extension
255
+ def _validate_image_format(format: str) -> str:
256
+ """Validate image format to prevent injection attacks.
257
+
258
+ Args:
259
+ format: Image format to validate
260
+
261
+ Returns:
262
+ str: Validated format
263
+
264
+ Raises:
265
+ ValueError: If format is not supported
266
+ """
267
+ allowed_formats = {"svg", "png", "jpg", "jpeg", "gif", "pdf", "html"}
268
+ if format not in allowed_formats:
269
+ raise ValueError(f"Unsupported format: {format}. Allowed: {allowed_formats}")
270
+ return format
271
+
272
+ def _create_temp_image_file(data: str | bytes, format: str) -> str:
273
+ """Create a temporary file with image data.
274
+
275
+ Args:
276
+ data: Image data as string or bytes
277
+ format: Validated image format
278
+
279
+ Returns:
280
+ str: Path to temporary file
281
+
282
+ Raises:
283
+ OSError: If file creation fails
284
+ """
175
285
  with tempfile.NamedTemporaryFile(suffix=f".{format}", delete=False) as tmp:
176
- tmp.write(data)
286
+ if isinstance(data, str):
287
+ tmp.write(data.encode('utf-8'))
288
+ else:
289
+ tmp.write(data)
177
290
  tmp_path = tmp.name
291
+
292
+ # Validate the temporary file path for security
293
+ validate_file_path(tmp_path, allow_relative=False)
294
+ return tmp_path
295
+
296
+ def _open_image_viewer(tmp_path: str) -> None:
297
+ """Open image viewer with the given file path.
298
+
299
+ Args:
300
+ tmp_path: Path to temporary image file
301
+
302
+ Raises:
303
+ OSError: If platform is not supported
304
+ subprocess.CalledProcessError: If subprocess fails
305
+ subprocess.TimeoutExpired: If subprocess times out
306
+ """
307
+ import platform
308
+ platform_system = platform.system()
309
+
310
+ if platform_system == "Darwin": # macOS
311
+ subprocess.run(["open", tmp_path], check=True, timeout=10)
312
+ elif platform_system == "Linux":
313
+ subprocess.run(["xdg-open", tmp_path], check=True, timeout=10)
314
+ elif platform_system == "Windows":
315
+ subprocess.run(["start", "", tmp_path], shell=True, check=True, timeout=10)
316
+ else:
317
+ raise OSError(f"Unsupported platform: {platform_system}")
178
318
 
179
- # Open with default application on macOS
180
- subprocess.run(["open", tmp_path])
319
+ def _cleanup_temp_file(tmp_path: str) -> None:
320
+ """Clean up temporary file.
321
+
322
+ Args:
323
+ tmp_path: Path to temporary file to remove
324
+ """
325
+ try:
326
+ os.unlink(tmp_path)
327
+ except OSError:
328
+ pass # File might already be deleted or in use
181
329
 
182
- # Optional: Remove the temp file after a delay
330
+ def view_img(data: str | bytes, format: str = "svg"):
331
+ """View image data using the system's default image viewer.
332
+
333
+ Args:
334
+ data: Image data as string or bytes
335
+ format: Image format (svg, png, jpg, jpeg, gif, pdf, html)
336
+
337
+ Raises:
338
+ ValueError: If format is not supported
339
+ RuntimeError: If file opening fails
340
+ OSError: If platform is not supported
341
+ """
342
+ # Validate format to prevent injection attacks
343
+ validated_format = _validate_image_format(format)
344
+
345
+ # Create a temporary file with validated extension
346
+ tmp_path = _create_temp_image_file(data, validated_format)
183
347
 
348
+ try:
349
+ # Open image viewer with secure subprocess call
350
+ _open_image_viewer(tmp_path)
351
+ except (subprocess.CalledProcessError, subprocess.TimeoutExpired, OSError) as e:
352
+ # Clean up temp file on error
353
+ _cleanup_temp_file(tmp_path)
354
+ raise RuntimeError(f"Failed to open file: {e}")
355
+
356
+ # Optional: Remove the temp file after a delay
184
357
  time.sleep(2) # Wait for viewer to open
185
- os.unlink(tmp_path)
358
+ _cleanup_temp_file(tmp_path)
186
359
 
187
360
 
188
361
  def update_config_from_dict(
@@ -0,0 +1,221 @@
1
+ """Security utilities for input validation and sanitization."""
2
+
3
+ import os
4
+ import re
5
+ from pathlib import Path
6
+ from typing import Any, Dict, List, Optional, Union, TYPE_CHECKING
7
+
8
+ if TYPE_CHECKING:
9
+ from collections.abc import Callable
10
+
11
+
12
+ class SecurityError(Exception):
13
+ """Raised when security validation fails."""
14
+ pass
15
+
16
+
17
+ def validate_file_path(path: Union[str, Path],
18
+ allowed_extensions: Optional[List[str]] = None,
19
+ allow_absolute: bool = True,
20
+ allow_relative: bool = True) -> Path:
21
+ """Validate and sanitize file paths to prevent directory traversal attacks.
22
+
23
+ Args:
24
+ path: File path to validate
25
+ allowed_extensions: List of allowed file extensions (e.g., ['.yaml', '.yml'])
26
+ allow_absolute: Whether to allow absolute paths
27
+ allow_relative: Whether to allow relative paths
28
+
29
+ Returns:
30
+ Validated Path object
31
+
32
+ Raises:
33
+ SecurityError: If path is invalid or potentially dangerous
34
+ ValueError: If path is empty or None
35
+ """
36
+ if not path:
37
+ raise ValueError("Path cannot be empty or None")
38
+
39
+ # Convert to Path object
40
+ path_obj = Path(path)
41
+
42
+ # Check for directory traversal attempts
43
+ path_str = str(path_obj)
44
+ if '..' in path_obj.parts or path_str.startswith('..'):
45
+ raise SecurityError(f"Directory traversal detected in path: {path}")
46
+
47
+ # Check absolute vs relative path restrictions
48
+ if path_obj.is_absolute() and not allow_absolute:
49
+ raise SecurityError(f"Absolute paths not allowed: {path}")
50
+
51
+ if not path_obj.is_absolute() and not allow_relative:
52
+ raise SecurityError(f"Relative paths not allowed: {path}")
53
+
54
+ # Validate file extension if specified
55
+ if allowed_extensions:
56
+ if not path_obj.suffix.lower() in [ext.lower() for ext in allowed_extensions]:
57
+ raise SecurityError(
58
+ f"File extension '{path_obj.suffix}' not allowed. "
59
+ f"Allowed: {allowed_extensions}"
60
+ )
61
+
62
+ # Check for potentially dangerous characters
63
+ dangerous_chars = ['|', '&', ';', '`', '$', '<', '>', '"', "'"]
64
+ if any(char in path_str for char in dangerous_chars):
65
+ raise SecurityError(f"Dangerous characters detected in path: {path}")
66
+
67
+ return path_obj
68
+
69
+
70
+ def validate_pipeline_name(name: str) -> str:
71
+ """Validate pipeline name to prevent injection attacks.
72
+
73
+ Args:
74
+ name: Pipeline name to validate
75
+
76
+ Returns:
77
+ Validated name
78
+
79
+ Raises:
80
+ ValueError: If name is invalid
81
+ SecurityError: If name contains dangerous characters
82
+ """
83
+ if not name or not isinstance(name, str):
84
+ raise ValueError("Pipeline name must be a non-empty string")
85
+
86
+ name = name.strip()
87
+ if not name:
88
+ raise ValueError("Pipeline name cannot be empty or only whitespace")
89
+
90
+ # Check for dangerous characters
91
+ if not re.match(r'^[a-zA-Z0-9_-]+$', name):
92
+ raise SecurityError(
93
+ f"Pipeline name '{name}' contains invalid characters. "
94
+ "Only alphanumeric, underscore, and hyphen are allowed."
95
+ )
96
+
97
+ # Check length constraints
98
+ if len(name) > 100:
99
+ raise SecurityError(f"Pipeline name too long: {len(name)} > 100 characters")
100
+
101
+ return name
102
+
103
+
104
+ def validate_config_dict(config: Dict[str, Any],
105
+ allowed_keys: Optional[List[str]] = None,
106
+ max_depth: int = 10) -> Dict[str, Any]:
107
+ """Validate configuration dictionary to prevent malicious content.
108
+
109
+ Args:
110
+ config: Configuration dictionary to validate
111
+ allowed_keys: List of allowed top-level keys
112
+ max_depth: Maximum nesting depth to prevent DoS attacks
113
+
114
+ Returns:
115
+ Validated configuration dictionary
116
+
117
+ Raises:
118
+ SecurityError: If configuration contains dangerous content
119
+ ValueError: If configuration is invalid
120
+ """
121
+ if not isinstance(config, dict):
122
+ raise ValueError("Configuration must be a dictionary")
123
+
124
+ # Check for allowed keys
125
+ if allowed_keys:
126
+ invalid_keys = set(config.keys()) - set(allowed_keys)
127
+ if invalid_keys:
128
+ raise SecurityError(f"Invalid configuration keys: {invalid_keys}")
129
+
130
+ # Check nesting depth
131
+ def check_depth(obj, depth=0):
132
+ if depth > max_depth:
133
+ raise SecurityError(f"Configuration nesting too deep: {depth} > {max_depth}")
134
+
135
+ if isinstance(obj, dict):
136
+ for value in obj.values():
137
+ check_depth(value, depth + 1)
138
+ elif isinstance(obj, (list, tuple)):
139
+ for item in obj:
140
+ check_depth(item, depth + 1)
141
+
142
+ check_depth(config)
143
+
144
+ return config
145
+
146
+
147
+ def sanitize_log_data(data: Any) -> Any:
148
+ """Sanitize data for safe logging to prevent log injection.
149
+
150
+ Args:
151
+ data: Data to sanitize for logging
152
+
153
+ Returns:
154
+ Sanitized data safe for logging
155
+ """
156
+ if isinstance(data, str):
157
+ # Remove potential log injection characters
158
+ sanitized = re.sub(r'[\r\n\t]', ' ', data)
159
+ # Limit length to prevent log flooding
160
+ if len(sanitized) > 1000:
161
+ sanitized = sanitized[:997] + "..."
162
+ return sanitized
163
+ elif isinstance(data, (dict, list)):
164
+ # For complex objects, convert to string and sanitize
165
+ return sanitize_log_data(str(data))
166
+ else:
167
+ return data
168
+
169
+
170
+ def validate_executor_type(executor_type: str) -> str:
171
+ """Validate executor type to prevent arbitrary code execution.
172
+
173
+ Args:
174
+ executor_type: Executor type string to validate
175
+
176
+ Returns:
177
+ Validated executor type
178
+
179
+ Raises:
180
+ SecurityError: If executor type is invalid or dangerous
181
+ """
182
+ if not executor_type or not isinstance(executor_type, str):
183
+ raise ValueError("Executor type must be a non-empty string")
184
+
185
+ allowed_executors = {
186
+ 'synchronous', 'threadpool', 'processpool', 'ray', 'dask'
187
+ }
188
+
189
+ if executor_type not in allowed_executors:
190
+ raise SecurityError(
191
+ f"Invalid executor type: {executor_type}. "
192
+ f"Allowed types: {allowed_executors}"
193
+ )
194
+
195
+ return executor_type
196
+
197
+
198
+ def validate_callback_function(callback: Any) -> bool:
199
+ """Validate callback function to ensure it's safe to execute.
200
+
201
+ Args:
202
+ callback: Callback function or callable to validate
203
+
204
+ Returns:
205
+ True if callback is valid
206
+
207
+ Raises:
208
+ SecurityError: If callback is dangerous or invalid
209
+ """
210
+ if callback is None:
211
+ return True
212
+
213
+ if not callable(callback):
214
+ raise SecurityError("Callback must be callable")
215
+
216
+ # Check if it's a built-in function that could be dangerous
217
+ dangerous_functions = {'eval', 'exec', 'compile', '__import__'}
218
+ if hasattr(callback, '__name__') and callback.__name__ in dangerous_functions:
219
+ raise SecurityError(f"Dangerous callback function: {callback.__name__}")
220
+
221
+ return True
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: FlowerPower
3
- Version: 0.21.0
3
+ Version: 0.31.0
4
4
  Summary: A simple workflow framework for building and managing data processing pipelines
5
5
  Author-email: "Volker L." <ligno.blades@gmail.com>
6
6
  Project-URL: Homepage, https://github.com/legout/flowerpower
@@ -25,26 +25,14 @@ Provides-Extra: io
25
25
  Requires-Dist: flowerpower-io>=0.1.1; extra == "io"
26
26
  Provides-Extra: io-legacy
27
27
  Requires-Dist: flowerpower-io[legacy]>=0.1.1; extra == "io-legacy"
28
- Provides-Extra: mongodb
29
- Requires-Dist: pymongo>=4.7.2; extra == "mongodb"
30
- Provides-Extra: mqtt
31
- Requires-Dist: paho-mqtt>=2.1.0; extra == "mqtt"
32
- Requires-Dist: orjson>=3.10.11; extra == "mqtt"
33
- Requires-Dist: mmh3>=5.1.0; extra == "mqtt"
34
28
  Provides-Extra: opentelemetry
35
29
  Requires-Dist: opentelemetry-api>=1.5.0; extra == "opentelemetry"
36
30
  Requires-Dist: opentelemetry-sdk>=1.5.0; extra == "opentelemetry"
37
31
  Requires-Dist: opentelemetry-exporter-jaeger>=1.21.0; extra == "opentelemetry"
38
32
  Provides-Extra: ray
39
33
  Requires-Dist: ray>=2.34.0; extra == "ray"
40
- Provides-Extra: tui
41
- Requires-Dist: textual>=0.85.2; extra == "tui"
42
34
  Provides-Extra: ui
43
35
  Requires-Dist: sf-hamilton-ui>=0.0.11; extra == "ui"
44
- Provides-Extra: webserver
45
- Requires-Dist: sanic>=24.6.0; extra == "webserver"
46
- Requires-Dist: sanic-ext>=23.12.0; extra == "webserver"
47
- Requires-Dist: orjson>=3.10.11; extra == "webserver"
48
36
  Provides-Extra: openlineage
49
37
  Requires-Dist: openlineage-python>=1.32.0; extra == "openlineage"
50
38
  Dynamic: license-file