FlowerPower 0.9.13.1__py3-none-any.whl → 1.0.0b2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (85) hide show
  1. flowerpower/__init__.py +17 -2
  2. flowerpower/cfg/__init__.py +201 -149
  3. flowerpower/cfg/base.py +122 -24
  4. flowerpower/cfg/pipeline/__init__.py +254 -0
  5. flowerpower/cfg/pipeline/adapter.py +66 -0
  6. flowerpower/cfg/pipeline/run.py +40 -11
  7. flowerpower/cfg/pipeline/schedule.py +69 -79
  8. flowerpower/cfg/project/__init__.py +149 -0
  9. flowerpower/cfg/project/adapter.py +57 -0
  10. flowerpower/cfg/project/job_queue.py +165 -0
  11. flowerpower/cli/__init__.py +92 -37
  12. flowerpower/cli/job_queue.py +878 -0
  13. flowerpower/cli/mqtt.py +32 -1
  14. flowerpower/cli/pipeline.py +559 -406
  15. flowerpower/cli/utils.py +29 -18
  16. flowerpower/flowerpower.py +12 -8
  17. flowerpower/fs/__init__.py +20 -2
  18. flowerpower/fs/base.py +350 -26
  19. flowerpower/fs/ext.py +797 -216
  20. flowerpower/fs/storage_options.py +1097 -55
  21. flowerpower/io/base.py +13 -18
  22. flowerpower/io/loader/__init__.py +28 -0
  23. flowerpower/io/loader/deltatable.py +7 -10
  24. flowerpower/io/metadata.py +1 -0
  25. flowerpower/io/saver/__init__.py +28 -0
  26. flowerpower/io/saver/deltatable.py +4 -3
  27. flowerpower/job_queue/__init__.py +252 -0
  28. flowerpower/job_queue/apscheduler/__init__.py +11 -0
  29. flowerpower/job_queue/apscheduler/_setup/datastore.py +110 -0
  30. flowerpower/job_queue/apscheduler/_setup/eventbroker.py +93 -0
  31. flowerpower/job_queue/apscheduler/manager.py +1063 -0
  32. flowerpower/job_queue/apscheduler/setup.py +524 -0
  33. flowerpower/job_queue/apscheduler/trigger.py +169 -0
  34. flowerpower/job_queue/apscheduler/utils.py +309 -0
  35. flowerpower/job_queue/base.py +382 -0
  36. flowerpower/job_queue/rq/__init__.py +10 -0
  37. flowerpower/job_queue/rq/_trigger.py +37 -0
  38. flowerpower/job_queue/rq/concurrent_workers/gevent_worker.py +226 -0
  39. flowerpower/job_queue/rq/concurrent_workers/thread_worker.py +231 -0
  40. flowerpower/job_queue/rq/manager.py +1449 -0
  41. flowerpower/job_queue/rq/setup.py +150 -0
  42. flowerpower/job_queue/rq/utils.py +69 -0
  43. flowerpower/pipeline/__init__.py +5 -0
  44. flowerpower/pipeline/base.py +118 -0
  45. flowerpower/pipeline/io.py +407 -0
  46. flowerpower/pipeline/job_queue.py +505 -0
  47. flowerpower/pipeline/manager.py +1586 -0
  48. flowerpower/pipeline/registry.py +560 -0
  49. flowerpower/pipeline/runner.py +560 -0
  50. flowerpower/pipeline/visualizer.py +142 -0
  51. flowerpower/plugins/mqtt/__init__.py +12 -0
  52. flowerpower/plugins/mqtt/cfg.py +16 -0
  53. flowerpower/plugins/mqtt/manager.py +789 -0
  54. flowerpower/settings.py +110 -0
  55. flowerpower/utils/logging.py +21 -0
  56. flowerpower/utils/misc.py +57 -9
  57. flowerpower/utils/sql.py +122 -24
  58. flowerpower/utils/templates.py +2 -142
  59. flowerpower-1.0.0b2.dist-info/METADATA +324 -0
  60. flowerpower-1.0.0b2.dist-info/RECORD +94 -0
  61. flowerpower/_web/__init__.py +0 -61
  62. flowerpower/_web/routes/config.py +0 -103
  63. flowerpower/_web/routes/pipelines.py +0 -173
  64. flowerpower/_web/routes/scheduler.py +0 -136
  65. flowerpower/cfg/pipeline/tracker.py +0 -14
  66. flowerpower/cfg/project/open_telemetry.py +0 -8
  67. flowerpower/cfg/project/tracker.py +0 -11
  68. flowerpower/cfg/project/worker.py +0 -19
  69. flowerpower/cli/scheduler.py +0 -309
  70. flowerpower/cli/web.py +0 -44
  71. flowerpower/event_handler.py +0 -23
  72. flowerpower/mqtt.py +0 -609
  73. flowerpower/pipeline.py +0 -2499
  74. flowerpower/scheduler.py +0 -680
  75. flowerpower/tui.py +0 -79
  76. flowerpower/utils/datastore.py +0 -186
  77. flowerpower/utils/eventbroker.py +0 -127
  78. flowerpower/utils/executor.py +0 -58
  79. flowerpower/utils/trigger.py +0 -140
  80. flowerpower-0.9.13.1.dist-info/METADATA +0 -586
  81. flowerpower-0.9.13.1.dist-info/RECORD +0 -76
  82. /flowerpower/{cfg/pipeline/params.py → cli/worker.py} +0 -0
  83. {flowerpower-0.9.13.1.dist-info → flowerpower-1.0.0b2.dist-info}/WHEEL +0 -0
  84. {flowerpower-0.9.13.1.dist-info → flowerpower-1.0.0b2.dist-info}/entry_points.txt +0 -0
  85. {flowerpower-0.9.13.1.dist-info → flowerpower-1.0.0b2.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,110 @@
1
+ import os
2
+
3
+ PIPELINES_DIR = os.getenv("FP_PIPELINES_DIR", "pipelines")
4
+ CONFIG_DIR = os.getenv("FP_CONFIG_DIR", "conf")
5
+ HOOKS_DIR = os.getenv("FP_HOOKS_DIR", "hooks")
6
+
7
+ # EXECUTOR
8
+ EXECUTOR = os.getenv("FP_EXECUTOR", "threadpool")
9
+ EXECUTOR_MAX_WORKERS = int(
10
+ os.getenv("FP_EXECUTOR_MAX_WORKERS", os.cpu_count() * 5 or 10)
11
+ )
12
+ EXECUTOR_NUM_CPUS = int(os.getenv("FP_EXECUTOR_NUM_CPUS", os.cpu_count() or 1))
13
+
14
+ # LOGGING
15
+ LOG_LEVEL = os.getenv("FP_LOG_LEVEL", "INFO")
16
+
17
+ # WORKER
18
+ DEFAULT_JOB_QUEUE = os.getenv("FP_JOB_QUEUE_TYPE", "rq")
19
+ # RQ WORKER
20
+ RQ_BACKEND = os.getenv("FP_RQ_BACKEND", "redis")
21
+ RQ_QUEUES = (
22
+ os.getenv("FP_RQ_QUEUES", "default, high, low, scheduler")
23
+ .replace(" ", "")
24
+ .split(",")
25
+ )
26
+ RQ_NUM_WORKERS = int(os.getenv("FP_RQ_NUM_WORKERS", EXECUTOR_NUM_CPUS))
27
+
28
+ # APS WORKER
29
+ APS_BACKEND_DS = os.getenv("FP_APS_DS_BACKEND", "postgresql")
30
+ APS_SCHEMA_DS = os.getenv("FP_APS_SCHEMA", "flowerpower")
31
+ APS_BACKEND_EB = os.getenv("FP_APS_EB_BACKEND", "postgresql")
32
+ APS_CLEANUP_INTERVAL = int(os.getenv("FP_APS_CLEANUP_INTERVAL", 300))
33
+ APS_MAX_CONCURRENT_JOBS = int(os.getenv("FP_APS_MAX_CONCURRENT_JOBS", 10))
34
+ APS_DEFAULT_EXECUTOR = os.getenv("FP_APS_DEFAULT_EXECUTOR", EXECUTOR)
35
+ APS_NUM_WORKERS = int(os.getenv("FP_APS_NUM_WORKERS", EXECUTOR_MAX_WORKERS))
36
+
37
+ # Define backend properties in a dictionary for easier maintenance
38
+ BACKEND_PROPERTIES = {
39
+ "postgresql": {
40
+ "uri_prefix": "postgresql+asyncpg://",
41
+ "default_port": 5432,
42
+ "default_host": "localhost",
43
+ "default_database": "postgres",
44
+ "default_username": "postgres",
45
+ "is_sqla_type": True,
46
+ },
47
+ "mysql": {
48
+ "uri_prefix": "mysql+aiomysql://",
49
+ "default_port": 3306,
50
+ "default_host": "localhost",
51
+ "default_database": "mysql",
52
+ "default_username": "root",
53
+ "is_sqla_type": True,
54
+ },
55
+ "sqlite": {
56
+ "uri_prefix": "sqlite+aiosqlite://",
57
+ "default_port": None,
58
+ "default_host": "",
59
+ "default_database": "",
60
+ "is_sqla_type": True,
61
+ "is_sqlite_type": True,
62
+ },
63
+ "mongodb": {
64
+ "uri_prefix": "mongodb://",
65
+ "default_port": 27017,
66
+ "default_host": "localhost",
67
+ "default_database": "admin",
68
+ "is_sqla_type": False,
69
+ },
70
+ "mqtt": {
71
+ "uri_prefix": "mqtt://",
72
+ "default_port": 1883,
73
+ "default_host": "localhost",
74
+ "default_database": "mqtt",
75
+ },
76
+ "redis": {
77
+ "uri_prefix": "redis://",
78
+ "default_port": 6379,
79
+ "default_host": "localhost",
80
+ "default_database": 0,
81
+ },
82
+ "nats_kv": {
83
+ "uri_prefix": "nats://",
84
+ "default_port": 4222,
85
+ "default_host": "localhost",
86
+ "default_database": "default",
87
+ },
88
+ "memory": {
89
+ "uri_prefix": "memory://",
90
+ "default_port": None,
91
+ "default_host": "",
92
+ "default_database": "",
93
+ },
94
+ }
95
+
96
+ # HAMILTON
97
+ HAMILTON_MAX_LIST_LENGTH_CAPTURE = int(
98
+ os.getenv("HAMILTON_MAX_LIST_LENGTH_CAPTURE", 50)
99
+ )
100
+ HAMILTON_MAX_DICT_LENGTH_CAPTURE = int(
101
+ os.getenv("HAMILTON_MAX_DICT_LENGTH_CAPTURE", 10)
102
+ )
103
+ HAMILTON_CAPTURE_DATA_STATISTICS = bool(
104
+ os.getenv("HAMILTON_CAPTURE_DATA_STATISTICS", True)
105
+ )
106
+
107
+ HAMILTON_AUTOLOAD_EXTENSIONS = int(os.getenv("HAMILTON_AUTOLOAD_EXTENSIONS", 0))
108
+ HAMILTON_TELEMETRY_ENABLED = bool(os.getenv("HAMILTON_TELEMETRY_ENABLED", False))
109
+ HAMILTON_API_URL = os.getenv("HAMILTON_API_URL", "http://localhost:8241")
110
+ HAMILTON_UI_URL = os.getenv("HAMILTON_UI_URL", "http://localhost:8242")
@@ -0,0 +1,21 @@
1
+ import sys
2
+
3
+ from loguru import logger
4
+
5
+ from ..settings import LOG_LEVEL # Import the setting
6
+
7
+
8
+ def setup_logging(level: str = LOG_LEVEL) -> None:
9
+ """
10
+ Configures the Loguru logger.
11
+
12
+ Removes the default handler and adds a new one targeting stderr
13
+ with the level specified by the FP_LOG_LEVEL setting.
14
+ """
15
+ logger.remove() # Remove the default handler added by Loguru
16
+ logger.add(
17
+ sys.stderr,
18
+ level=level.upper(), # Use the level from the parameter, ensure it's uppercase
19
+ format="<green>{time:YYYY-MM-DD HH:mm:ss.SSS}</green> | <level>{level: <8}</level> | <cyan>{name}</cyan>:<cyan>{function}</cyan>:<cyan>{line}</cyan> - <level>{message}</level>", # Example format
20
+ )
21
+ # logger.info(f"Log level set to: {FP_LOG_LEVEL.upper()}")
flowerpower/utils/misc.py CHANGED
@@ -3,8 +3,11 @@ import os
3
3
  import subprocess
4
4
  import tempfile
5
5
  import time
6
+ from typing import Any
6
7
 
7
- import tqdm
8
+ import msgspec
9
+
10
+ # import tqdm
8
11
 
9
12
  if importlib.util.find_spec("pyarrow"):
10
13
  import pyarrow as pa
@@ -150,12 +153,10 @@ if importlib.util.find_spec("polars"):
150
153
  next(v for v in data.values() if isinstance(v, (list, tuple)))
151
154
  )
152
155
  # Convert to DataFrame where each list element becomes a row
153
- data = pl.DataFrame(
154
- {
155
- k: v if isinstance(v, (list, tuple)) else [v] * length
156
- for k, v in data.items()
157
- }
158
- )
156
+ data = pl.DataFrame({
157
+ k: v if isinstance(v, (list, tuple)) else [v] * length
158
+ for k, v in data.items()
159
+ })
159
160
  else:
160
161
  # If values are scalars, wrap them in a list to create a single row
161
162
  data = pl.DataFrame({k: [v] for k, v in data.items()})
@@ -257,6 +258,7 @@ if (
257
258
  data = [pa.Table.from_batches([dd]) for dd in data]
258
259
 
259
260
  return data
261
+
260
262
  else:
261
263
 
262
264
  def to_pyarrow_table(*args, **kwargs):
@@ -345,8 +347,8 @@ if importlib.util.find_spec("joblib"):
345
347
  all_iterables = iterables + list(iterable_kwargs.values())
346
348
  param_combinations = list(zip(*all_iterables)) # Convert to list for tqdm
347
349
 
348
- if verbose:
349
- param_combinations = tqdm.tqdm(param_combinations)
350
+ # if verbose:
351
+ # param_combinations = tqdm.tqdm(param_combinations)
350
352
 
351
353
  return Parallel(**parallel_kwargs)(
352
354
  delayed(func)(
@@ -410,3 +412,49 @@ def view_img(data: str | bytes, format: str = "svg"):
410
412
 
411
413
  time.sleep(2) # Wait for viewer to open
412
414
  os.unlink(tmp_path)
415
+
416
+
417
+ def update_config_from_dict(
418
+ struct: msgspec.Struct, data: dict[str, Any]
419
+ ) -> msgspec.Struct:
420
+ """
421
+ Updates a msgspec.Struct instance with values from a dictionary.
422
+ Handles nested msgspec.Struct objects and nested dictionaries.
423
+
424
+ Args:
425
+ obj: The msgspec.Struct object to update
426
+ update_dict: Dictionary containing update values
427
+
428
+ Returns:
429
+ Updated msgspec.Struct instance
430
+ """
431
+ # Convert the struct to a dictionary for easier manipulation
432
+ obj_dict = msgspec.to_builtins(struct)
433
+
434
+ # Update the dictionary recursively
435
+ for key, value in data.items():
436
+ if key in obj_dict:
437
+ if isinstance(value, dict) and isinstance(obj_dict[key], dict):
438
+ # Handle nested dictionaries
439
+ obj_dict[key] = update_nested_dict(obj_dict[key], value)
440
+ else:
441
+ # Direct update for non-nested values
442
+ obj_dict[key] = value
443
+
444
+ # Convert back to the original struct type
445
+ return msgspec.convert(obj_dict, type(struct))
446
+
447
+
448
+ def update_nested_dict(
449
+ original: dict[str, Any], updates: dict[str, Any]
450
+ ) -> dict[str, Any]:
451
+ """Helper function to update nested dictionaries"""
452
+ result = original.copy()
453
+ for key, value in updates.items():
454
+ if key in result and isinstance(value, dict) and isinstance(result[key], dict):
455
+ # Recursively update nested dictionaries
456
+ result[key] = update_nested_dict(result[key], value)
457
+ else:
458
+ # Direct update
459
+ result[key] = value
460
+ return result
flowerpower/utils/sql.py CHANGED
@@ -2,8 +2,8 @@ import datetime as dt
2
2
  import re
3
3
  from functools import lru_cache
4
4
  from typing import Any
5
+ from zoneinfo import ZoneInfo, ZoneInfoNotFoundError
5
6
 
6
- import pendulum as pdl
7
7
  import pyarrow as pa
8
8
  import pyarrow.compute as pc
9
9
  from sqlglot import exp, parse_one
@@ -13,38 +13,136 @@ from .polars import pl
13
13
 
14
14
  @lru_cache(maxsize=128)
15
15
  def timestamp_from_string(
16
- timestamp: str,
16
+ timestamp_str: str,
17
17
  tz: str | None = None,
18
- exact: bool = True,
19
- strict: bool = False,
20
18
  naive: bool = False,
21
- ) -> pdl.DateTime | pdl.Date | pdl.Time | dt.datetime | dt.date | dt.time:
19
+ ) -> dt.datetime | dt.date | dt.time:
22
20
  """
23
- Converts a string like "2023-01-01 10:00:00" into a datetime.datetime object.
21
+ Converts a timestamp string (ISO 8601 format) into a datetime, date, or time object
22
+ using only standard Python libraries.
23
+
24
+ Handles strings with or without timezone information (e.g., '2023-01-01T10:00:00+02:00',
25
+ '2023-01-01', '10:00:00'). Supports timezone offsets like '+HH:MM' or '+HHMM'.
26
+ For named timezones (e.g., 'Europe/Paris'), requires Python 3.9+ and the 'tzdata'
27
+ package to be installed.
24
28
 
25
29
  Args:
26
- string (str): The string representation of the timestamp, e.g. "2023-01-01 10:00:00".
27
- tz (str, optional): The timezone to use for the timestamp. Defaults to None.
28
- exact (bool, optional): Whether to use exact parsing. Defaults to True.
29
- strict (bool, optional): Whether to use strict parsing. Defaults to False.
30
- naive (bool, optional): Whether to return a naive datetime without a timezone. Defaults to False.
30
+ timestamp_str (str): The string representation of the timestamp (ISO 8601 format).
31
+ tz (str, optional): Target timezone identifier (e.g., 'UTC', '+02:00', 'Europe/Paris').
32
+ If provided, the output datetime/time will be localized or converted to this timezone.
33
+ Defaults to None.
34
+ naive (bool, optional): If True, return a naive datetime/time (no timezone info),
35
+ even if the input string or `tz` parameter specifies one. Defaults to False.
31
36
 
32
37
  Returns:
33
- datetime.datetime: The datetime object.
34
- """
35
- # Extract the timezone from the string if not provided
36
- # tz = extract_timezone(timestamp) if tz is None else tz
37
- # timestamp = timestamp.replace(tz, "").strip() if tz else timestamp
38
-
39
- pdl_timestamp = pdl.parse(timestamp, exact=exact, strict=strict)
38
+ Union[dt.datetime, dt.date, dt.time]: The parsed datetime, date, or time object.
40
39
 
41
- if isinstance(pdl_timestamp, pdl.DateTime):
42
- if tz is not None:
43
- pdl_timestamp = pdl_timestamp.naive().set(tz=tz)
44
- if naive or tz is None:
45
- pdl_timestamp = pdl_timestamp.naive()
40
+ Raises:
41
+ ValueError: If the timestamp string format is invalid or the timezone is
42
+ invalid/unsupported.
43
+ """
46
44
 
47
- return pdl_timestamp
45
+ # Regex to parse timezone offsets like +HH:MM or +HHMM
46
+ _TZ_OFFSET_REGEX = re.compile(r"([+-])(\d{2}):?(\d{2})")
47
+
48
+ def _parse_tz_offset(tz_str: str) -> dt.tzinfo | None:
49
+ """Parses a timezone offset string into a timezone object."""
50
+ match = _TZ_OFFSET_REGEX.fullmatch(tz_str)
51
+ if match:
52
+ sign, hours, minutes = match.groups()
53
+ offset_seconds = (int(hours) * 3600 + int(minutes) * 60) * (
54
+ -1 if sign == "-" else 1
55
+ )
56
+ if abs(offset_seconds) >= 24 * 3600:
57
+ raise ValueError(f"Invalid timezone offset: {tz_str}")
58
+ return dt.timezone(dt.timedelta(seconds=offset_seconds), name=tz_str)
59
+ return None
60
+
61
+ def _get_tzinfo(tz_identifier: str | None) -> dt.tzinfo | None:
62
+ """Gets a tzinfo object from a string (offset or IANA name)."""
63
+ if tz_identifier is None:
64
+ return None
65
+ if tz_identifier.upper() == "UTC":
66
+ return dt.timezone.utc
67
+
68
+ # Try parsing as offset first
69
+ offset_tz = _parse_tz_offset(tz_identifier)
70
+ if offset_tz:
71
+ return offset_tz
72
+
73
+ # Try parsing as IANA name using zoneinfo (if available)
74
+ if ZoneInfo:
75
+ try:
76
+ return ZoneInfo(tz_identifier)
77
+ except ZoneInfoNotFoundError:
78
+ raise ValueError(
79
+ f"Timezone '{tz_identifier}' not found. Install 'tzdata' or use offset format."
80
+ )
81
+ except Exception as e: # Catch other potential zoneinfo errors
82
+ raise ValueError(f"Error loading timezone '{tz_identifier}': {e}")
83
+ else:
84
+ # zoneinfo not available
85
+ raise ValueError(
86
+ f"Invalid timezone: '{tz_identifier}'. Use offset format (e.g., '+02:00') "
87
+ "or run Python 3.9+ with 'tzdata' installed for named timezones."
88
+ )
89
+
90
+ target_tz: dt.tzinfo | None = _get_tzinfo(tz)
91
+ parsed_obj: dt.datetime | dt.date | dt.time | None = None
92
+
93
+ # Preprocess: Replace space separator, strip whitespace
94
+ processed_str = timestamp_str.strip().replace(" ", "T")
95
+
96
+ # Attempt parsing (datetime, date, time) using fromisoformat
97
+ try:
98
+ # Python < 3.11 fromisoformat has limitations (e.g., no Z, no +HHMM offset)
99
+ # This implementation assumes Python 3.11+ for full ISO 8601 support via fromisoformat
100
+ # or that input strings use formats compatible with older versions (e.g., +HH:MM)
101
+ parsed_obj = dt.datetime.fromisoformat(processed_str)
102
+ except ValueError:
103
+ try:
104
+ parsed_obj = dt.date.fromisoformat(processed_str)
105
+ except ValueError:
106
+ try:
107
+ # Time parsing needs care, especially with offsets in older Python
108
+ parsed_obj = dt.time.fromisoformat(processed_str)
109
+ except ValueError:
110
+ # Add fallback for simple HH:MM:SS if needed, though less robust
111
+ # try:
112
+ # parsed_obj = dt.datetime.strptime(processed_str, "%H:%M:%S").time()
113
+ # except ValueError:
114
+ raise ValueError(f"Invalid timestamp format: '{timestamp_str}'")
115
+
116
+ # Apply timezone logic if we have a datetime or time object
117
+ if isinstance(parsed_obj, (dt.datetime, dt.time)):
118
+ is_aware = (
119
+ parsed_obj.tzinfo is not None
120
+ and parsed_obj.tzinfo.utcoffset(
121
+ parsed_obj if isinstance(parsed_obj, dt.datetime) else None
122
+ )
123
+ is not None
124
+ )
125
+
126
+ if target_tz:
127
+ if is_aware:
128
+ # Convert existing aware object to target timezone (only for datetime)
129
+ if isinstance(parsed_obj, dt.datetime):
130
+ parsed_obj = parsed_obj.astimezone(target_tz)
131
+ # else: dt.time cannot be converted without a date context. Keep original tz.
132
+ else:
133
+ # Localize naive object to target timezone
134
+ parsed_obj = parsed_obj.replace(tzinfo=target_tz)
135
+ is_aware = True # Object is now considered aware
136
+
137
+ # Handle naive flag: remove tzinfo if requested
138
+ if naive and is_aware:
139
+ parsed_obj = parsed_obj.replace(tzinfo=None)
140
+
141
+ # If it's a date object, tz/naive flags are ignored
142
+ elif isinstance(parsed_obj, dt.date):
143
+ pass
144
+
145
+ return parsed_obj
48
146
 
49
147
 
50
148
  # Compile regex patterns once for efficiency
@@ -1,147 +1,5 @@
1
1
 
2
2
 
3
- # PIPELINE_TEMPLATE = """# ---------------- Pipelines Configuration ----------------- #
4
-
5
- # # ------------------------ Example ------------------------- #
6
- # #
7
- # # path: pipelines
8
- # #
9
- # # ## pipeline parameter
10
- # #
11
- # # params:
12
- # # flow1: ## pipeline name
13
- # # step1: ## step name
14
- # # param1_1: 123 ## step parameters
15
- # # param1_2: abc
16
- # # step2:
17
- # # param2_1: true
18
- # #
19
- # # ## run configuration
20
- # #
21
- # # run:
22
- # # prod: # environment name
23
- # # flow1:
24
- # # inputs: ## input parameters
25
- # # final_vars: [step2] ## final output vars
26
- # # with_tracker: true ## whether to track the run
27
- # #
28
- # # dev:
29
- # # flow1:
30
- # # inputs:
31
- # # final_vars: [step2]
32
- # # with_tracker: false
33
- # #
34
- # # ---------------------------------------------------------- #
35
-
36
- # """
37
-
38
- # SCHEDULER_TEMPLATE = """# ---------------- Scheduler Configuration ----------------- #
39
-
40
- # # ------------------------ Example ------------------------- #
41
- # #
42
- # # ## data store configuration
43
- # #
44
- # # ### postgres
45
- # # data_store:
46
- # # type: sqlalchemy
47
- # # url: postgresql+asyncpg://edge:edge@postgres/flowerpower
48
- # #
49
- # # ### sqlite
50
- # # data_store:
51
- # # type: sqlalchemy
52
- # # url: sqlite+aiosqlite:///flowerpower.db
53
- # #
54
- # # ### memory
55
- # # data_store:
56
- # # type: memory
57
- # #
58
- # # ### mongodb
59
- # # data_store:
60
- # # type: mongodb
61
- # # url: mongodb://localhost:27017/scheduler
62
- # #
63
- # # ## event broker configuration
64
- # #
65
- # # ### postgres
66
- # # event_broker:
67
- # # type: asyncpg
68
- # # url: postgresql+asyncpg://edge:edge@postgres/flowerpower
69
- # #
70
- # # ### mqtt
71
- # # event_broker:
72
- # # type: mqtt
73
- # # host: localhost
74
- # # port: 1883
75
- # # username: edge
76
- # # password: edge
77
-
78
- # # ### redis
79
- # # event_broker:
80
- # # type: redis
81
- # # host: localhost
82
- # # port: 6379
83
-
84
- # # ### local
85
- # # event_broker:
86
- # # type: local # or memory
87
- # #
88
- # # ## cleanup interval configuration
89
- # #
90
- # # cleanup_interval:
91
- # # unit: minutes
92
- # # value: 15
93
- # #
94
- # # ## pipeline schedule configuration
95
- # #
96
- # # pipeline:
97
- # # my_flow:
98
- # # type: cron ## options: interval, calendarinterval, date
99
- # # start_time:
100
- # # end_time:
101
- # # ## optional cron arguments
102
- # # crontab: * * * * *
103
- # # year:
104
- # # month:
105
- # # week:
106
- # # day:
107
- # # days_of_week:
108
- # # hour:
109
- # # minute:
110
- # # second:
111
- # # timezone:
112
- # # ## optional interval arguments
113
- # # weeks:
114
- # # days:
115
- # # hours:
116
- # # minutes:
117
- # # seconds:
118
- # # microseconds:
119
- # #
120
- # # ---------------------------------------------------------- #
121
-
122
- # """
123
-
124
- # TRACKER_TEMPLATE = """# ----------------- Tracker Configuration ------------------ #
125
-
126
- # # ------------------------ Example ------------------------- #
127
- # #
128
- # # username: your.email@example.com
129
- # # api_url: http://localhost:8241
130
- # # ui_url: http://localhost:8242
131
- # # api_key:
132
-
133
- # # pipeline:
134
- # # my_flow:
135
- # # project_id: 1
136
- # # tags:
137
- # # environment: dev
138
- # # version: 1.0
139
- # # TODO: add_more_tags_to_find_your_run_later
140
- # # dag_name: my_flow_123
141
- # #
142
- # # ---------------------------------------------------------- #
143
-
144
- # """
145
3
 
146
4
  PIPELINE_PY_TEMPLATE = """# FlowerPower pipeline {name}.py
147
5
  # Created on {date}
@@ -187,4 +45,6 @@ def {function_name}(payload: bytes, topic: str) -> dict:
187
45
  Returns:
188
46
  dict: The configuration for the pipeline.
189
47
  """
48
+
49
+ pass
190
50
  '''