FlowerPower 0.9.12.4__py3-none-any.whl → 1.0.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- flowerpower/__init__.py +17 -2
- flowerpower/cfg/__init__.py +201 -149
- flowerpower/cfg/base.py +122 -24
- flowerpower/cfg/pipeline/__init__.py +254 -0
- flowerpower/cfg/pipeline/adapter.py +66 -0
- flowerpower/cfg/pipeline/run.py +40 -11
- flowerpower/cfg/pipeline/schedule.py +69 -79
- flowerpower/cfg/project/__init__.py +149 -0
- flowerpower/cfg/project/adapter.py +57 -0
- flowerpower/cfg/project/job_queue.py +165 -0
- flowerpower/cli/__init__.py +92 -35
- flowerpower/cli/job_queue.py +878 -0
- flowerpower/cli/mqtt.py +49 -4
- flowerpower/cli/pipeline.py +576 -381
- flowerpower/cli/utils.py +55 -0
- flowerpower/flowerpower.py +12 -7
- flowerpower/fs/__init__.py +20 -2
- flowerpower/fs/base.py +350 -26
- flowerpower/fs/ext.py +797 -216
- flowerpower/fs/storage_options.py +1097 -55
- flowerpower/io/base.py +13 -18
- flowerpower/io/loader/__init__.py +28 -0
- flowerpower/io/loader/deltatable.py +7 -10
- flowerpower/io/metadata.py +1 -0
- flowerpower/io/saver/__init__.py +28 -0
- flowerpower/io/saver/deltatable.py +4 -3
- flowerpower/job_queue/__init__.py +252 -0
- flowerpower/job_queue/apscheduler/__init__.py +11 -0
- flowerpower/job_queue/apscheduler/_setup/datastore.py +110 -0
- flowerpower/job_queue/apscheduler/_setup/eventbroker.py +93 -0
- flowerpower/job_queue/apscheduler/manager.py +1063 -0
- flowerpower/job_queue/apscheduler/setup.py +524 -0
- flowerpower/job_queue/apscheduler/trigger.py +169 -0
- flowerpower/job_queue/apscheduler/utils.py +309 -0
- flowerpower/job_queue/base.py +382 -0
- flowerpower/job_queue/rq/__init__.py +10 -0
- flowerpower/job_queue/rq/_trigger.py +37 -0
- flowerpower/job_queue/rq/concurrent_workers/gevent_worker.py +226 -0
- flowerpower/job_queue/rq/concurrent_workers/thread_worker.py +231 -0
- flowerpower/job_queue/rq/manager.py +1449 -0
- flowerpower/job_queue/rq/setup.py +150 -0
- flowerpower/job_queue/rq/utils.py +69 -0
- flowerpower/pipeline/__init__.py +5 -0
- flowerpower/pipeline/base.py +118 -0
- flowerpower/pipeline/io.py +407 -0
- flowerpower/pipeline/job_queue.py +505 -0
- flowerpower/pipeline/manager.py +1586 -0
- flowerpower/pipeline/registry.py +560 -0
- flowerpower/pipeline/runner.py +560 -0
- flowerpower/pipeline/visualizer.py +142 -0
- flowerpower/plugins/mqtt/__init__.py +12 -0
- flowerpower/plugins/mqtt/cfg.py +16 -0
- flowerpower/plugins/mqtt/manager.py +789 -0
- flowerpower/settings.py +110 -0
- flowerpower/utils/logging.py +21 -0
- flowerpower/utils/misc.py +57 -9
- flowerpower/utils/sql.py +122 -24
- flowerpower/utils/templates.py +18 -142
- flowerpower/web/app.py +0 -0
- flowerpower-1.0.0b1.dist-info/METADATA +324 -0
- flowerpower-1.0.0b1.dist-info/RECORD +94 -0
- {flowerpower-0.9.12.4.dist-info → flowerpower-1.0.0b1.dist-info}/WHEEL +1 -1
- flowerpower/cfg/pipeline/tracker.py +0 -14
- flowerpower/cfg/project/open_telemetry.py +0 -8
- flowerpower/cfg/project/tracker.py +0 -11
- flowerpower/cfg/project/worker.py +0 -19
- flowerpower/cli/scheduler.py +0 -309
- flowerpower/event_handler.py +0 -23
- flowerpower/mqtt.py +0 -525
- flowerpower/pipeline.py +0 -2419
- flowerpower/scheduler.py +0 -680
- flowerpower/tui.py +0 -79
- flowerpower/utils/datastore.py +0 -186
- flowerpower/utils/eventbroker.py +0 -127
- flowerpower/utils/executor.py +0 -58
- flowerpower/utils/trigger.py +0 -140
- flowerpower-0.9.12.4.dist-info/METADATA +0 -575
- flowerpower-0.9.12.4.dist-info/RECORD +0 -70
- /flowerpower/{cfg/pipeline/params.py → cli/worker.py} +0 -0
- {flowerpower-0.9.12.4.dist-info → flowerpower-1.0.0b1.dist-info}/entry_points.txt +0 -0
- {flowerpower-0.9.12.4.dist-info → flowerpower-1.0.0b1.dist-info}/top_level.txt +0 -0
flowerpower/settings.py
ADDED
@@ -0,0 +1,110 @@
|
|
1
|
+
import os
|
2
|
+
|
3
|
+
PIPELINES_DIR = os.getenv("FP_PIPELINES_DIR", "pipelines")
|
4
|
+
CONFIG_DIR = os.getenv("FP_CONFIG_DIR", "conf")
|
5
|
+
HOOKS_DIR = os.getenv("FP_HOOKS_DIR", "hooks")
|
6
|
+
|
7
|
+
# EXECUTOR
|
8
|
+
EXECUTOR = os.getenv("FP_EXECUTOR", "threadpool")
|
9
|
+
EXECUTOR_MAX_WORKERS = int(
|
10
|
+
os.getenv("FP_EXECUTOR_MAX_WORKERS", os.cpu_count() * 5 or 10)
|
11
|
+
)
|
12
|
+
EXECUTOR_NUM_CPUS = int(os.getenv("FP_EXECUTOR_NUM_CPUS", os.cpu_count() or 1))
|
13
|
+
|
14
|
+
# LOGGING
|
15
|
+
LOG_LEVEL = os.getenv("FP_LOG_LEVEL", "INFO")
|
16
|
+
|
17
|
+
# WORKER
|
18
|
+
DEFAULT_JOB_QUEUE = os.getenv("FP_JOB_QUEUE_TYPE", "rq")
|
19
|
+
# RQ WORKER
|
20
|
+
RQ_BACKEND = os.getenv("FP_RQ_BACKEND", "redis")
|
21
|
+
RQ_QUEUES = (
|
22
|
+
os.getenv("FP_RQ_QUEUES", "default, high, low, scheduler")
|
23
|
+
.replace(" ", "")
|
24
|
+
.split(",")
|
25
|
+
)
|
26
|
+
RQ_NUM_WORKERS = int(os.getenv("FP_RQ_NUM_WORKERS", EXECUTOR_NUM_CPUS))
|
27
|
+
|
28
|
+
# APS WORKER
|
29
|
+
APS_BACKEND_DS = os.getenv("FP_APS_DS_BACKEND", "postgresql")
|
30
|
+
APS_SCHEMA_DS = os.getenv("FP_APS_SCHEMA", "flowerpower")
|
31
|
+
APS_BACKEND_EB = os.getenv("FP_APS_EB_BACKEND", "postgresql")
|
32
|
+
APS_CLEANUP_INTERVAL = int(os.getenv("FP_APS_CLEANUP_INTERVAL", 300))
|
33
|
+
APS_MAX_CONCURRENT_JOBS = int(os.getenv("FP_APS_MAX_CONCURRENT_JOBS", 10))
|
34
|
+
APS_DEFAULT_EXECUTOR = os.getenv("FP_APS_DEFAULT_EXECUTOR", EXECUTOR)
|
35
|
+
APS_NUM_WORKERS = int(os.getenv("FP_APS_NUM_WORKERS", EXECUTOR_MAX_WORKERS))
|
36
|
+
|
37
|
+
# Define backend properties in a dictionary for easier maintenance
|
38
|
+
BACKEND_PROPERTIES = {
|
39
|
+
"postgresql": {
|
40
|
+
"uri_prefix": "postgresql+asyncpg://",
|
41
|
+
"default_port": 5432,
|
42
|
+
"default_host": "localhost",
|
43
|
+
"default_database": "postgres",
|
44
|
+
"default_username": "postgres",
|
45
|
+
"is_sqla_type": True,
|
46
|
+
},
|
47
|
+
"mysql": {
|
48
|
+
"uri_prefix": "mysql+aiomysql://",
|
49
|
+
"default_port": 3306,
|
50
|
+
"default_host": "localhost",
|
51
|
+
"default_database": "mysql",
|
52
|
+
"default_username": "root",
|
53
|
+
"is_sqla_type": True,
|
54
|
+
},
|
55
|
+
"sqlite": {
|
56
|
+
"uri_prefix": "sqlite+aiosqlite://",
|
57
|
+
"default_port": None,
|
58
|
+
"default_host": "",
|
59
|
+
"default_database": "",
|
60
|
+
"is_sqla_type": True,
|
61
|
+
"is_sqlite_type": True,
|
62
|
+
},
|
63
|
+
"mongodb": {
|
64
|
+
"uri_prefix": "mongodb://",
|
65
|
+
"default_port": 27017,
|
66
|
+
"default_host": "localhost",
|
67
|
+
"default_database": "admin",
|
68
|
+
"is_sqla_type": False,
|
69
|
+
},
|
70
|
+
"mqtt": {
|
71
|
+
"uri_prefix": "mqtt://",
|
72
|
+
"default_port": 1883,
|
73
|
+
"default_host": "localhost",
|
74
|
+
"default_database": "mqtt",
|
75
|
+
},
|
76
|
+
"redis": {
|
77
|
+
"uri_prefix": "redis://",
|
78
|
+
"default_port": 6379,
|
79
|
+
"default_host": "localhost",
|
80
|
+
"default_database": 0,
|
81
|
+
},
|
82
|
+
"nats_kv": {
|
83
|
+
"uri_prefix": "nats://",
|
84
|
+
"default_port": 4222,
|
85
|
+
"default_host": "localhost",
|
86
|
+
"default_database": "default",
|
87
|
+
},
|
88
|
+
"memory": {
|
89
|
+
"uri_prefix": "memory://",
|
90
|
+
"default_port": None,
|
91
|
+
"default_host": "",
|
92
|
+
"default_database": "",
|
93
|
+
},
|
94
|
+
}
|
95
|
+
|
96
|
+
# HAMILTON
|
97
|
+
HAMILTON_MAX_LIST_LENGTH_CAPTURE = int(
|
98
|
+
os.getenv("HAMILTON_MAX_LIST_LENGTH_CAPTURE", 50)
|
99
|
+
)
|
100
|
+
HAMILTON_MAX_DICT_LENGTH_CAPTURE = int(
|
101
|
+
os.getenv("HAMILTON_MAX_DICT_LENGTH_CAPTURE", 10)
|
102
|
+
)
|
103
|
+
HAMILTON_CAPTURE_DATA_STATISTICS = bool(
|
104
|
+
os.getenv("HAMILTON_CAPTURE_DATA_STATISTICS", True)
|
105
|
+
)
|
106
|
+
|
107
|
+
HAMILTON_AUTOLOAD_EXTENSIONS = int(os.getenv("HAMILTON_AUTOLOAD_EXTENSIONS", 0))
|
108
|
+
HAMILTON_TELEMETRY_ENABLED = bool(os.getenv("HAMILTON_TELEMETRY_ENABLED", False))
|
109
|
+
HAMILTON_API_URL = os.getenv("HAMILTON_API_URL", "http://localhost:8241")
|
110
|
+
HAMILTON_UI_URL = os.getenv("HAMILTON_UI_URL", "http://localhost:8242")
|
@@ -0,0 +1,21 @@
|
|
1
|
+
import sys
|
2
|
+
|
3
|
+
from loguru import logger
|
4
|
+
|
5
|
+
from ..settings import LOG_LEVEL # Import the setting
|
6
|
+
|
7
|
+
|
8
|
+
def setup_logging(level: str = LOG_LEVEL) -> None:
|
9
|
+
"""
|
10
|
+
Configures the Loguru logger.
|
11
|
+
|
12
|
+
Removes the default handler and adds a new one targeting stderr
|
13
|
+
with the level specified by the FP_LOG_LEVEL setting.
|
14
|
+
"""
|
15
|
+
logger.remove() # Remove the default handler added by Loguru
|
16
|
+
logger.add(
|
17
|
+
sys.stderr,
|
18
|
+
level=level.upper(), # Use the level from the parameter, ensure it's uppercase
|
19
|
+
format="<green>{time:YYYY-MM-DD HH:mm:ss.SSS}</green> | <level>{level: <8}</level> | <cyan>{name}</cyan>:<cyan>{function}</cyan>:<cyan>{line}</cyan> - <level>{message}</level>", # Example format
|
20
|
+
)
|
21
|
+
# logger.info(f"Log level set to: {FP_LOG_LEVEL.upper()}")
|
flowerpower/utils/misc.py
CHANGED
@@ -3,8 +3,11 @@ import os
|
|
3
3
|
import subprocess
|
4
4
|
import tempfile
|
5
5
|
import time
|
6
|
+
from typing import Any
|
6
7
|
|
7
|
-
import
|
8
|
+
import msgspec
|
9
|
+
|
10
|
+
# import tqdm
|
8
11
|
|
9
12
|
if importlib.util.find_spec("pyarrow"):
|
10
13
|
import pyarrow as pa
|
@@ -150,12 +153,10 @@ if importlib.util.find_spec("polars"):
|
|
150
153
|
next(v for v in data.values() if isinstance(v, (list, tuple)))
|
151
154
|
)
|
152
155
|
# Convert to DataFrame where each list element becomes a row
|
153
|
-
data = pl.DataFrame(
|
154
|
-
|
155
|
-
|
156
|
-
|
157
|
-
}
|
158
|
-
)
|
156
|
+
data = pl.DataFrame({
|
157
|
+
k: v if isinstance(v, (list, tuple)) else [v] * length
|
158
|
+
for k, v in data.items()
|
159
|
+
})
|
159
160
|
else:
|
160
161
|
# If values are scalars, wrap them in a list to create a single row
|
161
162
|
data = pl.DataFrame({k: [v] for k, v in data.items()})
|
@@ -257,6 +258,7 @@ if (
|
|
257
258
|
data = [pa.Table.from_batches([dd]) for dd in data]
|
258
259
|
|
259
260
|
return data
|
261
|
+
|
260
262
|
else:
|
261
263
|
|
262
264
|
def to_pyarrow_table(*args, **kwargs):
|
@@ -345,8 +347,8 @@ if importlib.util.find_spec("joblib"):
|
|
345
347
|
all_iterables = iterables + list(iterable_kwargs.values())
|
346
348
|
param_combinations = list(zip(*all_iterables)) # Convert to list for tqdm
|
347
349
|
|
348
|
-
if verbose:
|
349
|
-
|
350
|
+
# if verbose:
|
351
|
+
# param_combinations = tqdm.tqdm(param_combinations)
|
350
352
|
|
351
353
|
return Parallel(**parallel_kwargs)(
|
352
354
|
delayed(func)(
|
@@ -410,3 +412,49 @@ def view_img(data: str | bytes, format: str = "svg"):
|
|
410
412
|
|
411
413
|
time.sleep(2) # Wait for viewer to open
|
412
414
|
os.unlink(tmp_path)
|
415
|
+
|
416
|
+
|
417
|
+
def update_config_from_dict(
|
418
|
+
struct: msgspec.Struct, data: dict[str, Any]
|
419
|
+
) -> msgspec.Struct:
|
420
|
+
"""
|
421
|
+
Updates a msgspec.Struct instance with values from a dictionary.
|
422
|
+
Handles nested msgspec.Struct objects and nested dictionaries.
|
423
|
+
|
424
|
+
Args:
|
425
|
+
obj: The msgspec.Struct object to update
|
426
|
+
update_dict: Dictionary containing update values
|
427
|
+
|
428
|
+
Returns:
|
429
|
+
Updated msgspec.Struct instance
|
430
|
+
"""
|
431
|
+
# Convert the struct to a dictionary for easier manipulation
|
432
|
+
obj_dict = msgspec.to_builtins(struct)
|
433
|
+
|
434
|
+
# Update the dictionary recursively
|
435
|
+
for key, value in data.items():
|
436
|
+
if key in obj_dict:
|
437
|
+
if isinstance(value, dict) and isinstance(obj_dict[key], dict):
|
438
|
+
# Handle nested dictionaries
|
439
|
+
obj_dict[key] = update_nested_dict(obj_dict[key], value)
|
440
|
+
else:
|
441
|
+
# Direct update for non-nested values
|
442
|
+
obj_dict[key] = value
|
443
|
+
|
444
|
+
# Convert back to the original struct type
|
445
|
+
return msgspec.convert(obj_dict, type(struct))
|
446
|
+
|
447
|
+
|
448
|
+
def update_nested_dict(
|
449
|
+
original: dict[str, Any], updates: dict[str, Any]
|
450
|
+
) -> dict[str, Any]:
|
451
|
+
"""Helper function to update nested dictionaries"""
|
452
|
+
result = original.copy()
|
453
|
+
for key, value in updates.items():
|
454
|
+
if key in result and isinstance(value, dict) and isinstance(result[key], dict):
|
455
|
+
# Recursively update nested dictionaries
|
456
|
+
result[key] = update_nested_dict(result[key], value)
|
457
|
+
else:
|
458
|
+
# Direct update
|
459
|
+
result[key] = value
|
460
|
+
return result
|
flowerpower/utils/sql.py
CHANGED
@@ -2,8 +2,8 @@ import datetime as dt
|
|
2
2
|
import re
|
3
3
|
from functools import lru_cache
|
4
4
|
from typing import Any
|
5
|
+
from zoneinfo import ZoneInfo, ZoneInfoNotFoundError
|
5
6
|
|
6
|
-
import pendulum as pdl
|
7
7
|
import pyarrow as pa
|
8
8
|
import pyarrow.compute as pc
|
9
9
|
from sqlglot import exp, parse_one
|
@@ -13,38 +13,136 @@ from .polars import pl
|
|
13
13
|
|
14
14
|
@lru_cache(maxsize=128)
|
15
15
|
def timestamp_from_string(
|
16
|
-
|
16
|
+
timestamp_str: str,
|
17
17
|
tz: str | None = None,
|
18
|
-
exact: bool = True,
|
19
|
-
strict: bool = False,
|
20
18
|
naive: bool = False,
|
21
|
-
) ->
|
19
|
+
) -> dt.datetime | dt.date | dt.time:
|
22
20
|
"""
|
23
|
-
Converts a string
|
21
|
+
Converts a timestamp string (ISO 8601 format) into a datetime, date, or time object
|
22
|
+
using only standard Python libraries.
|
23
|
+
|
24
|
+
Handles strings with or without timezone information (e.g., '2023-01-01T10:00:00+02:00',
|
25
|
+
'2023-01-01', '10:00:00'). Supports timezone offsets like '+HH:MM' or '+HHMM'.
|
26
|
+
For named timezones (e.g., 'Europe/Paris'), requires Python 3.9+ and the 'tzdata'
|
27
|
+
package to be installed.
|
24
28
|
|
25
29
|
Args:
|
26
|
-
|
27
|
-
tz (str, optional):
|
28
|
-
|
29
|
-
|
30
|
-
naive (bool, optional):
|
30
|
+
timestamp_str (str): The string representation of the timestamp (ISO 8601 format).
|
31
|
+
tz (str, optional): Target timezone identifier (e.g., 'UTC', '+02:00', 'Europe/Paris').
|
32
|
+
If provided, the output datetime/time will be localized or converted to this timezone.
|
33
|
+
Defaults to None.
|
34
|
+
naive (bool, optional): If True, return a naive datetime/time (no timezone info),
|
35
|
+
even if the input string or `tz` parameter specifies one. Defaults to False.
|
31
36
|
|
32
37
|
Returns:
|
33
|
-
datetime.
|
34
|
-
"""
|
35
|
-
# Extract the timezone from the string if not provided
|
36
|
-
# tz = extract_timezone(timestamp) if tz is None else tz
|
37
|
-
# timestamp = timestamp.replace(tz, "").strip() if tz else timestamp
|
38
|
-
|
39
|
-
pdl_timestamp = pdl.parse(timestamp, exact=exact, strict=strict)
|
38
|
+
Union[dt.datetime, dt.date, dt.time]: The parsed datetime, date, or time object.
|
40
39
|
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
pdl_timestamp = pdl_timestamp.naive()
|
40
|
+
Raises:
|
41
|
+
ValueError: If the timestamp string format is invalid or the timezone is
|
42
|
+
invalid/unsupported.
|
43
|
+
"""
|
46
44
|
|
47
|
-
|
45
|
+
# Regex to parse timezone offsets like +HH:MM or +HHMM
|
46
|
+
_TZ_OFFSET_REGEX = re.compile(r"([+-])(\d{2}):?(\d{2})")
|
47
|
+
|
48
|
+
def _parse_tz_offset(tz_str: str) -> dt.tzinfo | None:
|
49
|
+
"""Parses a timezone offset string into a timezone object."""
|
50
|
+
match = _TZ_OFFSET_REGEX.fullmatch(tz_str)
|
51
|
+
if match:
|
52
|
+
sign, hours, minutes = match.groups()
|
53
|
+
offset_seconds = (int(hours) * 3600 + int(minutes) * 60) * (
|
54
|
+
-1 if sign == "-" else 1
|
55
|
+
)
|
56
|
+
if abs(offset_seconds) >= 24 * 3600:
|
57
|
+
raise ValueError(f"Invalid timezone offset: {tz_str}")
|
58
|
+
return dt.timezone(dt.timedelta(seconds=offset_seconds), name=tz_str)
|
59
|
+
return None
|
60
|
+
|
61
|
+
def _get_tzinfo(tz_identifier: str | None) -> dt.tzinfo | None:
|
62
|
+
"""Gets a tzinfo object from a string (offset or IANA name)."""
|
63
|
+
if tz_identifier is None:
|
64
|
+
return None
|
65
|
+
if tz_identifier.upper() == "UTC":
|
66
|
+
return dt.timezone.utc
|
67
|
+
|
68
|
+
# Try parsing as offset first
|
69
|
+
offset_tz = _parse_tz_offset(tz_identifier)
|
70
|
+
if offset_tz:
|
71
|
+
return offset_tz
|
72
|
+
|
73
|
+
# Try parsing as IANA name using zoneinfo (if available)
|
74
|
+
if ZoneInfo:
|
75
|
+
try:
|
76
|
+
return ZoneInfo(tz_identifier)
|
77
|
+
except ZoneInfoNotFoundError:
|
78
|
+
raise ValueError(
|
79
|
+
f"Timezone '{tz_identifier}' not found. Install 'tzdata' or use offset format."
|
80
|
+
)
|
81
|
+
except Exception as e: # Catch other potential zoneinfo errors
|
82
|
+
raise ValueError(f"Error loading timezone '{tz_identifier}': {e}")
|
83
|
+
else:
|
84
|
+
# zoneinfo not available
|
85
|
+
raise ValueError(
|
86
|
+
f"Invalid timezone: '{tz_identifier}'. Use offset format (e.g., '+02:00') "
|
87
|
+
"or run Python 3.9+ with 'tzdata' installed for named timezones."
|
88
|
+
)
|
89
|
+
|
90
|
+
target_tz: dt.tzinfo | None = _get_tzinfo(tz)
|
91
|
+
parsed_obj: dt.datetime | dt.date | dt.time | None = None
|
92
|
+
|
93
|
+
# Preprocess: Replace space separator, strip whitespace
|
94
|
+
processed_str = timestamp_str.strip().replace(" ", "T")
|
95
|
+
|
96
|
+
# Attempt parsing (datetime, date, time) using fromisoformat
|
97
|
+
try:
|
98
|
+
# Python < 3.11 fromisoformat has limitations (e.g., no Z, no +HHMM offset)
|
99
|
+
# This implementation assumes Python 3.11+ for full ISO 8601 support via fromisoformat
|
100
|
+
# or that input strings use formats compatible with older versions (e.g., +HH:MM)
|
101
|
+
parsed_obj = dt.datetime.fromisoformat(processed_str)
|
102
|
+
except ValueError:
|
103
|
+
try:
|
104
|
+
parsed_obj = dt.date.fromisoformat(processed_str)
|
105
|
+
except ValueError:
|
106
|
+
try:
|
107
|
+
# Time parsing needs care, especially with offsets in older Python
|
108
|
+
parsed_obj = dt.time.fromisoformat(processed_str)
|
109
|
+
except ValueError:
|
110
|
+
# Add fallback for simple HH:MM:SS if needed, though less robust
|
111
|
+
# try:
|
112
|
+
# parsed_obj = dt.datetime.strptime(processed_str, "%H:%M:%S").time()
|
113
|
+
# except ValueError:
|
114
|
+
raise ValueError(f"Invalid timestamp format: '{timestamp_str}'")
|
115
|
+
|
116
|
+
# Apply timezone logic if we have a datetime or time object
|
117
|
+
if isinstance(parsed_obj, (dt.datetime, dt.time)):
|
118
|
+
is_aware = (
|
119
|
+
parsed_obj.tzinfo is not None
|
120
|
+
and parsed_obj.tzinfo.utcoffset(
|
121
|
+
parsed_obj if isinstance(parsed_obj, dt.datetime) else None
|
122
|
+
)
|
123
|
+
is not None
|
124
|
+
)
|
125
|
+
|
126
|
+
if target_tz:
|
127
|
+
if is_aware:
|
128
|
+
# Convert existing aware object to target timezone (only for datetime)
|
129
|
+
if isinstance(parsed_obj, dt.datetime):
|
130
|
+
parsed_obj = parsed_obj.astimezone(target_tz)
|
131
|
+
# else: dt.time cannot be converted without a date context. Keep original tz.
|
132
|
+
else:
|
133
|
+
# Localize naive object to target timezone
|
134
|
+
parsed_obj = parsed_obj.replace(tzinfo=target_tz)
|
135
|
+
is_aware = True # Object is now considered aware
|
136
|
+
|
137
|
+
# Handle naive flag: remove tzinfo if requested
|
138
|
+
if naive and is_aware:
|
139
|
+
parsed_obj = parsed_obj.replace(tzinfo=None)
|
140
|
+
|
141
|
+
# If it's a date object, tz/naive flags are ignored
|
142
|
+
elif isinstance(parsed_obj, dt.date):
|
143
|
+
pass
|
144
|
+
|
145
|
+
return parsed_obj
|
48
146
|
|
49
147
|
|
50
148
|
# Compile regex patterns once for efficiency
|
flowerpower/utils/templates.py
CHANGED
@@ -1,145 +1,5 @@
|
|
1
|
-
|
2
|
-
|
3
|
-
# # ------------------------ Example ------------------------- #
|
4
|
-
# #
|
5
|
-
# # path: pipelines
|
6
|
-
# #
|
7
|
-
# # ## pipeline parameter
|
8
|
-
# #
|
9
|
-
# # params:
|
10
|
-
# # flow1: ## pipeline name
|
11
|
-
# # step1: ## step name
|
12
|
-
# # param1_1: 123 ## step parameters
|
13
|
-
# # param1_2: abc
|
14
|
-
# # step2:
|
15
|
-
# # param2_1: true
|
16
|
-
# #
|
17
|
-
# # ## run configuration
|
18
|
-
# #
|
19
|
-
# # run:
|
20
|
-
# # prod: # environment name
|
21
|
-
# # flow1:
|
22
|
-
# # inputs: ## input parameters
|
23
|
-
# # final_vars: [step2] ## final output vars
|
24
|
-
# # with_tracker: true ## whether to track the run
|
25
|
-
# #
|
26
|
-
# # dev:
|
27
|
-
# # flow1:
|
28
|
-
# # inputs:
|
29
|
-
# # final_vars: [step2]
|
30
|
-
# # with_tracker: false
|
31
|
-
# #
|
32
|
-
# # ---------------------------------------------------------- #
|
33
|
-
|
34
|
-
# """
|
35
|
-
|
36
|
-
# SCHEDULER_TEMPLATE = """# ---------------- Scheduler Configuration ----------------- #
|
37
|
-
|
38
|
-
# # ------------------------ Example ------------------------- #
|
39
|
-
# #
|
40
|
-
# # ## data store configuration
|
41
|
-
# #
|
42
|
-
# # ### postgres
|
43
|
-
# # data_store:
|
44
|
-
# # type: sqlalchemy
|
45
|
-
# # url: postgresql+asyncpg://edge:edge@postgres/flowerpower
|
46
|
-
# #
|
47
|
-
# # ### sqlite
|
48
|
-
# # data_store:
|
49
|
-
# # type: sqlalchemy
|
50
|
-
# # url: sqlite+aiosqlite:///flowerpower.db
|
51
|
-
# #
|
52
|
-
# # ### memory
|
53
|
-
# # data_store:
|
54
|
-
# # type: memory
|
55
|
-
# #
|
56
|
-
# # ### mongodb
|
57
|
-
# # data_store:
|
58
|
-
# # type: mongodb
|
59
|
-
# # url: mongodb://localhost:27017/scheduler
|
60
|
-
# #
|
61
|
-
# # ## event broker configuration
|
62
|
-
# #
|
63
|
-
# # ### postgres
|
64
|
-
# # event_broker:
|
65
|
-
# # type: asyncpg
|
66
|
-
# # url: postgresql+asyncpg://edge:edge@postgres/flowerpower
|
67
|
-
# #
|
68
|
-
# # ### mqtt
|
69
|
-
# # event_broker:
|
70
|
-
# # type: mqtt
|
71
|
-
# # host: localhost
|
72
|
-
# # port: 1883
|
73
|
-
# # username: edge
|
74
|
-
# # password: edge
|
75
|
-
|
76
|
-
# # ### redis
|
77
|
-
# # event_broker:
|
78
|
-
# # type: redis
|
79
|
-
# # host: localhost
|
80
|
-
# # port: 6379
|
81
|
-
|
82
|
-
# # ### local
|
83
|
-
# # event_broker:
|
84
|
-
# # type: local # or memory
|
85
|
-
# #
|
86
|
-
# # ## cleanup interval configuration
|
87
|
-
# #
|
88
|
-
# # cleanup_interval:
|
89
|
-
# # unit: minutes
|
90
|
-
# # value: 15
|
91
|
-
# #
|
92
|
-
# # ## pipeline schedule configuration
|
93
|
-
# #
|
94
|
-
# # pipeline:
|
95
|
-
# # my_flow:
|
96
|
-
# # type: cron ## options: interval, calendarinterval, date
|
97
|
-
# # start_time:
|
98
|
-
# # end_time:
|
99
|
-
# # ## optional cron arguments
|
100
|
-
# # crontab: * * * * *
|
101
|
-
# # year:
|
102
|
-
# # month:
|
103
|
-
# # week:
|
104
|
-
# # day:
|
105
|
-
# # days_of_week:
|
106
|
-
# # hour:
|
107
|
-
# # minute:
|
108
|
-
# # second:
|
109
|
-
# # timezone:
|
110
|
-
# # ## optional interval arguments
|
111
|
-
# # weeks:
|
112
|
-
# # days:
|
113
|
-
# # hours:
|
114
|
-
# # minutes:
|
115
|
-
# # seconds:
|
116
|
-
# # microseconds:
|
117
|
-
# #
|
118
|
-
# # ---------------------------------------------------------- #
|
119
|
-
|
120
|
-
# """
|
121
|
-
|
122
|
-
# TRACKER_TEMPLATE = """# ----------------- Tracker Configuration ------------------ #
|
123
|
-
|
124
|
-
# # ------------------------ Example ------------------------- #
|
125
|
-
# #
|
126
|
-
# # username: your.email@example.com
|
127
|
-
# # api_url: http://localhost:8241
|
128
|
-
# # ui_url: http://localhost:8242
|
129
|
-
# # api_key:
|
130
|
-
|
131
|
-
# # pipeline:
|
132
|
-
# # my_flow:
|
133
|
-
# # project_id: 1
|
134
|
-
# # tags:
|
135
|
-
# # environment: dev
|
136
|
-
# # version: 1.0
|
137
|
-
# # TODO: add_more_tags_to_find_your_run_later
|
138
|
-
# # dag_name: my_flow_123
|
139
|
-
# #
|
140
|
-
# # ---------------------------------------------------------- #
|
141
|
-
|
142
|
-
# """
|
1
|
+
|
2
|
+
|
143
3
|
|
144
4
|
PIPELINE_PY_TEMPLATE = """# FlowerPower pipeline {name}.py
|
145
5
|
# Created on {date}
|
@@ -172,3 +32,19 @@ PARAMS = Config.load(
|
|
172
32
|
# Pipeline functions
|
173
33
|
|
174
34
|
"""
|
35
|
+
|
36
|
+
HOOK_TEMPLATE__MQTT_BUILD_CONFIG = '''
|
37
|
+
def {function_name}(payload: bytes, topic: str) -> dict:
|
38
|
+
"""
|
39
|
+
MQTT hook function to build the configuration for the pipeline.
|
40
|
+
This function is called in the on_message callback of the MQTT client.
|
41
|
+
The result of this function will be passed to the hamilton builder as the config for the pipeline.
|
42
|
+
Args:
|
43
|
+
payload (bytes): The payload of the MQTT message.
|
44
|
+
topic (str): The topic of the MQTT message.
|
45
|
+
Returns:
|
46
|
+
dict: The configuration for the pipeline.
|
47
|
+
"""
|
48
|
+
|
49
|
+
pass
|
50
|
+
'''
|
flowerpower/web/app.py
ADDED
File without changes
|