FlowerPower 0.9.12.4__py3-none-any.whl → 1.0.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- flowerpower/__init__.py +17 -2
- flowerpower/cfg/__init__.py +201 -149
- flowerpower/cfg/base.py +122 -24
- flowerpower/cfg/pipeline/__init__.py +254 -0
- flowerpower/cfg/pipeline/adapter.py +66 -0
- flowerpower/cfg/pipeline/run.py +40 -11
- flowerpower/cfg/pipeline/schedule.py +69 -79
- flowerpower/cfg/project/__init__.py +149 -0
- flowerpower/cfg/project/adapter.py +57 -0
- flowerpower/cfg/project/job_queue.py +165 -0
- flowerpower/cli/__init__.py +92 -35
- flowerpower/cli/job_queue.py +878 -0
- flowerpower/cli/mqtt.py +49 -4
- flowerpower/cli/pipeline.py +576 -381
- flowerpower/cli/utils.py +55 -0
- flowerpower/flowerpower.py +12 -7
- flowerpower/fs/__init__.py +20 -2
- flowerpower/fs/base.py +350 -26
- flowerpower/fs/ext.py +797 -216
- flowerpower/fs/storage_options.py +1097 -55
- flowerpower/io/base.py +13 -18
- flowerpower/io/loader/__init__.py +28 -0
- flowerpower/io/loader/deltatable.py +7 -10
- flowerpower/io/metadata.py +1 -0
- flowerpower/io/saver/__init__.py +28 -0
- flowerpower/io/saver/deltatable.py +4 -3
- flowerpower/job_queue/__init__.py +252 -0
- flowerpower/job_queue/apscheduler/__init__.py +11 -0
- flowerpower/job_queue/apscheduler/_setup/datastore.py +110 -0
- flowerpower/job_queue/apscheduler/_setup/eventbroker.py +93 -0
- flowerpower/job_queue/apscheduler/manager.py +1063 -0
- flowerpower/job_queue/apscheduler/setup.py +524 -0
- flowerpower/job_queue/apscheduler/trigger.py +169 -0
- flowerpower/job_queue/apscheduler/utils.py +309 -0
- flowerpower/job_queue/base.py +382 -0
- flowerpower/job_queue/rq/__init__.py +10 -0
- flowerpower/job_queue/rq/_trigger.py +37 -0
- flowerpower/job_queue/rq/concurrent_workers/gevent_worker.py +226 -0
- flowerpower/job_queue/rq/concurrent_workers/thread_worker.py +231 -0
- flowerpower/job_queue/rq/manager.py +1449 -0
- flowerpower/job_queue/rq/setup.py +150 -0
- flowerpower/job_queue/rq/utils.py +69 -0
- flowerpower/pipeline/__init__.py +5 -0
- flowerpower/pipeline/base.py +118 -0
- flowerpower/pipeline/io.py +407 -0
- flowerpower/pipeline/job_queue.py +505 -0
- flowerpower/pipeline/manager.py +1586 -0
- flowerpower/pipeline/registry.py +560 -0
- flowerpower/pipeline/runner.py +560 -0
- flowerpower/pipeline/visualizer.py +142 -0
- flowerpower/plugins/mqtt/__init__.py +12 -0
- flowerpower/plugins/mqtt/cfg.py +16 -0
- flowerpower/plugins/mqtt/manager.py +789 -0
- flowerpower/settings.py +110 -0
- flowerpower/utils/logging.py +21 -0
- flowerpower/utils/misc.py +57 -9
- flowerpower/utils/sql.py +122 -24
- flowerpower/utils/templates.py +18 -142
- flowerpower/web/app.py +0 -0
- flowerpower-1.0.0b1.dist-info/METADATA +324 -0
- flowerpower-1.0.0b1.dist-info/RECORD +94 -0
- {flowerpower-0.9.12.4.dist-info → flowerpower-1.0.0b1.dist-info}/WHEEL +1 -1
- flowerpower/cfg/pipeline/tracker.py +0 -14
- flowerpower/cfg/project/open_telemetry.py +0 -8
- flowerpower/cfg/project/tracker.py +0 -11
- flowerpower/cfg/project/worker.py +0 -19
- flowerpower/cli/scheduler.py +0 -309
- flowerpower/event_handler.py +0 -23
- flowerpower/mqtt.py +0 -525
- flowerpower/pipeline.py +0 -2419
- flowerpower/scheduler.py +0 -680
- flowerpower/tui.py +0 -79
- flowerpower/utils/datastore.py +0 -186
- flowerpower/utils/eventbroker.py +0 -127
- flowerpower/utils/executor.py +0 -58
- flowerpower/utils/trigger.py +0 -140
- flowerpower-0.9.12.4.dist-info/METADATA +0 -575
- flowerpower-0.9.12.4.dist-info/RECORD +0 -70
- /flowerpower/{cfg/pipeline/params.py → cli/worker.py} +0 -0
- {flowerpower-0.9.12.4.dist-info → flowerpower-1.0.0b1.dist-info}/entry_points.txt +0 -0
- {flowerpower-0.9.12.4.dist-info → flowerpower-1.0.0b1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,309 @@
|
|
1
|
+
from operator import attrgetter
|
2
|
+
from typing import List
|
3
|
+
|
4
|
+
from rich.console import Console
|
5
|
+
from rich.table import Table
|
6
|
+
|
7
|
+
|
8
|
+
def humanize_crontab(minute, hour, day, month, day_of_week):
|
9
|
+
days = {
|
10
|
+
"0": "Sunday",
|
11
|
+
"sun": "Sunday",
|
12
|
+
"7": "Sunday",
|
13
|
+
"1": "Monday",
|
14
|
+
"mon": "Monday",
|
15
|
+
"2": "Tuesday",
|
16
|
+
"tue": "Tuesday",
|
17
|
+
"3": "Wednesday",
|
18
|
+
"wed": "Wednesday",
|
19
|
+
"4": "Thursday",
|
20
|
+
"thu": "Thursday",
|
21
|
+
"5": "Friday",
|
22
|
+
"fri": "Friday",
|
23
|
+
"6": "Saturday",
|
24
|
+
"sat": "Saturday",
|
25
|
+
"*": "*",
|
26
|
+
}
|
27
|
+
months = {
|
28
|
+
"1": "January",
|
29
|
+
"2": "February",
|
30
|
+
"3": "March",
|
31
|
+
"4": "April",
|
32
|
+
"5": "May",
|
33
|
+
"6": "June",
|
34
|
+
"7": "July",
|
35
|
+
"8": "August",
|
36
|
+
"9": "September",
|
37
|
+
"10": "October",
|
38
|
+
"11": "November",
|
39
|
+
"12": "December",
|
40
|
+
"*": "*",
|
41
|
+
}
|
42
|
+
|
43
|
+
def get_day_name(day_input):
|
44
|
+
day_input = str(day_input).lower().strip()
|
45
|
+
if "-" in day_input:
|
46
|
+
start, end = day_input.split("-")
|
47
|
+
return f"{days.get(start.strip(), start)}-{days.get(end.strip(), end)}"
|
48
|
+
if "," in day_input:
|
49
|
+
return ", ".join(
|
50
|
+
days.get(d.strip(), d.strip()) for d in day_input.split(",")
|
51
|
+
)
|
52
|
+
return days.get(day_input, day_input)
|
53
|
+
|
54
|
+
try:
|
55
|
+
minute, hour, day, month, day_of_week = map(
|
56
|
+
str.strip, map(str, [minute, hour, day, month, day_of_week])
|
57
|
+
)
|
58
|
+
|
59
|
+
if "/" in minute:
|
60
|
+
return f"every {minute.split('/')[1]} minutes"
|
61
|
+
if "/" in hour:
|
62
|
+
return f"every {hour.split('/')[1]} hours"
|
63
|
+
|
64
|
+
if all(x == "*" for x in [minute, hour, day, month, day_of_week]):
|
65
|
+
return "every minute"
|
66
|
+
if [minute, hour, day, month, day_of_week] == ["0", "*", "*", "*", "*"]:
|
67
|
+
return "every hour"
|
68
|
+
|
69
|
+
if (
|
70
|
+
minute == "0"
|
71
|
+
and hour != "*"
|
72
|
+
and day == "*"
|
73
|
+
and month == "*"
|
74
|
+
and day_of_week == "*"
|
75
|
+
):
|
76
|
+
return (
|
77
|
+
"every day at midnight"
|
78
|
+
if hour == "0"
|
79
|
+
else "every day at noon"
|
80
|
+
if hour == "12"
|
81
|
+
else f"every day at {hour}:00"
|
82
|
+
)
|
83
|
+
|
84
|
+
if (
|
85
|
+
minute == "0"
|
86
|
+
and hour == "0"
|
87
|
+
and day == "*"
|
88
|
+
and month == "*"
|
89
|
+
and day_of_week != "*"
|
90
|
+
):
|
91
|
+
return f"every {get_day_name(day_of_week)} at midnight"
|
92
|
+
|
93
|
+
if (
|
94
|
+
minute == "0"
|
95
|
+
and hour != "*"
|
96
|
+
and day == "*"
|
97
|
+
and month == "*"
|
98
|
+
and day_of_week != "*"
|
99
|
+
):
|
100
|
+
return (
|
101
|
+
"every weekday at {hour}:00"
|
102
|
+
if "-" in day_of_week
|
103
|
+
and "mon" in day_of_week.lower()
|
104
|
+
and "fri" in day_of_week.lower()
|
105
|
+
else f"every {get_day_name(day_of_week)} at {hour}:00"
|
106
|
+
)
|
107
|
+
|
108
|
+
if (
|
109
|
+
minute != "*"
|
110
|
+
and hour != "*"
|
111
|
+
and day == "*"
|
112
|
+
and month == "*"
|
113
|
+
and day_of_week == "*"
|
114
|
+
):
|
115
|
+
return f"every day at {hour}:{minute.zfill(2)}"
|
116
|
+
|
117
|
+
if day != "*" and month != "*" and minute == "0" and hour == "0":
|
118
|
+
return f"on day {day} of {months.get(month, month)} at midnight"
|
119
|
+
|
120
|
+
if (
|
121
|
+
minute != "*"
|
122
|
+
and hour == "*"
|
123
|
+
and day == "*"
|
124
|
+
and month == "*"
|
125
|
+
and day_of_week == "*"
|
126
|
+
):
|
127
|
+
return f"every hour at minute {minute}"
|
128
|
+
|
129
|
+
parts = []
|
130
|
+
if minute != "*":
|
131
|
+
parts.append(f"at minute {minute}")
|
132
|
+
if hour != "*":
|
133
|
+
parts.append(f"hour {hour}")
|
134
|
+
if day != "*":
|
135
|
+
parts.append(f"day {day}")
|
136
|
+
if month != "*":
|
137
|
+
parts.append(f"month {months.get(month, month)}")
|
138
|
+
if day_of_week != "*":
|
139
|
+
parts.append(f"on {get_day_name(day_of_week)}")
|
140
|
+
|
141
|
+
return f"runs {' '.join(parts)}" if parts else "every minute"
|
142
|
+
except Exception:
|
143
|
+
return f"{minute} {hour} {day} {month} {day_of_week}"
|
144
|
+
|
145
|
+
|
146
|
+
def format_trigger(trigger):
|
147
|
+
trigger_type = trigger.__class__.__name__
|
148
|
+
|
149
|
+
if trigger_type == "IntervalTrigger":
|
150
|
+
for unit in ["seconds", "minutes", "hours", "days"]:
|
151
|
+
if value := getattr(trigger, unit, None):
|
152
|
+
return f"Interval: Every {value}{unit[0]}"
|
153
|
+
return "Interval"
|
154
|
+
|
155
|
+
if trigger_type == "CronTrigger":
|
156
|
+
try:
|
157
|
+
cron_parts = dict(
|
158
|
+
part.split("=")
|
159
|
+
for part in str(trigger).strip("CronTrigger(").rstrip(")").split(", ")
|
160
|
+
)
|
161
|
+
cron_parts = {k: v.strip("'") for k, v in cron_parts.items()}
|
162
|
+
crontab = f"{cron_parts['minute']} {cron_parts['hour']} {cron_parts['day']} {cron_parts['month']} {cron_parts['day_of_week']}"
|
163
|
+
human_readable = humanize_crontab(**{
|
164
|
+
k: cron_parts[k]
|
165
|
+
for k in ["minute", "hour", "day", "month", "day_of_week"]
|
166
|
+
})
|
167
|
+
return f"Cron: {human_readable} ({crontab})"
|
168
|
+
except Exception:
|
169
|
+
return f"Cron: {str(trigger)}"
|
170
|
+
|
171
|
+
if trigger_type == "DateTrigger":
|
172
|
+
return f"Date: Once at {trigger.run_date.strftime('%Y-%m-%d %H:%M:%S')}"
|
173
|
+
|
174
|
+
return f"{trigger_type}: {str(trigger)}"
|
175
|
+
|
176
|
+
|
177
|
+
def display_schedules(schedules: List):
|
178
|
+
console = Console()
|
179
|
+
total_width = console.width - 10
|
180
|
+
|
181
|
+
width_ratios = {
|
182
|
+
"id": 0.20,
|
183
|
+
"task": 0.10,
|
184
|
+
"trigger": 0.25,
|
185
|
+
"name": 0.15,
|
186
|
+
"run_args": 0.15,
|
187
|
+
"next_fire": 0.08,
|
188
|
+
"last_fire": 0.08,
|
189
|
+
"paused": 0.01,
|
190
|
+
}
|
191
|
+
|
192
|
+
widths = {k: max(10, int(total_width * ratio)) for k, ratio in width_ratios.items()}
|
193
|
+
|
194
|
+
table = Table(
|
195
|
+
show_header=True,
|
196
|
+
header_style="bold magenta",
|
197
|
+
width=total_width,
|
198
|
+
row_styles=["", "dim"],
|
199
|
+
border_style="blue",
|
200
|
+
show_lines=True,
|
201
|
+
)
|
202
|
+
|
203
|
+
for col, style, width in [
|
204
|
+
("ID", "dim", widths["id"]),
|
205
|
+
("Task", "cyan", widths["task"]),
|
206
|
+
("Trigger", "blue", widths["trigger"]),
|
207
|
+
("Name", "yellow", widths["name"]),
|
208
|
+
("Run Args", "yellow", widths["run_args"]),
|
209
|
+
("Next Fire Time", "green", widths["next_fire"]),
|
210
|
+
("Last Fire Time", "red", widths["last_fire"]),
|
211
|
+
("Paused", "bold", widths["paused"]),
|
212
|
+
]:
|
213
|
+
table.add_column(col, style=style, width=width)
|
214
|
+
|
215
|
+
for schedule in sorted(schedules, key=attrgetter("next_fire_time")):
|
216
|
+
table.add_row(
|
217
|
+
schedule.id,
|
218
|
+
schedule.task_id.split(":")[-1],
|
219
|
+
format_trigger(schedule.trigger),
|
220
|
+
(
|
221
|
+
str(schedule.args[1])
|
222
|
+
if schedule.args and len(schedule.args) > 1
|
223
|
+
else "None"
|
224
|
+
),
|
225
|
+
"\n".join(f"{k}: {v}" for k, v in (schedule.kwargs or {}).items())
|
226
|
+
or "None",
|
227
|
+
(
|
228
|
+
schedule.next_fire_time.strftime("%Y-%m-%d %H:%M:%S")
|
229
|
+
if schedule.next_fire_time
|
230
|
+
else "Never"
|
231
|
+
),
|
232
|
+
(
|
233
|
+
schedule.last_fire_time.strftime("%Y-%m-%d %H:%M:%S")
|
234
|
+
if schedule.last_fire_time
|
235
|
+
else "Never"
|
236
|
+
),
|
237
|
+
"✓" if schedule.paused else "✗",
|
238
|
+
)
|
239
|
+
|
240
|
+
console.print(table)
|
241
|
+
|
242
|
+
|
243
|
+
def display_tasks(tasks):
|
244
|
+
console = Console()
|
245
|
+
table = Table(title="Tasks")
|
246
|
+
|
247
|
+
widths = {"id": 50, "executor": 15, "max_jobs": 15, "misfire": 20}
|
248
|
+
|
249
|
+
for col, style, width in [
|
250
|
+
("ID", "cyan", widths["id"]),
|
251
|
+
("Job Executor", "blue", widths["executor"]),
|
252
|
+
("Max Running Jobs", "yellow", widths["max_jobs"]),
|
253
|
+
("Misfire Grace Time", "green", widths["misfire"]),
|
254
|
+
]:
|
255
|
+
table.add_column(col, style=style, width=width)
|
256
|
+
|
257
|
+
for task in sorted(tasks, key=attrgetter("id")):
|
258
|
+
table.add_row(
|
259
|
+
task.id,
|
260
|
+
str(task.job_executor),
|
261
|
+
str(task.max_running_jobs or "None"),
|
262
|
+
str(task.misfire_grace_time or "None"),
|
263
|
+
)
|
264
|
+
|
265
|
+
console.print(table)
|
266
|
+
|
267
|
+
|
268
|
+
def display_jobs(jobs):
|
269
|
+
console = Console()
|
270
|
+
table = Table(title="Jobs")
|
271
|
+
|
272
|
+
widths = {
|
273
|
+
"id": 10,
|
274
|
+
"task_id": 40,
|
275
|
+
"args": 20,
|
276
|
+
"kwargs": 20,
|
277
|
+
"schedule": 15,
|
278
|
+
"created": 25,
|
279
|
+
"status": 15,
|
280
|
+
}
|
281
|
+
|
282
|
+
for col, style, width in [
|
283
|
+
("ID", "cyan", widths["id"]),
|
284
|
+
("Task ID", "blue", widths["task_id"]),
|
285
|
+
("Args", "yellow", widths["args"]),
|
286
|
+
("Kwargs", "yellow", widths["kwargs"]),
|
287
|
+
("Schedule ID", "green", widths["schedule"]),
|
288
|
+
("Created At", "magenta", widths["created"]),
|
289
|
+
("Status", "red", widths["status"]),
|
290
|
+
]:
|
291
|
+
table.add_column(col, style=style, width=width)
|
292
|
+
|
293
|
+
for job in sorted(jobs, key=attrgetter("id")):
|
294
|
+
status = "Running" if job.acquired_by else "Pending"
|
295
|
+
table.add_row(
|
296
|
+
str(job.id),
|
297
|
+
job.task_id,
|
298
|
+
str(job.args if job.args else "None"),
|
299
|
+
(
|
300
|
+
"\n".join(f"{k}: {v}" for k, v in job.kwargs.items())
|
301
|
+
if job.kwargs
|
302
|
+
else "None"
|
303
|
+
),
|
304
|
+
str(job.schedule_id or "None"),
|
305
|
+
job.created_at.strftime("%Y-%m-%d %H:%M:%S"),
|
306
|
+
status,
|
307
|
+
)
|
308
|
+
|
309
|
+
console.print(table)
|
@@ -0,0 +1,382 @@
|
|
1
|
+
"""
|
2
|
+
Base scheduler interface for FlowerPower.
|
3
|
+
|
4
|
+
This module defines the abstract base classes for scheduling operations
|
5
|
+
that can be implemented by different backend providers (APScheduler, RQ, etc.).
|
6
|
+
"""
|
7
|
+
|
8
|
+
import abc
|
9
|
+
import posixpath
|
10
|
+
import sys
|
11
|
+
import urllib.parse
|
12
|
+
from dataclasses import dataclass, field
|
13
|
+
from enum import Enum
|
14
|
+
from pathlib import Path
|
15
|
+
from typing import Any
|
16
|
+
|
17
|
+
from sqlalchemy.ext.asyncio import AsyncEngine
|
18
|
+
|
19
|
+
from ..cfg import ProjectConfig
|
20
|
+
from ..fs import AbstractFileSystem, get_filesystem
|
21
|
+
# from ..utils.misc import update_config_from_dict
|
22
|
+
from ..settings import BACKEND_PROPERTIES
|
23
|
+
|
24
|
+
|
25
|
+
class BackendType(str, Enum):
|
26
|
+
POSTGRESQL = "postgresql"
|
27
|
+
MYSQL = "mysql"
|
28
|
+
SQLITE = "sqlite"
|
29
|
+
MONGODB = "mongodb"
|
30
|
+
MQTT = "mqtt"
|
31
|
+
REDIS = "redis"
|
32
|
+
NATS_KV = "nats_kv"
|
33
|
+
MEMORY = "memory"
|
34
|
+
|
35
|
+
@property
|
36
|
+
def properties(self):
|
37
|
+
return BACKEND_PROPERTIES[self.value]
|
38
|
+
|
39
|
+
@property
|
40
|
+
def uri_prefix(self) -> str:
|
41
|
+
return self.properties.get("uri_prefix", "")
|
42
|
+
|
43
|
+
@property
|
44
|
+
def default_port(self):
|
45
|
+
return self.properties.get("default_port")
|
46
|
+
|
47
|
+
@property
|
48
|
+
def default_host(self) -> str:
|
49
|
+
return self.properties.get("default_host", "")
|
50
|
+
|
51
|
+
@property
|
52
|
+
def default_username(self) -> str:
|
53
|
+
return self.properties.get("default_username", "")
|
54
|
+
|
55
|
+
@property
|
56
|
+
def default_password(self) -> str:
|
57
|
+
return self.properties.get("default_password", "")
|
58
|
+
|
59
|
+
@property
|
60
|
+
def default_database(self) -> str:
|
61
|
+
return self.properties.get("default_database", "")
|
62
|
+
|
63
|
+
@property
|
64
|
+
def is_sqla_type(self) -> bool:
|
65
|
+
return self.properties.get("is_sqla_type", False)
|
66
|
+
|
67
|
+
@property
|
68
|
+
def is_mongodb_type(self) -> bool:
|
69
|
+
return self.value == "mongodb"
|
70
|
+
|
71
|
+
@property
|
72
|
+
def is_mqtt_type(self) -> bool:
|
73
|
+
return self.value == "mqtt"
|
74
|
+
|
75
|
+
@property
|
76
|
+
def is_redis_type(self) -> bool:
|
77
|
+
return self.value == "redis"
|
78
|
+
|
79
|
+
@property
|
80
|
+
def is_nats_kv_type(self) -> bool:
|
81
|
+
return self.value == "nats_kv"
|
82
|
+
|
83
|
+
@property
|
84
|
+
def is_memory_type(self) -> bool:
|
85
|
+
return self.value == "memory"
|
86
|
+
|
87
|
+
@property
|
88
|
+
def is_sqlite_type(self) -> bool:
|
89
|
+
return self.value == "sqlite"
|
90
|
+
|
91
|
+
def gen_uri(
|
92
|
+
self,
|
93
|
+
host: str | None = None,
|
94
|
+
port: int | None = None,
|
95
|
+
username: str | None = None,
|
96
|
+
password: str | None = None,
|
97
|
+
database: str | None = None,
|
98
|
+
ssl: bool = False,
|
99
|
+
ca_file: str | None = None,
|
100
|
+
cert_file: str | None = None,
|
101
|
+
key_file: str | None = None,
|
102
|
+
verify_ssl: bool = False,
|
103
|
+
) -> str:
|
104
|
+
# Handle host and port
|
105
|
+
host = host or self.default_host
|
106
|
+
port = port or self.default_port
|
107
|
+
database = database or self.default_database
|
108
|
+
username = username or self.default_username
|
109
|
+
password = password or self.default_password
|
110
|
+
|
111
|
+
# components: List[str] = []
|
112
|
+
# Get the appropriate URI prefix based on backend type and SSL setting
|
113
|
+
if self.is_redis_type:
|
114
|
+
uri_prefix = "rediss://" if ssl else "redis://"
|
115
|
+
elif self.is_nats_kv_type:
|
116
|
+
uri_prefix = "nats+tls://" if ssl else "nats://"
|
117
|
+
elif self.is_mqtt_type:
|
118
|
+
uri_prefix = "mqtts://" if ssl else "mqtt://"
|
119
|
+
if ssl and port == 1883:
|
120
|
+
port = 8883
|
121
|
+
else:
|
122
|
+
uri_prefix = self.uri_prefix
|
123
|
+
|
124
|
+
# Handle authentication
|
125
|
+
if username and password:
|
126
|
+
auth = f"{urllib.parse.quote(username)}:{urllib.parse.quote(password)}@"
|
127
|
+
elif username:
|
128
|
+
auth = f"{urllib.parse.quote(username)}@"
|
129
|
+
elif password:
|
130
|
+
auth = f":{urllib.parse.quote(password)}@"
|
131
|
+
else:
|
132
|
+
auth = ""
|
133
|
+
|
134
|
+
port_part = f":{port}" # if port is not None else self.default_port
|
135
|
+
|
136
|
+
# Special handling for SQLite and memory types
|
137
|
+
if self.is_sqlite_type or self.is_memory_type:
|
138
|
+
if self.is_sqlite_type and database:
|
139
|
+
return f"{uri_prefix}{database}"
|
140
|
+
return "memory://"
|
141
|
+
|
142
|
+
# Build path component
|
143
|
+
database = database or self.default_database
|
144
|
+
path = f"/{database}" if database else ""
|
145
|
+
|
146
|
+
# Construct base URI
|
147
|
+
base_uri = f"{uri_prefix}{auth}{host}{port_part}{path}"
|
148
|
+
|
149
|
+
# Prepare query parameters for SSL files
|
150
|
+
query_params: list[str] = []
|
151
|
+
|
152
|
+
if ssl:
|
153
|
+
# Always add ssl query parameter if ssl=True
|
154
|
+
if self.value == "postgresql":
|
155
|
+
query_params.append("ssl=verify-full" if verify_ssl else "ssl=allow")
|
156
|
+
if ca_file:
|
157
|
+
query_params.append(f"sslrootcert={urllib.parse.quote(ca_file)}")
|
158
|
+
if cert_file:
|
159
|
+
query_params.append(f"sslcert={urllib.parse.quote(cert_file)}")
|
160
|
+
if key_file:
|
161
|
+
query_params.append(f"sslkey={urllib.parse.quote(key_file)}")
|
162
|
+
elif self.value == "mysql":
|
163
|
+
query_params.append("ssl=true")
|
164
|
+
if ca_file:
|
165
|
+
query_params.append(f"ssl_ca={urllib.parse.quote(ca_file)}")
|
166
|
+
if cert_file:
|
167
|
+
query_params.append(f"ssl_cert={urllib.parse.quote(cert_file)}")
|
168
|
+
if key_file:
|
169
|
+
query_params.append(f"ssl_key={urllib.parse.quote(key_file)}")
|
170
|
+
elif self.is_mongodb_type:
|
171
|
+
query_params.append("tls=true")
|
172
|
+
if ca_file:
|
173
|
+
query_params.append(f"tlsCAFile={urllib.parse.quote(ca_file)}")
|
174
|
+
if cert_file and key_file:
|
175
|
+
query_params.append(
|
176
|
+
f"tlsCertificateKeyFile={urllib.parse.quote(cert_file)}"
|
177
|
+
)
|
178
|
+
elif self.is_redis_type:
|
179
|
+
query_params.append("ssl=true")
|
180
|
+
if ca_file:
|
181
|
+
query_params.append(f"ssl_ca_certs={urllib.parse.quote(ca_file)}")
|
182
|
+
if cert_file:
|
183
|
+
query_params.append(f"ssl_certfile={urllib.parse.quote(cert_file)}")
|
184
|
+
if key_file:
|
185
|
+
query_params.append(f"ssl_keyfile={urllib.parse.quote(key_file)}")
|
186
|
+
elif self.is_nats_kv_type:
|
187
|
+
query_params.append("tls=true")
|
188
|
+
if ca_file:
|
189
|
+
query_params.append(f"tls_ca_file={urllib.parse.quote(ca_file)}")
|
190
|
+
if cert_file:
|
191
|
+
query_params.append(
|
192
|
+
f"tls_cert_file={urllib.parse.quote(cert_file)}"
|
193
|
+
)
|
194
|
+
if key_file:
|
195
|
+
query_params.append(f"tls_key_file={urllib.parse.quote(key_file)}")
|
196
|
+
elif self.is_mqtt_type:
|
197
|
+
query_params.append("tls=true")
|
198
|
+
if ca_file:
|
199
|
+
query_params.append(f"tls_ca_file={urllib.parse.quote(ca_file)}")
|
200
|
+
if cert_file:
|
201
|
+
query_params.append(
|
202
|
+
f"tls_cert_file={urllib.parse.quote(cert_file)}"
|
203
|
+
)
|
204
|
+
if key_file:
|
205
|
+
query_params.append(f"tls_key_file={urllib.parse.quote(key_file)}")
|
206
|
+
|
207
|
+
# Compose query string if Any params exist
|
208
|
+
query_string = ""
|
209
|
+
if query_params:
|
210
|
+
query_string = "?" + "&".join(query_params)
|
211
|
+
|
212
|
+
return f"{base_uri}{query_string}"
|
213
|
+
|
214
|
+
|
215
|
+
@dataclass(slots=True)
|
216
|
+
class BaseBackend:
|
217
|
+
type: BackendType | str | None = None
|
218
|
+
uri: str | None = None
|
219
|
+
username: str | None = None
|
220
|
+
password: str | None = None
|
221
|
+
host: str | None = None
|
222
|
+
port: int | None = None
|
223
|
+
database: str | None = None
|
224
|
+
ssl: bool = False
|
225
|
+
ca_file: str | None = None
|
226
|
+
cert_file: str | None = None
|
227
|
+
key_file: str | None = None
|
228
|
+
verify_ssl: bool = False
|
229
|
+
_kwargs: dict = field(default_factory=dict)
|
230
|
+
_sqla_engine: AsyncEngine | None = (
|
231
|
+
None # SQLAlchemy async engine instance for SQL backends
|
232
|
+
)
|
233
|
+
_client: Any | None = None # Native client instance for non-SQL backends
|
234
|
+
|
235
|
+
def __post_init__(self):
|
236
|
+
if self.type is None:
|
237
|
+
self.type = "memory"
|
238
|
+
|
239
|
+
elif isinstance(self.type, str):
|
240
|
+
try:
|
241
|
+
self.type = BackendType[self.type.upper()]
|
242
|
+
except KeyError:
|
243
|
+
raise ValueError(
|
244
|
+
f"Invalid backend type: {self.type}. Valid types: {[bt.value for bt in BackendType]}"
|
245
|
+
)
|
246
|
+
|
247
|
+
if not self.uri:
|
248
|
+
self.uri = self.type.gen_uri(
|
249
|
+
username=self.username,
|
250
|
+
password=self.password,
|
251
|
+
host=self.host,
|
252
|
+
port=self.port,
|
253
|
+
database=self.database,
|
254
|
+
ssl=self.ssl,
|
255
|
+
ca_file=self.ca_file,
|
256
|
+
cert_file=self.cert_file,
|
257
|
+
key_file=self.key_file,
|
258
|
+
verify_ssl=self.verify_ssl,
|
259
|
+
)
|
260
|
+
|
261
|
+
# Setup is handled by backend-specific implementations
|
262
|
+
|
263
|
+
@classmethod
|
264
|
+
def from_dict(cls, d: dict) -> "BaseBackend":
|
265
|
+
return cls(**d)
|
266
|
+
|
267
|
+
|
268
|
+
class BaseTrigger(abc.ABC):
|
269
|
+
"""
|
270
|
+
Abstract base class for schedule triggers.
|
271
|
+
|
272
|
+
A trigger determines when a scheduled job should be executed.
|
273
|
+
"""
|
274
|
+
|
275
|
+
def __init__(self, trigger_type: str):
|
276
|
+
self.trigger_type = trigger_type
|
277
|
+
|
278
|
+
@abc.abstractmethod
|
279
|
+
def get_trigger_instance(self, **kwargs) -> Any:
|
280
|
+
"""
|
281
|
+
Get the backend-specific trigger instance.
|
282
|
+
|
283
|
+
Args:
|
284
|
+
**kwargs: Keyword arguments specific to the trigger type
|
285
|
+
|
286
|
+
Returns:
|
287
|
+
Any: A backend-specific trigger instance
|
288
|
+
"""
|
289
|
+
pass
|
290
|
+
|
291
|
+
|
292
|
+
class BaseJobQueueManager:
|
293
|
+
"""
|
294
|
+
Abstract base class for scheduler workers (APScheduler, RQ, etc.).
|
295
|
+
Defines the required interface for all scheduler backends.
|
296
|
+
|
297
|
+
Can be used as a context manager:
|
298
|
+
|
299
|
+
```python
|
300
|
+
with RQManager(name="test") as manager:
|
301
|
+
manager.add_job(job1)
|
302
|
+
```
|
303
|
+
"""
|
304
|
+
|
305
|
+
def __enter__(self):
|
306
|
+
"""Context manager entry - returns self for use in with statement."""
|
307
|
+
return self
|
308
|
+
|
309
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
310
|
+
"""Context manager exit - ensures workers are stopped."""
|
311
|
+
if hasattr(self, "_worker_process") and self._worker_process is not None:
|
312
|
+
self.stop_worker()
|
313
|
+
if hasattr(self, "_worker_pool") and self._worker_pool is not None:
|
314
|
+
self.stop_worker_pool()
|
315
|
+
if hasattr(self, "_worker") and self._worker is not None:
|
316
|
+
self.stop_worker()
|
317
|
+
if hasattr(self, "_scheduler") and self._scheduler is not None:
|
318
|
+
self.stop_scheduler()
|
319
|
+
return False # Don't suppress exceptions
|
320
|
+
|
321
|
+
def __init__(
|
322
|
+
self,
|
323
|
+
type: str | None = None,
|
324
|
+
name: str | None = None,
|
325
|
+
base_dir: str | None = None,
|
326
|
+
backend: BaseBackend | None = None,
|
327
|
+
storage_options: dict = None,
|
328
|
+
fs: AbstractFileSystem | None = None,
|
329
|
+
**kwargs,
|
330
|
+
):
|
331
|
+
"""
|
332
|
+
Initialize the APScheduler backend.
|
333
|
+
|
334
|
+
Args:
|
335
|
+
name: Name of the scheduler
|
336
|
+
base_dir: Base directory for the FlowerPower project
|
337
|
+
backend: APSBackend instance with data store and event broker
|
338
|
+
storage_options: Storage options for filesystem access
|
339
|
+
fs: Filesystem to use
|
340
|
+
cfg_override: Configuration overrides for the worker
|
341
|
+
"""
|
342
|
+
self.name = name or ""
|
343
|
+
self._base_dir = base_dir or str(Path.cwd())
|
344
|
+
self._storage_options = storage_options or {}
|
345
|
+
self._backend = backend
|
346
|
+
self._type = type
|
347
|
+
self._pipelines_dir = kwargs.get("pipelines_dir", "pipelines")
|
348
|
+
self._conf_dir = "conf"
|
349
|
+
|
350
|
+
if fs is None:
|
351
|
+
fs = get_filesystem(self._base_dir, **(self._storage_options or {}))
|
352
|
+
self._fs = fs
|
353
|
+
|
354
|
+
self._add_modules_path()
|
355
|
+
self._load_config()
|
356
|
+
|
357
|
+
def _load_config(self) -> None:
|
358
|
+
"""Load the configuration.
|
359
|
+
|
360
|
+
Args:
|
361
|
+
cfg_updates: Configuration updates to apply
|
362
|
+
"""
|
363
|
+
self.cfg = ProjectConfig.load(
|
364
|
+
base_dir=self._base_dir, job_queue_type=self._type, fs=self._fs
|
365
|
+
).job_queue
|
366
|
+
|
367
|
+
def _add_modules_path(self):
|
368
|
+
"""
|
369
|
+
Sync the filesystem.
|
370
|
+
|
371
|
+
Returns:
|
372
|
+
None
|
373
|
+
"""
|
374
|
+
if self._fs.is_cache_fs:
|
375
|
+
self._fs.sync()
|
376
|
+
|
377
|
+
if self._fs.path not in sys.path:
|
378
|
+
sys.path.insert(0, self._fs.path)
|
379
|
+
|
380
|
+
modules_path = posixpath.join(self._fs.path, self._pipelines_dir)
|
381
|
+
if modules_path not in sys.path:
|
382
|
+
sys.path.insert(0, modules_path)
|