FlowerPower 0.9.13.1__py3-none-any.whl → 1.0.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (85) hide show
  1. flowerpower/__init__.py +17 -2
  2. flowerpower/cfg/__init__.py +201 -149
  3. flowerpower/cfg/base.py +122 -24
  4. flowerpower/cfg/pipeline/__init__.py +254 -0
  5. flowerpower/cfg/pipeline/adapter.py +66 -0
  6. flowerpower/cfg/pipeline/run.py +40 -11
  7. flowerpower/cfg/pipeline/schedule.py +69 -79
  8. flowerpower/cfg/project/__init__.py +149 -0
  9. flowerpower/cfg/project/adapter.py +57 -0
  10. flowerpower/cfg/project/job_queue.py +165 -0
  11. flowerpower/cli/__init__.py +92 -37
  12. flowerpower/cli/job_queue.py +878 -0
  13. flowerpower/cli/mqtt.py +32 -1
  14. flowerpower/cli/pipeline.py +559 -406
  15. flowerpower/cli/utils.py +29 -18
  16. flowerpower/flowerpower.py +12 -8
  17. flowerpower/fs/__init__.py +20 -2
  18. flowerpower/fs/base.py +350 -26
  19. flowerpower/fs/ext.py +797 -216
  20. flowerpower/fs/storage_options.py +1097 -55
  21. flowerpower/io/base.py +13 -18
  22. flowerpower/io/loader/__init__.py +28 -0
  23. flowerpower/io/loader/deltatable.py +7 -10
  24. flowerpower/io/metadata.py +1 -0
  25. flowerpower/io/saver/__init__.py +28 -0
  26. flowerpower/io/saver/deltatable.py +4 -3
  27. flowerpower/job_queue/__init__.py +252 -0
  28. flowerpower/job_queue/apscheduler/__init__.py +11 -0
  29. flowerpower/job_queue/apscheduler/_setup/datastore.py +110 -0
  30. flowerpower/job_queue/apscheduler/_setup/eventbroker.py +93 -0
  31. flowerpower/job_queue/apscheduler/manager.py +1063 -0
  32. flowerpower/job_queue/apscheduler/setup.py +524 -0
  33. flowerpower/job_queue/apscheduler/trigger.py +169 -0
  34. flowerpower/job_queue/apscheduler/utils.py +309 -0
  35. flowerpower/job_queue/base.py +382 -0
  36. flowerpower/job_queue/rq/__init__.py +10 -0
  37. flowerpower/job_queue/rq/_trigger.py +37 -0
  38. flowerpower/job_queue/rq/concurrent_workers/gevent_worker.py +226 -0
  39. flowerpower/job_queue/rq/concurrent_workers/thread_worker.py +231 -0
  40. flowerpower/job_queue/rq/manager.py +1449 -0
  41. flowerpower/job_queue/rq/setup.py +150 -0
  42. flowerpower/job_queue/rq/utils.py +69 -0
  43. flowerpower/pipeline/__init__.py +5 -0
  44. flowerpower/pipeline/base.py +118 -0
  45. flowerpower/pipeline/io.py +407 -0
  46. flowerpower/pipeline/job_queue.py +505 -0
  47. flowerpower/pipeline/manager.py +1586 -0
  48. flowerpower/pipeline/registry.py +560 -0
  49. flowerpower/pipeline/runner.py +560 -0
  50. flowerpower/pipeline/visualizer.py +142 -0
  51. flowerpower/plugins/mqtt/__init__.py +12 -0
  52. flowerpower/plugins/mqtt/cfg.py +16 -0
  53. flowerpower/plugins/mqtt/manager.py +789 -0
  54. flowerpower/settings.py +110 -0
  55. flowerpower/utils/logging.py +21 -0
  56. flowerpower/utils/misc.py +57 -9
  57. flowerpower/utils/sql.py +122 -24
  58. flowerpower/utils/templates.py +2 -142
  59. flowerpower-1.0.0b1.dist-info/METADATA +324 -0
  60. flowerpower-1.0.0b1.dist-info/RECORD +94 -0
  61. flowerpower/_web/__init__.py +0 -61
  62. flowerpower/_web/routes/config.py +0 -103
  63. flowerpower/_web/routes/pipelines.py +0 -173
  64. flowerpower/_web/routes/scheduler.py +0 -136
  65. flowerpower/cfg/pipeline/tracker.py +0 -14
  66. flowerpower/cfg/project/open_telemetry.py +0 -8
  67. flowerpower/cfg/project/tracker.py +0 -11
  68. flowerpower/cfg/project/worker.py +0 -19
  69. flowerpower/cli/scheduler.py +0 -309
  70. flowerpower/cli/web.py +0 -44
  71. flowerpower/event_handler.py +0 -23
  72. flowerpower/mqtt.py +0 -609
  73. flowerpower/pipeline.py +0 -2499
  74. flowerpower/scheduler.py +0 -680
  75. flowerpower/tui.py +0 -79
  76. flowerpower/utils/datastore.py +0 -186
  77. flowerpower/utils/eventbroker.py +0 -127
  78. flowerpower/utils/executor.py +0 -58
  79. flowerpower/utils/trigger.py +0 -140
  80. flowerpower-0.9.13.1.dist-info/METADATA +0 -586
  81. flowerpower-0.9.13.1.dist-info/RECORD +0 -76
  82. /flowerpower/{cfg/pipeline/params.py → cli/worker.py} +0 -0
  83. {flowerpower-0.9.13.1.dist-info → flowerpower-1.0.0b1.dist-info}/WHEEL +0 -0
  84. {flowerpower-0.9.13.1.dist-info → flowerpower-1.0.0b1.dist-info}/entry_points.txt +0 -0
  85. {flowerpower-0.9.13.1.dist-info → flowerpower-1.0.0b1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,524 @@
1
+ # Standard library imports
2
+ from dataclasses import dataclass, field
3
+
4
+ # Third-party imports
5
+ from apscheduler.datastores.base import BaseDataStore
6
+ from apscheduler.eventbrokers.base import BaseEventBroker
7
+ from loguru import logger
8
+ from sqlalchemy import text
9
+ from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
10
+
11
+ # Local imports
12
+ from ...utils.logging import setup_logging
13
+ from ..base import BaseBackend
14
+
15
+ setup_logging()
16
+
17
+
18
+ @dataclass # (slots=True)
19
+ class APSDataStore(BaseBackend):
20
+ """APScheduler data store implementation that supports multiple backend types.
21
+
22
+ This class provides a flexible data store interface for APScheduler, supporting various
23
+ backend storage options including SQLAlchemy-compatible databases, MongoDB, and in-memory
24
+ storage.
25
+
26
+ Args:
27
+ schema (str | None): Database schema name. Defaults to "flowerpower".
28
+ Note: Ignored for SQLite databases.
29
+
30
+ Attributes:
31
+ type (BackendType): Type of backend storage (inherited from BaseBackend)
32
+ uri (str): Connection URI for the backend (inherited from BaseBackend)
33
+ _client (BaseDataStore): The APScheduler data store instance
34
+ _sqla_engine (AsyncEngine): SQLAlchemy async engine for SQL databases
35
+
36
+ Raises:
37
+ ValueError: If an invalid backend type is specified
38
+
39
+ Example:
40
+ ```python
41
+ # Create PostgreSQL data store
42
+ data_store = APSDataStore(
43
+ type="postgresql",
44
+ uri="postgresql+asyncpg://user:pass@localhost/db",
45
+ schema="scheduler"
46
+ )
47
+ data_store.setup()
48
+
49
+ # Create in-memory data store
50
+ memory_store = APSDataStore(type="memory")
51
+ memory_store.setup()
52
+
53
+ # Create MongoDB data store
54
+ mongo_store = APSDataStore(
55
+ type="mongodb",
56
+ uri="mongodb://localhost:27017",
57
+ schema="scheduler"
58
+ )
59
+ mongo_store.setup()
60
+ ```
61
+ """
62
+
63
+ schema: str | None = "flowerpower"
64
+
65
+ def __post_init__(self):
66
+ """Initialize and validate the data store configuration.
67
+
68
+ This method is called automatically after instance creation. It:
69
+ 1. Sets default type to "memory" if not specified
70
+ 2. Calls parent class initialization
71
+ 3. Validates backend type
72
+ 4. Warns about schema limitations with SQLite
73
+
74
+ Raises:
75
+ ValueError: If an invalid backend type is specified
76
+ """
77
+ if self.type is None:
78
+ self.type = "memory"
79
+ super().__post_init__()
80
+
81
+ if (
82
+ not self.type.is_memory_type
83
+ and not self.type.is_mongodb_type
84
+ and not self.type.is_sqla_type
85
+ ):
86
+ raise ValueError(
87
+ f"Invalid backend type: {self.type}. Valid types: {
88
+ [
89
+ self.type.POSTGRESQL,
90
+ self.type.MYSQL,
91
+ self.type.SQLITE,
92
+ self.type.MONGODB,
93
+ self.type.MEMORY,
94
+ ]
95
+ }"
96
+ )
97
+ if self.type.is_sqlite_type and self.schema is not None:
98
+ logger.warning(
99
+ "SQLite does not support schema. When using SQLite, the schema will be ignored.",
100
+ "When you need to use schemas, you can use several SQLite databases, ",
101
+ "one for each schema. Or use PostgreSQL or MySQL.",
102
+ )
103
+
104
+ async def _setup_db(self) -> None:
105
+ """Initialize database and schema for SQL backends.
106
+
107
+ Creates the database and schema if they don't exist. This is an internal async
108
+ method called by setup_db().
109
+
110
+ Raises:
111
+ Exception: If database/schema creation fails
112
+ """
113
+ sqla_engine = create_async_engine(self.uri)
114
+
115
+ try:
116
+ await self._create_schema(sqla_engine)
117
+ except Exception:
118
+ await self._create_database_and_schema(sqla_engine)
119
+
120
+ async def _create_schema(self, engine: AsyncEngine) -> None:
121
+ """Create schema in existing database if it doesn't exist.
122
+
123
+ Args:
124
+ engine: SQLAlchemy async engine connected to the database
125
+ """
126
+ if not self.schema:
127
+ return
128
+
129
+ async with engine.begin() as conn:
130
+ await conn.execute(text(f"CREATE SCHEMA IF NOT EXISTS {self.schema}"))
131
+ await conn.commit()
132
+
133
+ async def _create_database_and_schema(self, engine: AsyncEngine) -> None:
134
+ """Create both database and schema if they don't exist.
135
+
136
+ Creates a temporary connection to template1 to create the database,
137
+ then creates the schema within the new database.
138
+
139
+ Args:
140
+ engine: SQLAlchemy async engine
141
+ """
142
+ database_name = self.uri.split("/")[-1].split("?")[0]
143
+ temp_uri = self.uri.replace(f"/{database_name}", "/template1")
144
+ temp_engine = create_async_engine(temp_uri)
145
+
146
+ async with temp_engine.begin() as conn:
147
+ await conn.execute(text("COMMIT"))
148
+ try:
149
+ await conn.execute(text(f"CREATE DATABASE {database_name}"))
150
+ finally:
151
+ await conn.execute(text("COMMIT"))
152
+
153
+ if self.schema:
154
+ await self._create_schema(engine)
155
+
156
+ def setup_db(self) -> None:
157
+ """Initialize the database synchronously.
158
+
159
+ This is a blocking wrapper around the async _setup_db() method.
160
+ Uses anyio portal to run async code from synchronous context.
161
+ """
162
+ from anyio.from_thread import start_blocking_portal
163
+
164
+ with start_blocking_portal() as portal:
165
+ portal.call(self._setup_db)
166
+
167
+ def _setup_sqlalchemy(self) -> None:
168
+ """Initialize SQLAlchemy data store.
169
+
170
+ Sets up SQLAlchemy engine and data store for PostgreSQL, MySQL, or SQLite.
171
+ Creates database and schema if needed.
172
+ """
173
+ from apscheduler.datastores.sqlalchemy import SQLAlchemyDataStore
174
+
175
+ if not self.type.is_sqlite_type:
176
+ self.setup_db()
177
+ self._sqla_engine = create_async_engine(self.uri)
178
+ self._client = SQLAlchemyDataStore(self._sqla_engine, schema=self.schema)
179
+
180
+ def _setup_mongodb(self) -> None:
181
+ """Initialize MongoDB data store.
182
+
183
+ Creates MongoDBDataStore instance using provided URI and schema (database name).
184
+ """
185
+ from apscheduler.datastores.mongodb import MongoDBDataStore
186
+
187
+ self._client = MongoDBDataStore(self.uri, database=self.schema)
188
+
189
+ def _setup_memory(self) -> None:
190
+ """Initialize in-memory data store.
191
+
192
+ Creates MemoryDataStore instance for temporary storage.
193
+ """
194
+ from apscheduler.datastores.memory import MemoryDataStore
195
+
196
+ self._client = MemoryDataStore()
197
+
198
+ def setup(self) -> None:
199
+ """Initialize the appropriate data store based on backend type.
200
+
201
+ This is the main setup method that should be called after creating the data store.
202
+ It delegates to the appropriate setup method based on the backend type.
203
+ """
204
+ if self.type.is_sqla_type:
205
+ self._setup_sqlalchemy()
206
+ elif self.type.is_mongodb_type:
207
+ self._setup_mongodb()
208
+ else:
209
+ self._setup_memory()
210
+
211
+ @property
212
+ def client(self) -> BaseDataStore:
213
+ """Get the initialized data store client.
214
+
215
+ Returns:
216
+ BaseDataStore: The APScheduler data store instance, initializing it if needed.
217
+ """
218
+ if self._client is None:
219
+ self.setup()
220
+ return self._client
221
+
222
+ @property
223
+ def sqla_engine(self) -> AsyncEngine | None:
224
+ """Get the SQLAlchemy engine.
225
+
226
+ Returns:
227
+ AsyncEngine | None: The async SQLAlchemy engine for SQL backends,
228
+ None for non-SQL backends
229
+ """
230
+ if self._sqla_engine is None:
231
+ self.setup()
232
+ return self._sqla_engine
233
+
234
+
235
+ @dataclass # (slots=True)
236
+ class APSEventBroker(BaseBackend):
237
+ """APScheduler event broker implementation supporting multiple messaging backends.
238
+
239
+ This class provides a flexible event broker interface for APScheduler that can use
240
+ various messaging systems including PostgreSQL NOTIFY/LISTEN, MQTT, Redis pub/sub,
241
+ and in-memory event handling.
242
+
243
+ Attributes:
244
+ type (BackendType): Type of backend messaging system (inherited from BaseBackend)
245
+ uri (str): Connection URI for the backend (inherited from BaseBackend)
246
+ _client (BaseEventBroker): The APScheduler event broker instance
247
+ _sqla_engine (AsyncEngine): SQLAlchemy async engine for PostgreSQL NOTIFY/LISTEN
248
+
249
+ Raises:
250
+ ValueError: If an invalid backend type is specified or if SQLAlchemy engine is not PostgreSQL
251
+ when using from_ds_sqla
252
+
253
+ Example:
254
+ ```python
255
+ # Create Redis event broker
256
+ redis_broker = APSEventBroker(
257
+ type="redis",
258
+ uri="redis://localhost:6379/0"
259
+ )
260
+ redis_broker.setup()
261
+
262
+ # Create MQTT event broker
263
+ mqtt_broker = APSEventBroker(
264
+ type="mqtt",
265
+ uri="mqtt://user:pass@localhost:1883"
266
+ )
267
+ mqtt_broker.setup()
268
+
269
+ # Create PostgreSQL event broker from existing SQLAlchemy engine
270
+ pg_broker = APSEventBroker.from_ds_sqla(pg_engine)
271
+
272
+ # Create in-memory event broker
273
+ memory_broker = APSEventBroker(type="memory")
274
+ memory_broker.setup()
275
+ ```
276
+ """
277
+
278
+ def __post_init__(self):
279
+ """Initialize and validate the event broker configuration.
280
+
281
+ This method is called automatically after instance creation. It:
282
+ 1. Sets default type to "memory" if not specified
283
+ 2. Calls parent class initialization
284
+ 3. Validates backend type compatibility
285
+
286
+ Raises:
287
+ ValueError: If an invalid backend type is specified or an unsupported
288
+ combination of settings is provided (e.g., Redis without URI)
289
+ """
290
+ if self.type is None:
291
+ self.type = "memory"
292
+ super().__post_init__()
293
+
294
+ if (
295
+ not self.type.is_redis_type
296
+ and not self.type.is_memory_type
297
+ and not self.type.is_mongodb_type
298
+ and not self.type.is_sqla_type
299
+ ):
300
+ raise ValueError(
301
+ f"Invalid backend type: {self.type}. Valid types: {
302
+ [
303
+ self.type.POSTGRESQL,
304
+ self.type.MQTT,
305
+ self.type.REDIS,
306
+ self.type.MEMORY,
307
+ ]
308
+ }"
309
+ )
310
+
311
+ def _setup_asyncpg_event_broker(self):
312
+ """Initialize PostgreSQL event broker.
313
+
314
+ Sets up AsyncpgEventBroker using either a DSN string or existing SQLAlchemy engine.
315
+ Uses PostgreSQL's NOTIFY/LISTEN for event messaging.
316
+ """
317
+ from apscheduler.eventbrokers.asyncpg import AsyncpgEventBroker
318
+
319
+ if self._sqla_engine is None:
320
+ self._client = AsyncpgEventBroker.from_dsn(dsn=self.uri)
321
+ else:
322
+ self._client = AsyncpgEventBroker.from_async_sqla_engine(
323
+ engine=self._sqla_engine
324
+ )
325
+
326
+ def _setup_mqtt_event_broker(self):
327
+ """Initialize MQTT event broker.
328
+
329
+ Parses MQTT connection URI for host, port, credentials and SSL settings.
330
+ Sets up MQTTEventBroker for pub/sub messaging.
331
+ """
332
+ import urllib.parse
333
+
334
+ from apscheduler.eventbrokers.mqtt import MQTTEventBroker
335
+
336
+ # Parse the URI
337
+ parsed = urllib.parse.urlparse(self.uri)
338
+
339
+ hostname = parsed.hostname
340
+ port = parsed.port
341
+ username = parsed.username
342
+ password = parsed.password
343
+ use_ssl = parsed.scheme == "mqtts"
344
+
345
+ self._client = MQTTEventBroker(
346
+ host=hostname, port=port, ssl=use_ssl, topic="flowerpower/worker"
347
+ )
348
+ if (self.username is not None) and (self.password is not None):
349
+ self._client._client.username_pw_set(
350
+ username,
351
+ password,
352
+ )
353
+
354
+ def _setup_redis_event_broker(self):
355
+ """Initialize Redis event broker.
356
+
357
+ Creates RedisEventBroker instance using provided Redis URI.
358
+ Uses Redis pub/sub for event messaging.
359
+ """
360
+ from apscheduler.eventbrokers.redis import RedisEventBroker
361
+
362
+ self._client = RedisEventBroker(self.uri)
363
+
364
+ def _setup_local_event_broker(self):
365
+ """Initialize in-memory event broker.
366
+
367
+ Creates LocalEventBroker for in-process event handling.
368
+ """
369
+ from apscheduler.eventbrokers.local import LocalEventBroker
370
+
371
+ self._client = LocalEventBroker()
372
+
373
+ def setup(self):
374
+ """Initialize the appropriate event broker based on backend type.
375
+
376
+ This is the main setup method that should be called after creating the event broker.
377
+ It delegates to the appropriate setup method based on the backend type.
378
+ """
379
+ if self.type.is_sqla_type:
380
+ self._setup_asyncpg_event_broker()
381
+ elif self.type.is_mqtt_type:
382
+ self._setup_mqtt_event_broker()
383
+ elif self.type.is_redis_type:
384
+ self._setup_redis_event_broker()
385
+ else:
386
+ self._setup_local_event_broker()
387
+
388
+ @property
389
+ def client(self) -> BaseEventBroker:
390
+ """Get the initialized event broker client.
391
+
392
+ Returns:
393
+ BaseEventBroker: The APScheduler event broker instance, initializing it if needed.
394
+ """
395
+ if self._client is None:
396
+ self.setup()
397
+ return self._client
398
+
399
+ @property
400
+ def sqla_engine(self) -> AsyncEngine | None:
401
+ """Get the SQLAlchemy engine.
402
+
403
+ Returns:
404
+ AsyncEngine | None: The async SQLAlchemy engine for PostgreSQL backend,
405
+ None for other backends
406
+ """
407
+ if self._sqla_engine is None:
408
+ self.setup()
409
+ return self._sqla_engine
410
+
411
+ @classmethod
412
+ def from_ds_sqla(cls, sqla_engine: AsyncEngine) -> "APSEventBroker":
413
+ """Create event broker from existing SQLAlchemy engine.
414
+
415
+ This factory method creates a PostgreSQL event broker that shares the
416
+ same database connection as a data store.
417
+
418
+ Args:
419
+ sqla_engine: Async SQLAlchemy engine, must be PostgreSQL with asyncpg driver
420
+
421
+ Returns:
422
+ APSEventBroker: New event broker instance using the provided engine
423
+
424
+ Raises:
425
+ ValueError: If engine is not PostgreSQL with asyncpg driver
426
+
427
+ Example:
428
+ ```python
429
+ # Create data store with PostgreSQL
430
+ data_store = APSDataStore(
431
+ type="postgresql",
432
+ uri="postgresql+asyncpg://user:pass@localhost/db"
433
+ )
434
+ data_store.setup()
435
+
436
+ # Create event broker using same connection
437
+ event_broker = APSEventBroker.from_ds_sqla(data_store.sqla_engine)
438
+ ```
439
+ """
440
+ if sqla_engine.url.drivername != "postgresql+asyncpg":
441
+ raise ValueError(
442
+ f"sqla_engine must be a PostgreSQL engine ('postgresql+asyncpg://'), got '{sqla_engine.url.drivername}'"
443
+ )
444
+ return cls(
445
+ type="postgresql",
446
+ _sqla_engine=sqla_engine,
447
+ )
448
+
449
+
450
+ @dataclass(slots=True)
451
+ class APSBackend:
452
+ """Main backend configuration class for APScheduler combining data store and event broker.
453
+
454
+ This class serves as a container for configuring both the data store and event broker
455
+ components of APScheduler. It handles initialization and setup of both components,
456
+ with support for dictionary-based configuration.
457
+
458
+ Args:
459
+ data_store (APSDataStore | dict | None): Data store configuration, either as an
460
+ APSDataStore instance or a configuration dictionary. Defaults to a new
461
+ APSDataStore instance.
462
+ event_broker (APSEventBroker | dict | None): Event broker configuration, either as
463
+ an APSEventBroker instance or a configuration dictionary. Defaults to a new
464
+ APSEventBroker instance.
465
+
466
+ Example:
467
+ ```python
468
+ # Create backend with default memory storage
469
+ backend = APSBackend()
470
+
471
+ # Create backend with PostgreSQL data store and Redis event broker
472
+ backend = APSBackend(
473
+ data_store={
474
+ "type": "postgresql",
475
+ "uri": "postgresql+asyncpg://user:pass@localhost/db",
476
+ "schema": "scheduler"
477
+ },
478
+ event_broker={
479
+ "type": "redis",
480
+ "uri": "redis://localhost:6379/0"
481
+ }
482
+ )
483
+
484
+ # Create backend with PostgreSQL for both data store and event broker
485
+ backend = APSBackend(
486
+ data_store={
487
+ "type": "postgresql",
488
+ "uri": "postgresql+asyncpg://user:pass@localhost/db",
489
+ },
490
+ event_broker={
491
+ "from_ds_sqla": True # Use same PostgreSQL connection for events
492
+ }
493
+ )
494
+ ```
495
+ """
496
+
497
+ data_store: APSDataStore | dict | None = field(default_factory=APSDataStore)
498
+ event_broker: APSEventBroker | dict | None = field(default_factory=APSEventBroker)
499
+
500
+ def __post_init__(self):
501
+ """Initialize and setup data store and event broker components.
502
+
503
+ Called automatically after instance creation. This method:
504
+ 1. Converts data store dict to APSDataStore instance if needed
505
+ 2. Initializes data store
506
+ 3. Converts event broker dict to APSEventBroker instance if needed
507
+ 4. Sets up event broker using data store connection if specified
508
+ 5. Initializes event broker
509
+ """
510
+ if self.data_store is not None:
511
+ if isinstance(self.data_store, dict):
512
+ self.data_store = APSDataStore.from_dict(self.data_store)
513
+ self.data_store.setup()
514
+ if self.event_broker is not None:
515
+ if isinstance(self.event_broker, dict):
516
+ if "from_ds_sqla" in self.event_broker:
517
+ self.event_broker = APSEventBroker.from_ds_sqla(
518
+ self.data_store.sqla_engine
519
+ )
520
+ else:
521
+ self.event_broker = APSEventBroker.from_dict(self.event_broker)
522
+ self.event_broker.setup()
523
+ if self.event_broker is not None:
524
+ self.event_broker.setup()
@@ -0,0 +1,169 @@
1
+ import datetime as dt
2
+ from enum import Enum
3
+ from typing import Any, Dict, Type
4
+
5
+ from apscheduler.triggers.calendarinterval import CalendarIntervalTrigger
6
+ from apscheduler.triggers.cron import CronTrigger
7
+ from apscheduler.triggers.date import DateTrigger
8
+ from apscheduler.triggers.interval import IntervalTrigger
9
+
10
+ from ..base import BaseTrigger
11
+
12
+
13
+ class TriggerType(Enum):
14
+ CRON = "cron"
15
+ INTERVAL = "interval"
16
+ CALENDARINTERVAL = "calendarinterval"
17
+ DATE = "date"
18
+
19
+
20
+ # Mapping of trigger type to its class and allowed kwargs
21
+ TRIGGER_CONFIG: Dict[TriggerType, Dict[str, Any]] = {
22
+ TriggerType.CRON: {
23
+ "class": CronTrigger,
24
+ "kwargs": [
25
+ "crontab",
26
+ "year",
27
+ "month",
28
+ "week",
29
+ "day",
30
+ "day_of_week",
31
+ "hour",
32
+ "minute",
33
+ "second",
34
+ "start_time",
35
+ "end_time",
36
+ "timezone",
37
+ ],
38
+ },
39
+ TriggerType.INTERVAL: {
40
+ "class": IntervalTrigger,
41
+ "kwargs": [
42
+ "weeks",
43
+ "days",
44
+ "hours",
45
+ "minutes",
46
+ "seconds",
47
+ "microseconds",
48
+ "start_time",
49
+ "end_time",
50
+ ],
51
+ },
52
+ TriggerType.CALENDARINTERVAL: {
53
+ "class": CalendarIntervalTrigger,
54
+ "kwargs": [
55
+ "years",
56
+ "months",
57
+ "weeks",
58
+ "days",
59
+ "hour",
60
+ "minute",
61
+ "second",
62
+ "start_date",
63
+ "end_date",
64
+ "timezone",
65
+ ],
66
+ },
67
+ TriggerType.DATE: {
68
+ "class": DateTrigger,
69
+ "kwargs": [
70
+ "run_time",
71
+ ],
72
+ },
73
+ }
74
+
75
+
76
+ class APSTrigger(BaseTrigger):
77
+ """
78
+ Implementation of BaseTrigger for APScheduler.
79
+
80
+ Provides a factory for creating APScheduler trigger instances
81
+ with validation and filtering of keyword arguments.
82
+ """
83
+
84
+ trigger_type: TriggerType
85
+
86
+ def __init__(self, trigger_type: str):
87
+ """
88
+ Initialize APSchedulerTrigger with a trigger type.
89
+
90
+ Args:
91
+ trigger_type (str): The type of trigger (cron, interval, calendarinterval, date).
92
+
93
+ Raises:
94
+ ValueError: If the trigger_type is invalid.
95
+ """
96
+ try:
97
+ self.trigger_type = TriggerType(trigger_type.lower())
98
+ except ValueError:
99
+ valid_types = [t.value for t in TriggerType]
100
+ raise ValueError(
101
+ f"Invalid trigger type '{trigger_type}'. Valid types are: {valid_types}"
102
+ )
103
+
104
+ def _get_allowed_kwargs(self) -> set:
105
+ """Return the set of allowed kwargs for the current trigger type."""
106
+ return set(TRIGGER_CONFIG[self.trigger_type]["kwargs"])
107
+
108
+ def _check_kwargs(self, **kwargs) -> None:
109
+ """
110
+ Validate that all provided kwargs are allowed for the trigger type.
111
+
112
+ Raises:
113
+ ValueError: If any kwarg is not allowed.
114
+ """
115
+ allowed = self._get_allowed_kwargs()
116
+ invalid = [k for k in kwargs if k not in allowed]
117
+ if invalid:
118
+ raise ValueError(
119
+ f"Invalid argument(s) for trigger type '{self.trigger_type.value}': {invalid}. "
120
+ f"Allowed arguments are: {sorted(allowed)}"
121
+ )
122
+
123
+ def _filter_kwargs(self, **kwargs) -> Dict[str, Any]:
124
+ """
125
+ Filter kwargs to only those allowed for the trigger type and not None.
126
+
127
+ Returns:
128
+ Dict[str, Any]: Filtered kwargs.
129
+ """
130
+ allowed = self._get_allowed_kwargs()
131
+ return {k: v for k, v in kwargs.items() if k in allowed and v is not None}
132
+
133
+ def get_trigger_instance(self, **kwargs) -> Any:
134
+ """
135
+ Create and return an APScheduler trigger instance based on the trigger type.
136
+
137
+ Args:
138
+ **kwargs: Keyword arguments for the trigger.
139
+
140
+ Returns:
141
+ Any: An APScheduler trigger instance.
142
+
143
+ Raises:
144
+ ValueError: If invalid arguments are provided or trigger type is unknown.
145
+ """
146
+ self._check_kwargs(**kwargs)
147
+ filtered_kwargs = self._filter_kwargs(**kwargs)
148
+ trigger_cls: Type = TRIGGER_CONFIG[self.trigger_type]["class"]
149
+
150
+ if self.trigger_type == TriggerType.CRON:
151
+ crontab = filtered_kwargs.pop("crontab", None)
152
+ if crontab:
153
+ return trigger_cls.from_crontab(crontab)
154
+ return trigger_cls(**filtered_kwargs)
155
+ elif self.trigger_type == TriggerType.INTERVAL:
156
+ return trigger_cls(**filtered_kwargs)
157
+ elif self.trigger_type == TriggerType.CALENDARINTERVAL:
158
+ return trigger_cls(**filtered_kwargs)
159
+ elif self.trigger_type == TriggerType.DATE:
160
+ # Default to now if not specified
161
+ if "run_time" not in filtered_kwargs:
162
+ filtered_kwargs["run_time"] = dt.datetime.now()
163
+ return trigger_cls(**filtered_kwargs)
164
+ else:
165
+ # This should never be reached due to Enum validation in __init__
166
+ raise ValueError(f"Unknown trigger type: {self.trigger_type.value}")
167
+
168
+
169
+ # End of file