FlowerPower 0.20.0__py3-none-any.whl → 0.30.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- flowerpower/__init__.py +2 -6
- flowerpower/cfg/__init__.py +4 -11
- flowerpower/cfg/base.py +29 -25
- flowerpower/cfg/pipeline/__init__.py +3 -3
- flowerpower/cfg/pipeline/_schedule.py +32 -0
- flowerpower/cfg/pipeline/adapter.py +0 -5
- flowerpower/cfg/pipeline/builder.py +377 -0
- flowerpower/cfg/pipeline/run.py +89 -0
- flowerpower/cfg/project/__init__.py +8 -21
- flowerpower/cfg/project/adapter.py +0 -12
- flowerpower/cli/__init__.py +2 -28
- flowerpower/cli/pipeline.py +10 -4
- flowerpower/flowerpower.py +275 -585
- flowerpower/pipeline/base.py +19 -10
- flowerpower/pipeline/io.py +52 -46
- flowerpower/pipeline/manager.py +149 -91
- flowerpower/pipeline/pipeline.py +159 -87
- flowerpower/pipeline/registry.py +68 -33
- flowerpower/pipeline/visualizer.py +4 -4
- flowerpower/plugins/{_io → io}/__init__.py +1 -1
- flowerpower/settings/__init__.py +0 -2
- flowerpower/settings/{backend.py → _backend.py} +0 -19
- flowerpower/settings/logging.py +1 -1
- flowerpower/utils/logging.py +24 -12
- flowerpower/utils/misc.py +17 -0
- flowerpower-0.30.0.dist-info/METADATA +451 -0
- flowerpower-0.30.0.dist-info/RECORD +42 -0
- flowerpower/cfg/pipeline/schedule.py +0 -74
- flowerpower/cfg/project/job_queue.py +0 -111
- flowerpower/cli/job_queue.py +0 -1329
- flowerpower/cli/mqtt.py +0 -174
- flowerpower/job_queue/__init__.py +0 -205
- flowerpower/job_queue/base.py +0 -611
- flowerpower/job_queue/rq/__init__.py +0 -10
- flowerpower/job_queue/rq/_trigger.py +0 -37
- flowerpower/job_queue/rq/concurrent_workers/gevent_worker.py +0 -226
- flowerpower/job_queue/rq/concurrent_workers/thread_worker.py +0 -228
- flowerpower/job_queue/rq/manager.py +0 -1893
- flowerpower/job_queue/rq/setup.py +0 -154
- flowerpower/job_queue/rq/utils.py +0 -69
- flowerpower/mqtt.py +0 -12
- flowerpower/plugins/mqtt/__init__.py +0 -12
- flowerpower/plugins/mqtt/cfg.py +0 -17
- flowerpower/plugins/mqtt/manager.py +0 -962
- flowerpower/settings/job_queue.py +0 -31
- flowerpower-0.20.0.dist-info/METADATA +0 -693
- flowerpower-0.20.0.dist-info/RECORD +0 -58
- {flowerpower-0.20.0.dist-info → flowerpower-0.30.0.dist-info}/WHEEL +0 -0
- {flowerpower-0.20.0.dist-info → flowerpower-0.30.0.dist-info}/entry_points.txt +0 -0
- {flowerpower-0.20.0.dist-info → flowerpower-0.30.0.dist-info}/licenses/LICENSE +0 -0
- {flowerpower-0.20.0.dist-info → flowerpower-0.30.0.dist-info}/top_level.txt +0 -0
flowerpower/job_queue/base.py
DELETED
@@ -1,611 +0,0 @@
|
|
1
|
-
"""
|
2
|
-
Base scheduler interface for FlowerPower.
|
3
|
-
|
4
|
-
This module defines the abstract base classes for scheduling operations
|
5
|
-
that can be implemented by different backend providers (APScheduler, RQ, etc.).
|
6
|
-
"""
|
7
|
-
|
8
|
-
import abc
|
9
|
-
import importlib
|
10
|
-
import os
|
11
|
-
import posixpath
|
12
|
-
import sys
|
13
|
-
import urllib.parse
|
14
|
-
from dataclasses import dataclass, field
|
15
|
-
from enum import Enum
|
16
|
-
from pathlib import Path
|
17
|
-
from typing import TYPE_CHECKING, Any, TypeVar
|
18
|
-
|
19
|
-
from loguru import logger
|
20
|
-
|
21
|
-
if importlib.util.find_spec("sqlalchemy"):
|
22
|
-
from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
|
23
|
-
else:
|
24
|
-
create_async_engine = None
|
25
|
-
AsyncEngine = TypeVar("AsyncEngine")
|
26
|
-
|
27
|
-
# Import PipelineRegistry with TYPE_CHECKING to avoid circular imports
|
28
|
-
if TYPE_CHECKING:
|
29
|
-
from ..pipeline.registry import PipelineRegistry
|
30
|
-
|
31
|
-
from fsspec_utils import AbstractFileSystem, filesystem
|
32
|
-
|
33
|
-
from ..cfg import ProjectConfig
|
34
|
-
# from ..utils.misc import update_config_from_dict
|
35
|
-
from ..settings import BACKEND_PROPERTIES, CACHE_DIR, CONFIG_DIR, PIPELINES_DIR
|
36
|
-
|
37
|
-
|
38
|
-
class BackendType(str, Enum):
|
39
|
-
POSTGRESQL = "postgresql"
|
40
|
-
MYSQL = "mysql"
|
41
|
-
SQLITE = "sqlite"
|
42
|
-
MONGODB = "mongodb"
|
43
|
-
MQTT = "mqtt"
|
44
|
-
REDIS = "redis"
|
45
|
-
NATS_KV = "nats_kv"
|
46
|
-
MEMORY = "memory"
|
47
|
-
|
48
|
-
@property
|
49
|
-
def properties(self):
|
50
|
-
return BACKEND_PROPERTIES[self.value]
|
51
|
-
|
52
|
-
@property
|
53
|
-
def uri_prefix(self) -> str:
|
54
|
-
return self.properties.get("uri_prefix", "")
|
55
|
-
|
56
|
-
@property
|
57
|
-
def default_port(self):
|
58
|
-
return self.properties.get("default_port")
|
59
|
-
|
60
|
-
@property
|
61
|
-
def default_host(self) -> str:
|
62
|
-
return self.properties.get("default_host", "")
|
63
|
-
|
64
|
-
@property
|
65
|
-
def default_username(self) -> str:
|
66
|
-
return self.properties.get("default_username", "")
|
67
|
-
|
68
|
-
@property
|
69
|
-
def default_password(self) -> str:
|
70
|
-
return self.properties.get("default_password", "")
|
71
|
-
|
72
|
-
@property
|
73
|
-
def default_database(self) -> str:
|
74
|
-
return self.properties.get("default_database", "")
|
75
|
-
|
76
|
-
@property
|
77
|
-
def is_sqla_type(self) -> bool:
|
78
|
-
return self.properties.get("is_sqla_type", False)
|
79
|
-
|
80
|
-
@property
|
81
|
-
def is_mongodb_type(self) -> bool:
|
82
|
-
return self.value == "mongodb"
|
83
|
-
|
84
|
-
@property
|
85
|
-
def is_mqtt_type(self) -> bool:
|
86
|
-
return self.value == "mqtt"
|
87
|
-
|
88
|
-
@property
|
89
|
-
def is_redis_type(self) -> bool:
|
90
|
-
return self.value == "redis"
|
91
|
-
|
92
|
-
@property
|
93
|
-
def is_nats_kv_type(self) -> bool:
|
94
|
-
return self.value == "nats_kv"
|
95
|
-
|
96
|
-
@property
|
97
|
-
def is_memory_type(self) -> bool:
|
98
|
-
return self.value == "memory"
|
99
|
-
|
100
|
-
@property
|
101
|
-
def is_sqlite_type(self) -> bool:
|
102
|
-
return self.value == "sqlite"
|
103
|
-
|
104
|
-
def gen_uri(
|
105
|
-
self,
|
106
|
-
host: str | None = None,
|
107
|
-
port: int | None = None,
|
108
|
-
username: str | None = None,
|
109
|
-
password: str | None = None,
|
110
|
-
database: str | None = None,
|
111
|
-
ssl: bool = False,
|
112
|
-
ca_file: str | None = None,
|
113
|
-
cert_file: str | None = None,
|
114
|
-
key_file: str | None = None,
|
115
|
-
verify_ssl: bool = False,
|
116
|
-
) -> str:
|
117
|
-
# Handle host and port
|
118
|
-
host = host or self.default_host
|
119
|
-
port = port or self.default_port
|
120
|
-
database = database or self.default_database
|
121
|
-
username = username or self.default_username
|
122
|
-
password = password or self.default_password
|
123
|
-
|
124
|
-
# components: List[str] = []
|
125
|
-
# Get the appropriate URI prefix based on backend type and SSL setting
|
126
|
-
if self.is_redis_type:
|
127
|
-
uri_prefix = "rediss://" if ssl else "redis://"
|
128
|
-
elif self.is_nats_kv_type:
|
129
|
-
uri_prefix = "nats+tls://" if ssl else "nats://"
|
130
|
-
elif self.is_mqtt_type:
|
131
|
-
uri_prefix = "mqtts://" if ssl else "mqtt://"
|
132
|
-
if ssl and port == 1883:
|
133
|
-
port = 8883
|
134
|
-
else:
|
135
|
-
uri_prefix = self.uri_prefix
|
136
|
-
|
137
|
-
# Handle authentication
|
138
|
-
if username and password:
|
139
|
-
auth = f"{urllib.parse.quote(username)}:{urllib.parse.quote(password)}@"
|
140
|
-
elif username:
|
141
|
-
auth = f"{urllib.parse.quote(username)}@"
|
142
|
-
elif password:
|
143
|
-
auth = f":{urllib.parse.quote(password)}@"
|
144
|
-
else:
|
145
|
-
auth = ""
|
146
|
-
|
147
|
-
port_part = f":{port}" # if port is not None else self.default_port
|
148
|
-
|
149
|
-
# Special handling for SQLite and memory types
|
150
|
-
if self.is_sqlite_type or self.is_memory_type:
|
151
|
-
if self.is_sqlite_type:
|
152
|
-
if database:
|
153
|
-
return f"{uri_prefix}{database}"
|
154
|
-
else:
|
155
|
-
return f"{uri_prefix}"
|
156
|
-
return "memory://"
|
157
|
-
|
158
|
-
# Build path component
|
159
|
-
database = database or self.default_database
|
160
|
-
path = f"/{database}" if database else ""
|
161
|
-
|
162
|
-
# Construct base URI
|
163
|
-
base_uri = f"{uri_prefix}{auth}{host}{port_part}{path}"
|
164
|
-
|
165
|
-
# Prepare query parameters for SSL files
|
166
|
-
query_params: list[str] = []
|
167
|
-
|
168
|
-
if ssl:
|
169
|
-
# Always add ssl query parameter if ssl=True
|
170
|
-
if self.value == "postgresql":
|
171
|
-
query_params.append("ssl=verify-full" if verify_ssl else "ssl=allow")
|
172
|
-
if ca_file:
|
173
|
-
query_params.append(f"sslrootcert={urllib.parse.quote(ca_file)}")
|
174
|
-
if cert_file:
|
175
|
-
query_params.append(f"sslcert={urllib.parse.quote(cert_file)}")
|
176
|
-
if key_file:
|
177
|
-
query_params.append(f"sslkey={urllib.parse.quote(key_file)}")
|
178
|
-
elif self.value == "mysql":
|
179
|
-
query_params.append("ssl=true")
|
180
|
-
if ca_file:
|
181
|
-
query_params.append(f"ssl_ca={urllib.parse.quote(ca_file)}")
|
182
|
-
if cert_file:
|
183
|
-
query_params.append(f"ssl_cert={urllib.parse.quote(cert_file)}")
|
184
|
-
if key_file:
|
185
|
-
query_params.append(f"ssl_key={urllib.parse.quote(key_file)}")
|
186
|
-
elif self.is_mongodb_type:
|
187
|
-
query_params.append("tls=true")
|
188
|
-
if ca_file:
|
189
|
-
query_params.append(f"tlsCAFile={urllib.parse.quote(ca_file)}")
|
190
|
-
if cert_file and key_file:
|
191
|
-
query_params.append(
|
192
|
-
f"tlsCertificateKeyFile={urllib.parse.quote(cert_file)}"
|
193
|
-
)
|
194
|
-
elif self.is_redis_type:
|
195
|
-
if not verify_ssl:
|
196
|
-
query_params.append("ssl_cert_reqs=none")
|
197
|
-
if ca_file:
|
198
|
-
query_params.append(f"ssl_ca_certs={urllib.parse.quote(ca_file)}")
|
199
|
-
if cert_file:
|
200
|
-
query_params.append(f"ssl_certfile={urllib.parse.quote(cert_file)}")
|
201
|
-
if key_file:
|
202
|
-
query_params.append(f"ssl_keyfile={urllib.parse.quote(key_file)}")
|
203
|
-
elif self.is_nats_kv_type:
|
204
|
-
query_params.append("tls=true")
|
205
|
-
if ca_file:
|
206
|
-
query_params.append(f"tls_ca_file={urllib.parse.quote(ca_file)}")
|
207
|
-
if cert_file:
|
208
|
-
query_params.append(
|
209
|
-
f"tls_cert_file={urllib.parse.quote(cert_file)}"
|
210
|
-
)
|
211
|
-
if key_file:
|
212
|
-
query_params.append(f"tls_key_file={urllib.parse.quote(key_file)}")
|
213
|
-
elif self.is_mqtt_type:
|
214
|
-
query_params.append("tls=true")
|
215
|
-
if ca_file:
|
216
|
-
query_params.append(f"tls_ca_file={urllib.parse.quote(ca_file)}")
|
217
|
-
if cert_file:
|
218
|
-
query_params.append(
|
219
|
-
f"tls_cert_file={urllib.parse.quote(cert_file)}"
|
220
|
-
)
|
221
|
-
if key_file:
|
222
|
-
query_params.append(f"tls_key_file={urllib.parse.quote(key_file)}")
|
223
|
-
|
224
|
-
# Compose query string if Any params exist
|
225
|
-
query_string = ""
|
226
|
-
if query_params:
|
227
|
-
query_string = "?" + "&".join(query_params)
|
228
|
-
|
229
|
-
return f"{base_uri}{query_string}"
|
230
|
-
|
231
|
-
|
232
|
-
@dataclass(slots=True)
|
233
|
-
class BaseBackend:
|
234
|
-
type: BackendType | str | None = None
|
235
|
-
uri: str | None = None
|
236
|
-
username: str | None = None
|
237
|
-
password: str | None = None
|
238
|
-
host: str | None = None
|
239
|
-
port: int | None = None
|
240
|
-
database: str | None = None
|
241
|
-
ssl: bool = False
|
242
|
-
ca_file: str | None = None
|
243
|
-
cert_file: str | None = None
|
244
|
-
key_file: str | None = None
|
245
|
-
verify_ssl: bool = False
|
246
|
-
_kwargs: dict = field(default_factory=dict)
|
247
|
-
_sqla_engine: AsyncEngine | None = (
|
248
|
-
None # SQLAlchemy async engine instance for SQL backends
|
249
|
-
)
|
250
|
-
_client: Any | None = None # Native client instance for non-SQL backends
|
251
|
-
|
252
|
-
def __post_init__(self):
|
253
|
-
if self.type is None:
|
254
|
-
self.type = "memory"
|
255
|
-
|
256
|
-
elif isinstance(self.type, str):
|
257
|
-
try:
|
258
|
-
self.type = BackendType[self.type.upper()]
|
259
|
-
except KeyError:
|
260
|
-
raise ValueError(
|
261
|
-
f"Invalid backend type: {self.type}. Valid types: {[bt.value for bt in BackendType]}"
|
262
|
-
)
|
263
|
-
|
264
|
-
if not self.uri:
|
265
|
-
self.uri = self.type.gen_uri(
|
266
|
-
username=self.username,
|
267
|
-
password=self.password,
|
268
|
-
host=self.host,
|
269
|
-
port=self.port,
|
270
|
-
database=self.database,
|
271
|
-
ssl=self.ssl,
|
272
|
-
ca_file=self.ca_file,
|
273
|
-
cert_file=self.cert_file,
|
274
|
-
key_file=self.key_file,
|
275
|
-
verify_ssl=self.verify_ssl,
|
276
|
-
)
|
277
|
-
|
278
|
-
# Setup is handled by backend-specific implementations
|
279
|
-
|
280
|
-
@classmethod
|
281
|
-
def from_dict(cls, d: dict) -> "BaseBackend":
|
282
|
-
return cls(**d)
|
283
|
-
|
284
|
-
|
285
|
-
class BaseTrigger(abc.ABC):
|
286
|
-
"""
|
287
|
-
Abstract base class for schedule triggers.
|
288
|
-
|
289
|
-
A trigger determines when a scheduled job should be executed.
|
290
|
-
"""
|
291
|
-
|
292
|
-
def __init__(self, trigger_type: str):
|
293
|
-
self.trigger_type = trigger_type
|
294
|
-
|
295
|
-
@abc.abstractmethod
|
296
|
-
def get_trigger_instance(self, **kwargs) -> Any:
|
297
|
-
"""
|
298
|
-
Get the backend-specific trigger instance.
|
299
|
-
|
300
|
-
Args:
|
301
|
-
**kwargs: Keyword arguments specific to the trigger type
|
302
|
-
|
303
|
-
Returns:
|
304
|
-
Any: A backend-specific trigger instance
|
305
|
-
"""
|
306
|
-
pass
|
307
|
-
|
308
|
-
|
309
|
-
class BaseJobQueueManager:
|
310
|
-
"""
|
311
|
-
Abstract base class for scheduler workers (APScheduler, RQ, etc.).
|
312
|
-
Defines the required interface for all scheduler backends.
|
313
|
-
|
314
|
-
Can be used as a context manager:
|
315
|
-
|
316
|
-
```python
|
317
|
-
with RQManager(name="test") as manager:
|
318
|
-
manager.add_job(job1)
|
319
|
-
```
|
320
|
-
"""
|
321
|
-
|
322
|
-
def __enter__(self):
|
323
|
-
"""Context manager entry - returns self for use in with statement."""
|
324
|
-
return self
|
325
|
-
|
326
|
-
def __exit__(self, exc_type, exc_val, exc_tb):
|
327
|
-
"""Context manager exit - ensures workers are stopped."""
|
328
|
-
if hasattr(self, "_worker_process") and self._worker_process is not None:
|
329
|
-
self.stop_worker()
|
330
|
-
if hasattr(self, "_worker_pool") and self._worker_pool is not None:
|
331
|
-
self.stop_worker_pool()
|
332
|
-
if hasattr(self, "_worker") and self._worker is not None:
|
333
|
-
self.stop_worker()
|
334
|
-
if hasattr(self, "_scheduler") and self._scheduler is not None:
|
335
|
-
self.stop_scheduler()
|
336
|
-
return False # Don't suppress exceptions
|
337
|
-
|
338
|
-
def __init__(
|
339
|
-
self,
|
340
|
-
type: str | None = None,
|
341
|
-
name: str | None = None,
|
342
|
-
base_dir: str | None = None,
|
343
|
-
backend: BaseBackend | None = None,
|
344
|
-
storage_options: dict = None,
|
345
|
-
fs: AbstractFileSystem | None = None,
|
346
|
-
**kwargs,
|
347
|
-
):
|
348
|
-
"""
|
349
|
-
Initialize the APScheduler backend.
|
350
|
-
|
351
|
-
Args:
|
352
|
-
name: Name of the scheduler
|
353
|
-
base_dir: Base directory for the FlowerPower project
|
354
|
-
backend: APSBackend instance with data store and event broker
|
355
|
-
storage_options: Storage options for filesystem access
|
356
|
-
fs: Filesystem to use
|
357
|
-
cfg_override: Configuration overrides for the worker
|
358
|
-
"""
|
359
|
-
self.name = name or ""
|
360
|
-
self._base_dir = base_dir or str(Path.cwd())
|
361
|
-
# self._storage_options = storage_options or {}
|
362
|
-
self._backend = backend
|
363
|
-
self._type = type
|
364
|
-
self._pipelines_dir = kwargs.get("pipelines_dir", PIPELINES_DIR)
|
365
|
-
self._cfg_dir = CONFIG_DIR
|
366
|
-
|
367
|
-
# Initialize pipeline registry (will be injected by FlowerPowerProject)
|
368
|
-
self._pipeline_registry = None
|
369
|
-
|
370
|
-
if storage_options is not None:
|
371
|
-
cached = True
|
372
|
-
cache_storage = posixpath.join(
|
373
|
-
posixpath.expanduser(CACHE_DIR), self._base_dir.split("://")[-1]
|
374
|
-
)
|
375
|
-
os.makedirs(cache_storage, exist_ok=True)
|
376
|
-
else:
|
377
|
-
cached = False
|
378
|
-
cache_storage = None
|
379
|
-
if not fs:
|
380
|
-
fs = filesystem(
|
381
|
-
self._base_dir,
|
382
|
-
storage_options=storage_options,
|
383
|
-
cached=cached,
|
384
|
-
cache_storage=cache_storage,
|
385
|
-
)
|
386
|
-
self._fs = fs
|
387
|
-
self._storage_options = storage_options or fs.storage_options
|
388
|
-
|
389
|
-
self._add_modules_path()
|
390
|
-
self._load_config()
|
391
|
-
|
392
|
-
def _load_config(self) -> None:
|
393
|
-
"""Load the configuration.
|
394
|
-
|
395
|
-
Args:
|
396
|
-
cfg_updates: Configuration updates to apply
|
397
|
-
"""
|
398
|
-
self.cfg = ProjectConfig.load(
|
399
|
-
base_dir=self._base_dir, job_queue_type=self._type, fs=self._fs
|
400
|
-
).job_queue
|
401
|
-
|
402
|
-
def _add_modules_path(self):
|
403
|
-
"""
|
404
|
-
Sync the filesystem.
|
405
|
-
|
406
|
-
Returns:
|
407
|
-
None
|
408
|
-
"""
|
409
|
-
if self._fs.is_cache_fs:
|
410
|
-
self._fs.sync_cache()
|
411
|
-
project_path = self._fs._mapper.directory
|
412
|
-
modules_path = posixpath.join(project_path, self._pipelines_dir)
|
413
|
-
|
414
|
-
else:
|
415
|
-
# Use the base directory directly if not using cache
|
416
|
-
project_path = self._fs.path
|
417
|
-
modules_path = posixpath.join(project_path, self._pipelines_dir)
|
418
|
-
|
419
|
-
if project_path not in sys.path:
|
420
|
-
sys.path.insert(0, project_path)
|
421
|
-
|
422
|
-
if modules_path not in sys.path:
|
423
|
-
sys.path.insert(0, modules_path)
|
424
|
-
|
425
|
-
@property
|
426
|
-
def pipeline_registry(self) -> "PipelineRegistry":
|
427
|
-
"""Get or create a PipelineRegistry instance for this job queue manager.
|
428
|
-
|
429
|
-
This property lazily creates a PipelineRegistry using the job queue manager's
|
430
|
-
filesystem and directory configuration. The registry is cached after first access.
|
431
|
-
|
432
|
-
Returns:
|
433
|
-
PipelineRegistry: A registry instance configured with this manager's settings
|
434
|
-
|
435
|
-
Raises:
|
436
|
-
RuntimeError: If PipelineRegistry creation fails
|
437
|
-
|
438
|
-
Example:
|
439
|
-
```python
|
440
|
-
manager = RQManager(base_dir="/path/to/project")
|
441
|
-
registry = manager.pipeline_registry # Creates registry on first access
|
442
|
-
pipeline = registry.get_pipeline("my_pipeline")
|
443
|
-
```
|
444
|
-
"""
|
445
|
-
if self._pipeline_registry is None:
|
446
|
-
try:
|
447
|
-
# Import here to avoid circular import issues
|
448
|
-
from ..pipeline.registry import PipelineRegistry
|
449
|
-
|
450
|
-
# Create registry using the from_filesystem factory method
|
451
|
-
self._pipeline_registry = PipelineRegistry.from_filesystem(
|
452
|
-
base_dir=self._base_dir,
|
453
|
-
fs=self._fs,
|
454
|
-
storage_options=self._storage_options,
|
455
|
-
)
|
456
|
-
|
457
|
-
logger.debug(
|
458
|
-
f"Created PipelineRegistry for JobQueueManager with base_dir: {self._base_dir}"
|
459
|
-
)
|
460
|
-
|
461
|
-
except Exception as e:
|
462
|
-
error_msg = f"Failed to create PipelineRegistry: {e}"
|
463
|
-
logger.error(error_msg)
|
464
|
-
raise RuntimeError(error_msg) from e
|
465
|
-
|
466
|
-
return self._pipeline_registry
|
467
|
-
|
468
|
-
# --- Pipeline-specific high-level methods ---
|
469
|
-
|
470
|
-
def schedule_pipeline(self, name: str, *args, **kwargs):
|
471
|
-
"""Schedule a pipeline for execution using its name.
|
472
|
-
|
473
|
-
This high-level method loads the pipeline from the internal registry and schedules
|
474
|
-
its execution with the job queue.
|
475
|
-
|
476
|
-
Args:
|
477
|
-
name: Name of the pipeline to schedule
|
478
|
-
*args: Additional positional arguments for scheduling
|
479
|
-
**kwargs: Additional keyword arguments for scheduling
|
480
|
-
|
481
|
-
Returns:
|
482
|
-
Schedule ID or job ID depending on implementation
|
483
|
-
|
484
|
-
Raises:
|
485
|
-
NotImplementedError: Must be implemented by subclasses
|
486
|
-
"""
|
487
|
-
raise NotImplementedError("Subclasses must implement schedule_pipeline()")
|
488
|
-
|
489
|
-
def enqueue_pipeline(self, name: str, *args, **kwargs):
|
490
|
-
"""Enqueue a pipeline for immediate execution using its name.
|
491
|
-
|
492
|
-
This high-level method loads the pipeline from the internal registry and enqueues
|
493
|
-
it for immediate execution in the job queue.
|
494
|
-
|
495
|
-
Args:
|
496
|
-
name: Name of the pipeline to enqueue
|
497
|
-
*args: Additional positional arguments for job execution
|
498
|
-
**kwargs: Additional keyword arguments for job execution
|
499
|
-
|
500
|
-
Returns:
|
501
|
-
Job ID or result depending on implementation
|
502
|
-
|
503
|
-
Raises:
|
504
|
-
NotImplementedError: Must be implemented by subclasses
|
505
|
-
"""
|
506
|
-
raise NotImplementedError("Subclasses must implement enqueue_pipeline()")
|
507
|
-
|
508
|
-
# --- Core job queue methods ---
|
509
|
-
|
510
|
-
def enqueue(self, func, *args, **kwargs):
|
511
|
-
"""Enqueue a job for execution (immediate, delayed, or scheduled).
|
512
|
-
|
513
|
-
This is the main method for adding jobs to the queue. It supports:
|
514
|
-
- Immediate execution (no run_at or run_in parameters)
|
515
|
-
- Delayed execution (run_in parameter)
|
516
|
-
- Scheduled execution (run_at parameter)
|
517
|
-
|
518
|
-
Args:
|
519
|
-
func: Function to execute. Must be importable from the worker process.
|
520
|
-
*args: Positional arguments for the function
|
521
|
-
**kwargs: Keyword arguments including:
|
522
|
-
- run_in: Schedule the job to run after a delay (timedelta, int seconds, or string)
|
523
|
-
- run_at: Schedule the job to run at a specific datetime
|
524
|
-
- Other job queue specific parameters (timeout, retry, etc.)
|
525
|
-
|
526
|
-
Returns:
|
527
|
-
Job object or job ID depending on implementation
|
528
|
-
|
529
|
-
Raises:
|
530
|
-
NotImplementedError: Must be implemented by subclasses
|
531
|
-
|
532
|
-
Example:
|
533
|
-
```python
|
534
|
-
# Immediate execution
|
535
|
-
manager.enqueue(my_func, arg1, arg2, kwarg1="value")
|
536
|
-
|
537
|
-
# Delayed execution
|
538
|
-
manager.enqueue(my_func, arg1, run_in=300) # 5 minutes
|
539
|
-
manager.enqueue(my_func, arg1, run_in=timedelta(hours=1))
|
540
|
-
|
541
|
-
# Scheduled execution
|
542
|
-
manager.enqueue(my_func, arg1, run_at=datetime(2025, 1, 1, 9, 0))
|
543
|
-
```
|
544
|
-
"""
|
545
|
-
raise NotImplementedError("Subclasses must implement enqueue()")
|
546
|
-
|
547
|
-
def enqueue_in(self, delay, func, *args, **kwargs):
|
548
|
-
"""Enqueue a job to run after a specified delay.
|
549
|
-
|
550
|
-
This is a convenience method for delayed execution. It's equivalent to
|
551
|
-
calling enqueue() with the run_in parameter.
|
552
|
-
|
553
|
-
Args:
|
554
|
-
delay: Time to wait before execution (timedelta, int seconds, or string)
|
555
|
-
func: Function to execute
|
556
|
-
*args: Positional arguments for the function
|
557
|
-
**kwargs: Keyword arguments for the function and job options
|
558
|
-
|
559
|
-
Returns:
|
560
|
-
Job object or job ID depending on implementation
|
561
|
-
|
562
|
-
Raises:
|
563
|
-
NotImplementedError: Must be implemented by subclasses
|
564
|
-
|
565
|
-
Example:
|
566
|
-
```python
|
567
|
-
# Run in 5 minutes
|
568
|
-
manager.enqueue_in(300, my_func, arg1, arg2)
|
569
|
-
|
570
|
-
# Run in 1 hour
|
571
|
-
manager.enqueue_in(timedelta(hours=1), my_func, arg1, kwarg1="value")
|
572
|
-
|
573
|
-
# Run in 30 seconds (string format)
|
574
|
-
manager.enqueue_in("30s", my_func, arg1)
|
575
|
-
```
|
576
|
-
"""
|
577
|
-
raise NotImplementedError("Subclasses must implement enqueue_in()")
|
578
|
-
|
579
|
-
def enqueue_at(self, datetime, func, *args, **kwargs):
|
580
|
-
"""Enqueue a job to run at a specific datetime.
|
581
|
-
|
582
|
-
This is a convenience method for scheduled execution. It's equivalent to
|
583
|
-
calling enqueue() with the run_at parameter.
|
584
|
-
|
585
|
-
Args:
|
586
|
-
datetime: When to execute the job (datetime object or ISO string)
|
587
|
-
func: Function to execute
|
588
|
-
*args: Positional arguments for the function
|
589
|
-
**kwargs: Keyword arguments for the function and job options
|
590
|
-
|
591
|
-
Returns:
|
592
|
-
Job object or job ID depending on implementation
|
593
|
-
|
594
|
-
Raises:
|
595
|
-
NotImplementedError: Must be implemented by subclasses
|
596
|
-
|
597
|
-
Example:
|
598
|
-
```python
|
599
|
-
# Run at specific time
|
600
|
-
manager.enqueue_at(datetime(2025, 1, 1, 9, 0), my_func, arg1, arg2)
|
601
|
-
|
602
|
-
# Run tomorrow at 9 AM
|
603
|
-
tomorrow_9am = datetime.now() + timedelta(days=1)
|
604
|
-
tomorrow_9am = tomorrow_9am.replace(hour=9, minute=0, second=0)
|
605
|
-
manager.enqueue_at(tomorrow_9am, my_func, arg1, kwarg1="value")
|
606
|
-
|
607
|
-
# Run using ISO string
|
608
|
-
manager.enqueue_at("2025-01-01T09:00:00", my_func, arg1)
|
609
|
-
```
|
610
|
-
"""
|
611
|
-
raise NotImplementedError("Subclasses must implement enqueue_at()")
|
@@ -1,37 +0,0 @@
|
|
1
|
-
from typing import Any, Dict
|
2
|
-
|
3
|
-
from ..base import BaseTrigger
|
4
|
-
|
5
|
-
|
6
|
-
class RQTrigger(BaseTrigger):
|
7
|
-
"""
|
8
|
-
RQTrigger adapts trigger logic for the RQ worker backend.
|
9
|
-
|
10
|
-
Inherits from BaseTrigger and provides a trigger instance
|
11
|
-
in dictionary format suitable for RQ scheduling.
|
12
|
-
"""
|
13
|
-
|
14
|
-
def __init__(self, trigger_type: str):
|
15
|
-
super().__init__(trigger_type)
|
16
|
-
|
17
|
-
def get_trigger_instance(self, **kwargs) -> Dict[str, Any]:
|
18
|
-
"""
|
19
|
-
Get trigger parameters for RQ Scheduler.
|
20
|
-
|
21
|
-
Args:
|
22
|
-
**kwargs: Keyword arguments for the trigger
|
23
|
-
|
24
|
-
Returns:
|
25
|
-
Dict[str, Any]: A dictionary with trigger configuration
|
26
|
-
"""
|
27
|
-
# RQ doesn't have specific trigger classes like APScheduler.
|
28
|
-
# Instead, we'll return a dictionary with parameters that can
|
29
|
-
# be used by RQSchedulerBackend to schedule jobs appropriately.
|
30
|
-
|
31
|
-
result = {"type": self.trigger_type, **kwargs}
|
32
|
-
|
33
|
-
# For cron triggers, handle crontab string specifically
|
34
|
-
if self.trigger_type == "cron" and "crontab" in kwargs:
|
35
|
-
result["crontab"] = kwargs["crontab"]
|
36
|
-
|
37
|
-
return result
|