FlowerPower 0.11.6.20__py3-none-any.whl → 0.21.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (101) hide show
  1. flowerpower/__init__.py +2 -6
  2. flowerpower/cfg/__init__.py +7 -14
  3. flowerpower/cfg/base.py +29 -25
  4. flowerpower/cfg/pipeline/__init__.py +8 -6
  5. flowerpower/cfg/pipeline/_schedule.py +32 -0
  6. flowerpower/cfg/pipeline/adapter.py +0 -5
  7. flowerpower/cfg/pipeline/builder.py +377 -0
  8. flowerpower/cfg/pipeline/run.py +36 -0
  9. flowerpower/cfg/project/__init__.py +11 -24
  10. flowerpower/cfg/project/adapter.py +0 -12
  11. flowerpower/cli/__init__.py +2 -21
  12. flowerpower/cli/cfg.py +0 -3
  13. flowerpower/cli/mqtt.py +0 -6
  14. flowerpower/cli/pipeline.py +22 -415
  15. flowerpower/cli/utils.py +0 -1
  16. flowerpower/flowerpower.py +345 -146
  17. flowerpower/pipeline/__init__.py +2 -0
  18. flowerpower/pipeline/base.py +21 -12
  19. flowerpower/pipeline/io.py +58 -54
  20. flowerpower/pipeline/manager.py +165 -726
  21. flowerpower/pipeline/pipeline.py +643 -0
  22. flowerpower/pipeline/registry.py +285 -18
  23. flowerpower/pipeline/visualizer.py +5 -6
  24. flowerpower/plugins/io/__init__.py +8 -0
  25. flowerpower/plugins/mqtt/__init__.py +7 -11
  26. flowerpower/settings/__init__.py +0 -2
  27. flowerpower/settings/{backend.py → _backend.py} +0 -21
  28. flowerpower/settings/logging.py +1 -1
  29. flowerpower/utils/logging.py +24 -12
  30. flowerpower/utils/misc.py +17 -256
  31. flowerpower/utils/monkey.py +1 -83
  32. flowerpower-0.21.0.dist-info/METADATA +463 -0
  33. flowerpower-0.21.0.dist-info/RECORD +44 -0
  34. flowerpower/cfg/pipeline/schedule.py +0 -74
  35. flowerpower/cfg/project/job_queue.py +0 -238
  36. flowerpower/cli/job_queue.py +0 -1061
  37. flowerpower/fs/__init__.py +0 -29
  38. flowerpower/fs/base.py +0 -662
  39. flowerpower/fs/ext.py +0 -2143
  40. flowerpower/fs/storage_options.py +0 -1420
  41. flowerpower/job_queue/__init__.py +0 -294
  42. flowerpower/job_queue/apscheduler/__init__.py +0 -11
  43. flowerpower/job_queue/apscheduler/_setup/datastore.py +0 -110
  44. flowerpower/job_queue/apscheduler/_setup/eventbroker.py +0 -93
  45. flowerpower/job_queue/apscheduler/manager.py +0 -1051
  46. flowerpower/job_queue/apscheduler/setup.py +0 -554
  47. flowerpower/job_queue/apscheduler/trigger.py +0 -169
  48. flowerpower/job_queue/apscheduler/utils.py +0 -311
  49. flowerpower/job_queue/base.py +0 -413
  50. flowerpower/job_queue/rq/__init__.py +0 -10
  51. flowerpower/job_queue/rq/_trigger.py +0 -37
  52. flowerpower/job_queue/rq/concurrent_workers/gevent_worker.py +0 -226
  53. flowerpower/job_queue/rq/concurrent_workers/thread_worker.py +0 -231
  54. flowerpower/job_queue/rq/manager.py +0 -1582
  55. flowerpower/job_queue/rq/setup.py +0 -154
  56. flowerpower/job_queue/rq/utils.py +0 -69
  57. flowerpower/mqtt.py +0 -12
  58. flowerpower/pipeline/job_queue.py +0 -583
  59. flowerpower/pipeline/runner.py +0 -603
  60. flowerpower/plugins/io/base.py +0 -2520
  61. flowerpower/plugins/io/helpers/datetime.py +0 -298
  62. flowerpower/plugins/io/helpers/polars.py +0 -875
  63. flowerpower/plugins/io/helpers/pyarrow.py +0 -570
  64. flowerpower/plugins/io/helpers/sql.py +0 -202
  65. flowerpower/plugins/io/loader/__init__.py +0 -28
  66. flowerpower/plugins/io/loader/csv.py +0 -37
  67. flowerpower/plugins/io/loader/deltatable.py +0 -190
  68. flowerpower/plugins/io/loader/duckdb.py +0 -19
  69. flowerpower/plugins/io/loader/json.py +0 -37
  70. flowerpower/plugins/io/loader/mqtt.py +0 -159
  71. flowerpower/plugins/io/loader/mssql.py +0 -26
  72. flowerpower/plugins/io/loader/mysql.py +0 -26
  73. flowerpower/plugins/io/loader/oracle.py +0 -26
  74. flowerpower/plugins/io/loader/parquet.py +0 -35
  75. flowerpower/plugins/io/loader/postgres.py +0 -26
  76. flowerpower/plugins/io/loader/pydala.py +0 -19
  77. flowerpower/plugins/io/loader/sqlite.py +0 -23
  78. flowerpower/plugins/io/metadata.py +0 -244
  79. flowerpower/plugins/io/saver/__init__.py +0 -28
  80. flowerpower/plugins/io/saver/csv.py +0 -36
  81. flowerpower/plugins/io/saver/deltatable.py +0 -186
  82. flowerpower/plugins/io/saver/duckdb.py +0 -19
  83. flowerpower/plugins/io/saver/json.py +0 -36
  84. flowerpower/plugins/io/saver/mqtt.py +0 -28
  85. flowerpower/plugins/io/saver/mssql.py +0 -26
  86. flowerpower/plugins/io/saver/mysql.py +0 -26
  87. flowerpower/plugins/io/saver/oracle.py +0 -26
  88. flowerpower/plugins/io/saver/parquet.py +0 -36
  89. flowerpower/plugins/io/saver/postgres.py +0 -26
  90. flowerpower/plugins/io/saver/pydala.py +0 -20
  91. flowerpower/plugins/io/saver/sqlite.py +0 -24
  92. flowerpower/plugins/mqtt/cfg.py +0 -17
  93. flowerpower/plugins/mqtt/manager.py +0 -962
  94. flowerpower/settings/job_queue.py +0 -87
  95. flowerpower/utils/scheduler.py +0 -311
  96. flowerpower-0.11.6.20.dist-info/METADATA +0 -537
  97. flowerpower-0.11.6.20.dist-info/RECORD +0 -102
  98. {flowerpower-0.11.6.20.dist-info → flowerpower-0.21.0.dist-info}/WHEEL +0 -0
  99. {flowerpower-0.11.6.20.dist-info → flowerpower-0.21.0.dist-info}/entry_points.txt +0 -0
  100. {flowerpower-0.11.6.20.dist-info → flowerpower-0.21.0.dist-info}/licenses/LICENSE +0 -0
  101. {flowerpower-0.11.6.20.dist-info → flowerpower-0.21.0.dist-info}/top_level.txt +0 -0
@@ -1,294 +0,0 @@
1
- import importlib
2
- from typing import Any, Optional
3
-
4
- from loguru import logger
5
-
6
- from ..cfg.project import ProjectConfig
7
- from ..fs import AbstractFileSystem
8
- from ..utils.logging import setup_logging
9
-
10
- if importlib.util.find_spec("apscheduler"):
11
- from .apscheduler import APSBackend, APSManager
12
- else:
13
- APSBackend = None
14
- APSManager = None
15
- if importlib.util.find_spec("rq"):
16
- from .rq import RQBackend, RQManager
17
- else:
18
- RQBackend = None
19
- RQManager = None
20
- from .base import BaseBackend, BaseJobQueueManager
21
-
22
- setup_logging()
23
-
24
-
25
- class JobQueueBackend:
26
- """A factory class for creating backend instances for different job queue types.
27
-
28
- This class provides a unified interface for creating backend instances that handle
29
- the storage, queuing, and event management for different job queue types. Each backend
30
- type provides specific implementations for:
31
- - Job storage and persistence
32
- - Queue management
33
- - Event handling and communication
34
- - Result storage
35
-
36
- Example:
37
- ```python
38
- # Create RQ backend with Redis
39
- rq_backend = JobQueueBackend(
40
- job_queue_type="rq",
41
- uri="redis://localhost:6379/0",
42
- queues=["high", "default", "low"]
43
- )
44
-
45
- # Create APScheduler backend with PostgreSQL and Redis
46
- aps_backend = JobQueueBackend(
47
- job_queue_type="apscheduler",
48
- data_store={
49
- "type": "postgresql",
50
- "uri": "postgresql+asyncpg://user:pass@localhost/db"
51
- },
52
- event_broker={
53
- "type": "redis",
54
- "uri": "redis://localhost:6379/0"
55
- }
56
- )
57
- ```
58
- """
59
-
60
- def __new__(
61
- cls,
62
- job_queue_type: str,
63
- **kwargs,
64
- ) -> BaseBackend:
65
- """Create a new backend instance based on the specified job queue type.
66
-
67
- Args:
68
- job_queue_type: The type of backend to create. Valid values are:
69
- - "rq": Redis Queue backend using Redis
70
- - "apscheduler": APScheduler backend supporting various databases
71
- and event brokers
72
- **kwargs: Backend-specific configuration options:
73
- For RQ:
74
- - uri (str): Redis connection URI
75
- - queues (list[str]): List of queue names
76
- - result_ttl (int): Time to live for results in seconds
77
- For APScheduler:
78
- - data_store (dict): Data store configuration
79
- - event_broker (dict): Event broker configuration
80
- - cleanup_interval (int): Cleanup interval in seconds
81
- - max_concurrent_jobs (int): Maximum concurrent jobs
82
-
83
- Returns:
84
- BaseBackend: An instance of RQBackend or APSBackend depending on
85
- the specified job queue type.
86
-
87
- Raises:
88
- ValueError: If an invalid job queue type is specified.
89
- RuntimeError: If backend initialization fails due to configuration
90
- or connection issues.
91
-
92
- Example:
93
- ```python
94
- # Create RQ backend
95
- rq_backend = Backend(
96
- job_queue_type="rq",
97
- uri="redis://localhost:6379/0",
98
- queues=["high", "default", "low"],
99
- result_ttl=3600
100
- )
101
-
102
- # Create APScheduler backend with PostgreSQL and Redis
103
- aps_backend = Backend(
104
- job_queue_type="apscheduler",
105
- data_store={
106
- "type": "postgresql",
107
- "uri": "postgresql+asyncpg://user:pass@localhost/db",
108
- "schema": "scheduler"
109
- },
110
- event_broker={
111
- "type": "redis",
112
- "uri": "redis://localhost:6379/0"
113
- },
114
- cleanup_interval=300,
115
- max_concurrent_jobs=10
116
- )
117
- ```
118
- """
119
- if job_queue_type == "rq" and RQBackend is not None:
120
- return RQBackend(**kwargs)
121
- elif job_queue_type == "apscheduler" and APSBackend is not None:
122
- return APSBackend(**kwargs)
123
- else:
124
- if job_queue_type == "rq" and RQBackend is None:
125
- logger.warning(
126
- "RQ is not installed. `JobQueueBackend` is not initialized and using the job queue is disabled. "
127
- "Install rq to use RQ. `uv pip install flowerpower[rq]` or `uv add flowerpower[rq]`"
128
- )
129
- return None
130
- elif job_queue_type == "apscheduler" and APSBackend is None:
131
- logger.warning(
132
- "APScheduler is not installed. `JobQueueBackend` is not initialized and using the job queue is disabled. "
133
- "Install apscheduler to use APScheduler. `uv pip install flowerpower[apscheduler]` or `uv add flowerpower[apscheduler]`"
134
- )
135
- return None
136
- else:
137
- raise ValueError(
138
- f"Invalid job queue type: {job_queue_type}. Valid types: ['rq', 'apscheduler']"
139
- )
140
-
141
-
142
- class JobQueueManager:
143
- """A factory class for creating job queue instances for job scheduling and execution.
144
-
145
- This class provides a unified interface for creating different types of job queue instances
146
- (RQ, APScheduler, Huey) based on the specified backend type. Each job queue type provides
147
- different capabilities for job scheduling and execution.
148
-
149
- The job queue instances handle:
150
- - Job scheduling and execution
151
- - Background task processing
152
- - Job queue management
153
- - Result storage and retrieval
154
-
155
- Example:
156
- ```python
157
- # Create an RQ job queue
158
- rq_worker = JobQueueManager(
159
- type="rq",
160
- name="my_worker",
161
- log_level="DEBUG"
162
- )
163
-
164
- # Create an APScheduler job queue with custom backend
165
- from flowerpower.job_queue.apscheduler import APSBackend
166
- backend_config = APSBackend(
167
- data_store={"type": "postgresql", "uri": "postgresql+asyncpg://user:pass@localhost/db"},
168
- event_broker={"type": "redis", "uri": "redis://localhost:6379/0"}
169
- )
170
- aps_worker = JobQueueManager(
171
- type="apscheduler",
172
- name="scheduler",
173
- backend=backend_config
174
- )
175
-
176
- ```
177
- """
178
-
179
- def __new__(
180
- cls,
181
- type: str | None = None,
182
- name: str | None = None,
183
- base_dir: str | None = ".",
184
- backend: JobQueueBackend | None = None,
185
- storage_options: Optional[dict[str, Any]] = None,
186
- fs: AbstractFileSystem | None = None,
187
- log_level: str | None = None,
188
- **kwargs,
189
- ) -> BaseJobQueueManager:
190
- """Create a new job queue instance based on the specified backend type.
191
-
192
- Args:
193
- type: The type of job queue to create. Valid values are:
194
- - "rq": Redis Queue job queue for Redis-based job queuing
195
- - "apscheduler": APScheduler job queue for advanced job scheduling
196
- name: Name of the job queue instance. Used for identification in logs
197
- and monitoring.
198
- base_dir: Base directory for job queue files and configuration. Defaults
199
- to current working directory if not specified.
200
- backend: Pre-configured backend instance. If not provided, one will
201
- be created based on configuration settings.
202
- storage_options: Options for configuring filesystem storage access.
203
- Example: {"mode": "async", "root": "/tmp", "protocol": "s3"}
204
- fs: Custom filesystem implementation for storage operations.
205
- Example: S3FileSystem, LocalFileSystem, etc.
206
- log_level: Logging level for the job queue. Valid values are:
207
- "DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"
208
- **kwargs: Additional configuration options passed to the specific
209
- job queue implementation.
210
-
211
- Returns:
212
- BaseJobQueueManager: An instance of the specified job queue type (RQManager,
213
- APSManager).
214
-
215
- Raises:
216
- ValueError: If an invalid job queue type is specified.
217
- ImportError: If required dependencies for the chosen job queue type
218
- are not installed.
219
- RuntimeError: If job queue initialization fails due to configuration
220
- or connection issues.
221
-
222
- Example:
223
- ```python
224
- # Basic RQ job queue
225
- worker = JobQueueManager(type="rq", name="basic_worker")
226
-
227
- # APScheduler with custom logging and storage
228
- worker = JobQueueManager(
229
- type="apscheduler",
230
- name="scheduler",
231
- base_dir="/app/data",
232
- storage_options={"mode": "async"},
233
- log_level="DEBUG"
234
- )
235
-
236
- ```
237
- """
238
- if type is None:
239
- type = ProjectConfig.load(
240
- base_dir=base_dir,
241
- name=name,
242
- fs=fs,
243
- storage_options=storage_options or {},
244
- ).job_queue.type
245
-
246
- if type == "rq":
247
- if RQManager is not None:
248
- return RQManager(
249
- name=name,
250
- base_dir=base_dir,
251
- backend=backend,
252
- storage_options=storage_options,
253
- fs=fs,
254
- log_level=log_level,
255
- **kwargs,
256
- )
257
- else:
258
- logger.warning(
259
- "`JobQueueManager` can not be initialized. This might be due to missing dependencies (RQ), invalid configuration or backend not being available."
260
- )
261
- return None
262
-
263
- elif type == "apscheduler":
264
- if APSManager is not None:
265
- return APSManager(
266
- name=name,
267
- base_dir=base_dir,
268
- backend=backend,
269
- storage_options=storage_options,
270
- fs=fs,
271
- log_level=log_level,
272
- **kwargs,
273
- )
274
- else:
275
- logger.warning(
276
- "`JobQueueManager` can not be initialized. This might be due to missing dependencies (APScheduler), invalid configuration or backend not being available."
277
- )
278
- return None
279
-
280
- else:
281
- raise ImportError(
282
- f"Invalid job queue type: {type}. Valid types: ['rq', 'apscheduler']"
283
- )
284
-
285
-
286
- __all__ = [
287
- "JobQueueManager",
288
- "RQManager",
289
- "APSManager",
290
- # "HueyWorker",
291
- "JobQueueBackend",
292
- "RQBackend",
293
- "APSBackend",
294
- ]
@@ -1,11 +0,0 @@
1
- from .manager import APSManager
2
- from .setup import APSBackend, APSDataStore, APSEventBroker
3
- from .trigger import APSTrigger
4
-
5
- __all__ = [
6
- "APSManager",
7
- "APSTrigger",
8
- "APSBackend",
9
- "APSDataStore",
10
- "APSEventBroker",
11
- ]
@@ -1,110 +0,0 @@
1
- from apscheduler.datastores.base import BaseDataStore
2
- from sqlalchemy import text
3
- from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine
4
-
5
- from ...base import BackendType, BaseBackend
6
-
7
-
8
- class APSDataStoreType(BackendType):
9
- POSTGRESQL = "postgresql"
10
- SQLITE = "sqlite"
11
- MYSQL = "mysql"
12
- MONGODB = "mongodb"
13
- MEMORY = "memory"
14
-
15
-
16
- class APSDataStore(BaseBackend):
17
- """Data store for APScheduler."""
18
-
19
- def __post_init__(self):
20
- super().__post_init__(backend_type=APSDataStoreType)
21
- self._validate_inputs()
22
-
23
- @classmethod
24
- def from_dict(cls, d: dict[str, any]) -> "APSDataStore":
25
- return cls(**d)
26
-
27
- def _validate_inputs(self) -> None:
28
- if self.type.value not in [ds.value for ds in APSDataStoreType]:
29
- raise ValueError(
30
- f"Invalid data store type: {self.type}. Valid types: {[ds.value for ds in APSDataStoreType]}"
31
- )
32
-
33
- async def _setup_db(self) -> None:
34
- sqla_engine = create_async_engine(self.uri)
35
-
36
- try:
37
- await self._create_schema(sqla_engine)
38
- except Exception:
39
- await self._create_database_and_schema(sqla_engine)
40
-
41
- async def _create_schema(self, engine: AsyncEngine) -> None:
42
- if not self.schema_or_queue:
43
- return
44
-
45
- async with engine.begin() as conn:
46
- await conn.execute(
47
- text(f"CREATE SCHEMA IF NOT EXISTS {self.schema_or_queue}")
48
- )
49
- await conn.commit()
50
-
51
- async def _create_database_and_schema(self, engine: AsyncEngine) -> None:
52
- database_name = self.uri.split("/")[-1].split("?")[0]
53
- temp_uri = self.uri.replace(f"/{database_name}", "/template1")
54
- temp_engine = create_async_engine(temp_uri)
55
-
56
- async with temp_engine.begin() as conn:
57
- await conn.execute(text("COMMIT"))
58
- try:
59
- await conn.execute(text(f"CREATE DATABASE {database_name}"))
60
- finally:
61
- await conn.execute(text("COMMIT"))
62
-
63
- if self.schema_or_queue:
64
- await self._create_schema(engine)
65
-
66
- def setup_db(self) -> None:
67
- from anyio.from_thread import start_blocking_portal
68
-
69
- with start_blocking_portal() as portal:
70
- portal.call(self._setup_db)
71
-
72
- def _setup_sqlalchemy(self) -> None:
73
- from apscheduler.datastores.sqlalchemy import SQLAlchemyDataStore
74
-
75
- if not self.type.is_sqlite_type:
76
- self.setup_db()
77
- self._sqla_engine = create_async_engine(self.uri)
78
- self._client = SQLAlchemyDataStore(
79
- self._sqla_engine, schema=self.schema_or_queue
80
- )
81
-
82
- def _setup_mongodb(self) -> None:
83
- from apscheduler.datastores.mongodb import MongoDBDataStore
84
-
85
- self._client = MongoDBDataStore(self.uri, database=self.schema_or_queue)
86
-
87
- def _setup_memory(self) -> None:
88
- from apscheduler.datastores.memory import MemoryDataStore
89
-
90
- self._client = MemoryDataStore()
91
-
92
- def setup(self) -> None:
93
- if self.type.is_sqla_type:
94
- self._setup_sqlalchemy()
95
- elif self.type.is_mongodb_type:
96
- self._setup_mongodb()
97
- else:
98
- self._setup_memory()
99
-
100
- @property
101
- def client(self) -> BaseDataStore:
102
- if self._client is None:
103
- self.setup()
104
- return self._client
105
-
106
- @property
107
- def sqla_engine(self) -> AsyncEngine | None:
108
- if self._sqla_engine is None:
109
- self.setup()
110
- return self._sqla_engine
@@ -1,93 +0,0 @@
1
- from apscheduler.eventbrokers.base import BaseEventBroker
2
- from sqlalchemy.ext.asyncio import AsyncEngine
3
-
4
- from ...base import BackendType, BaseBackend
5
-
6
-
7
- class APSEventBrokerType(BackendType):
8
- POSTGRESQL = "postgresql"
9
- MEMORY = "memory"
10
- REDIS = "redis"
11
- MQTT = "mqtt"
12
-
13
-
14
- class APSEventBroker(BaseBackend):
15
- """Data store for APScheduler."""
16
-
17
- def __post_init__(self):
18
- super().__post_init__(backend_type=APSEventBrokerType)
19
-
20
- @classmethod
21
- def from_dict(cls, d: dict[str, any]) -> "APSEventBroker":
22
- return cls(**d)
23
-
24
- def _validate_inputs(self) -> None:
25
- if self.type.value not in [ds.value for ds in APSEventBrokerType]:
26
- raise ValueError(
27
- f"Invalid data store type: {self.type}. Valid types: {[ds.value for ds in APSEventBrokerType]}"
28
- )
29
-
30
- def _setup_asyncpg_event_broker(self):
31
- from apscheduler.eventbrokers.asyncpg import AsyncpgEventBroker
32
-
33
- if self._sqla_engine is None:
34
- self._event_broker = AsyncpgEventBroker.from_dsn(dsn=self.uri)
35
- else:
36
- self._event_broker = AsyncpgEventBroker.from_async_sqla_engine(
37
- engine=self._sqla_engine
38
- )
39
-
40
- def _setup_mqtt_event_broker(self):
41
- import urllib.parse
42
-
43
- from apscheduler.eventbrokers.mqtt import MQTTEventBroker
44
-
45
- # Parse the URI
46
- parsed = urllib.parse.urlparse(self.uri)
47
-
48
- hostname = parsed.hostname
49
- port = parsed.port
50
- username = parsed.username
51
- password = parsed.password
52
- use_ssl = parsed.scheme == "mqtts"
53
-
54
- self._event_broker = MQTTEventBroker(
55
- host=hostname, port=port, ssl=use_ssl, topic="flowerpower/scheduler"
56
- )
57
- if (self.username is not None) and (self.password is not None):
58
- self._event_broker._client.username_pw_set(
59
- username,
60
- password,
61
- )
62
-
63
- def _setup_redis_event_broker(self):
64
- from apscheduler.eventbrokers.redis import RedisEventBroker
65
-
66
- self._event_broker = RedisEventBroker(self.uri)
67
-
68
- def _setup_local_event_broker(self):
69
- from apscheduler.eventbrokers.local import LocalEventBroker
70
-
71
- self._event_broker = LocalEventBroker()
72
-
73
- def setup(self):
74
- if self.is_sqla_type:
75
- self._setup_asyncpg_event_broker()
76
- elif self.is_mqtt_type:
77
- self._setup_mqtt_event_broker()
78
- elif self.is_redis_type:
79
- self._setup_redis_event_broker()
80
- else:
81
- self._setup_local_event_broker()
82
-
83
- @property
84
- def client(self) -> BaseEventBroker:
85
- if self._event_broker is None:
86
- self.setup()
87
- return self._event_broker
88
-
89
- @property
90
- def sqla_engine(self) -> AsyncEngine | None:
91
- if self._sqla_engine is None:
92
- self.setup()
93
- return self._sqla_engine