dbos 0.26.0a8__tar.gz → 0.26.0a10__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {dbos-0.26.0a8 → dbos-0.26.0a10}/PKG-INFO +1 -1
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_client.py +123 -2
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_dbos_config.py +4 -54
- dbos-0.26.0a10/dbos/_docker_pg_helper.py +191 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/cli/cli.py +17 -1
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/dbos-config.schema.json +0 -4
- {dbos-0.26.0a8 → dbos-0.26.0a10}/pyproject.toml +2 -1
- {dbos-0.26.0a8 → dbos-0.26.0a10}/tests/test_config.py +6 -117
- dbos-0.26.0a8/dbos/_cloudutils/authentication.py +0 -163
- dbos-0.26.0a8/dbos/_cloudutils/cloudutils.py +0 -254
- dbos-0.26.0a8/dbos/_cloudutils/databases.py +0 -241
- dbos-0.26.0a8/dbos/_db_wizard.py +0 -220
- dbos-0.26.0a8/tests/test_dbwizard.py +0 -84
- {dbos-0.26.0a8 → dbos-0.26.0a10}/LICENSE +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/README.md +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/__init__.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/__main__.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_admin_server.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_app_db.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_classproperty.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_conductor/conductor.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_conductor/protocol.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_context.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_core.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_croniter.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_dbos.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_debug.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_error.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_fastapi.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_flask.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_kafka.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_kafka_message.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_logger.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_migrations/env.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_migrations/script.py.mako +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_migrations/versions/5c361fc04708_added_system_tables.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_migrations/versions/a3b18ad34abe_added_triggers.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_migrations/versions/d76646551a6c_workflow_queue.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_migrations/versions/eab0cc1d9a14_job_queue.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_outcome.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_queue.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_recovery.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_registrations.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_request.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_roles.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_scheduler.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_schemas/__init__.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_schemas/application_database.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_schemas/system_database.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_serialization.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_sys_db.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_templates/dbos-db-starter/README.md +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_templates/dbos-db-starter/__package/main.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_templates/dbos-db-starter/alembic.ini +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_templates/dbos-db-starter/migrations/env.py.dbos +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_templates/dbos-db-starter/migrations/script.py.mako +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_tracer.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_utils.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/_workflow_commands.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/cli/_github_init.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/cli/_template_init.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/dbos/py.typed +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/tests/__init__.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/tests/atexit_no_ctor.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/tests/atexit_no_launch.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/tests/classdefs.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/tests/client_collateral.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/tests/client_worker.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/tests/conftest.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/tests/more_classdefs.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/tests/queuedworkflow.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/tests/test_admin_server.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/tests/test_async.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/tests/test_classdecorators.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/tests/test_client.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/tests/test_concurrency.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/tests/test_croniter.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/tests/test_dbos.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/tests/test_debug.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/tests/test_docker_secrets.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/tests/test_failures.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/tests/test_fastapi.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/tests/test_fastapi_roles.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/tests/test_flask.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/tests/test_kafka.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/tests/test_outcome.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/tests/test_package.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/tests/test_queue.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/tests/test_scheduler.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/tests/test_schema_migration.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/tests/test_singleton.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/tests/test_spans.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/tests/test_sqlalchemy.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/tests/test_workflow_introspection.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/tests/test_workflow_management.py +0 -0
- {dbos-0.26.0a8 → dbos-0.26.0a10}/version/__init__.py +0 -0
@@ -1,7 +1,7 @@
|
|
1
1
|
import asyncio
|
2
2
|
import sys
|
3
3
|
import uuid
|
4
|
-
from typing import Any, Generic, Optional, TypedDict, TypeVar
|
4
|
+
from typing import Any, Generic, List, Optional, TypedDict, TypeVar
|
5
5
|
|
6
6
|
if sys.version_info < (3, 11):
|
7
7
|
from typing_extensions import NotRequired
|
@@ -15,7 +15,12 @@ from dbos._error import DBOSNonExistentWorkflowError
|
|
15
15
|
from dbos._registrations import DEFAULT_MAX_RECOVERY_ATTEMPTS
|
16
16
|
from dbos._serialization import WorkflowInputs
|
17
17
|
from dbos._sys_db import SystemDatabase, WorkflowStatusInternal, WorkflowStatusString
|
18
|
-
from dbos._workflow_commands import
|
18
|
+
from dbos._workflow_commands import (
|
19
|
+
WorkflowStatus,
|
20
|
+
get_workflow,
|
21
|
+
list_queued_workflows,
|
22
|
+
list_workflows,
|
23
|
+
)
|
19
24
|
|
20
25
|
R = TypeVar("R", covariant=True) # A generic type for workflow return values
|
21
26
|
|
@@ -202,3 +207,119 @@ class DBOSClient:
|
|
202
207
|
return await asyncio.to_thread(
|
203
208
|
self.get_event, workflow_id, key, timeout_seconds
|
204
209
|
)
|
210
|
+
|
211
|
+
def cancel_workflow(self, workflow_id: str) -> None:
|
212
|
+
self._sys_db.cancel_workflow(workflow_id)
|
213
|
+
|
214
|
+
async def cancel_workflow_async(self, workflow_id: str) -> None:
|
215
|
+
await asyncio.to_thread(self.cancel_workflow, workflow_id)
|
216
|
+
|
217
|
+
def resume_workflow(self, workflow_id: str) -> None:
|
218
|
+
self._sys_db.resume_workflow(workflow_id)
|
219
|
+
|
220
|
+
async def resume_workflow_async(self, workflow_id: str) -> None:
|
221
|
+
await asyncio.to_thread(self.resume_workflow, workflow_id)
|
222
|
+
|
223
|
+
def list_workflows(
|
224
|
+
self,
|
225
|
+
*,
|
226
|
+
workflow_ids: Optional[List[str]] = None,
|
227
|
+
status: Optional[str] = None,
|
228
|
+
start_time: Optional[str] = None,
|
229
|
+
end_time: Optional[str] = None,
|
230
|
+
name: Optional[str] = None,
|
231
|
+
app_version: Optional[str] = None,
|
232
|
+
user: Optional[str] = None,
|
233
|
+
limit: Optional[int] = None,
|
234
|
+
offset: Optional[int] = None,
|
235
|
+
sort_desc: bool = False,
|
236
|
+
) -> List[WorkflowStatus]:
|
237
|
+
return list_workflows(
|
238
|
+
self._sys_db,
|
239
|
+
workflow_ids=workflow_ids,
|
240
|
+
status=status,
|
241
|
+
start_time=start_time,
|
242
|
+
end_time=end_time,
|
243
|
+
name=name,
|
244
|
+
app_version=app_version,
|
245
|
+
user=user,
|
246
|
+
limit=limit,
|
247
|
+
offset=offset,
|
248
|
+
sort_desc=sort_desc,
|
249
|
+
)
|
250
|
+
|
251
|
+
async def list_workflows_async(
|
252
|
+
self,
|
253
|
+
*,
|
254
|
+
workflow_ids: Optional[List[str]] = None,
|
255
|
+
status: Optional[str] = None,
|
256
|
+
start_time: Optional[str] = None,
|
257
|
+
end_time: Optional[str] = None,
|
258
|
+
name: Optional[str] = None,
|
259
|
+
app_version: Optional[str] = None,
|
260
|
+
user: Optional[str] = None,
|
261
|
+
limit: Optional[int] = None,
|
262
|
+
offset: Optional[int] = None,
|
263
|
+
sort_desc: bool = False,
|
264
|
+
) -> List[WorkflowStatus]:
|
265
|
+
return await asyncio.to_thread(
|
266
|
+
self.list_workflows,
|
267
|
+
workflow_ids=workflow_ids,
|
268
|
+
status=status,
|
269
|
+
start_time=start_time,
|
270
|
+
end_time=end_time,
|
271
|
+
name=name,
|
272
|
+
app_version=app_version,
|
273
|
+
user=user,
|
274
|
+
limit=limit,
|
275
|
+
offset=offset,
|
276
|
+
sort_desc=sort_desc,
|
277
|
+
)
|
278
|
+
|
279
|
+
def list_queued_workflows(
|
280
|
+
self,
|
281
|
+
*,
|
282
|
+
queue_name: Optional[str] = None,
|
283
|
+
status: Optional[str] = None,
|
284
|
+
start_time: Optional[str] = None,
|
285
|
+
end_time: Optional[str] = None,
|
286
|
+
name: Optional[str] = None,
|
287
|
+
limit: Optional[int] = None,
|
288
|
+
offset: Optional[int] = None,
|
289
|
+
sort_desc: bool = False,
|
290
|
+
) -> List[WorkflowStatus]:
|
291
|
+
return list_queued_workflows(
|
292
|
+
self._sys_db,
|
293
|
+
queue_name=queue_name,
|
294
|
+
status=status,
|
295
|
+
start_time=start_time,
|
296
|
+
end_time=end_time,
|
297
|
+
name=name,
|
298
|
+
limit=limit,
|
299
|
+
offset=offset,
|
300
|
+
sort_desc=sort_desc,
|
301
|
+
)
|
302
|
+
|
303
|
+
async def list_queued_workflows_async(
|
304
|
+
self,
|
305
|
+
*,
|
306
|
+
queue_name: Optional[str] = None,
|
307
|
+
status: Optional[str] = None,
|
308
|
+
start_time: Optional[str] = None,
|
309
|
+
end_time: Optional[str] = None,
|
310
|
+
name: Optional[str] = None,
|
311
|
+
limit: Optional[int] = None,
|
312
|
+
offset: Optional[int] = None,
|
313
|
+
sort_desc: bool = False,
|
314
|
+
) -> List[WorkflowStatus]:
|
315
|
+
return await asyncio.to_thread(
|
316
|
+
self.list_queued_workflows,
|
317
|
+
queue_name=queue_name,
|
318
|
+
status=status,
|
319
|
+
start_time=start_time,
|
320
|
+
end_time=end_time,
|
321
|
+
name=name,
|
322
|
+
limit=limit,
|
323
|
+
offset=offset,
|
324
|
+
sort_desc=sort_desc,
|
325
|
+
)
|
@@ -15,7 +15,6 @@ from jsonschema import ValidationError, validate
|
|
15
15
|
from rich import print
|
16
16
|
from sqlalchemy import URL, make_url
|
17
17
|
|
18
|
-
from ._db_wizard import db_wizard, load_db_connection
|
19
18
|
from ._error import DBOSInitializationError
|
20
19
|
from ._logger import dbos_logger
|
21
20
|
|
@@ -70,7 +69,6 @@ class DatabaseConfig(TypedDict, total=False):
|
|
70
69
|
sys_db_pool_size: Optional[int]
|
71
70
|
ssl: Optional[bool]
|
72
71
|
ssl_ca: Optional[str]
|
73
|
-
local_suffix: Optional[bool]
|
74
72
|
migrate: Optional[List[str]]
|
75
73
|
rollback: Optional[List[str]]
|
76
74
|
|
@@ -288,7 +286,6 @@ def load_config(
|
|
288
286
|
config_file_path: str = DBOS_CONFIG_PATH,
|
289
287
|
*,
|
290
288
|
run_process_config: bool = True,
|
291
|
-
use_db_wizard: bool = True,
|
292
289
|
silent: bool = False,
|
293
290
|
) -> ConfigFile:
|
294
291
|
"""
|
@@ -339,13 +336,12 @@ def load_config(
|
|
339
336
|
|
340
337
|
data = cast(ConfigFile, data)
|
341
338
|
if run_process_config:
|
342
|
-
data = process_config(data=data,
|
339
|
+
data = process_config(data=data, silent=silent)
|
343
340
|
return data # type: ignore
|
344
341
|
|
345
342
|
|
346
343
|
def process_config(
|
347
344
|
*,
|
348
|
-
use_db_wizard: bool = True,
|
349
345
|
data: ConfigFile,
|
350
346
|
silent: bool = False,
|
351
347
|
) -> ConfigFile:
|
@@ -372,22 +368,17 @@ def process_config(
|
|
372
368
|
# database_url takes precedence over database config, but we need to preserve rollback and migrate if they exist
|
373
369
|
migrate = data["database"].get("migrate", False)
|
374
370
|
rollback = data["database"].get("rollback", False)
|
375
|
-
local_suffix = data["database"].get("local_suffix", False)
|
376
371
|
if data.get("database_url"):
|
377
372
|
dbconfig = parse_database_url_to_dbconfig(cast(str, data["database_url"]))
|
378
373
|
if migrate:
|
379
374
|
dbconfig["migrate"] = cast(List[str], migrate)
|
380
375
|
if rollback:
|
381
376
|
dbconfig["rollback"] = cast(List[str], rollback)
|
382
|
-
if local_suffix:
|
383
|
-
dbconfig["local_suffix"] = cast(bool, local_suffix)
|
384
377
|
data["database"] = dbconfig
|
385
378
|
|
386
379
|
if "app_db_name" not in data["database"] or not (data["database"]["app_db_name"]):
|
387
380
|
data["database"]["app_db_name"] = _app_name_to_db_name(data["name"])
|
388
381
|
|
389
|
-
# Load the DB connection file. Use its values for missing connection parameters. Use defaults otherwise.
|
390
|
-
db_connection = load_db_connection()
|
391
382
|
connection_passed_in = data["database"].get("hostname", None) is not None
|
392
383
|
|
393
384
|
dbos_dbport: Optional[int] = None
|
@@ -397,49 +388,22 @@ def process_config(
|
|
397
388
|
dbos_dbport = int(dbport_env)
|
398
389
|
except ValueError:
|
399
390
|
pass
|
400
|
-
dbos_dblocalsuffix: Optional[bool] = None
|
401
|
-
dblocalsuffix_env = os.getenv("DBOS_DBLOCALSUFFIX")
|
402
|
-
if dblocalsuffix_env:
|
403
|
-
try:
|
404
|
-
dbos_dblocalsuffix = dblocalsuffix_env.casefold() == "true".casefold()
|
405
|
-
except ValueError:
|
406
|
-
pass
|
407
391
|
|
408
392
|
data["database"]["hostname"] = (
|
409
|
-
os.getenv("DBOS_DBHOST")
|
410
|
-
or data["database"].get("hostname")
|
411
|
-
or db_connection.get("hostname")
|
412
|
-
or "localhost"
|
393
|
+
os.getenv("DBOS_DBHOST") or data["database"].get("hostname") or "localhost"
|
413
394
|
)
|
414
395
|
|
415
|
-
data["database"]["port"] = (
|
416
|
-
dbos_dbport or data["database"].get("port") or db_connection.get("port") or 5432
|
417
|
-
)
|
396
|
+
data["database"]["port"] = dbos_dbport or data["database"].get("port") or 5432
|
418
397
|
data["database"]["username"] = (
|
419
|
-
os.getenv("DBOS_DBUSER")
|
420
|
-
or data["database"].get("username")
|
421
|
-
or db_connection.get("username")
|
422
|
-
or "postgres"
|
398
|
+
os.getenv("DBOS_DBUSER") or data["database"].get("username") or "postgres"
|
423
399
|
)
|
424
400
|
data["database"]["password"] = (
|
425
401
|
os.getenv("DBOS_DBPASSWORD")
|
426
402
|
or data["database"].get("password")
|
427
|
-
or db_connection.get("password")
|
428
403
|
or os.environ.get("PGPASSWORD")
|
429
404
|
or "dbos"
|
430
405
|
)
|
431
406
|
|
432
|
-
local_suffix = False
|
433
|
-
dbcon_local_suffix = db_connection.get("local_suffix")
|
434
|
-
if dbcon_local_suffix is not None:
|
435
|
-
local_suffix = dbcon_local_suffix
|
436
|
-
db_local_suffix = data["database"].get("local_suffix")
|
437
|
-
if db_local_suffix is not None:
|
438
|
-
local_suffix = db_local_suffix
|
439
|
-
if dbos_dblocalsuffix is not None:
|
440
|
-
local_suffix = dbos_dblocalsuffix
|
441
|
-
data["database"]["local_suffix"] = local_suffix
|
442
|
-
|
443
407
|
if not data["database"].get("app_db_pool_size"):
|
444
408
|
data["database"]["app_db_pool_size"] = 20
|
445
409
|
if not data["database"].get("sys_db_pool_size"):
|
@@ -454,10 +418,6 @@ def process_config(
|
|
454
418
|
elif "run_admin_server" not in data["runtimeConfig"]:
|
455
419
|
data["runtimeConfig"]["run_admin_server"] = True
|
456
420
|
|
457
|
-
# Check the connectivity to the database and make sure it's properly configured
|
458
|
-
# Note, never use db wizard if the DBOS is running in debug mode (i.e. DBOS_DEBUG_WORKFLOW_ID env var is set)
|
459
|
-
debugWorkflowId = os.getenv("DBOS_DEBUG_WORKFLOW_ID")
|
460
|
-
|
461
421
|
# Pretty-print where we've loaded database connection information from, respecting the log level
|
462
422
|
if not silent and logs["logLevel"] == "INFO" or logs["logLevel"] == "DEBUG":
|
463
423
|
d = data["database"]
|
@@ -470,21 +430,11 @@ def process_config(
|
|
470
430
|
print(
|
471
431
|
f"[bold blue]Using database connection string: {conn_string}[/bold blue]"
|
472
432
|
)
|
473
|
-
elif db_connection.get("hostname"):
|
474
|
-
print(
|
475
|
-
f"[bold blue]Loading database connection string from .dbos/db_connection: {conn_string}[/bold blue]"
|
476
|
-
)
|
477
433
|
else:
|
478
434
|
print(
|
479
435
|
f"[bold blue]Using default database connection string: {conn_string}[/bold blue]"
|
480
436
|
)
|
481
437
|
|
482
|
-
if use_db_wizard and debugWorkflowId is None:
|
483
|
-
data = db_wizard(data)
|
484
|
-
|
485
|
-
if "local_suffix" in data["database"] and data["database"]["local_suffix"]:
|
486
|
-
data["database"]["app_db_name"] = f"{data['database']['app_db_name']}_local"
|
487
|
-
|
488
438
|
# Return data as ConfigFile type
|
489
439
|
return data
|
490
440
|
|
@@ -0,0 +1,191 @@
|
|
1
|
+
import logging
|
2
|
+
import os
|
3
|
+
import subprocess
|
4
|
+
import time
|
5
|
+
|
6
|
+
import docker
|
7
|
+
import psycopg
|
8
|
+
from docker.errors import APIError, NotFound
|
9
|
+
|
10
|
+
logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s")
|
11
|
+
from typing import Any, Dict, Optional, Tuple
|
12
|
+
|
13
|
+
|
14
|
+
def start_docker_pg() -> None:
|
15
|
+
"""
|
16
|
+
Starts a PostgreSQL database in a Docker container.
|
17
|
+
|
18
|
+
This function checks if Docker is installed, and if so, starts a local PostgreSQL
|
19
|
+
database in a Docker container. It configures the database with default settings
|
20
|
+
and provides connection information upon successful startup.
|
21
|
+
|
22
|
+
The function uses environment variable PGPASSWORD if available, otherwise
|
23
|
+
defaults to 'dbos' as the database password.
|
24
|
+
|
25
|
+
Returns:
|
26
|
+
None
|
27
|
+
|
28
|
+
Raises:
|
29
|
+
Exception: If there is an error starting the Docker container or if the
|
30
|
+
PostgreSQL service does not become available within the timeout period.
|
31
|
+
"""
|
32
|
+
|
33
|
+
logging.info("Attempting to create a Docker Postgres container...")
|
34
|
+
has_docker = check_docker_installed()
|
35
|
+
|
36
|
+
pool_config = {
|
37
|
+
"host": "localhost",
|
38
|
+
"port": 5432,
|
39
|
+
"password": os.environ.get("PGPASSWORD", "dbos"),
|
40
|
+
"user": "postgres",
|
41
|
+
"database": "postgres",
|
42
|
+
"connect_timeout": 2,
|
43
|
+
}
|
44
|
+
|
45
|
+
# If Docker is installed, start a local Docker based Postgres
|
46
|
+
if has_docker:
|
47
|
+
start_docker_postgres(pool_config)
|
48
|
+
logging.info(
|
49
|
+
f"Postgres available at postgres://postgres:{pool_config['password']}@{pool_config['host']}:{pool_config['port']}"
|
50
|
+
)
|
51
|
+
else:
|
52
|
+
logging.warning("Docker not detected locally")
|
53
|
+
|
54
|
+
|
55
|
+
def check_db_connectivity(config: Dict[str, Any]) -> Optional[Exception]:
|
56
|
+
conn = None
|
57
|
+
try:
|
58
|
+
conn = psycopg.connect(
|
59
|
+
host=config["host"],
|
60
|
+
port=config["port"],
|
61
|
+
user=config["user"],
|
62
|
+
password=config["password"],
|
63
|
+
dbname=config["database"],
|
64
|
+
connect_timeout=config.get("connect_timeout", 30),
|
65
|
+
)
|
66
|
+
cursor = conn.cursor()
|
67
|
+
cursor.execute("SELECT 1;")
|
68
|
+
cursor.close()
|
69
|
+
return None
|
70
|
+
except Exception as error:
|
71
|
+
return error
|
72
|
+
finally:
|
73
|
+
if conn is not None:
|
74
|
+
conn.close()
|
75
|
+
|
76
|
+
|
77
|
+
def exec_sync(cmd: str) -> Tuple[str, str]:
|
78
|
+
result = subprocess.run(cmd, shell=True, text=True, capture_output=True, check=True)
|
79
|
+
return result.stdout, result.stderr
|
80
|
+
|
81
|
+
|
82
|
+
def start_docker_postgres(pool_config: Dict[str, Any]) -> bool:
|
83
|
+
logging.info("Starting a Postgres Docker container...")
|
84
|
+
container_name = "dbos-db"
|
85
|
+
pg_data = "/var/lib/postgresql/data"
|
86
|
+
|
87
|
+
try:
|
88
|
+
client = docker.from_env()
|
89
|
+
|
90
|
+
# Check if the container already exists
|
91
|
+
try:
|
92
|
+
container = client.containers.get(container_name)
|
93
|
+
if container.status == "running":
|
94
|
+
logging.info(f"Container '{container_name}' is already running.")
|
95
|
+
return True
|
96
|
+
elif container.status == "exited":
|
97
|
+
container.start()
|
98
|
+
logging.info(
|
99
|
+
f"Container '{container_name}' was stopped and has been restarted."
|
100
|
+
)
|
101
|
+
return True
|
102
|
+
except NotFound:
|
103
|
+
# Container doesn't exist, proceed with creation
|
104
|
+
pass
|
105
|
+
|
106
|
+
# Create and start the container
|
107
|
+
container = client.containers.run(
|
108
|
+
image="pgvector/pgvector:pg16",
|
109
|
+
name=container_name,
|
110
|
+
detach=True,
|
111
|
+
environment={
|
112
|
+
"POSTGRES_PASSWORD": pool_config["password"],
|
113
|
+
"PGDATA": pg_data,
|
114
|
+
},
|
115
|
+
ports={"5432/tcp": pool_config["port"]},
|
116
|
+
volumes={pg_data: {"bind": pg_data, "mode": "rw"}},
|
117
|
+
remove=True, # Equivalent to --rm
|
118
|
+
)
|
119
|
+
|
120
|
+
logging.info(f"Created container: {container.id}")
|
121
|
+
|
122
|
+
except APIError as e:
|
123
|
+
raise Exception(f"Docker API error: {str(e)}")
|
124
|
+
|
125
|
+
# Wait for PostgreSQL to be ready
|
126
|
+
attempts = 30
|
127
|
+
while attempts > 0:
|
128
|
+
if attempts % 5 == 0:
|
129
|
+
logging.info("Waiting for Postgres Docker container to start...")
|
130
|
+
|
131
|
+
if check_db_connectivity(pool_config) is None:
|
132
|
+
return True
|
133
|
+
|
134
|
+
attempts -= 1
|
135
|
+
time.sleep(1)
|
136
|
+
|
137
|
+
raise Exception(
|
138
|
+
f"Failed to start Docker container: Container {container_name} did not start in time."
|
139
|
+
)
|
140
|
+
|
141
|
+
|
142
|
+
def check_docker_installed() -> bool:
|
143
|
+
"""
|
144
|
+
Check if Docker is installed and running using the docker library.
|
145
|
+
|
146
|
+
Returns:
|
147
|
+
bool: True if Docker is installed and running, False otherwise.
|
148
|
+
"""
|
149
|
+
try:
|
150
|
+
client = docker.from_env()
|
151
|
+
client.ping() # type: ignore
|
152
|
+
return True
|
153
|
+
except Exception:
|
154
|
+
return False
|
155
|
+
|
156
|
+
|
157
|
+
def stop_docker_pg() -> None:
|
158
|
+
"""
|
159
|
+
Stops the Docker Postgres container.
|
160
|
+
|
161
|
+
Returns:
|
162
|
+
bool: True if the container was successfully stopped, False if it wasn't running
|
163
|
+
|
164
|
+
Raises:
|
165
|
+
Exception: If there was an error stopping the container
|
166
|
+
"""
|
167
|
+
logger = logging.getLogger()
|
168
|
+
container_name = "dbos-db"
|
169
|
+
try:
|
170
|
+
logger.info(f"Stopping Docker Postgres container {container_name}...")
|
171
|
+
|
172
|
+
client = docker.from_env()
|
173
|
+
|
174
|
+
try:
|
175
|
+
container = client.containers.get(container_name)
|
176
|
+
|
177
|
+
if container.status == "running":
|
178
|
+
container.stop()
|
179
|
+
logger.info(
|
180
|
+
f"Successfully stopped Docker Postgres container {container_name}."
|
181
|
+
)
|
182
|
+
else:
|
183
|
+
logger.info(f"Container {container_name} exists but is not running.")
|
184
|
+
|
185
|
+
except docker.errors.NotFound:
|
186
|
+
logger.info(f"Container {container_name} does not exist.")
|
187
|
+
|
188
|
+
except Exception as error:
|
189
|
+
error_message = str(error)
|
190
|
+
logger.error(f"Failed to stop Docker Postgres container: {error_message}")
|
191
|
+
raise
|
@@ -20,6 +20,7 @@ from dbos._debug import debug_workflow, parse_start_command
|
|
20
20
|
from .. import load_config
|
21
21
|
from .._app_db import ApplicationDatabase
|
22
22
|
from .._dbos_config import _is_valid_app_name
|
23
|
+
from .._docker_pg_helper import start_docker_pg, stop_docker_pg
|
23
24
|
from .._sys_db import SystemDatabase, reset_system_database
|
24
25
|
from .._workflow_commands import (
|
25
26
|
get_workflow,
|
@@ -37,6 +38,21 @@ queue = typer.Typer()
|
|
37
38
|
app.add_typer(workflow, name="workflow", help="Manage DBOS workflows")
|
38
39
|
workflow.add_typer(queue, name="queue", help="Manage enqueued workflows")
|
39
40
|
|
41
|
+
postgres = typer.Typer()
|
42
|
+
app.add_typer(
|
43
|
+
postgres, name="postgres", help="Manage local Postgres database with Docker"
|
44
|
+
)
|
45
|
+
|
46
|
+
|
47
|
+
@postgres.command(name="start", help="Start a local Postgres database")
|
48
|
+
def pg_start() -> None:
|
49
|
+
start_docker_pg()
|
50
|
+
|
51
|
+
|
52
|
+
@postgres.command(name="stop", help="Stop the local Postgres database")
|
53
|
+
def pg_stop() -> None:
|
54
|
+
stop_docker_pg()
|
55
|
+
|
40
56
|
|
41
57
|
def _on_windows() -> bool:
|
42
58
|
return platform.system() == "Windows"
|
@@ -246,7 +262,7 @@ def reset(
|
|
246
262
|
def debug(
|
247
263
|
workflow_id: Annotated[str, typer.Argument(help="Workflow ID to debug")],
|
248
264
|
) -> None:
|
249
|
-
config = load_config(silent=True
|
265
|
+
config = load_config(silent=True)
|
250
266
|
start = config["runtimeConfig"]["start"]
|
251
267
|
if not start:
|
252
268
|
typer.echo("No start commands found in 'dbos-config.yaml'")
|
@@ -62,10 +62,6 @@
|
|
62
62
|
"type": "string",
|
63
63
|
"description": "If using SSL/TLS to securely connect to a database, path to an SSL root certificate file"
|
64
64
|
},
|
65
|
-
"local_suffix": {
|
66
|
-
"type": "boolean",
|
67
|
-
"description": "Whether to suffix app_db_name with '_local'. Set to true when doing local development using a DBOS Cloud database."
|
68
|
-
},
|
69
65
|
"app_db_client": {
|
70
66
|
"type": "string",
|
71
67
|
"description": "Specify the database client to use to connect to the application database",
|
@@ -28,7 +28,7 @@ dependencies = [
|
|
28
28
|
]
|
29
29
|
requires-python = ">=3.9"
|
30
30
|
readme = "README.md"
|
31
|
-
version = "0.26.
|
31
|
+
version = "0.26.0a10"
|
32
32
|
|
33
33
|
[project.license]
|
34
34
|
text = "MIT"
|
@@ -88,4 +88,5 @@ dev = [
|
|
88
88
|
"pdm-backend>=2.4.2",
|
89
89
|
"pytest-asyncio>=0.25.0",
|
90
90
|
"pyright>=1.1.398",
|
91
|
+
"types-docker>=7.1.0.20241229",
|
91
92
|
]
|