dbos 0.7.0a1__py3-none-any.whl → 0.7.0a8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dbos/__init__.py +2 -0
- dbos/application_database.py +6 -11
- dbos/context.py +3 -2
- dbos/core.py +74 -54
- dbos/dbos.py +57 -69
- dbos/dbos_config.py +1 -1
- dbos/fastapi.py +46 -2
- dbos/kafka.py +27 -12
- dbos/migrations/versions/eab0cc1d9a14_job_queue.py +55 -0
- dbos/queue.py +36 -0
- dbos/recovery.py +1 -1
- dbos/scheduler/scheduler.py +8 -10
- dbos/schemas/system_database.py +23 -0
- dbos/system_database.py +106 -68
- {dbos-0.7.0a1.dist-info → dbos-0.7.0a8.dist-info}/METADATA +2 -2
- {dbos-0.7.0a1.dist-info → dbos-0.7.0a8.dist-info}/RECORD +19 -17
- {dbos-0.7.0a1.dist-info → dbos-0.7.0a8.dist-info}/WHEEL +0 -0
- {dbos-0.7.0a1.dist-info → dbos-0.7.0a8.dist-info}/entry_points.txt +0 -0
- {dbos-0.7.0a1.dist-info → dbos-0.7.0a8.dist-info}/licenses/LICENSE +0 -0
dbos/__init__.py
CHANGED
|
@@ -3,6 +3,7 @@ from .context import DBOSContextEnsure, SetWorkflowID
|
|
|
3
3
|
from .dbos import DBOS, DBOSConfiguredInstance, WorkflowHandle, WorkflowStatus
|
|
4
4
|
from .dbos_config import ConfigFile, get_dbos_database_url, load_config
|
|
5
5
|
from .kafka_message import KafkaMessage
|
|
6
|
+
from .queue import Queue
|
|
6
7
|
from .system_database import GetWorkflowsInput, WorkflowStatusString
|
|
7
8
|
|
|
8
9
|
__all__ = [
|
|
@@ -19,4 +20,5 @@ __all__ = [
|
|
|
19
20
|
"load_config",
|
|
20
21
|
"get_dbos_database_url",
|
|
21
22
|
"error",
|
|
23
|
+
"Queue",
|
|
22
24
|
]
|
dbos/application_database.py
CHANGED
|
@@ -2,7 +2,6 @@ from typing import Optional, TypedDict, cast
|
|
|
2
2
|
|
|
3
3
|
import sqlalchemy as sa
|
|
4
4
|
import sqlalchemy.dialects.postgresql as pg
|
|
5
|
-
import sqlalchemy.exc as sa_exc
|
|
6
5
|
from sqlalchemy.exc import DBAPIError
|
|
7
6
|
from sqlalchemy.orm import Session, sessionmaker
|
|
8
7
|
|
|
@@ -36,7 +35,7 @@ class ApplicationDatabase:
|
|
|
36
35
|
|
|
37
36
|
# If the application database does not already exist, create it
|
|
38
37
|
postgres_db_url = sa.URL.create(
|
|
39
|
-
"postgresql",
|
|
38
|
+
"postgresql+psycopg",
|
|
40
39
|
username=config["database"]["username"],
|
|
41
40
|
password=config["database"]["password"],
|
|
42
41
|
host=config["database"]["hostname"],
|
|
@@ -55,7 +54,7 @@ class ApplicationDatabase:
|
|
|
55
54
|
|
|
56
55
|
# Create a connection pool for the application database
|
|
57
56
|
app_db_url = sa.URL.create(
|
|
58
|
-
"postgresql",
|
|
57
|
+
"postgresql+psycopg",
|
|
59
58
|
username=config["database"]["username"],
|
|
60
59
|
password=config["database"]["password"],
|
|
61
60
|
host=config["database"]["hostname"],
|
|
@@ -97,11 +96,9 @@ class ApplicationDatabase:
|
|
|
97
96
|
)
|
|
98
97
|
)
|
|
99
98
|
except DBAPIError as dbapi_error:
|
|
100
|
-
if dbapi_error.orig.
|
|
99
|
+
if dbapi_error.orig.sqlstate == "23505": # type: ignore
|
|
101
100
|
raise DBOSWorkflowConflictIDError(output["workflow_uuid"])
|
|
102
|
-
raise
|
|
103
|
-
except Exception as e:
|
|
104
|
-
raise e
|
|
101
|
+
raise
|
|
105
102
|
|
|
106
103
|
def record_transaction_error(self, output: TransactionResultInternal) -> None:
|
|
107
104
|
try:
|
|
@@ -122,11 +119,9 @@ class ApplicationDatabase:
|
|
|
122
119
|
)
|
|
123
120
|
)
|
|
124
121
|
except DBAPIError as dbapi_error:
|
|
125
|
-
if dbapi_error.orig.
|
|
122
|
+
if dbapi_error.orig.sqlstate == "23505": # type: ignore
|
|
126
123
|
raise DBOSWorkflowConflictIDError(output["workflow_uuid"])
|
|
127
|
-
raise
|
|
128
|
-
except Exception as e:
|
|
129
|
-
raise e
|
|
124
|
+
raise
|
|
130
125
|
|
|
131
126
|
@staticmethod
|
|
132
127
|
def check_transaction_execution(
|
dbos/context.py
CHANGED
|
@@ -3,6 +3,7 @@ from __future__ import annotations
|
|
|
3
3
|
import json
|
|
4
4
|
import os
|
|
5
5
|
import uuid
|
|
6
|
+
from contextlib import AbstractContextManager
|
|
6
7
|
from contextvars import ContextVar
|
|
7
8
|
from enum import Enum
|
|
8
9
|
from types import TracebackType
|
|
@@ -344,7 +345,7 @@ class SetWorkflowRecovery:
|
|
|
344
345
|
return False # Did not handle
|
|
345
346
|
|
|
346
347
|
|
|
347
|
-
class EnterDBOSWorkflow:
|
|
348
|
+
class EnterDBOSWorkflow(AbstractContextManager[DBOSContext, Literal[False]]):
|
|
348
349
|
def __init__(self, attributes: TracedAttributes) -> None:
|
|
349
350
|
self.created_ctx = False
|
|
350
351
|
self.attributes = attributes
|
|
@@ -377,7 +378,7 @@ class EnterDBOSWorkflow:
|
|
|
377
378
|
return False # Did not handle
|
|
378
379
|
|
|
379
380
|
|
|
380
|
-
class EnterDBOSChildWorkflow:
|
|
381
|
+
class EnterDBOSChildWorkflow(AbstractContextManager[DBOSContext, Literal[False]]):
|
|
381
382
|
def __init__(self, attributes: TracedAttributes) -> None:
|
|
382
383
|
self.parent_ctx: Optional[DBOSContext] = None
|
|
383
384
|
self.child_ctx: Optional[DBOSContext] = None
|
dbos/core.py
CHANGED
|
@@ -63,6 +63,7 @@ from dbos.system_database import (
|
|
|
63
63
|
OperationResultInternal,
|
|
64
64
|
WorkflowInputs,
|
|
65
65
|
WorkflowStatusInternal,
|
|
66
|
+
WorkflowStatusString,
|
|
66
67
|
)
|
|
67
68
|
|
|
68
69
|
if TYPE_CHECKING:
|
|
@@ -108,7 +109,7 @@ class _WorkflowHandlePolling(Generic[R]):
|
|
|
108
109
|
return self.workflow_id
|
|
109
110
|
|
|
110
111
|
def get_result(self) -> R:
|
|
111
|
-
res: R = self.dbos.
|
|
112
|
+
res: R = self.dbos._sys_db.await_workflow_result(self.workflow_id)
|
|
112
113
|
return res
|
|
113
114
|
|
|
114
115
|
def get_status(self) -> "WorkflowStatus":
|
|
@@ -126,6 +127,7 @@ def _init_workflow(
|
|
|
126
127
|
class_name: Optional[str],
|
|
127
128
|
config_name: Optional[str],
|
|
128
129
|
temp_wf_type: Optional[str],
|
|
130
|
+
queue: Optional[str] = None,
|
|
129
131
|
) -> WorkflowStatusInternal:
|
|
130
132
|
wfid = (
|
|
131
133
|
ctx.workflow_id
|
|
@@ -134,7 +136,11 @@ def _init_workflow(
|
|
|
134
136
|
)
|
|
135
137
|
status: WorkflowStatusInternal = {
|
|
136
138
|
"workflow_uuid": wfid,
|
|
137
|
-
"status":
|
|
139
|
+
"status": (
|
|
140
|
+
WorkflowStatusString.PENDING.value
|
|
141
|
+
if queue is None
|
|
142
|
+
else WorkflowStatusString.ENQUEUED.value
|
|
143
|
+
),
|
|
138
144
|
"name": wf_name,
|
|
139
145
|
"class_name": class_name,
|
|
140
146
|
"config_name": config_name,
|
|
@@ -150,20 +156,25 @@ def _init_workflow(
|
|
|
150
156
|
json.dumps(ctx.authenticated_roles) if ctx.authenticated_roles else None
|
|
151
157
|
),
|
|
152
158
|
"assumed_role": ctx.assumed_role,
|
|
159
|
+
"queue_name": queue,
|
|
153
160
|
}
|
|
154
161
|
|
|
155
162
|
# If we have a class name, the first arg is the instance and do not serialize
|
|
156
163
|
if class_name is not None:
|
|
157
164
|
inputs = {"args": inputs["args"][1:], "kwargs": inputs["kwargs"]}
|
|
158
165
|
|
|
159
|
-
if temp_wf_type != "transaction":
|
|
166
|
+
if temp_wf_type != "transaction" or queue is not None:
|
|
160
167
|
# Synchronously record the status and inputs for workflows and single-step workflows
|
|
161
168
|
# We also have to do this for single-step workflows because of the foreign key constraint on the operation outputs table
|
|
162
|
-
|
|
163
|
-
dbos.
|
|
169
|
+
# TODO: Make this transactional (and with the queue step below)
|
|
170
|
+
dbos._sys_db.update_workflow_status(status, False, ctx.in_recovery)
|
|
171
|
+
dbos._sys_db.update_workflow_inputs(wfid, utils.serialize(inputs))
|
|
164
172
|
else:
|
|
165
173
|
# Buffer the inputs for single-transaction workflows, but don't buffer the status
|
|
166
|
-
dbos.
|
|
174
|
+
dbos._sys_db.buffer_workflow_inputs(wfid, utils.serialize(inputs))
|
|
175
|
+
|
|
176
|
+
if queue is not None:
|
|
177
|
+
dbos._sys_db.enqueue(wfid, queue)
|
|
167
178
|
|
|
168
179
|
return status
|
|
169
180
|
|
|
@@ -179,7 +190,9 @@ def _execute_workflow(
|
|
|
179
190
|
output = func(*args, **kwargs)
|
|
180
191
|
status["status"] = "SUCCESS"
|
|
181
192
|
status["output"] = utils.serialize(output)
|
|
182
|
-
|
|
193
|
+
if status["queue_name"] is not None:
|
|
194
|
+
dbos._sys_db.remove_from_queue(status["workflow_uuid"])
|
|
195
|
+
dbos._sys_db.buffer_workflow_status(status)
|
|
183
196
|
except DBOSWorkflowConflictIDError:
|
|
184
197
|
# Retrieve the workflow handle and wait for the result.
|
|
185
198
|
# Must use existing_workflow=False because workflow status might not be set yet for single transaction workflows.
|
|
@@ -191,8 +204,10 @@ def _execute_workflow(
|
|
|
191
204
|
except Exception as error:
|
|
192
205
|
status["status"] = "ERROR"
|
|
193
206
|
status["error"] = utils.serialize(error)
|
|
194
|
-
|
|
195
|
-
|
|
207
|
+
if status["queue_name"] is not None:
|
|
208
|
+
dbos._sys_db.remove_from_queue(status["workflow_uuid"])
|
|
209
|
+
dbos._sys_db.update_workflow_status(status)
|
|
210
|
+
raise
|
|
196
211
|
|
|
197
212
|
return output
|
|
198
213
|
|
|
@@ -217,14 +232,14 @@ def _execute_workflow_wthread(
|
|
|
217
232
|
dbos.logger.error(
|
|
218
233
|
f"Exception encountered in asynchronous workflow: {traceback.format_exc()}"
|
|
219
234
|
)
|
|
220
|
-
raise
|
|
235
|
+
raise
|
|
221
236
|
|
|
222
237
|
|
|
223
238
|
def _execute_workflow_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[Any]":
|
|
224
|
-
status = dbos.
|
|
239
|
+
status = dbos._sys_db.get_workflow_status(workflow_id)
|
|
225
240
|
if not status:
|
|
226
241
|
raise DBOSRecoveryError(workflow_id, "Workflow status not found")
|
|
227
|
-
inputs = dbos.
|
|
242
|
+
inputs = dbos._sys_db.get_workflow_inputs(workflow_id)
|
|
228
243
|
if not inputs:
|
|
229
244
|
raise DBOSRecoveryError(workflow_id, "Workflow inputs not found")
|
|
230
245
|
wf_func = dbos._registry.workflow_info_map.get(status["name"], None)
|
|
@@ -249,6 +264,8 @@ def _execute_workflow_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[Any]
|
|
|
249
264
|
return _start_workflow(
|
|
250
265
|
dbos,
|
|
251
266
|
wf_func,
|
|
267
|
+
status["queue_name"],
|
|
268
|
+
True,
|
|
252
269
|
dbos._registry.instance_info_map[iname],
|
|
253
270
|
*inputs["args"],
|
|
254
271
|
**inputs["kwargs"],
|
|
@@ -264,6 +281,8 @@ def _execute_workflow_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[Any]
|
|
|
264
281
|
return _start_workflow(
|
|
265
282
|
dbos,
|
|
266
283
|
wf_func,
|
|
284
|
+
status["queue_name"],
|
|
285
|
+
True,
|
|
267
286
|
dbos._registry.class_info_map[class_name],
|
|
268
287
|
*inputs["args"],
|
|
269
288
|
**inputs["kwargs"],
|
|
@@ -271,7 +290,12 @@ def _execute_workflow_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[Any]
|
|
|
271
290
|
else:
|
|
272
291
|
with SetWorkflowID(workflow_id):
|
|
273
292
|
return _start_workflow(
|
|
274
|
-
dbos,
|
|
293
|
+
dbos,
|
|
294
|
+
wf_func,
|
|
295
|
+
status["queue_name"],
|
|
296
|
+
True,
|
|
297
|
+
*inputs["args"],
|
|
298
|
+
**inputs["kwargs"],
|
|
275
299
|
)
|
|
276
300
|
|
|
277
301
|
|
|
@@ -298,34 +322,22 @@ def _workflow_wrapper(dbosreg: "_DBOSRegistry", func: F) -> F:
|
|
|
298
322
|
"kwargs": kwargs,
|
|
299
323
|
}
|
|
300
324
|
ctx = get_local_dbos_context()
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
)
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
else:
|
|
316
|
-
with EnterDBOSWorkflow(attributes), DBOSAssumeRole(rr):
|
|
317
|
-
ctx = assert_current_dbos_context()
|
|
318
|
-
status = _init_workflow(
|
|
319
|
-
dbos,
|
|
320
|
-
ctx,
|
|
321
|
-
inputs=inputs,
|
|
322
|
-
wf_name=get_dbos_func_name(func),
|
|
323
|
-
class_name=get_dbos_class_name(fi, func, args),
|
|
324
|
-
config_name=get_config_name(fi, func, args),
|
|
325
|
-
temp_wf_type=get_temp_workflow_type(func),
|
|
326
|
-
)
|
|
325
|
+
enterWorkflowCtxMgr = (
|
|
326
|
+
EnterDBOSChildWorkflow if ctx and ctx.is_workflow() else EnterDBOSWorkflow
|
|
327
|
+
)
|
|
328
|
+
with enterWorkflowCtxMgr(attributes), DBOSAssumeRole(rr):
|
|
329
|
+
ctx = assert_current_dbos_context() # Now the child ctx
|
|
330
|
+
status = _init_workflow(
|
|
331
|
+
dbos,
|
|
332
|
+
ctx,
|
|
333
|
+
inputs=inputs,
|
|
334
|
+
wf_name=get_dbos_func_name(func),
|
|
335
|
+
class_name=get_dbos_class_name(fi, func, args),
|
|
336
|
+
config_name=get_config_name(fi, func, args),
|
|
337
|
+
temp_wf_type=get_temp_workflow_type(func),
|
|
338
|
+
)
|
|
327
339
|
|
|
328
|
-
|
|
340
|
+
return _execute_workflow(dbos, status, func, *args, **kwargs)
|
|
329
341
|
|
|
330
342
|
wrapped_func = cast(F, wrapper)
|
|
331
343
|
return wrapped_func
|
|
@@ -343,6 +355,8 @@ def _workflow(reg: "_DBOSRegistry") -> Callable[[F], F]:
|
|
|
343
355
|
def _start_workflow(
|
|
344
356
|
dbos: "DBOS",
|
|
345
357
|
func: "Workflow[P, R]",
|
|
358
|
+
queue_name: Optional[str],
|
|
359
|
+
execute_workflow: bool,
|
|
346
360
|
*args: P.args,
|
|
347
361
|
**kwargs: P.kwargs,
|
|
348
362
|
) -> "WorkflowHandle[R]":
|
|
@@ -396,10 +410,14 @@ def _start_workflow(
|
|
|
396
410
|
class_name=get_dbos_class_name(fi, func, gin_args),
|
|
397
411
|
config_name=get_config_name(fi, func, gin_args),
|
|
398
412
|
temp_wf_type=get_temp_workflow_type(func),
|
|
413
|
+
queue=queue_name,
|
|
399
414
|
)
|
|
400
415
|
|
|
416
|
+
if not execute_workflow:
|
|
417
|
+
return _WorkflowHandlePolling(new_wf_id, dbos)
|
|
418
|
+
|
|
401
419
|
if fself is not None:
|
|
402
|
-
future = dbos.
|
|
420
|
+
future = dbos._executor.submit(
|
|
403
421
|
cast(Callable[..., R], _execute_workflow_wthread),
|
|
404
422
|
dbos,
|
|
405
423
|
status,
|
|
@@ -410,7 +428,7 @@ def _start_workflow(
|
|
|
410
428
|
**kwargs,
|
|
411
429
|
)
|
|
412
430
|
else:
|
|
413
|
-
future = dbos.
|
|
431
|
+
future = dbos._executor.submit(
|
|
414
432
|
cast(Callable[..., R], _execute_workflow_wthread),
|
|
415
433
|
dbos,
|
|
416
434
|
status,
|
|
@@ -432,7 +450,7 @@ def _transaction(
|
|
|
432
450
|
f"Function {func.__name__} invoked before DBOS initialized"
|
|
433
451
|
)
|
|
434
452
|
dbos = dbosreg.dbos
|
|
435
|
-
with dbos.
|
|
453
|
+
with dbos._app_db.sessionmaker() as session:
|
|
436
454
|
attributes: TracedAttributes = {
|
|
437
455
|
"name": func.__name__,
|
|
438
456
|
"operationType": OperationType.TRANSACTION.value,
|
|
@@ -493,7 +511,7 @@ def _transaction(
|
|
|
493
511
|
)
|
|
494
512
|
break
|
|
495
513
|
except DBAPIError as dbapi_error:
|
|
496
|
-
if dbapi_error.orig.
|
|
514
|
+
if dbapi_error.orig.sqlstate == "40001": # type: ignore
|
|
497
515
|
# Retry on serialization failure
|
|
498
516
|
ctx.get_current_span().add_event(
|
|
499
517
|
"Transaction Serialization Failure",
|
|
@@ -505,13 +523,13 @@ def _transaction(
|
|
|
505
523
|
max_retry_wait_seconds,
|
|
506
524
|
)
|
|
507
525
|
continue
|
|
508
|
-
raise
|
|
526
|
+
raise
|
|
509
527
|
except Exception as error:
|
|
510
528
|
# Don't record the error if it was already recorded
|
|
511
529
|
if not has_recorded_error:
|
|
512
530
|
txn_output["error"] = utils.serialize(error)
|
|
513
|
-
dbos.
|
|
514
|
-
raise
|
|
531
|
+
dbos._app_db.record_transaction_error(txn_output)
|
|
532
|
+
raise
|
|
515
533
|
return output
|
|
516
534
|
|
|
517
535
|
fi = get_or_create_func_info(func)
|
|
@@ -541,6 +559,7 @@ def _transaction(
|
|
|
541
559
|
set_dbos_func_name(temp_wf, "<temp>." + func.__qualname__)
|
|
542
560
|
set_temp_workflow_type(temp_wf, "transaction")
|
|
543
561
|
dbosreg.register_wf_function(get_dbos_func_name(temp_wf), wrapped_wf)
|
|
562
|
+
wrapper.__orig_func = temp_wf # type: ignore
|
|
544
563
|
|
|
545
564
|
return cast(F, wrapper)
|
|
546
565
|
|
|
@@ -575,7 +594,7 @@ def _step(
|
|
|
575
594
|
"output": None,
|
|
576
595
|
"error": None,
|
|
577
596
|
}
|
|
578
|
-
recorded_output = dbos.
|
|
597
|
+
recorded_output = dbos._sys_db.check_operation_execution(
|
|
579
598
|
ctx.workflow_id, ctx.function_id
|
|
580
599
|
)
|
|
581
600
|
if recorded_output:
|
|
@@ -622,7 +641,7 @@ def _step(
|
|
|
622
641
|
step_output["error"] = (
|
|
623
642
|
utils.serialize(error) if error is not None else None
|
|
624
643
|
)
|
|
625
|
-
dbos.
|
|
644
|
+
dbos._sys_db.record_operation_result(step_output)
|
|
626
645
|
|
|
627
646
|
if error is not None:
|
|
628
647
|
raise error
|
|
@@ -657,6 +676,7 @@ def _step(
|
|
|
657
676
|
set_dbos_func_name(temp_wf, "<temp>." + func.__qualname__)
|
|
658
677
|
set_temp_workflow_type(temp_wf, "step")
|
|
659
678
|
dbosreg.register_wf_function(get_dbos_func_name(temp_wf), wrapped_wf)
|
|
679
|
+
wrapper.__orig_func = temp_wf # type: ignore
|
|
660
680
|
|
|
661
681
|
return cast(F, wrapper)
|
|
662
682
|
|
|
@@ -671,7 +691,7 @@ def _send(
|
|
|
671
691
|
"name": "send",
|
|
672
692
|
}
|
|
673
693
|
with EnterDBOSStep(attributes) as ctx:
|
|
674
|
-
dbos.
|
|
694
|
+
dbos._sys_db.send(
|
|
675
695
|
ctx.workflow_id,
|
|
676
696
|
ctx.curr_step_function_id,
|
|
677
697
|
destination_id,
|
|
@@ -702,7 +722,7 @@ def _recv(
|
|
|
702
722
|
with EnterDBOSStep(attributes) as ctx:
|
|
703
723
|
ctx.function_id += 1 # Reserve for the sleep
|
|
704
724
|
timeout_function_id = ctx.function_id
|
|
705
|
-
return dbos.
|
|
725
|
+
return dbos._sys_db.recv(
|
|
706
726
|
ctx.workflow_id,
|
|
707
727
|
ctx.curr_step_function_id,
|
|
708
728
|
timeout_function_id,
|
|
@@ -725,7 +745,7 @@ def _set_event(dbos: "DBOS", key: str, value: Any) -> None:
|
|
|
725
745
|
"name": "set_event",
|
|
726
746
|
}
|
|
727
747
|
with EnterDBOSStep(attributes) as ctx:
|
|
728
|
-
dbos.
|
|
748
|
+
dbos._sys_db.set_event(
|
|
729
749
|
ctx.workflow_id, ctx.curr_step_function_id, key, value
|
|
730
750
|
)
|
|
731
751
|
else:
|
|
@@ -753,7 +773,7 @@ def _get_event(
|
|
|
753
773
|
"function_id": ctx.curr_step_function_id,
|
|
754
774
|
"timeout_function_id": timeout_function_id,
|
|
755
775
|
}
|
|
756
|
-
return dbos.
|
|
776
|
+
return dbos._sys_db.get_event(workflow_id, key, timeout_seconds, caller_ctx)
|
|
757
777
|
else:
|
|
758
778
|
# Directly call it outside of a workflow
|
|
759
|
-
return dbos.
|
|
779
|
+
return dbos._sys_db.get_event(workflow_id, key, timeout_seconds)
|
dbos/dbos.py
CHANGED
|
@@ -20,7 +20,6 @@ from typing import (
|
|
|
20
20
|
Tuple,
|
|
21
21
|
Type,
|
|
22
22
|
TypeVar,
|
|
23
|
-
cast,
|
|
24
23
|
)
|
|
25
24
|
|
|
26
25
|
from opentelemetry.trace import Span
|
|
@@ -40,6 +39,7 @@ from dbos.core import (
|
|
|
40
39
|
_WorkflowHandlePolling,
|
|
41
40
|
)
|
|
42
41
|
from dbos.decorators import classproperty
|
|
42
|
+
from dbos.queue import Queue, queue_thread
|
|
43
43
|
from dbos.recovery import _recover_pending_workflows, _startup_recovery_thread
|
|
44
44
|
from dbos.registrations import (
|
|
45
45
|
DBOSClassInfo,
|
|
@@ -138,6 +138,7 @@ class _DBOSRegistry:
|
|
|
138
138
|
self.workflow_info_map: dict[str, Workflow[..., Any]] = {}
|
|
139
139
|
self.class_info_map: dict[str, type] = {}
|
|
140
140
|
self.instance_info_map: dict[str, object] = {}
|
|
141
|
+
self.queue_info_map: dict[str, Queue] = {}
|
|
141
142
|
self.pollers: list[_RegisteredJob] = []
|
|
142
143
|
self.dbos: Optional[DBOS] = None
|
|
143
144
|
self.config: Optional[ConfigFile] = None
|
|
@@ -163,7 +164,7 @@ class _DBOSRegistry:
|
|
|
163
164
|
) -> None:
|
|
164
165
|
if self.dbos and self.dbos._launched:
|
|
165
166
|
self.dbos.stop_events.append(evt)
|
|
166
|
-
self.dbos.
|
|
167
|
+
self.dbos._executor.submit(func, *args, **kwargs)
|
|
167
168
|
else:
|
|
168
169
|
self.pollers.append((evt, func, args, kwargs))
|
|
169
170
|
|
|
@@ -265,44 +266,21 @@ class DBOS:
|
|
|
265
266
|
dbos_logger.info("Initializing DBOS")
|
|
266
267
|
self.config: ConfigFile = config
|
|
267
268
|
self._launched: bool = False
|
|
268
|
-
self.
|
|
269
|
-
self.
|
|
269
|
+
self._sys_db_field: Optional[SystemDatabase] = None
|
|
270
|
+
self._app_db_field: Optional[ApplicationDatabase] = None
|
|
270
271
|
self._registry: _DBOSRegistry = _get_or_create_dbos_registry()
|
|
271
272
|
self._registry.dbos = self
|
|
272
|
-
self.
|
|
273
|
+
self._admin_server_field: Optional[AdminServer] = None
|
|
273
274
|
self.stop_events: List[threading.Event] = []
|
|
274
275
|
self.fastapi: Optional["FastAPI"] = fastapi
|
|
275
276
|
self.flask: Optional["Flask"] = flask
|
|
276
|
-
self.
|
|
277
|
+
self._executor_field: Optional[ThreadPoolExecutor] = None
|
|
277
278
|
|
|
278
279
|
# If using FastAPI, set up middleware and lifecycle events
|
|
279
280
|
if self.fastapi is not None:
|
|
280
|
-
from fastapi.requests import Request as FARequest
|
|
281
|
-
from fastapi.responses import JSONResponse
|
|
282
|
-
|
|
283
|
-
async def dbos_error_handler(
|
|
284
|
-
request: FARequest, gexc: Exception
|
|
285
|
-
) -> JSONResponse:
|
|
286
|
-
exc: DBOSException = cast(DBOSException, gexc)
|
|
287
|
-
status_code = 500
|
|
288
|
-
if exc.status_code is not None:
|
|
289
|
-
status_code = exc.status_code
|
|
290
|
-
return JSONResponse(
|
|
291
|
-
status_code=status_code,
|
|
292
|
-
content={
|
|
293
|
-
"message": str(exc.message),
|
|
294
|
-
"dbos_error_code": str(exc.dbos_error_code),
|
|
295
|
-
"dbos_error": str(exc.__class__.__name__),
|
|
296
|
-
},
|
|
297
|
-
)
|
|
298
|
-
|
|
299
|
-
self.fastapi.add_exception_handler(DBOSException, dbos_error_handler)
|
|
300
|
-
|
|
301
281
|
from dbos.fastapi import setup_fastapi_middleware
|
|
302
282
|
|
|
303
|
-
setup_fastapi_middleware(self.fastapi)
|
|
304
|
-
self.fastapi.on_event("startup")(self._launch)
|
|
305
|
-
self.fastapi.on_event("shutdown")(self._destroy)
|
|
283
|
+
setup_fastapi_middleware(self.fastapi, _get_dbos_instance())
|
|
306
284
|
|
|
307
285
|
# If using Flask, set up middleware
|
|
308
286
|
if self.flask is not None:
|
|
@@ -325,33 +303,33 @@ class DBOS:
|
|
|
325
303
|
handler.flush()
|
|
326
304
|
|
|
327
305
|
@property
|
|
328
|
-
def
|
|
329
|
-
if self.
|
|
306
|
+
def _executor(self) -> ThreadPoolExecutor:
|
|
307
|
+
if self._executor_field is None:
|
|
330
308
|
raise DBOSException("Executor accessed before DBOS was launched")
|
|
331
|
-
rv: ThreadPoolExecutor = self.
|
|
309
|
+
rv: ThreadPoolExecutor = self._executor_field
|
|
332
310
|
return rv
|
|
333
311
|
|
|
334
312
|
@property
|
|
335
|
-
def
|
|
336
|
-
if self.
|
|
313
|
+
def _sys_db(self) -> SystemDatabase:
|
|
314
|
+
if self._sys_db_field is None:
|
|
337
315
|
raise DBOSException("System database accessed before DBOS was launched")
|
|
338
|
-
rv: SystemDatabase = self.
|
|
316
|
+
rv: SystemDatabase = self._sys_db_field
|
|
339
317
|
return rv
|
|
340
318
|
|
|
341
319
|
@property
|
|
342
|
-
def
|
|
343
|
-
if self.
|
|
320
|
+
def _app_db(self) -> ApplicationDatabase:
|
|
321
|
+
if self._app_db_field is None:
|
|
344
322
|
raise DBOSException(
|
|
345
323
|
"Application database accessed before DBOS was launched"
|
|
346
324
|
)
|
|
347
|
-
rv: ApplicationDatabase = self.
|
|
325
|
+
rv: ApplicationDatabase = self._app_db_field
|
|
348
326
|
return rv
|
|
349
327
|
|
|
350
328
|
@property
|
|
351
|
-
def
|
|
352
|
-
if self.
|
|
329
|
+
def _admin_server(self) -> AdminServer:
|
|
330
|
+
if self._admin_server_field is None:
|
|
353
331
|
raise DBOSException("Admin server accessed before DBOS was launched")
|
|
354
|
-
rv: AdminServer = self.
|
|
332
|
+
rv: AdminServer = self._admin_server_field
|
|
355
333
|
return rv
|
|
356
334
|
|
|
357
335
|
@classmethod
|
|
@@ -364,25 +342,30 @@ class DBOS:
|
|
|
364
342
|
dbos_logger.warning(f"DBOS was already launched")
|
|
365
343
|
return
|
|
366
344
|
self._launched = True
|
|
367
|
-
self.
|
|
368
|
-
self.
|
|
369
|
-
self.
|
|
370
|
-
self.
|
|
345
|
+
self._executor_field = ThreadPoolExecutor(max_workers=64)
|
|
346
|
+
self._sys_db_field = SystemDatabase(self.config)
|
|
347
|
+
self._app_db_field = ApplicationDatabase(self.config)
|
|
348
|
+
self._admin_server_field = AdminServer(dbos=self)
|
|
371
349
|
|
|
372
350
|
if not os.environ.get("DBOS__VMID"):
|
|
373
|
-
workflow_ids = self.
|
|
374
|
-
self.
|
|
351
|
+
workflow_ids = self._sys_db.get_pending_workflows("local")
|
|
352
|
+
self._executor.submit(_startup_recovery_thread, self, workflow_ids)
|
|
375
353
|
|
|
376
354
|
# Listen to notifications
|
|
377
|
-
self.
|
|
355
|
+
self._executor.submit(self._sys_db._notification_listener)
|
|
378
356
|
|
|
379
357
|
# Start flush workflow buffers thread
|
|
380
|
-
self.
|
|
358
|
+
self._executor.submit(self._sys_db.flush_workflow_buffers)
|
|
359
|
+
|
|
360
|
+
# Start the queue thread
|
|
361
|
+
evt = threading.Event()
|
|
362
|
+
self.stop_events.append(evt)
|
|
363
|
+
self._executor.submit(queue_thread, evt, self)
|
|
381
364
|
|
|
382
365
|
# Grab any pollers that were deferred and start them
|
|
383
366
|
for evt, func, args, kwargs in self._registry.pollers:
|
|
384
367
|
self.stop_events.append(evt)
|
|
385
|
-
self.
|
|
368
|
+
self._executor.submit(func, *args, **kwargs)
|
|
386
369
|
self._registry.pollers = []
|
|
387
370
|
|
|
388
371
|
dbos_logger.info("DBOS launched")
|
|
@@ -397,20 +380,20 @@ class DBOS:
|
|
|
397
380
|
self._initialized = False
|
|
398
381
|
for event in self.stop_events:
|
|
399
382
|
event.set()
|
|
400
|
-
if self.
|
|
401
|
-
self.
|
|
402
|
-
self.
|
|
403
|
-
if self.
|
|
404
|
-
self.
|
|
405
|
-
self.
|
|
406
|
-
if self.
|
|
407
|
-
self.
|
|
408
|
-
self.
|
|
383
|
+
if self._sys_db_field is not None:
|
|
384
|
+
self._sys_db_field.destroy()
|
|
385
|
+
self._sys_db_field = None
|
|
386
|
+
if self._app_db_field is not None:
|
|
387
|
+
self._app_db_field.destroy()
|
|
388
|
+
self._app_db_field = None
|
|
389
|
+
if self._admin_server_field is not None:
|
|
390
|
+
self._admin_server_field.stop()
|
|
391
|
+
self._admin_server_field = None
|
|
409
392
|
# CB - This needs work, some things ought to stop before DBs are tossed out,
|
|
410
393
|
# on the other hand it hangs to move it
|
|
411
|
-
if self.
|
|
412
|
-
self.
|
|
413
|
-
self.
|
|
394
|
+
if self._executor_field is not None:
|
|
395
|
+
self._executor_field.shutdown(cancel_futures=True)
|
|
396
|
+
self._executor_field = None
|
|
414
397
|
|
|
415
398
|
@classmethod
|
|
416
399
|
def register_instance(cls, inst: object) -> None:
|
|
@@ -511,13 +494,18 @@ class DBOS:
|
|
|
511
494
|
|
|
512
495
|
@classmethod
|
|
513
496
|
def kafka_consumer(
|
|
514
|
-
cls,
|
|
497
|
+
cls,
|
|
498
|
+
config: dict[str, Any],
|
|
499
|
+
topics: list[str],
|
|
500
|
+
in_order: bool = False,
|
|
515
501
|
) -> Callable[[KafkaConsumerWorkflow], KafkaConsumerWorkflow]:
|
|
516
502
|
"""Decorate a function to be used as a Kafka consumer."""
|
|
517
503
|
try:
|
|
518
504
|
from dbos.kafka import kafka_consumer
|
|
519
505
|
|
|
520
|
-
return kafka_consumer(
|
|
506
|
+
return kafka_consumer(
|
|
507
|
+
_get_or_create_dbos_registry(), config, topics, in_order
|
|
508
|
+
)
|
|
521
509
|
except ModuleNotFoundError as e:
|
|
522
510
|
raise DBOSException(
|
|
523
511
|
f"{e.name} dependency not found. Please install {e.name} via your package manager."
|
|
@@ -531,7 +519,7 @@ class DBOS:
|
|
|
531
519
|
**kwargs: P.kwargs,
|
|
532
520
|
) -> WorkflowHandle[R]:
|
|
533
521
|
"""Invoke a workflow function in the background, returning a handle to the ongoing execution."""
|
|
534
|
-
return _start_workflow(_get_dbos_instance(), func, *args, **kwargs)
|
|
522
|
+
return _start_workflow(_get_dbos_instance(), func, None, True, *args, **kwargs)
|
|
535
523
|
|
|
536
524
|
@classmethod
|
|
537
525
|
def get_workflow_status(cls, workflow_id: str) -> Optional[WorkflowStatus]:
|
|
@@ -539,11 +527,11 @@ class DBOS:
|
|
|
539
527
|
ctx = get_local_dbos_context()
|
|
540
528
|
if ctx and ctx.is_within_workflow():
|
|
541
529
|
ctx.function_id += 1
|
|
542
|
-
stat = _get_dbos_instance().
|
|
530
|
+
stat = _get_dbos_instance()._sys_db.get_workflow_status_within_wf(
|
|
543
531
|
workflow_id, ctx.workflow_id, ctx.function_id
|
|
544
532
|
)
|
|
545
533
|
else:
|
|
546
|
-
stat = _get_dbos_instance().
|
|
534
|
+
stat = _get_dbos_instance()._sys_db.get_workflow_status(workflow_id)
|
|
547
535
|
if stat is None:
|
|
548
536
|
return None
|
|
549
537
|
|
|
@@ -607,7 +595,7 @@ class DBOS:
|
|
|
607
595
|
if seconds <= 0:
|
|
608
596
|
return
|
|
609
597
|
with EnterDBOSStep(attributes) as ctx:
|
|
610
|
-
_get_dbos_instance().
|
|
598
|
+
_get_dbos_instance()._sys_db.sleep(
|
|
611
599
|
ctx.workflow_id, ctx.curr_step_function_id, seconds
|
|
612
600
|
)
|
|
613
601
|
|
dbos/dbos_config.py
CHANGED
|
@@ -105,7 +105,7 @@ def get_dbos_database_url(config_file_path: str = "dbos-config.yaml") -> str:
|
|
|
105
105
|
"""
|
|
106
106
|
dbos_config = load_config(config_file_path)
|
|
107
107
|
db_url = URL.create(
|
|
108
|
-
"postgresql",
|
|
108
|
+
"postgresql+psycopg",
|
|
109
109
|
username=dbos_config["database"]["username"],
|
|
110
110
|
password=dbos_config["database"]["password"],
|
|
111
111
|
host=dbos_config["database"]["hostname"],
|