dbos 1.0.0__py3-none-any.whl → 1.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbos might be problematic. Click here for more details.
- dbos/_app_db.py +48 -41
- dbos/_client.py +1 -0
- dbos/_context.py +6 -0
- dbos/_dbos.py +4 -0
- dbos/_error.py +14 -0
- dbos/_sys_db.py +133 -58
- dbos/cli/cli.py +71 -47
- {dbos-1.0.0.dist-info → dbos-1.1.0.dist-info}/METADATA +1 -1
- {dbos-1.0.0.dist-info → dbos-1.1.0.dist-info}/RECORD +12 -12
- {dbos-1.0.0.dist-info → dbos-1.1.0.dist-info}/WHEEL +0 -0
- {dbos-1.0.0.dist-info → dbos-1.1.0.dist-info}/entry_points.txt +0 -0
- {dbos-1.0.0.dist-info → dbos-1.1.0.dist-info}/licenses/LICENSE +0 -0
dbos/_app_db.py
CHANGED
@@ -8,6 +8,7 @@ from sqlalchemy.orm import Session, sessionmaker
|
|
8
8
|
|
9
9
|
from . import _serialization
|
10
10
|
from ._error import DBOSUnexpectedStepError, DBOSWorkflowConflictIDError
|
11
|
+
from ._logger import dbos_logger
|
11
12
|
from ._schemas.application_database import ApplicationSchema
|
12
13
|
from ._sys_db import StepInfo
|
13
14
|
|
@@ -39,21 +40,6 @@ class ApplicationDatabase:
|
|
39
40
|
):
|
40
41
|
app_db_url = sa.make_url(database_url).set(drivername="postgresql+psycopg")
|
41
42
|
|
42
|
-
# If the application database does not already exist, create it
|
43
|
-
if not debug_mode:
|
44
|
-
postgres_db_engine = sa.create_engine(
|
45
|
-
app_db_url.set(database="postgres"),
|
46
|
-
**engine_kwargs,
|
47
|
-
)
|
48
|
-
with postgres_db_engine.connect() as conn:
|
49
|
-
conn.execution_options(isolation_level="AUTOCOMMIT")
|
50
|
-
if not conn.execute(
|
51
|
-
sa.text("SELECT 1 FROM pg_database WHERE datname=:db_name"),
|
52
|
-
parameters={"db_name": app_db_url.database},
|
53
|
-
).scalar():
|
54
|
-
conn.execute(sa.text(f"CREATE DATABASE {app_db_url.database}"))
|
55
|
-
postgres_db_engine.dispose()
|
56
|
-
|
57
43
|
if engine_kwargs is None:
|
58
44
|
engine_kwargs = {}
|
59
45
|
|
@@ -61,40 +47,61 @@ class ApplicationDatabase:
|
|
61
47
|
app_db_url,
|
62
48
|
**engine_kwargs,
|
63
49
|
)
|
50
|
+
self._engine_kwargs = engine_kwargs
|
64
51
|
self.sessionmaker = sessionmaker(bind=self.engine)
|
65
52
|
self.debug_mode = debug_mode
|
66
53
|
|
54
|
+
def run_migrations(self) -> None:
|
55
|
+
if self.debug_mode:
|
56
|
+
dbos_logger.warning(
|
57
|
+
"Application database migrations are skipped in debug mode."
|
58
|
+
)
|
59
|
+
return
|
60
|
+
# Check if the database exists
|
61
|
+
app_db_url = self.engine.url
|
62
|
+
postgres_db_engine = sa.create_engine(
|
63
|
+
app_db_url.set(database="postgres"),
|
64
|
+
**self._engine_kwargs,
|
65
|
+
)
|
66
|
+
with postgres_db_engine.connect() as conn:
|
67
|
+
conn.execution_options(isolation_level="AUTOCOMMIT")
|
68
|
+
if not conn.execute(
|
69
|
+
sa.text("SELECT 1 FROM pg_database WHERE datname=:db_name"),
|
70
|
+
parameters={"db_name": app_db_url.database},
|
71
|
+
).scalar():
|
72
|
+
conn.execute(sa.text(f"CREATE DATABASE {app_db_url.database}"))
|
73
|
+
postgres_db_engine.dispose()
|
74
|
+
|
67
75
|
# Create the dbos schema and transaction_outputs table in the application database
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
conn.execute(schema_creation_query)
|
76
|
+
with self.engine.begin() as conn:
|
77
|
+
schema_creation_query = sa.text(
|
78
|
+
f"CREATE SCHEMA IF NOT EXISTS {ApplicationSchema.schema}"
|
79
|
+
)
|
80
|
+
conn.execute(schema_creation_query)
|
74
81
|
|
75
|
-
|
76
|
-
|
82
|
+
inspector = inspect(self.engine)
|
83
|
+
if not inspector.has_table(
|
84
|
+
"transaction_outputs", schema=ApplicationSchema.schema
|
85
|
+
):
|
86
|
+
ApplicationSchema.metadata_obj.create_all(self.engine)
|
87
|
+
else:
|
88
|
+
columns = inspector.get_columns(
|
77
89
|
"transaction_outputs", schema=ApplicationSchema.schema
|
78
|
-
)
|
79
|
-
|
80
|
-
else:
|
81
|
-
columns = inspector.get_columns(
|
82
|
-
"transaction_outputs", schema=ApplicationSchema.schema
|
83
|
-
)
|
84
|
-
column_names = [col["name"] for col in columns]
|
90
|
+
)
|
91
|
+
column_names = [col["name"] for col in columns]
|
85
92
|
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
)
|
93
|
+
if "function_name" not in column_names:
|
94
|
+
# Column missing, alter table to add it
|
95
|
+
with self.engine.connect() as conn:
|
96
|
+
conn.execute(
|
97
|
+
text(
|
98
|
+
f"""
|
99
|
+
ALTER TABLE {ApplicationSchema.schema}.transaction_outputs
|
100
|
+
ADD COLUMN function_name TEXT NOT NULL DEFAULT '';
|
101
|
+
"""
|
96
102
|
)
|
97
|
-
|
103
|
+
)
|
104
|
+
conn.commit()
|
98
105
|
|
99
106
|
def destroy(self) -> None:
|
100
107
|
self.engine.dispose()
|
dbos/_client.py
CHANGED
@@ -99,6 +99,7 @@ class WorkflowHandleClientAsyncPolling(Generic[R]):
|
|
99
99
|
|
100
100
|
class DBOSClient:
|
101
101
|
def __init__(self, database_url: str, *, system_database: Optional[str] = None):
|
102
|
+
# We only create database connections but do not run migrations
|
102
103
|
self._sys_db = SystemDatabase(
|
103
104
|
database_url=database_url,
|
104
105
|
engine_kwargs={
|
dbos/_context.py
CHANGED
@@ -392,6 +392,7 @@ class SetWorkflowTimeout:
|
|
392
392
|
else None
|
393
393
|
)
|
394
394
|
self.saved_workflow_timeout: Optional[int] = None
|
395
|
+
self.saved_workflow_deadline_epoch_ms: Optional[int] = None
|
395
396
|
|
396
397
|
def __enter__(self) -> SetWorkflowTimeout:
|
397
398
|
# Code to create a basic context
|
@@ -402,6 +403,8 @@ class SetWorkflowTimeout:
|
|
402
403
|
ctx = assert_current_dbos_context()
|
403
404
|
self.saved_workflow_timeout = ctx.workflow_timeout_ms
|
404
405
|
ctx.workflow_timeout_ms = self.workflow_timeout_ms
|
406
|
+
self.saved_workflow_deadline_epoch_ms = ctx.workflow_deadline_epoch_ms
|
407
|
+
ctx.workflow_deadline_epoch_ms = None
|
405
408
|
return self
|
406
409
|
|
407
410
|
def __exit__(
|
@@ -411,6 +414,9 @@ class SetWorkflowTimeout:
|
|
411
414
|
traceback: Optional[TracebackType],
|
412
415
|
) -> Literal[False]:
|
413
416
|
assert_current_dbos_context().workflow_timeout_ms = self.saved_workflow_timeout
|
417
|
+
assert_current_dbos_context().workflow_deadline_epoch_ms = (
|
418
|
+
self.saved_workflow_deadline_epoch_ms
|
419
|
+
)
|
414
420
|
# Code to clean up the basic context if we created it
|
415
421
|
if self.created_ctx:
|
416
422
|
_clear_local_dbos_context()
|
dbos/_dbos.py
CHANGED
@@ -433,6 +433,10 @@ class DBOS:
|
|
433
433
|
if debug_mode:
|
434
434
|
return
|
435
435
|
|
436
|
+
# Run migrations for the system and application databases
|
437
|
+
self._sys_db.run_migrations()
|
438
|
+
self._app_db.run_migrations()
|
439
|
+
|
436
440
|
admin_port = self._config.get("runtimeConfig", {}).get("admin_port")
|
437
441
|
if admin_port is None:
|
438
442
|
admin_port = 3001
|
dbos/_error.py
CHANGED
@@ -62,6 +62,7 @@ class DBOSErrorCode(Enum):
|
|
62
62
|
WorkflowCancelled = 10
|
63
63
|
UnexpectedStep = 11
|
64
64
|
QueueDeduplicated = 12
|
65
|
+
AwaitedWorkflowCancelled = 13
|
65
66
|
ConflictingRegistrationError = 25
|
66
67
|
|
67
68
|
|
@@ -206,6 +207,19 @@ class DBOSQueueDeduplicatedError(DBOSException):
|
|
206
207
|
)
|
207
208
|
|
208
209
|
|
210
|
+
class DBOSAwaitedWorkflowCancelledError(DBOSException):
|
211
|
+
def __init__(self, workflow_id: str):
|
212
|
+
self.workflow_id = workflow_id
|
213
|
+
super().__init__(
|
214
|
+
f"Awaited workflow {workflow_id} was cancelled",
|
215
|
+
dbos_error_code=DBOSErrorCode.AwaitedWorkflowCancelled.value,
|
216
|
+
)
|
217
|
+
|
218
|
+
def __reduce__(self) -> Any:
|
219
|
+
# Tell jsonpickle how to reconstruct this object
|
220
|
+
return (self.__class__, (self.workflow_id,))
|
221
|
+
|
222
|
+
|
209
223
|
#######################################
|
210
224
|
## BaseException
|
211
225
|
#######################################
|
dbos/_sys_db.py
CHANGED
@@ -32,6 +32,7 @@ from dbos._utils import INTERNAL_QUEUE_NAME
|
|
32
32
|
from . import _serialization
|
33
33
|
from ._context import get_local_dbos_context
|
34
34
|
from ._error import (
|
35
|
+
DBOSAwaitedWorkflowCancelledError,
|
35
36
|
DBOSConflictingWorkflowError,
|
36
37
|
DBOSDeadLetterQueueError,
|
37
38
|
DBOSNonExistentWorkflowError,
|
@@ -96,6 +97,10 @@ class WorkflowStatus:
|
|
96
97
|
executor_id: Optional[str]
|
97
98
|
# The application version on which this workflow was started
|
98
99
|
app_version: Optional[str]
|
100
|
+
# The start-to-close timeout of the workflow in ms
|
101
|
+
workflow_timeout_ms: Optional[int]
|
102
|
+
# The deadline of a workflow, computed by adding its timeout to its start time.
|
103
|
+
workflow_deadline_epoch_ms: Optional[int]
|
99
104
|
|
100
105
|
# INTERNAL FIELDS
|
101
106
|
|
@@ -222,6 +227,47 @@ class StepInfo(TypedDict):
|
|
222
227
|
_dbos_null_topic = "__null__topic__"
|
223
228
|
|
224
229
|
|
230
|
+
class ConditionCount(TypedDict):
|
231
|
+
condition: threading.Condition
|
232
|
+
count: int
|
233
|
+
|
234
|
+
|
235
|
+
class ThreadSafeConditionDict:
|
236
|
+
def __init__(self) -> None:
|
237
|
+
self._dict: Dict[str, ConditionCount] = {}
|
238
|
+
self._lock = threading.Lock()
|
239
|
+
|
240
|
+
def get(self, key: str) -> Optional[threading.Condition]:
|
241
|
+
with self._lock:
|
242
|
+
if key not in self._dict:
|
243
|
+
# Key does not exist, return None
|
244
|
+
return None
|
245
|
+
return self._dict[key]["condition"]
|
246
|
+
|
247
|
+
def set(
|
248
|
+
self, key: str, value: threading.Condition
|
249
|
+
) -> tuple[bool, threading.Condition]:
|
250
|
+
with self._lock:
|
251
|
+
if key in self._dict:
|
252
|
+
# Key already exists, do not overwrite. Increment the wait count.
|
253
|
+
cc = self._dict[key]
|
254
|
+
cc["count"] += 1
|
255
|
+
return False, cc["condition"]
|
256
|
+
self._dict[key] = ConditionCount(condition=value, count=1)
|
257
|
+
return True, value
|
258
|
+
|
259
|
+
def pop(self, key: str) -> None:
|
260
|
+
with self._lock:
|
261
|
+
if key in self._dict:
|
262
|
+
cc = self._dict[key]
|
263
|
+
cc["count"] -= 1
|
264
|
+
if cc["count"] == 0:
|
265
|
+
# No more threads waiting on this condition, remove it
|
266
|
+
del self._dict[key]
|
267
|
+
else:
|
268
|
+
dbos_logger.warning(f"Key {key} not found in condition dictionary.")
|
269
|
+
|
270
|
+
|
225
271
|
class SystemDatabase:
|
226
272
|
|
227
273
|
def __init__(
|
@@ -241,34 +287,63 @@ class SystemDatabase:
|
|
241
287
|
sysdb_name = system_db_url.database + SystemSchema.sysdb_suffix
|
242
288
|
system_db_url = system_db_url.set(database=sysdb_name)
|
243
289
|
|
244
|
-
if not debug_mode:
|
245
|
-
# If the system database does not already exist, create it
|
246
|
-
engine = sa.create_engine(
|
247
|
-
system_db_url.set(database="postgres"), **engine_kwargs
|
248
|
-
)
|
249
|
-
with engine.connect() as conn:
|
250
|
-
conn.execution_options(isolation_level="AUTOCOMMIT")
|
251
|
-
if not conn.execute(
|
252
|
-
sa.text("SELECT 1 FROM pg_database WHERE datname=:db_name"),
|
253
|
-
parameters={"db_name": sysdb_name},
|
254
|
-
).scalar():
|
255
|
-
dbos_logger.info(f"Creating system database {sysdb_name}")
|
256
|
-
conn.execute(sa.text(f"CREATE DATABASE {sysdb_name}"))
|
257
|
-
engine.dispose()
|
258
|
-
|
259
290
|
self.engine = sa.create_engine(
|
260
291
|
system_db_url,
|
261
292
|
**engine_kwargs,
|
262
293
|
)
|
294
|
+
self._engine_kwargs = engine_kwargs
|
295
|
+
|
296
|
+
self.notification_conn: Optional[psycopg.connection.Connection] = None
|
297
|
+
self.notifications_map = ThreadSafeConditionDict()
|
298
|
+
self.workflow_events_map = ThreadSafeConditionDict()
|
299
|
+
|
300
|
+
# Now we can run background processes
|
301
|
+
self._run_background_processes = True
|
302
|
+
self._debug_mode = debug_mode
|
303
|
+
|
304
|
+
# Run migrations
|
305
|
+
def run_migrations(self) -> None:
|
306
|
+
if self._debug_mode:
|
307
|
+
dbos_logger.warning("System database migrations are skipped in debug mode.")
|
308
|
+
return
|
309
|
+
system_db_url = self.engine.url
|
310
|
+
sysdb_name = system_db_url.database
|
311
|
+
# If the system database does not already exist, create it
|
312
|
+
engine = sa.create_engine(
|
313
|
+
system_db_url.set(database="postgres"), **self._engine_kwargs
|
314
|
+
)
|
315
|
+
with engine.connect() as conn:
|
316
|
+
conn.execution_options(isolation_level="AUTOCOMMIT")
|
317
|
+
if not conn.execute(
|
318
|
+
sa.text("SELECT 1 FROM pg_database WHERE datname=:db_name"),
|
319
|
+
parameters={"db_name": sysdb_name},
|
320
|
+
).scalar():
|
321
|
+
dbos_logger.info(f"Creating system database {sysdb_name}")
|
322
|
+
conn.execute(sa.text(f"CREATE DATABASE {sysdb_name}"))
|
323
|
+
engine.dispose()
|
263
324
|
|
264
325
|
# Run a schema migration for the system database
|
265
|
-
|
266
|
-
|
267
|
-
|
326
|
+
migration_dir = os.path.join(
|
327
|
+
os.path.dirname(os.path.realpath(__file__)), "_migrations"
|
328
|
+
)
|
329
|
+
alembic_cfg = Config()
|
330
|
+
alembic_cfg.set_main_option("script_location", migration_dir)
|
331
|
+
logging.getLogger("alembic").setLevel(logging.WARNING)
|
332
|
+
# Alembic requires the % in URL-escaped parameters to itself be escaped to %%.
|
333
|
+
escaped_conn_string = re.sub(
|
334
|
+
r"%(?=[0-9A-Fa-f]{2})",
|
335
|
+
"%%",
|
336
|
+
self.engine.url.render_as_string(hide_password=False),
|
337
|
+
)
|
338
|
+
alembic_cfg.set_main_option("sqlalchemy.url", escaped_conn_string)
|
339
|
+
try:
|
340
|
+
command.upgrade(alembic_cfg, "head")
|
341
|
+
except Exception as e:
|
342
|
+
dbos_logger.warning(
|
343
|
+
f"Exception during system database construction. This is most likely because the system database was configured using a later version of DBOS: {e}"
|
268
344
|
)
|
269
345
|
alembic_cfg = Config()
|
270
346
|
alembic_cfg.set_main_option("script_location", migration_dir)
|
271
|
-
logging.getLogger("alembic").setLevel(logging.WARNING)
|
272
347
|
# Alembic requires the % in URL-escaped parameters to itself be escaped to %%.
|
273
348
|
escaped_conn_string = re.sub(
|
274
349
|
r"%(?=[0-9A-Fa-f]{2})",
|
@@ -282,29 +357,6 @@ class SystemDatabase:
|
|
282
357
|
dbos_logger.warning(
|
283
358
|
f"Exception during system database construction. This is most likely because the system database was configured using a later version of DBOS: {e}"
|
284
359
|
)
|
285
|
-
alembic_cfg = Config()
|
286
|
-
alembic_cfg.set_main_option("script_location", migration_dir)
|
287
|
-
# Alembic requires the % in URL-escaped parameters to itself be escaped to %%.
|
288
|
-
escaped_conn_string = re.sub(
|
289
|
-
r"%(?=[0-9A-Fa-f]{2})",
|
290
|
-
"%%",
|
291
|
-
self.engine.url.render_as_string(hide_password=False),
|
292
|
-
)
|
293
|
-
alembic_cfg.set_main_option("sqlalchemy.url", escaped_conn_string)
|
294
|
-
try:
|
295
|
-
command.upgrade(alembic_cfg, "head")
|
296
|
-
except Exception as e:
|
297
|
-
dbos_logger.warning(
|
298
|
-
f"Exception during system database construction. This is most likely because the system database was configured using a later version of DBOS: {e}"
|
299
|
-
)
|
300
|
-
|
301
|
-
self.notification_conn: Optional[psycopg.connection.Connection] = None
|
302
|
-
self.notifications_map: Dict[str, threading.Condition] = {}
|
303
|
-
self.workflow_events_map: Dict[str, threading.Condition] = {}
|
304
|
-
|
305
|
-
# Now we can run background processes
|
306
|
-
self._run_background_processes = True
|
307
|
-
self._debug_mode = debug_mode
|
308
360
|
|
309
361
|
# Destroy the pool when finished
|
310
362
|
def destroy(self) -> None:
|
@@ -714,9 +766,9 @@ class SystemDatabase:
|
|
714
766
|
error = row[2]
|
715
767
|
raise _serialization.deserialize_exception(error)
|
716
768
|
elif status == WorkflowStatusString.CANCELLED.value:
|
717
|
-
# Raise
|
769
|
+
# Raise AwaitedWorkflowCancelledError here, not the cancellation exception
|
718
770
|
# because the awaiting workflow is not being cancelled.
|
719
|
-
raise
|
771
|
+
raise DBOSAwaitedWorkflowCancelledError(workflow_id)
|
720
772
|
else:
|
721
773
|
pass # CB: I guess we're assuming the WF will show up eventually.
|
722
774
|
time.sleep(1)
|
@@ -790,6 +842,8 @@ class SystemDatabase:
|
|
790
842
|
SystemSchema.workflow_inputs.c.inputs,
|
791
843
|
SystemSchema.workflow_status.c.output,
|
792
844
|
SystemSchema.workflow_status.c.error,
|
845
|
+
SystemSchema.workflow_status.c.workflow_deadline_epoch_ms,
|
846
|
+
SystemSchema.workflow_status.c.workflow_timeout_ms,
|
793
847
|
).join(
|
794
848
|
SystemSchema.workflow_inputs,
|
795
849
|
SystemSchema.workflow_status.c.workflow_uuid
|
@@ -871,6 +925,8 @@ class SystemDatabase:
|
|
871
925
|
info.input = inputs
|
872
926
|
info.output = output
|
873
927
|
info.error = exception
|
928
|
+
info.workflow_deadline_epoch_ms = row[18]
|
929
|
+
info.workflow_timeout_ms = row[19]
|
874
930
|
|
875
931
|
infos.append(info)
|
876
932
|
return infos
|
@@ -900,6 +956,8 @@ class SystemDatabase:
|
|
900
956
|
SystemSchema.workflow_inputs.c.inputs,
|
901
957
|
SystemSchema.workflow_status.c.output,
|
902
958
|
SystemSchema.workflow_status.c.error,
|
959
|
+
SystemSchema.workflow_status.c.workflow_deadline_epoch_ms,
|
960
|
+
SystemSchema.workflow_status.c.workflow_timeout_ms,
|
903
961
|
).select_from(
|
904
962
|
SystemSchema.workflow_queue.join(
|
905
963
|
SystemSchema.workflow_status,
|
@@ -977,6 +1035,8 @@ class SystemDatabase:
|
|
977
1035
|
info.input = inputs
|
978
1036
|
info.output = output
|
979
1037
|
info.error = exception
|
1038
|
+
info.workflow_deadline_epoch_ms = row[18]
|
1039
|
+
info.workflow_timeout_ms = row[19]
|
980
1040
|
|
981
1041
|
infos.append(info)
|
982
1042
|
|
@@ -1282,7 +1342,12 @@ class SystemDatabase:
|
|
1282
1342
|
condition = threading.Condition()
|
1283
1343
|
# Must acquire first before adding to the map. Otherwise, the notification listener may notify it before the condition is acquired and waited.
|
1284
1344
|
condition.acquire()
|
1285
|
-
self.notifications_map
|
1345
|
+
success, _ = self.notifications_map.set(payload, condition)
|
1346
|
+
if not success:
|
1347
|
+
# This should not happen, but if it does, it means the workflow is executed concurrently.
|
1348
|
+
condition.release()
|
1349
|
+
self.notifications_map.pop(payload)
|
1350
|
+
raise DBOSWorkflowConflictIDError(workflow_uuid)
|
1286
1351
|
|
1287
1352
|
# Check if the key is already in the database. If not, wait for the notification.
|
1288
1353
|
init_recv: Sequence[Any]
|
@@ -1375,11 +1440,11 @@ class SystemDatabase:
|
|
1375
1440
|
f"Received notification on channel: {channel}, payload: {notify.payload}"
|
1376
1441
|
)
|
1377
1442
|
if channel == "dbos_notifications_channel":
|
1378
|
-
if
|
1379
|
-
notify.payload
|
1380
|
-
|
1381
|
-
|
1382
|
-
|
1443
|
+
if notify.payload:
|
1444
|
+
condition = self.notifications_map.get(notify.payload)
|
1445
|
+
if condition is None:
|
1446
|
+
# No condition found for this payload
|
1447
|
+
continue
|
1383
1448
|
condition.acquire()
|
1384
1449
|
condition.notify_all()
|
1385
1450
|
condition.release()
|
@@ -1387,11 +1452,11 @@ class SystemDatabase:
|
|
1387
1452
|
f"Signaled notifications condition for {notify.payload}"
|
1388
1453
|
)
|
1389
1454
|
elif channel == "dbos_workflow_events_channel":
|
1390
|
-
if
|
1391
|
-
notify.payload
|
1392
|
-
|
1393
|
-
|
1394
|
-
|
1455
|
+
if notify.payload:
|
1456
|
+
condition = self.workflow_events_map.get(notify.payload)
|
1457
|
+
if condition is None:
|
1458
|
+
# No condition found for this payload
|
1459
|
+
continue
|
1395
1460
|
condition.acquire()
|
1396
1461
|
condition.notify_all()
|
1397
1462
|
condition.release()
|
@@ -1529,8 +1594,13 @@ class SystemDatabase:
|
|
1529
1594
|
|
1530
1595
|
payload = f"{target_uuid}::{key}"
|
1531
1596
|
condition = threading.Condition()
|
1532
|
-
self.workflow_events_map[payload] = condition
|
1533
1597
|
condition.acquire()
|
1598
|
+
success, existing_condition = self.workflow_events_map.set(payload, condition)
|
1599
|
+
if not success:
|
1600
|
+
# Wait on the existing condition
|
1601
|
+
condition.release()
|
1602
|
+
condition = existing_condition
|
1603
|
+
condition.acquire()
|
1534
1604
|
|
1535
1605
|
# Check if the key is already in the database. If not, wait for the notification.
|
1536
1606
|
init_recv: Sequence[Any]
|
@@ -1770,8 +1840,13 @@ class SystemDatabase:
|
|
1770
1840
|
# If a timeout is set, set the deadline on dequeue
|
1771
1841
|
workflow_deadline_epoch_ms=sa.case(
|
1772
1842
|
(
|
1773
|
-
|
1774
|
-
|
1843
|
+
sa.and_(
|
1844
|
+
SystemSchema.workflow_status.c.workflow_timeout_ms.isnot(
|
1845
|
+
None
|
1846
|
+
),
|
1847
|
+
SystemSchema.workflow_status.c.workflow_deadline_epoch_ms.is_(
|
1848
|
+
None
|
1849
|
+
),
|
1775
1850
|
),
|
1776
1851
|
sa.func.extract("epoch", sa.func.now()) * 1000
|
1777
1852
|
+ SystemSchema.workflow_status.c.workflow_timeout_ms,
|
dbos/cli/cli.py
CHANGED
@@ -12,7 +12,7 @@ import sqlalchemy as sa
|
|
12
12
|
import typer
|
13
13
|
from rich import print
|
14
14
|
from rich.prompt import IntPrompt
|
15
|
-
from typing_extensions import Annotated
|
15
|
+
from typing_extensions import Annotated, List
|
16
16
|
|
17
17
|
from dbos._debug import debug_workflow, parse_start_command
|
18
18
|
|
@@ -147,55 +147,16 @@ def init(
|
|
147
147
|
] = False,
|
148
148
|
) -> None:
|
149
149
|
try:
|
150
|
-
|
151
150
|
git_templates = ["dbos-toolbox", "dbos-app-starter", "dbos-cron-starter"]
|
152
151
|
templates_dir = get_templates_directory()
|
153
|
-
templates = git_templates + [
|
154
|
-
x.name for x in os.scandir(templates_dir) if x.is_dir()
|
155
|
-
]
|
156
|
-
|
157
|
-
if config and template is None:
|
158
|
-
template = templates[-1]
|
159
152
|
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
while True:
|
168
|
-
try:
|
169
|
-
choice = IntPrompt.ask(
|
170
|
-
"\nSelect template number",
|
171
|
-
show_choices=False,
|
172
|
-
show_default=False,
|
173
|
-
)
|
174
|
-
if 1 <= choice <= len(templates):
|
175
|
-
template = templates[choice - 1]
|
176
|
-
break
|
177
|
-
else:
|
178
|
-
print(
|
179
|
-
"[red]Invalid selection. Please choose a number from the list.[/red]"
|
180
|
-
)
|
181
|
-
except (KeyboardInterrupt, EOFError):
|
182
|
-
raise typer.Abort()
|
183
|
-
except ValueError:
|
184
|
-
print("[red]Please enter a valid number.[/red]")
|
185
|
-
|
186
|
-
if template in git_templates:
|
187
|
-
project_name = template
|
188
|
-
else:
|
189
|
-
if project_name is None:
|
190
|
-
project_name = typing.cast(
|
191
|
-
str,
|
192
|
-
typer.prompt("What is your project's name?", get_project_name()),
|
193
|
-
)
|
194
|
-
|
195
|
-
if not _is_valid_app_name(project_name):
|
196
|
-
raise Exception(
|
197
|
-
f"{project_name} is an invalid DBOS app name. App names must be between 3 and 30 characters long and contain only lowercase letters, numbers, dashes, and underscores."
|
198
|
-
)
|
153
|
+
project_name, template = _resolve_project_name_and_template(
|
154
|
+
project_name=project_name,
|
155
|
+
template=template,
|
156
|
+
config=config,
|
157
|
+
git_templates=git_templates,
|
158
|
+
templates_dir=templates_dir,
|
159
|
+
)
|
199
160
|
|
200
161
|
if template in git_templates:
|
201
162
|
create_template_from_github(app_name=project_name, template_name=template)
|
@@ -207,6 +168,67 @@ def init(
|
|
207
168
|
print(f"[red]{e}[/red]")
|
208
169
|
|
209
170
|
|
171
|
+
def _resolve_project_name_and_template(
|
172
|
+
project_name: Optional[str],
|
173
|
+
template: Optional[str],
|
174
|
+
config: bool,
|
175
|
+
git_templates: List[str],
|
176
|
+
templates_dir: str,
|
177
|
+
) -> tuple[str, str]:
|
178
|
+
templates = git_templates + [
|
179
|
+
x.name for x in os.scandir(templates_dir) if x.is_dir()
|
180
|
+
]
|
181
|
+
|
182
|
+
if config and template is None:
|
183
|
+
template = templates[-1]
|
184
|
+
|
185
|
+
if template:
|
186
|
+
if template not in templates:
|
187
|
+
raise Exception(f"Template {template} not found in {templates_dir}")
|
188
|
+
else:
|
189
|
+
print("\n[bold]Available templates:[/bold]")
|
190
|
+
for idx, template_name in enumerate(templates, 1):
|
191
|
+
print(f" {idx}. {template_name}")
|
192
|
+
while True:
|
193
|
+
try:
|
194
|
+
choice = IntPrompt.ask(
|
195
|
+
"\nSelect template number",
|
196
|
+
show_choices=False,
|
197
|
+
show_default=False,
|
198
|
+
)
|
199
|
+
if 1 <= choice <= len(templates):
|
200
|
+
template = templates[choice - 1]
|
201
|
+
break
|
202
|
+
else:
|
203
|
+
print(
|
204
|
+
"[red]Invalid selection. Please choose a number from the list.[/red]"
|
205
|
+
)
|
206
|
+
except (KeyboardInterrupt, EOFError):
|
207
|
+
raise typer.Abort()
|
208
|
+
except ValueError:
|
209
|
+
print("[red]Please enter a valid number.[/red]")
|
210
|
+
|
211
|
+
if template in git_templates:
|
212
|
+
if project_name is None:
|
213
|
+
project_name = template
|
214
|
+
else:
|
215
|
+
if project_name is None:
|
216
|
+
project_name = typing.cast(
|
217
|
+
str,
|
218
|
+
typer.prompt("What is your project's name?", get_project_name()),
|
219
|
+
)
|
220
|
+
|
221
|
+
if not _is_valid_app_name(project_name):
|
222
|
+
raise Exception(
|
223
|
+
f"{project_name} is an invalid DBOS app name. App names must be between 3 and 30 characters long and contain only lowercase letters, numbers, dashes, and underscores."
|
224
|
+
)
|
225
|
+
|
226
|
+
assert project_name is not None, "Project name cannot be None"
|
227
|
+
assert template is not None, "Template name cannot be None"
|
228
|
+
|
229
|
+
return project_name, template
|
230
|
+
|
231
|
+
|
210
232
|
@app.command(
|
211
233
|
help="Run your database schema migrations using the migration commands in 'dbos-config.yaml'"
|
212
234
|
)
|
@@ -258,6 +280,8 @@ def migrate(
|
|
258
280
|
"pool_size": 2,
|
259
281
|
},
|
260
282
|
)
|
283
|
+
sys_db.run_migrations()
|
284
|
+
app_db.run_migrations()
|
261
285
|
except Exception as e:
|
262
286
|
typer.echo(f"DBOS system schema migration failed: {e}")
|
263
287
|
finally:
|
@@ -1,23 +1,23 @@
|
|
1
|
-
dbos-1.
|
2
|
-
dbos-1.
|
3
|
-
dbos-1.
|
4
|
-
dbos-1.
|
1
|
+
dbos-1.1.0.dist-info/METADATA,sha256=jmrqILlNfqY1zumpckLQnd1HTmTvzjTDZH23TXXvMoU,13265
|
2
|
+
dbos-1.1.0.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
|
3
|
+
dbos-1.1.0.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
|
4
|
+
dbos-1.1.0.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
|
5
5
|
dbos/__init__.py,sha256=NssPCubaBxdiKarOWa-wViz1hdJSkmBGcpLX_gQ4NeA,891
|
6
6
|
dbos/__main__.py,sha256=G7Exn-MhGrVJVDbgNlpzhfh8WMX_72t3_oJaFT9Lmt8,653
|
7
7
|
dbos/_admin_server.py,sha256=A_28_nJ1nBBYDmCxtklJR9O2v14JRMtD1rAo_D4y8Kc,9764
|
8
|
-
dbos/_app_db.py,sha256=
|
8
|
+
dbos/_app_db.py,sha256=wxZz3ja9QgVuyp5YLsAqa_MpuyD5tl0C5GHTLl8fwF0,10514
|
9
9
|
dbos/_classproperty.py,sha256=f0X-_BySzn3yFDRKB2JpCbLYQ9tLwt1XftfshvY7CBs,626
|
10
|
-
dbos/_client.py,sha256
|
10
|
+
dbos/_client.py,sha256=-nK2GjS9D0qnD2DkRDs7gKxNECwYlsvW6hFCjADlnv0,14186
|
11
11
|
dbos/_conductor/conductor.py,sha256=o0IaZjwnZ2TOyHeP2H4iSX6UnXLXQ4uODvWAKD9hHMs,21703
|
12
12
|
dbos/_conductor/protocol.py,sha256=wgOFZxmS81bv0WCB9dAyg0s6QzldpzVKQDoSPeaX0Ws,6967
|
13
|
-
dbos/_context.py,sha256=
|
13
|
+
dbos/_context.py,sha256=5ajoWAmToAfzzmMLylnJZoL4Ny9rBwZWuG05sXadMIA,24798
|
14
14
|
dbos/_core.py,sha256=UDpSgRA9m_YuViNXR9tVgNFLC-zxKZPxjlkj2a-Kj00,48317
|
15
15
|
dbos/_croniter.py,sha256=XHAyUyibs_59sJQfSNWkP7rqQY6_XrlfuuCxk4jYqek,47559
|
16
|
-
dbos/_dbos.py,sha256=
|
16
|
+
dbos/_dbos.py,sha256=f5s9cVgsiMkAkpvctLHE6sjVAEuC-eFEpRddYBIKxiA,46430
|
17
17
|
dbos/_dbos_config.py,sha256=IufNrIC-M2xSNTXyT_KXlEdfB3j03pPLv_nE0fEq4_U,20955
|
18
18
|
dbos/_debug.py,sha256=MNlQVZ6TscGCRQeEEL0VE8Uignvr6dPeDDDefS3xgIE,1823
|
19
19
|
dbos/_docker_pg_helper.py,sha256=tLJXWqZ4S-ExcaPnxg_i6cVxL6ZxrYlZjaGsklY-s2I,6115
|
20
|
-
dbos/_error.py,sha256=
|
20
|
+
dbos/_error.py,sha256=q0OQJZTbR8FFHV9hEpAGpz9oWBT5L509zUhmyff7FJw,8500
|
21
21
|
dbos/_event_loop.py,sha256=NmaLbEQFfEK36S_0KhVD39YdYrGce3qSKCTJ-5RqKQ0,2136
|
22
22
|
dbos/_fastapi.py,sha256=m4SL3H9P-NBQ_ZrbFxAWMOqNyIi3HGEn2ODR7xAK038,3118
|
23
23
|
dbos/_flask.py,sha256=Npnakt-a3W5OykONFRkDRnumaDhTQmA0NPdUCGRYKXE,1652
|
@@ -47,7 +47,7 @@ dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
47
47
|
dbos/_schemas/application_database.py,sha256=SypAS9l9EsaBHFn9FR8jmnqt01M74d9AF1AMa4m2hhI,1040
|
48
48
|
dbos/_schemas/system_database.py,sha256=3Z0L72bOgHnusK1hBaETWU9RfiLBP0QnS-fdu41i0yY,5835
|
49
49
|
dbos/_serialization.py,sha256=bWuwhXSQcGmiazvhJHA5gwhrRWxtmFmcCFQSDJnqqkU,3666
|
50
|
-
dbos/_sys_db.py,sha256=
|
50
|
+
dbos/_sys_db.py,sha256=gVa5arMBT8rKHkycPS8HyRzfvQdQRxYqIclw0Fcp6CM,84240
|
51
51
|
dbos/_templates/dbos-db-starter/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
|
52
52
|
dbos/_templates/dbos-db-starter/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
53
53
|
dbos/_templates/dbos-db-starter/__package/main.py.dbos,sha256=aQnBPSSQpkB8ERfhf7gB7P9tsU6OPKhZscfeh0yiaD8,2702
|
@@ -63,8 +63,8 @@ dbos/_utils.py,sha256=UbpMYRBSyvJqdXeWAnfSw8xXM1R1mfnyl1oTunhEjJM,513
|
|
63
63
|
dbos/_workflow_commands.py,sha256=2E8FRUv_nLYkpBTwfhh_ELhySYpMrm8qGB9J44g6DSE,3872
|
64
64
|
dbos/cli/_github_init.py,sha256=Y_bDF9gfO2jB1id4FV5h1oIxEJRWyqVjhb7bNEa5nQ0,3224
|
65
65
|
dbos/cli/_template_init.py,sha256=7JBcpMqP1r2mfCnvWatu33z8ctEGHJarlZYKgB83cXE,2972
|
66
|
-
dbos/cli/cli.py,sha256=
|
66
|
+
dbos/cli/cli.py,sha256=HinoCGrAUTiSeq7AAoCFfhdiE0uDw7vLMuDMN1_YTLI,20705
|
67
67
|
dbos/dbos-config.schema.json,sha256=CjaspeYmOkx6Ip_pcxtmfXJTn_YGdSx_0pcPBF7KZmo,6060
|
68
68
|
dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
|
69
69
|
version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
|
70
|
-
dbos-1.
|
70
|
+
dbos-1.1.0.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|