dbos 1.5.0a10__py3-none-any.whl → 1.6.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dbos/_client.py +7 -7
- dbos/_context.py +17 -12
- dbos/_core.py +51 -34
- dbos/_dbos.py +20 -6
- dbos/_kafka.py +2 -1
- dbos/_registrations.py +5 -3
- dbos/_roles.py +3 -2
- dbos/_scheduler.py +11 -8
- dbos/_sys_db.py +10 -7
- dbos/_workflow_commands.py +5 -5
- {dbos-1.5.0a10.dist-info → dbos-1.6.0.dist-info}/METADATA +1 -1
- {dbos-1.5.0a10.dist-info → dbos-1.6.0.dist-info}/RECORD +15 -15
- {dbos-1.5.0a10.dist-info → dbos-1.6.0.dist-info}/WHEEL +0 -0
- {dbos-1.5.0a10.dist-info → dbos-1.6.0.dist-info}/entry_points.txt +0 -0
- {dbos-1.5.0a10.dist-info → dbos-1.6.0.dist-info}/licenses/LICENSE +0 -0
dbos/_client.py
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
import asyncio
|
2
2
|
import sys
|
3
3
|
import uuid
|
4
|
-
from typing import Any, Generic, List, Optional, TypedDict, TypeVar
|
4
|
+
from typing import Any, Generic, List, Optional, TypedDict, TypeVar, Union
|
5
5
|
|
6
6
|
from dbos._app_db import ApplicationDatabase
|
7
7
|
from dbos._context import MaxPriority, MinPriority
|
@@ -128,7 +128,6 @@ class DBOSClient:
|
|
128
128
|
workflow_name = options["workflow_name"]
|
129
129
|
queue_name = options["queue_name"]
|
130
130
|
|
131
|
-
app_version = options.get("app_version")
|
132
131
|
max_recovery_attempts = options.get("max_recovery_attempts")
|
133
132
|
if max_recovery_attempts is None:
|
134
133
|
max_recovery_attempts = DEFAULT_MAX_RECOVERY_ATTEMPTS
|
@@ -139,6 +138,7 @@ class DBOSClient:
|
|
139
138
|
enqueue_options_internal: EnqueueOptionsInternal = {
|
140
139
|
"deduplication_id": options.get("deduplication_id"),
|
141
140
|
"priority": options.get("priority"),
|
141
|
+
"app_version": options.get("app_version"),
|
142
142
|
}
|
143
143
|
|
144
144
|
inputs: WorkflowInputs = {
|
@@ -152,7 +152,7 @@ class DBOSClient:
|
|
152
152
|
"name": workflow_name,
|
153
153
|
"class_name": None,
|
154
154
|
"queue_name": queue_name,
|
155
|
-
"app_version": app_version,
|
155
|
+
"app_version": enqueue_options_internal["app_version"],
|
156
156
|
"config_name": None,
|
157
157
|
"authenticated_user": None,
|
158
158
|
"assumed_role": None,
|
@@ -284,7 +284,7 @@ class DBOSClient:
|
|
284
284
|
self,
|
285
285
|
*,
|
286
286
|
workflow_ids: Optional[List[str]] = None,
|
287
|
-
status: Optional[str] = None,
|
287
|
+
status: Optional[Union[str, List[str]]] = None,
|
288
288
|
start_time: Optional[str] = None,
|
289
289
|
end_time: Optional[str] = None,
|
290
290
|
name: Optional[str] = None,
|
@@ -314,7 +314,7 @@ class DBOSClient:
|
|
314
314
|
self,
|
315
315
|
*,
|
316
316
|
workflow_ids: Optional[List[str]] = None,
|
317
|
-
status: Optional[str] = None,
|
317
|
+
status: Optional[Union[str, List[str]]] = None,
|
318
318
|
start_time: Optional[str] = None,
|
319
319
|
end_time: Optional[str] = None,
|
320
320
|
name: Optional[str] = None,
|
@@ -344,7 +344,7 @@ class DBOSClient:
|
|
344
344
|
self,
|
345
345
|
*,
|
346
346
|
queue_name: Optional[str] = None,
|
347
|
-
status: Optional[str] = None,
|
347
|
+
status: Optional[Union[str, List[str]]] = None,
|
348
348
|
start_time: Optional[str] = None,
|
349
349
|
end_time: Optional[str] = None,
|
350
350
|
name: Optional[str] = None,
|
@@ -368,7 +368,7 @@ class DBOSClient:
|
|
368
368
|
self,
|
369
369
|
*,
|
370
370
|
queue_name: Optional[str] = None,
|
371
|
-
status: Optional[str] = None,
|
371
|
+
status: Optional[Union[str, List[str]]] = None,
|
372
372
|
start_time: Optional[str] = None,
|
373
373
|
end_time: Optional[str] = None,
|
374
374
|
name: Optional[str] = None,
|
dbos/_context.py
CHANGED
@@ -93,6 +93,8 @@ class DBOSContext:
|
|
93
93
|
self.assumed_role: Optional[str] = None
|
94
94
|
self.step_status: Optional[StepStatus] = None
|
95
95
|
|
96
|
+
self.app_version: Optional[str] = None
|
97
|
+
|
96
98
|
# A user-specified workflow timeout. Takes priority over a propagated deadline.
|
97
99
|
self.workflow_timeout_ms: Optional[int] = None
|
98
100
|
# A propagated workflow deadline.
|
@@ -138,23 +140,18 @@ class DBOSContext:
|
|
138
140
|
self,
|
139
141
|
wfid: Optional[str],
|
140
142
|
attributes: TracedAttributes,
|
141
|
-
is_temp_workflow: bool = False,
|
142
143
|
) -> None:
|
143
144
|
if wfid is None or len(wfid) == 0:
|
144
145
|
wfid = self.assign_workflow_id()
|
145
146
|
self.id_assigned_for_next_workflow = ""
|
146
147
|
self.workflow_id = wfid
|
147
148
|
self.function_id = 0
|
148
|
-
|
149
|
-
self._start_span(attributes)
|
149
|
+
self._start_span(attributes)
|
150
150
|
|
151
|
-
def end_workflow(
|
152
|
-
self, exc_value: Optional[BaseException], is_temp_workflow: bool = False
|
153
|
-
) -> None:
|
151
|
+
def end_workflow(self, exc_value: Optional[BaseException]) -> None:
|
154
152
|
self.workflow_id = ""
|
155
153
|
self.function_id = -1
|
156
|
-
|
157
|
-
self._end_span(exc_value)
|
154
|
+
self._end_span(exc_value)
|
158
155
|
|
159
156
|
def is_within_workflow(self) -> bool:
|
160
157
|
return len(self.workflow_id) > 0
|
@@ -435,7 +432,11 @@ class SetEnqueueOptions:
|
|
435
432
|
"""
|
436
433
|
|
437
434
|
def __init__(
|
438
|
-
self,
|
435
|
+
self,
|
436
|
+
*,
|
437
|
+
deduplication_id: Optional[str] = None,
|
438
|
+
priority: Optional[int] = None,
|
439
|
+
app_version: Optional[str] = None,
|
439
440
|
) -> None:
|
440
441
|
self.created_ctx = False
|
441
442
|
self.deduplication_id: Optional[str] = deduplication_id
|
@@ -446,6 +447,8 @@ class SetEnqueueOptions:
|
|
446
447
|
)
|
447
448
|
self.priority: Optional[int] = priority
|
448
449
|
self.saved_priority: Optional[int] = None
|
450
|
+
self.app_version: Optional[str] = app_version
|
451
|
+
self.saved_app_version: Optional[str] = None
|
449
452
|
|
450
453
|
def __enter__(self) -> SetEnqueueOptions:
|
451
454
|
# Code to create a basic context
|
@@ -458,6 +461,8 @@ class SetEnqueueOptions:
|
|
458
461
|
ctx.deduplication_id = self.deduplication_id
|
459
462
|
self.saved_priority = ctx.priority
|
460
463
|
ctx.priority = self.priority
|
464
|
+
self.saved_app_version = ctx.app_version
|
465
|
+
ctx.app_version = self.app_version
|
461
466
|
return self
|
462
467
|
|
463
468
|
def __exit__(
|
@@ -469,6 +474,7 @@ class SetEnqueueOptions:
|
|
469
474
|
curr_ctx = assert_current_dbos_context()
|
470
475
|
curr_ctx.deduplication_id = self.saved_deduplication_id
|
471
476
|
curr_ctx.priority = self.saved_priority
|
477
|
+
curr_ctx.app_version = self.saved_app_version
|
472
478
|
# Code to clean up the basic context if we created it
|
473
479
|
if self.created_ctx:
|
474
480
|
_clear_local_dbos_context()
|
@@ -479,7 +485,6 @@ class EnterDBOSWorkflow(AbstractContextManager[DBOSContext, Literal[False]]):
|
|
479
485
|
def __init__(self, attributes: TracedAttributes) -> None:
|
480
486
|
self.created_ctx = False
|
481
487
|
self.attributes = attributes
|
482
|
-
self.is_temp_workflow = attributes["name"] == "temp_wf"
|
483
488
|
self.saved_workflow_timeout: Optional[int] = None
|
484
489
|
self.saved_deduplication_id: Optional[str] = None
|
485
490
|
self.saved_priority: Optional[int] = None
|
@@ -503,7 +508,7 @@ class EnterDBOSWorkflow(AbstractContextManager[DBOSContext, Literal[False]]):
|
|
503
508
|
self.saved_priority = ctx.priority
|
504
509
|
ctx.priority = None
|
505
510
|
ctx.start_workflow(
|
506
|
-
None, self.attributes
|
511
|
+
None, self.attributes
|
507
512
|
) # Will get from the context's next workflow ID
|
508
513
|
return ctx
|
509
514
|
|
@@ -515,7 +520,7 @@ class EnterDBOSWorkflow(AbstractContextManager[DBOSContext, Literal[False]]):
|
|
515
520
|
) -> Literal[False]:
|
516
521
|
ctx = assert_current_dbos_context()
|
517
522
|
assert ctx.is_within_workflow()
|
518
|
-
ctx.end_workflow(exc_value
|
523
|
+
ctx.end_workflow(exc_value)
|
519
524
|
# Restore the saved workflow timeout
|
520
525
|
ctx.workflow_timeout_ms = self.saved_workflow_timeout
|
521
526
|
# Clear any propagating timeout
|
dbos/_core.py
CHANGED
@@ -270,7 +270,12 @@ def _init_workflow(
|
|
270
270
|
"output": None,
|
271
271
|
"error": None,
|
272
272
|
"app_id": ctx.app_id,
|
273
|
-
"app_version":
|
273
|
+
"app_version": (
|
274
|
+
enqueue_options["app_version"]
|
275
|
+
if enqueue_options is not None
|
276
|
+
and enqueue_options["app_version"] is not None
|
277
|
+
else GlobalParams.app_version
|
278
|
+
),
|
274
279
|
"executor_id": ctx.executor_id,
|
275
280
|
"recovery_attempts": None,
|
276
281
|
"authenticated_user": ctx.authenticated_user,
|
@@ -387,7 +392,7 @@ def _execute_workflow_wthread(
|
|
387
392
|
**kwargs: Any,
|
388
393
|
) -> R:
|
389
394
|
attributes: TracedAttributes = {
|
390
|
-
"name": func
|
395
|
+
"name": get_dbos_func_name(func),
|
391
396
|
"operationType": OperationType.WORKFLOW.value,
|
392
397
|
}
|
393
398
|
with DBOSContextSwap(ctx):
|
@@ -420,7 +425,7 @@ async def _execute_workflow_async(
|
|
420
425
|
**kwargs: Any,
|
421
426
|
) -> R:
|
422
427
|
attributes: TracedAttributes = {
|
423
|
-
"name": func
|
428
|
+
"name": get_dbos_func_name(func),
|
424
429
|
"operationType": OperationType.WORKFLOW.value,
|
425
430
|
}
|
426
431
|
with DBOSContextSwap(ctx):
|
@@ -445,7 +450,8 @@ def execute_workflow_by_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[An
|
|
445
450
|
wf_func = dbos._registry.workflow_info_map.get(status["name"], None)
|
446
451
|
if not wf_func:
|
447
452
|
raise DBOSWorkflowFunctionNotFoundError(
|
448
|
-
workflow_id,
|
453
|
+
workflow_id,
|
454
|
+
f"Cannot execute workflow because {status['name']} is not a registered workflow function",
|
449
455
|
)
|
450
456
|
with DBOSContextEnsure():
|
451
457
|
# If this function belongs to a configured class, add that class instance as its first argument
|
@@ -526,7 +532,8 @@ def start_workflow(
|
|
526
532
|
fi = get_func_info(func)
|
527
533
|
if fi is None:
|
528
534
|
raise DBOSWorkflowFunctionNotFoundError(
|
529
|
-
"<NONE>",
|
535
|
+
"<NONE>",
|
536
|
+
f"start_workflow: function {func.__name__} is not registered",
|
530
537
|
)
|
531
538
|
|
532
539
|
func = cast("Workflow[P, R]", func.__orig_func) # type: ignore
|
@@ -546,6 +553,7 @@ def start_workflow(
|
|
546
553
|
enqueue_options = EnqueueOptionsInternal(
|
547
554
|
deduplication_id=local_ctx.deduplication_id if local_ctx is not None else None,
|
548
555
|
priority=local_ctx.priority if local_ctx is not None else None,
|
556
|
+
app_version=local_ctx.app_version if local_ctx is not None else None,
|
549
557
|
)
|
550
558
|
new_wf_id, new_wf_ctx = _get_new_wf()
|
551
559
|
|
@@ -620,7 +628,8 @@ async def start_workflow_async(
|
|
620
628
|
fi = get_func_info(func)
|
621
629
|
if fi is None:
|
622
630
|
raise DBOSWorkflowFunctionNotFoundError(
|
623
|
-
"<NONE>",
|
631
|
+
"<NONE>",
|
632
|
+
f"start_workflow: function {func.__name__} is not registered",
|
624
633
|
)
|
625
634
|
|
626
635
|
func = cast("Workflow[P, R]", func.__orig_func) # type: ignore
|
@@ -637,6 +646,7 @@ async def start_workflow_async(
|
|
637
646
|
enqueue_options = EnqueueOptionsInternal(
|
638
647
|
deduplication_id=local_ctx.deduplication_id if local_ctx is not None else None,
|
639
648
|
priority=local_ctx.priority if local_ctx is not None else None,
|
649
|
+
app_version=local_ctx.app_version if local_ctx is not None else None,
|
640
650
|
)
|
641
651
|
new_wf_id, new_wf_ctx = _get_new_wf()
|
642
652
|
|
@@ -723,13 +733,13 @@ def workflow_wrapper(
|
|
723
733
|
assert fi is not None
|
724
734
|
if dbosreg.dbos is None:
|
725
735
|
raise DBOSException(
|
726
|
-
f"Function {func
|
736
|
+
f"Function {get_dbos_func_name(func)} invoked before DBOS initialized"
|
727
737
|
)
|
728
738
|
dbos = dbosreg.dbos
|
729
739
|
|
730
740
|
rr: Optional[str] = check_required_roles(func, fi)
|
731
741
|
attributes: TracedAttributes = {
|
732
|
-
"name": func
|
742
|
+
"name": get_dbos_func_name(func),
|
733
743
|
"operationType": OperationType.WORKFLOW.value,
|
734
744
|
}
|
735
745
|
inputs: WorkflowInputs = {
|
@@ -829,27 +839,30 @@ def workflow_wrapper(
|
|
829
839
|
|
830
840
|
|
831
841
|
def decorate_workflow(
|
832
|
-
reg: "DBOSRegistry", max_recovery_attempts: Optional[int]
|
842
|
+
reg: "DBOSRegistry", name: Optional[str], max_recovery_attempts: Optional[int]
|
833
843
|
) -> Callable[[Callable[P, R]], Callable[P, R]]:
|
834
844
|
def _workflow_decorator(func: Callable[P, R]) -> Callable[P, R]:
|
835
845
|
wrapped_func = workflow_wrapper(reg, func, max_recovery_attempts)
|
836
|
-
|
846
|
+
func_name = name if name is not None else func.__qualname__
|
847
|
+
set_dbos_func_name(func, func_name)
|
848
|
+
set_dbos_func_name(wrapped_func, func_name)
|
849
|
+
reg.register_wf_function(func_name, wrapped_func, "workflow")
|
837
850
|
return wrapped_func
|
838
851
|
|
839
852
|
return _workflow_decorator
|
840
853
|
|
841
854
|
|
842
855
|
def decorate_transaction(
|
843
|
-
dbosreg: "DBOSRegistry", isolation_level: "IsolationLevel"
|
856
|
+
dbosreg: "DBOSRegistry", name: Optional[str], isolation_level: "IsolationLevel"
|
844
857
|
) -> Callable[[F], F]:
|
845
858
|
def decorator(func: F) -> F:
|
846
859
|
|
847
|
-
transaction_name = func.__qualname__
|
860
|
+
transaction_name = name if name is not None else func.__qualname__
|
848
861
|
|
849
862
|
def invoke_tx(*args: Any, **kwargs: Any) -> Any:
|
850
863
|
if dbosreg.dbos is None:
|
851
864
|
raise DBOSException(
|
852
|
-
f"Function {
|
865
|
+
f"Function {transaction_name} invoked before DBOS initialized"
|
853
866
|
)
|
854
867
|
|
855
868
|
dbos = dbosreg.dbos
|
@@ -857,12 +870,12 @@ def decorate_transaction(
|
|
857
870
|
status = dbos._sys_db.get_workflow_status(ctx.workflow_id)
|
858
871
|
if status and status["status"] == WorkflowStatusString.CANCELLED.value:
|
859
872
|
raise DBOSWorkflowCancelledError(
|
860
|
-
f"Workflow {ctx.workflow_id} is cancelled. Aborting transaction {
|
873
|
+
f"Workflow {ctx.workflow_id} is cancelled. Aborting transaction {transaction_name}."
|
861
874
|
)
|
862
875
|
|
863
876
|
with dbos._app_db.sessionmaker() as session:
|
864
877
|
attributes: TracedAttributes = {
|
865
|
-
"name":
|
878
|
+
"name": transaction_name,
|
866
879
|
"operationType": OperationType.TRANSACTION.value,
|
867
880
|
}
|
868
881
|
with EnterDBOSTransaction(session, attributes=attributes):
|
@@ -963,7 +976,7 @@ def decorate_transaction(
|
|
963
976
|
raise
|
964
977
|
except InvalidRequestError as invalid_request_error:
|
965
978
|
dbos.logger.error(
|
966
|
-
f"InvalidRequestError in transaction {
|
979
|
+
f"InvalidRequestError in transaction {transaction_name} \033[1m Hint: Do not call commit() or rollback() within a DBOS transaction.\033[0m"
|
967
980
|
)
|
968
981
|
txn_error = invalid_request_error
|
969
982
|
raise
|
@@ -983,7 +996,7 @@ def decorate_transaction(
|
|
983
996
|
|
984
997
|
if inspect.iscoroutinefunction(func):
|
985
998
|
raise DBOSException(
|
986
|
-
f"Function {
|
999
|
+
f"Function {transaction_name} is a coroutine function, but DBOS.transaction does not support coroutine functions"
|
987
1000
|
)
|
988
1001
|
|
989
1002
|
fi = get_or_create_func_info(func)
|
@@ -1002,15 +1015,19 @@ def decorate_transaction(
|
|
1002
1015
|
with DBOSAssumeRole(rr):
|
1003
1016
|
return invoke_tx(*args, **kwargs)
|
1004
1017
|
else:
|
1005
|
-
tempwf = dbosreg.workflow_info_map.get("<temp>." +
|
1018
|
+
tempwf = dbosreg.workflow_info_map.get("<temp>." + transaction_name)
|
1006
1019
|
assert tempwf
|
1007
1020
|
return tempwf(*args, **kwargs)
|
1008
1021
|
|
1022
|
+
set_dbos_func_name(func, transaction_name)
|
1023
|
+
set_dbos_func_name(wrapper, transaction_name)
|
1024
|
+
|
1009
1025
|
def temp_wf(*args: Any, **kwargs: Any) -> Any:
|
1010
1026
|
return wrapper(*args, **kwargs)
|
1011
1027
|
|
1012
1028
|
wrapped_wf = workflow_wrapper(dbosreg, temp_wf)
|
1013
|
-
set_dbos_func_name(temp_wf, "<temp>." +
|
1029
|
+
set_dbos_func_name(temp_wf, "<temp>." + transaction_name)
|
1030
|
+
set_dbos_func_name(wrapped_wf, "<temp>." + transaction_name)
|
1014
1031
|
set_temp_workflow_type(temp_wf, "transaction")
|
1015
1032
|
dbosreg.register_wf_function(
|
1016
1033
|
get_dbos_func_name(temp_wf), wrapped_wf, "transaction"
|
@@ -1027,24 +1044,25 @@ def decorate_transaction(
|
|
1027
1044
|
def decorate_step(
|
1028
1045
|
dbosreg: "DBOSRegistry",
|
1029
1046
|
*,
|
1030
|
-
|
1031
|
-
|
1032
|
-
|
1033
|
-
|
1047
|
+
name: Optional[str],
|
1048
|
+
retries_allowed: bool,
|
1049
|
+
interval_seconds: float,
|
1050
|
+
max_attempts: int,
|
1051
|
+
backoff_rate: float,
|
1034
1052
|
) -> Callable[[Callable[P, R]], Callable[P, R]]:
|
1035
1053
|
def decorator(func: Callable[P, R]) -> Callable[P, R]:
|
1036
1054
|
|
1037
|
-
step_name = func.__qualname__
|
1055
|
+
step_name = name if name is not None else func.__qualname__
|
1038
1056
|
|
1039
1057
|
def invoke_step(*args: Any, **kwargs: Any) -> Any:
|
1040
1058
|
if dbosreg.dbos is None:
|
1041
1059
|
raise DBOSException(
|
1042
|
-
f"Function {
|
1060
|
+
f"Function {step_name} invoked before DBOS initialized"
|
1043
1061
|
)
|
1044
1062
|
dbos = dbosreg.dbos
|
1045
1063
|
|
1046
1064
|
attributes: TracedAttributes = {
|
1047
|
-
"name":
|
1065
|
+
"name": step_name,
|
1048
1066
|
"operationType": OperationType.STEP.value,
|
1049
1067
|
}
|
1050
1068
|
|
@@ -1123,7 +1141,7 @@ def decorate_step(
|
|
1123
1141
|
stepOutcome = stepOutcome.retry(
|
1124
1142
|
max_attempts,
|
1125
1143
|
on_exception,
|
1126
|
-
lambda i, e: DBOSMaxStepRetriesExceeded(
|
1144
|
+
lambda i, e: DBOSMaxStepRetriesExceeded(step_name, i, e),
|
1127
1145
|
)
|
1128
1146
|
|
1129
1147
|
outcome = (
|
@@ -1152,7 +1170,7 @@ def decorate_step(
|
|
1152
1170
|
with DBOSAssumeRole(rr):
|
1153
1171
|
return invoke_step(*args, **kwargs)
|
1154
1172
|
else:
|
1155
|
-
tempwf = dbosreg.workflow_info_map.get("<temp>." +
|
1173
|
+
tempwf = dbosreg.workflow_info_map.get("<temp>." + step_name)
|
1156
1174
|
assert tempwf
|
1157
1175
|
return tempwf(*args, **kwargs)
|
1158
1176
|
|
@@ -1160,20 +1178,19 @@ def decorate_step(
|
|
1160
1178
|
_mark_coroutine(wrapper) if inspect.iscoroutinefunction(func) else wrapper # type: ignore
|
1161
1179
|
)
|
1162
1180
|
|
1181
|
+
set_dbos_func_name(func, step_name)
|
1182
|
+
set_dbos_func_name(wrapper, step_name)
|
1183
|
+
|
1163
1184
|
def temp_wf_sync(*args: Any, **kwargs: Any) -> Any:
|
1164
1185
|
return wrapper(*args, **kwargs)
|
1165
1186
|
|
1166
1187
|
async def temp_wf_async(*args: Any, **kwargs: Any) -> Any:
|
1167
1188
|
return await wrapper(*args, **kwargs)
|
1168
1189
|
|
1169
|
-
# Other code in transact-py depends on the name of temporary workflow functions to be "temp_wf"
|
1170
|
-
# so set the name of both sync and async temporary workflow functions explicitly
|
1171
|
-
temp_wf_sync.__name__ = "temp_wf"
|
1172
|
-
temp_wf_async.__name__ = "temp_wf"
|
1173
|
-
|
1174
1190
|
temp_wf = temp_wf_async if inspect.iscoroutinefunction(func) else temp_wf_sync
|
1175
1191
|
wrapped_wf = workflow_wrapper(dbosreg, temp_wf)
|
1176
|
-
set_dbos_func_name(temp_wf, "<temp>." +
|
1192
|
+
set_dbos_func_name(temp_wf, "<temp>." + step_name)
|
1193
|
+
set_dbos_func_name(wrapped_wf, "<temp>." + step_name)
|
1177
1194
|
set_temp_workflow_type(temp_wf, "step")
|
1178
1195
|
dbosreg.register_wf_function(get_dbos_func_name(temp_wf), wrapped_wf, "step")
|
1179
1196
|
wrapper.__orig_func = temp_wf # type: ignore
|
dbos/_dbos.py
CHANGED
@@ -24,6 +24,7 @@ from typing import (
|
|
24
24
|
Tuple,
|
25
25
|
Type,
|
26
26
|
TypeVar,
|
27
|
+
Union,
|
27
28
|
)
|
28
29
|
|
29
30
|
from opentelemetry.trace import Span
|
@@ -359,6 +360,7 @@ class DBOS:
|
|
359
360
|
|
360
361
|
temp_send_wf = workflow_wrapper(self._registry, send_temp_workflow)
|
361
362
|
set_dbos_func_name(send_temp_workflow, TEMP_SEND_WF_NAME)
|
363
|
+
set_dbos_func_name(temp_send_wf, TEMP_SEND_WF_NAME)
|
362
364
|
set_temp_workflow_type(send_temp_workflow, "send")
|
363
365
|
self._registry.register_wf_function(TEMP_SEND_WF_NAME, temp_send_wf, "send")
|
364
366
|
|
@@ -588,14 +590,22 @@ class DBOS:
|
|
588
590
|
# Decorators for DBOS functionality
|
589
591
|
@classmethod
|
590
592
|
def workflow(
|
591
|
-
cls,
|
593
|
+
cls,
|
594
|
+
*,
|
595
|
+
name: Optional[str] = None,
|
596
|
+
max_recovery_attempts: Optional[int] = DEFAULT_MAX_RECOVERY_ATTEMPTS,
|
592
597
|
) -> Callable[[Callable[P, R]], Callable[P, R]]:
|
593
598
|
"""Decorate a function for use as a DBOS workflow."""
|
594
|
-
return decorate_workflow(
|
599
|
+
return decorate_workflow(
|
600
|
+
_get_or_create_dbos_registry(), name, max_recovery_attempts
|
601
|
+
)
|
595
602
|
|
596
603
|
@classmethod
|
597
604
|
def transaction(
|
598
|
-
cls,
|
605
|
+
cls,
|
606
|
+
isolation_level: IsolationLevel = "SERIALIZABLE",
|
607
|
+
*,
|
608
|
+
name: Optional[str] = None,
|
599
609
|
) -> Callable[[F], F]:
|
600
610
|
"""
|
601
611
|
Decorate a function for use as a DBOS transaction.
|
@@ -604,12 +614,15 @@ class DBOS:
|
|
604
614
|
isolation_level(IsolationLevel): Transaction isolation level
|
605
615
|
|
606
616
|
"""
|
607
|
-
return decorate_transaction(
|
617
|
+
return decorate_transaction(
|
618
|
+
_get_or_create_dbos_registry(), name, isolation_level
|
619
|
+
)
|
608
620
|
|
609
621
|
@classmethod
|
610
622
|
def step(
|
611
623
|
cls,
|
612
624
|
*,
|
625
|
+
name: Optional[str] = None,
|
613
626
|
retries_allowed: bool = False,
|
614
627
|
interval_seconds: float = 1.0,
|
615
628
|
max_attempts: int = 3,
|
@@ -628,6 +641,7 @@ class DBOS:
|
|
628
641
|
|
629
642
|
return decorate_step(
|
630
643
|
_get_or_create_dbos_registry(),
|
644
|
+
name=name,
|
631
645
|
retries_allowed=retries_allowed,
|
632
646
|
interval_seconds=interval_seconds,
|
633
647
|
max_attempts=max_attempts,
|
@@ -998,7 +1012,7 @@ class DBOS:
|
|
998
1012
|
cls,
|
999
1013
|
*,
|
1000
1014
|
workflow_ids: Optional[List[str]] = None,
|
1001
|
-
status: Optional[str] = None,
|
1015
|
+
status: Optional[Union[str, List[str]]] = None,
|
1002
1016
|
start_time: Optional[str] = None,
|
1003
1017
|
end_time: Optional[str] = None,
|
1004
1018
|
name: Optional[str] = None,
|
@@ -1034,7 +1048,7 @@ class DBOS:
|
|
1034
1048
|
cls,
|
1035
1049
|
*,
|
1036
1050
|
queue_name: Optional[str] = None,
|
1037
|
-
status: Optional[str] = None,
|
1051
|
+
status: Optional[Union[str, List[str]]] = None,
|
1038
1052
|
start_time: Optional[str] = None,
|
1039
1053
|
end_time: Optional[str] = None,
|
1040
1054
|
name: Optional[str] = None,
|
dbos/_kafka.py
CHANGED
@@ -13,6 +13,7 @@ from ._context import SetWorkflowID
|
|
13
13
|
from ._error import DBOSInitializationError
|
14
14
|
from ._kafka_message import KafkaMessage
|
15
15
|
from ._logger import dbos_logger
|
16
|
+
from ._registrations import get_dbos_func_name
|
16
17
|
|
17
18
|
_KafkaConsumerWorkflow = Callable[[KafkaMessage], None]
|
18
19
|
|
@@ -44,7 +45,7 @@ def _kafka_consumer_loop(
|
|
44
45
|
config["auto.offset.reset"] = "earliest"
|
45
46
|
|
46
47
|
if config.get("group.id") is None:
|
47
|
-
config["group.id"] = safe_group_name(func
|
48
|
+
config["group.id"] = safe_group_name(get_dbos_func_name(func), topics)
|
48
49
|
dbos_logger.warning(
|
49
50
|
f"Consumer group ID not found. Using generated group.id {config['group.id']}"
|
50
51
|
)
|
dbos/_registrations.py
CHANGED
@@ -4,15 +4,17 @@ from enum import Enum
|
|
4
4
|
from types import FunctionType
|
5
5
|
from typing import Any, Callable, List, Literal, Optional, Tuple, Type, cast
|
6
6
|
|
7
|
+
from dbos._error import DBOSWorkflowFunctionNotFoundError
|
8
|
+
|
7
9
|
DEFAULT_MAX_RECOVERY_ATTEMPTS = 100
|
8
10
|
|
9
11
|
|
10
12
|
def get_dbos_func_name(f: Any) -> str:
|
11
13
|
if hasattr(f, "dbos_function_name"):
|
12
14
|
return str(getattr(f, "dbos_function_name"))
|
13
|
-
|
14
|
-
|
15
|
-
|
15
|
+
raise DBOSWorkflowFunctionNotFoundError(
|
16
|
+
"<NONE>", f"function {f.__name__} is not registered"
|
17
|
+
)
|
16
18
|
|
17
19
|
|
18
20
|
def set_dbos_func_name(f: Any, name: str) -> None:
|
dbos/_roles.py
CHANGED
@@ -10,6 +10,7 @@ from ._context import DBOSAssumeRole, get_local_dbos_context
|
|
10
10
|
from ._registrations import (
|
11
11
|
DBOSFuncInfo,
|
12
12
|
get_class_info_for_func,
|
13
|
+
get_dbos_func_name,
|
13
14
|
get_or_create_class_info,
|
14
15
|
get_or_create_func_info,
|
15
16
|
)
|
@@ -36,7 +37,7 @@ def check_required_roles(
|
|
36
37
|
ctx = get_local_dbos_context()
|
37
38
|
if ctx is None or ctx.authenticated_roles is None:
|
38
39
|
raise DBOSNotAuthorizedError(
|
39
|
-
f"Function {func
|
40
|
+
f"Function {get_dbos_func_name(func)} requires a role, but was called in a context without authentication information"
|
40
41
|
)
|
41
42
|
|
42
43
|
for r in required_roles:
|
@@ -44,7 +45,7 @@ def check_required_roles(
|
|
44
45
|
return r
|
45
46
|
|
46
47
|
raise DBOSNotAuthorizedError(
|
47
|
-
f"Function {func
|
48
|
+
f"Function {get_dbos_func_name(func)} has required roles, but user is not authenticated for any of them"
|
48
49
|
)
|
49
50
|
|
50
51
|
|
dbos/_scheduler.py
CHANGED
@@ -11,6 +11,7 @@ if TYPE_CHECKING:
|
|
11
11
|
|
12
12
|
from ._context import SetWorkflowID
|
13
13
|
from ._croniter import croniter # type: ignore
|
14
|
+
from ._registrations import get_dbos_func_name
|
14
15
|
|
15
16
|
ScheduledWorkflow = Callable[[datetime, datetime], None]
|
16
17
|
|
@@ -24,20 +25,22 @@ def scheduler_loop(
|
|
24
25
|
iter = croniter(cron, datetime.now(timezone.utc), second_at_beginning=True)
|
25
26
|
except Exception as e:
|
26
27
|
dbos_logger.error(
|
27
|
-
f'Cannot run scheduled function {func
|
28
|
+
f'Cannot run scheduled function {get_dbos_func_name(func)}. Invalid crontab "{cron}"'
|
28
29
|
)
|
29
30
|
while not stop_event.is_set():
|
30
31
|
nextExecTime = iter.get_next(datetime)
|
31
32
|
sleepTime = nextExecTime - datetime.now(timezone.utc)
|
32
33
|
if stop_event.wait(timeout=sleepTime.total_seconds()):
|
33
34
|
return
|
34
|
-
|
35
|
-
|
35
|
+
try:
|
36
|
+
with SetWorkflowID(
|
37
|
+
f"sched-{get_dbos_func_name(func)}-{nextExecTime.isoformat()}"
|
38
|
+
):
|
36
39
|
scheduler_queue.enqueue(func, nextExecTime, datetime.now(timezone.utc))
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
40
|
+
except Exception:
|
41
|
+
dbos_logger.warning(
|
42
|
+
f"Exception encountered in scheduler thread: {traceback.format_exc()})"
|
43
|
+
)
|
41
44
|
|
42
45
|
|
43
46
|
def scheduled(
|
@@ -48,7 +51,7 @@ def scheduled(
|
|
48
51
|
croniter(cron, datetime.now(timezone.utc), second_at_beginning=True)
|
49
52
|
except Exception as e:
|
50
53
|
raise ValueError(
|
51
|
-
f'Invalid crontab "{cron}" for scheduled function function {func
|
54
|
+
f'Invalid crontab "{cron}" for scheduled function function {get_dbos_func_name(func)}.'
|
52
55
|
)
|
53
56
|
|
54
57
|
global scheduler_queue
|
dbos/_sys_db.py
CHANGED
@@ -149,6 +149,8 @@ class EnqueueOptionsInternal(TypedDict):
|
|
149
149
|
deduplication_id: Optional[str]
|
150
150
|
# Priority of the workflow on the queue, starting from 1 ~ 2,147,483,647. Default 0 (highest priority).
|
151
151
|
priority: Optional[int]
|
152
|
+
# On what version the workflow is enqueued. Current version if not specified.
|
153
|
+
app_version: Optional[str]
|
152
154
|
|
153
155
|
|
154
156
|
class RecordedResult(TypedDict):
|
@@ -185,7 +187,9 @@ class GetWorkflowsInput:
|
|
185
187
|
self.authenticated_user: Optional[str] = None # The user who ran the workflow.
|
186
188
|
self.start_time: Optional[str] = None # Timestamp in ISO 8601 format
|
187
189
|
self.end_time: Optional[str] = None # Timestamp in ISO 8601 format
|
188
|
-
self.status: Optional[str] =
|
190
|
+
self.status: Optional[List[str]] = (
|
191
|
+
None # Get workflows with one of these statuses
|
192
|
+
)
|
189
193
|
self.application_version: Optional[str] = (
|
190
194
|
None # The application version that ran this workflow. = None
|
191
195
|
)
|
@@ -205,7 +209,7 @@ class GetWorkflowsInput:
|
|
205
209
|
|
206
210
|
class GetQueuedWorkflowsInput(TypedDict):
|
207
211
|
queue_name: Optional[str] # Get workflows belonging to this queue
|
208
|
-
status: Optional[str] # Get workflows with
|
212
|
+
status: Optional[list[str]] # Get workflows with one of these statuses
|
209
213
|
start_time: Optional[str] # Timestamp in ISO 8601 format
|
210
214
|
end_time: Optional[str] # Timestamp in ISO 8601 format
|
211
215
|
limit: Optional[int] # Return up to this many workflows IDs.
|
@@ -832,7 +836,7 @@ class SystemDatabase:
|
|
832
836
|
<= datetime.datetime.fromisoformat(input.end_time).timestamp() * 1000
|
833
837
|
)
|
834
838
|
if input.status:
|
835
|
-
query = query.where(SystemSchema.workflow_status.c.status
|
839
|
+
query = query.where(SystemSchema.workflow_status.c.status.in_(input.status))
|
836
840
|
if input.application_version:
|
837
841
|
query = query.where(
|
838
842
|
SystemSchema.workflow_status.c.application_version
|
@@ -938,10 +942,9 @@ class SystemDatabase:
|
|
938
942
|
SystemSchema.workflow_status.c.queue_name == input["queue_name"]
|
939
943
|
)
|
940
944
|
|
941
|
-
|
942
|
-
|
943
|
-
|
944
|
-
)
|
945
|
+
status = input.get("status", None)
|
946
|
+
if status:
|
947
|
+
query = query.where(SystemSchema.workflow_status.c.status.in_(status))
|
945
948
|
if "start_time" in input and input["start_time"] is not None:
|
946
949
|
query = query.where(
|
947
950
|
SystemSchema.workflow_status.c.created_at
|
dbos/_workflow_commands.py
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
import time
|
2
2
|
import uuid
|
3
3
|
from datetime import datetime
|
4
|
-
from typing import TYPE_CHECKING, List, Optional
|
4
|
+
from typing import TYPE_CHECKING, List, Optional, Union
|
5
5
|
|
6
6
|
from dbos._context import get_local_dbos_context
|
7
7
|
|
@@ -23,7 +23,7 @@ def list_workflows(
|
|
23
23
|
sys_db: SystemDatabase,
|
24
24
|
*,
|
25
25
|
workflow_ids: Optional[List[str]] = None,
|
26
|
-
status: Optional[str] = None,
|
26
|
+
status: Optional[Union[str, List[str]]] = None,
|
27
27
|
start_time: Optional[str] = None,
|
28
28
|
end_time: Optional[str] = None,
|
29
29
|
name: Optional[str] = None,
|
@@ -39,7 +39,7 @@ def list_workflows(
|
|
39
39
|
input.authenticated_user = user
|
40
40
|
input.start_time = start_time
|
41
41
|
input.end_time = end_time
|
42
|
-
input.status = status
|
42
|
+
input.status = status if status is None or isinstance(status, list) else [status]
|
43
43
|
input.application_version = app_version
|
44
44
|
input.limit = limit
|
45
45
|
input.name = name
|
@@ -56,7 +56,7 @@ def list_queued_workflows(
|
|
56
56
|
sys_db: SystemDatabase,
|
57
57
|
*,
|
58
58
|
queue_name: Optional[str] = None,
|
59
|
-
status: Optional[str] = None,
|
59
|
+
status: Optional[Union[str, List[str]]] = None,
|
60
60
|
start_time: Optional[str] = None,
|
61
61
|
end_time: Optional[str] = None,
|
62
62
|
name: Optional[str] = None,
|
@@ -68,7 +68,7 @@ def list_queued_workflows(
|
|
68
68
|
"queue_name": queue_name,
|
69
69
|
"start_time": start_time,
|
70
70
|
"end_time": end_time,
|
71
|
-
"status": status,
|
71
|
+
"status": status if status is None or isinstance(status, list) else [status],
|
72
72
|
"limit": limit,
|
73
73
|
"name": name,
|
74
74
|
"offset": offset,
|
@@ -1,19 +1,19 @@
|
|
1
|
-
dbos-1.
|
2
|
-
dbos-1.
|
3
|
-
dbos-1.
|
4
|
-
dbos-1.
|
1
|
+
dbos-1.6.0.dist-info/METADATA,sha256=k5u4xGb8zZtCVAbUbOe5LH26_VRO7CgfQ2Fur4zt2wc,13265
|
2
|
+
dbos-1.6.0.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
|
3
|
+
dbos-1.6.0.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
|
4
|
+
dbos-1.6.0.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
|
5
5
|
dbos/__init__.py,sha256=NssPCubaBxdiKarOWa-wViz1hdJSkmBGcpLX_gQ4NeA,891
|
6
6
|
dbos/__main__.py,sha256=G7Exn-MhGrVJVDbgNlpzhfh8WMX_72t3_oJaFT9Lmt8,653
|
7
7
|
dbos/_admin_server.py,sha256=l46ZX4NpvBP9W8cl9gE7OqMNwUCevLMt2VztM7crBv0,15465
|
8
8
|
dbos/_app_db.py,sha256=htblDPfqrpb_uZoFcvaud7cgQ-PDyn6Bn-cBidxdCTA,10603
|
9
9
|
dbos/_classproperty.py,sha256=f0X-_BySzn3yFDRKB2JpCbLYQ9tLwt1XftfshvY7CBs,626
|
10
|
-
dbos/_client.py,sha256=
|
10
|
+
dbos/_client.py,sha256=DeiJHo5fTedWsipr7qlQQIcDmVAPjzzX94X01121oQM,14780
|
11
11
|
dbos/_conductor/conductor.py,sha256=y_T-8kEHwKWt6W8LtcFMctB_6EvYFWsuGLxiFuuKKBU,23702
|
12
12
|
dbos/_conductor/protocol.py,sha256=DOTprPSd7oHDcvwWSyZpnlPds_JfILtcKzHZa-qBsF4,7330
|
13
|
-
dbos/_context.py,sha256=
|
14
|
-
dbos/_core.py,sha256=
|
13
|
+
dbos/_context.py,sha256=zhje6jObpBcRALYfHyyIEumHtk_enl_PxLl01j4oDME,24897
|
14
|
+
dbos/_core.py,sha256=m3e1WZ_210p2DT8c1sTh4S_CVM748UjkBdiGO846mVg,49269
|
15
15
|
dbos/_croniter.py,sha256=XHAyUyibs_59sJQfSNWkP7rqQY6_XrlfuuCxk4jYqek,47559
|
16
|
-
dbos/_dbos.py,sha256=
|
16
|
+
dbos/_dbos.py,sha256=qzXD55bGJJW2SxI6HESykDRIpBmODNwIUt_jRkcRBVw,47588
|
17
17
|
dbos/_dbos_config.py,sha256=JUG4V1rrP0p1AYESgih4ea80qOH_13UsgoIIm8X84pw,20562
|
18
18
|
dbos/_debug.py,sha256=99j2SChWmCPAlZoDmjsJGe77tpU2LEa8E2TtLAnnh7o,1831
|
19
19
|
dbos/_docker_pg_helper.py,sha256=tLJXWqZ4S-ExcaPnxg_i6cVxL6ZxrYlZjaGsklY-s2I,6115
|
@@ -21,7 +21,7 @@ dbos/_error.py,sha256=nS7KuXJHhuNXZRErxdEUGT38Hb0VPyxNwSyADiVpHcE,8581
|
|
21
21
|
dbos/_event_loop.py,sha256=cvaFN9-II3MsHEOq8QoICc_8qSKrjikMlLfuhC3Y8Dk,2923
|
22
22
|
dbos/_fastapi.py,sha256=T7YlVY77ASqyTqq0aAPclZ9YzlXdGTT0lEYSwSgt1EE,3151
|
23
23
|
dbos/_flask.py,sha256=Npnakt-a3W5OykONFRkDRnumaDhTQmA0NPdUCGRYKXE,1652
|
24
|
-
dbos/_kafka.py,sha256=
|
24
|
+
dbos/_kafka.py,sha256=Gm4fHWl7gYb-i5BMvwNwm5Km3z8zQpseqdMgqgFjlGI,4252
|
25
25
|
dbos/_kafka_message.py,sha256=NYvOXNG3Qn7bghn1pv3fg4Pbs86ILZGcK4IB-MLUNu0,409
|
26
26
|
dbos/_logger.py,sha256=Dp6bHZKUtcm5gWwYHj_HA5Wj5OMuJGUrpl2g2i4xDZg,4620
|
27
27
|
dbos/_migrations/env.py,sha256=38SIGVbmn_VV2x2u1aHLcPOoWgZ84eCymf3g_NljmbU,1626
|
@@ -42,14 +42,14 @@ dbos/_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py,sha256
|
|
42
42
|
dbos/_outcome.py,sha256=Kz3aL7517q9UEFTx3Cq9zzztjWyWVOx_08fZyHo9dvg,7035
|
43
43
|
dbos/_queue.py,sha256=Kq7aldTDLRF7cZtkXmsCy6wV2PR24enkhghEG25NtaU,4080
|
44
44
|
dbos/_recovery.py,sha256=TBNjkmSEqBU-g5YXExsLJ9XoCe4iekqtREsskXZECEg,2507
|
45
|
-
dbos/_registrations.py,sha256=
|
46
|
-
dbos/_roles.py,sha256=
|
47
|
-
dbos/_scheduler.py,sha256=
|
45
|
+
dbos/_registrations.py,sha256=U-PwDZBuyuJjA2LYtud7D3VxDR440mVpMYE-S11BWDo,7369
|
46
|
+
dbos/_roles.py,sha256=kCuhhg8XLtrHCgKgm44I0abIRTGHltf88OwjEKAUggk,2317
|
47
|
+
dbos/_scheduler.py,sha256=CWeGVfl9h51VXfxt80y5Da_5pE8SPty_AYkfpJkkMxQ,2117
|
48
48
|
dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
49
49
|
dbos/_schemas/application_database.py,sha256=SypAS9l9EsaBHFn9FR8jmnqt01M74d9AF1AMa4m2hhI,1040
|
50
50
|
dbos/_schemas/system_database.py,sha256=rbFKggONdvvbb45InvGz0TM6a7c-Ux9dcaL-h_7Z7pU,4438
|
51
51
|
dbos/_serialization.py,sha256=bWuwhXSQcGmiazvhJHA5gwhrRWxtmFmcCFQSDJnqqkU,3666
|
52
|
-
dbos/_sys_db.py,sha256=
|
52
|
+
dbos/_sys_db.py,sha256=yhwhH23QvehbhPW3k6f4TRQ6mDjmvMILqsR8YffFZBg,80368
|
53
53
|
dbos/_templates/dbos-db-starter/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
|
54
54
|
dbos/_templates/dbos-db-starter/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
55
55
|
dbos/_templates/dbos-db-starter/__package/main.py.dbos,sha256=aQnBPSSQpkB8ERfhf7gB7P9tsU6OPKhZscfeh0yiaD8,2702
|
@@ -62,11 +62,11 @@ dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py,sh
|
|
62
62
|
dbos/_templates/dbos-db-starter/start_postgres_docker.py,sha256=lQVLlYO5YkhGPEgPqwGc7Y8uDKse9HsWv5fynJEFJHM,1681
|
63
63
|
dbos/_tracer.py,sha256=RnlcaOJEx_58hr2J9L9g6E7gjAHAeEtEGugJZmCwNfQ,2963
|
64
64
|
dbos/_utils.py,sha256=uywq1QrjMwy17btjxW4bES49povlQwYwYbvKwMT6C2U,1575
|
65
|
-
dbos/_workflow_commands.py,sha256=
|
65
|
+
dbos/_workflow_commands.py,sha256=4QCs7ziQ9T457tqfaNFwiXd6mDisr-ZK__skz1Uteyg,4648
|
66
66
|
dbos/cli/_github_init.py,sha256=Y_bDF9gfO2jB1id4FV5h1oIxEJRWyqVjhb7bNEa5nQ0,3224
|
67
67
|
dbos/cli/_template_init.py,sha256=7JBcpMqP1r2mfCnvWatu33z8ctEGHJarlZYKgB83cXE,2972
|
68
68
|
dbos/cli/cli.py,sha256=IcfaX4rrSrk6f24S2jrlR33snYMyNyEIx_lNQtuVr2E,22081
|
69
69
|
dbos/dbos-config.schema.json,sha256=CjaspeYmOkx6Ip_pcxtmfXJTn_YGdSx_0pcPBF7KZmo,6060
|
70
70
|
dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
|
71
71
|
version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
|
72
|
-
dbos-1.
|
72
|
+
dbos-1.6.0.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|