dbos 2.3.0a5__py3-none-any.whl → 2.4.0a2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbos might be problematic. Click here for more details.
- dbos/_app_db.py +2 -0
- dbos/_client.py +6 -0
- dbos/_conductor/protocol.py +39 -1
- dbos/_core.py +3 -0
- dbos/_dbos.py +2 -0
- dbos/_migration.py +47 -2
- dbos/_schemas/system_database.py +3 -0
- dbos/_sys_db.py +113 -159
- dbos/_workflow_commands.py +15 -15
- dbos/cli/cli.py +1 -1
- {dbos-2.3.0a5.dist-info → dbos-2.4.0a2.dist-info}/METADATA +1 -1
- {dbos-2.3.0a5.dist-info → dbos-2.4.0a2.dist-info}/RECORD +15 -15
- {dbos-2.3.0a5.dist-info → dbos-2.4.0a2.dist-info}/WHEEL +0 -0
- {dbos-2.3.0a5.dist-info → dbos-2.4.0a2.dist-info}/entry_points.txt +0 -0
- {dbos-2.3.0a5.dist-info → dbos-2.4.0a2.dist-info}/licenses/LICENSE +0 -0
dbos/_app_db.py
CHANGED
dbos/_client.py
CHANGED
|
@@ -149,9 +149,11 @@ class DBOSClient:
|
|
|
149
149
|
self._sys_db = SystemDatabase.create(
|
|
150
150
|
system_database_url=system_database_url,
|
|
151
151
|
engine_kwargs={
|
|
152
|
+
"connect_args": {"application_name": "dbos_transact_client"},
|
|
152
153
|
"pool_timeout": 30,
|
|
153
154
|
"max_overflow": 0,
|
|
154
155
|
"pool_size": 2,
|
|
156
|
+
"pool_pre_ping": True,
|
|
155
157
|
},
|
|
156
158
|
engine=system_database_engine,
|
|
157
159
|
schema=dbos_system_schema,
|
|
@@ -162,9 +164,11 @@ class DBOSClient:
|
|
|
162
164
|
self._app_db = ApplicationDatabase.create(
|
|
163
165
|
database_url=application_database_url,
|
|
164
166
|
engine_kwargs={
|
|
167
|
+
"connect_args": {"application_name": "dbos_transact_client"},
|
|
165
168
|
"pool_timeout": 30,
|
|
166
169
|
"max_overflow": 0,
|
|
167
170
|
"pool_size": 2,
|
|
171
|
+
"pool_pre_ping": True,
|
|
168
172
|
},
|
|
169
173
|
schema=dbos_system_schema,
|
|
170
174
|
serializer=serializer,
|
|
@@ -234,6 +238,7 @@ class DBOSClient:
|
|
|
234
238
|
),
|
|
235
239
|
"inputs": self._serializer.serialize(inputs),
|
|
236
240
|
"queue_partition_key": enqueue_options_internal["queue_partition_key"],
|
|
241
|
+
"forked_from": None,
|
|
237
242
|
}
|
|
238
243
|
|
|
239
244
|
self._sys_db.init_workflow(
|
|
@@ -300,6 +305,7 @@ class DBOSClient:
|
|
|
300
305
|
"priority": 0,
|
|
301
306
|
"inputs": self._serializer.serialize({"args": (), "kwargs": {}}),
|
|
302
307
|
"queue_partition_key": None,
|
|
308
|
+
"forked_from": None,
|
|
303
309
|
}
|
|
304
310
|
with self._sys_db.engine.begin() as conn:
|
|
305
311
|
self._sys_db._insert_workflow_status(
|
dbos/_conductor/protocol.py
CHANGED
|
@@ -143,6 +143,13 @@ class WorkflowsOutput:
|
|
|
143
143
|
QueueName: Optional[str]
|
|
144
144
|
ApplicationVersion: Optional[str]
|
|
145
145
|
ExecutorID: Optional[str]
|
|
146
|
+
WorkflowTimeoutMS: Optional[str]
|
|
147
|
+
WorkflowDeadlineEpochMS: Optional[str]
|
|
148
|
+
DeduplicationID: Optional[str]
|
|
149
|
+
Priority: Optional[str]
|
|
150
|
+
QueuePartitionKey: Optional[str]
|
|
151
|
+
ForkedFrom: Optional[str]
|
|
152
|
+
ForkedTo: Optional[list[str]]
|
|
146
153
|
|
|
147
154
|
@classmethod
|
|
148
155
|
def from_workflow_information(cls, info: WorkflowStatus) -> "WorkflowsOutput":
|
|
@@ -152,12 +159,22 @@ class WorkflowsOutput:
|
|
|
152
159
|
inputs_str = str(info.input) if info.input is not None else None
|
|
153
160
|
outputs_str = str(info.output) if info.output is not None else None
|
|
154
161
|
error_str = str(info.error) if info.error is not None else None
|
|
155
|
-
request_str = None
|
|
156
162
|
roles_str = (
|
|
157
163
|
str(info.authenticated_roles)
|
|
158
164
|
if info.authenticated_roles is not None
|
|
159
165
|
else None
|
|
160
166
|
)
|
|
167
|
+
workflow_timeout_ms_str = (
|
|
168
|
+
str(info.workflow_timeout_ms)
|
|
169
|
+
if info.workflow_timeout_ms is not None
|
|
170
|
+
else None
|
|
171
|
+
)
|
|
172
|
+
workflow_deadline_epoch_ms_str = (
|
|
173
|
+
str(info.workflow_deadline_epoch_ms)
|
|
174
|
+
if info.workflow_deadline_epoch_ms is not None
|
|
175
|
+
else None
|
|
176
|
+
)
|
|
177
|
+
priority_str = str(info.priority) if info.priority is not None else None
|
|
161
178
|
|
|
162
179
|
return cls(
|
|
163
180
|
WorkflowUUID=info.workflow_id,
|
|
@@ -176,6 +193,13 @@ class WorkflowsOutput:
|
|
|
176
193
|
QueueName=info.queue_name,
|
|
177
194
|
ApplicationVersion=info.app_version,
|
|
178
195
|
ExecutorID=info.executor_id,
|
|
196
|
+
WorkflowTimeoutMS=workflow_timeout_ms_str,
|
|
197
|
+
WorkflowDeadlineEpochMS=workflow_deadline_epoch_ms_str,
|
|
198
|
+
DeduplicationID=info.deduplication_id,
|
|
199
|
+
Priority=priority_str,
|
|
200
|
+
QueuePartitionKey=info.queue_partition_key,
|
|
201
|
+
ForkedFrom=info.forked_from,
|
|
202
|
+
ForkedTo=info.forked_to,
|
|
179
203
|
)
|
|
180
204
|
|
|
181
205
|
|
|
@@ -186,14 +210,28 @@ class WorkflowSteps:
|
|
|
186
210
|
output: Optional[str]
|
|
187
211
|
error: Optional[str]
|
|
188
212
|
child_workflow_id: Optional[str]
|
|
213
|
+
started_at_epoch_ms: Optional[str]
|
|
214
|
+
completed_at_epoch_ms: Optional[str]
|
|
189
215
|
|
|
190
216
|
@classmethod
|
|
191
217
|
def from_step_info(cls, info: StepInfo) -> "WorkflowSteps":
|
|
192
218
|
output_str = str(info["output"]) if info["output"] is not None else None
|
|
193
219
|
error_str = str(info["error"]) if info["error"] is not None else None
|
|
220
|
+
started_at_str = (
|
|
221
|
+
str(info["started_at_epoch_ms"])
|
|
222
|
+
if info["started_at_epoch_ms"] is not None
|
|
223
|
+
else None
|
|
224
|
+
)
|
|
225
|
+
completed_at_str = (
|
|
226
|
+
str(info["completed_at_epoch_ms"])
|
|
227
|
+
if info["completed_at_epoch_ms"] is not None
|
|
228
|
+
else None
|
|
229
|
+
)
|
|
194
230
|
return cls(
|
|
195
231
|
function_id=info["function_id"],
|
|
196
232
|
function_name=info["function_name"],
|
|
233
|
+
started_at_epoch_ms=started_at_str,
|
|
234
|
+
completed_at_epoch_ms=completed_at_str,
|
|
197
235
|
output=output_str,
|
|
198
236
|
error=error_str,
|
|
199
237
|
child_workflow_id=info["child_workflow_id"],
|
dbos/_core.py
CHANGED
|
@@ -300,6 +300,7 @@ def _init_workflow(
|
|
|
300
300
|
if enqueue_options is not None
|
|
301
301
|
else None
|
|
302
302
|
),
|
|
303
|
+
"forked_from": None,
|
|
303
304
|
}
|
|
304
305
|
|
|
305
306
|
# Synchronously record the status and inputs for workflows
|
|
@@ -316,6 +317,7 @@ def _init_workflow(
|
|
|
316
317
|
"function_name": wf_name,
|
|
317
318
|
"output": None,
|
|
318
319
|
"error": dbos._serializer.serialize(e),
|
|
320
|
+
"started_at_epoch_ms": int(time.time() * 1000),
|
|
319
321
|
}
|
|
320
322
|
dbos._sys_db.record_operation_result(result)
|
|
321
323
|
raise
|
|
@@ -1118,6 +1120,7 @@ def decorate_step(
|
|
|
1118
1120
|
"function_name": step_name,
|
|
1119
1121
|
"output": None,
|
|
1120
1122
|
"error": None,
|
|
1123
|
+
"started_at_epoch_ms": int(time.time() * 1000),
|
|
1121
1124
|
}
|
|
1122
1125
|
|
|
1123
1126
|
try:
|
dbos/_dbos.py
CHANGED
|
@@ -1128,6 +1128,7 @@ class DBOS:
|
|
|
1128
1128
|
name: Optional[str] = None,
|
|
1129
1129
|
app_version: Optional[str] = None,
|
|
1130
1130
|
user: Optional[str] = None,
|
|
1131
|
+
queue_name: Optional[str] = None,
|
|
1131
1132
|
limit: Optional[int] = None,
|
|
1132
1133
|
offset: Optional[int] = None,
|
|
1133
1134
|
sort_desc: bool = False,
|
|
@@ -1151,6 +1152,7 @@ class DBOS:
|
|
|
1151
1152
|
workflow_id_prefix=workflow_id_prefix,
|
|
1152
1153
|
load_input=load_input,
|
|
1153
1154
|
load_output=load_output,
|
|
1155
|
+
queue_name=queue_name,
|
|
1154
1156
|
)
|
|
1155
1157
|
|
|
1156
1158
|
return _get_dbos_instance()._sys_db.call_function_as_step(
|
dbos/_migration.py
CHANGED
|
@@ -209,8 +209,32 @@ ALTER TABLE \"{schema}\".workflow_status ADD COLUMN queue_partition_key TEXT;
|
|
|
209
209
|
"""
|
|
210
210
|
|
|
211
211
|
|
|
212
|
+
def get_dbos_migration_three(schema: str) -> str:
|
|
213
|
+
return f"""
|
|
214
|
+
create index "idx_workflow_status_queue_status_started" on \"{schema}\"."workflow_status" ("queue_name", "status", "started_at_epoch_ms")
|
|
215
|
+
"""
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
def get_dbos_migration_four(schema: str) -> str:
|
|
219
|
+
return f"""
|
|
220
|
+
ALTER TABLE \"{schema}\".workflow_status ADD COLUMN forked_from TEXT;
|
|
221
|
+
"""
|
|
222
|
+
|
|
223
|
+
|
|
224
|
+
def get_dbos_migration_five(schema: str) -> str:
|
|
225
|
+
return f"""
|
|
226
|
+
ALTER TABLE \"{schema}\".operation_outputs ADD COLUMN started_at_epoch_ms BIGINT, ADD COLUMN completed_at_epoch_ms BIGINT;
|
|
227
|
+
"""
|
|
228
|
+
|
|
229
|
+
|
|
212
230
|
def get_dbos_migrations(schema: str) -> list[str]:
|
|
213
|
-
return [
|
|
231
|
+
return [
|
|
232
|
+
get_dbos_migration_one(schema),
|
|
233
|
+
get_dbos_migration_two(schema),
|
|
234
|
+
get_dbos_migration_three(schema),
|
|
235
|
+
get_dbos_migration_four(schema),
|
|
236
|
+
get_dbos_migration_five(schema),
|
|
237
|
+
]
|
|
214
238
|
|
|
215
239
|
|
|
216
240
|
def get_sqlite_timestamp_expr() -> str:
|
|
@@ -303,4 +327,25 @@ sqlite_migration_two = """
|
|
|
303
327
|
ALTER TABLE workflow_status ADD COLUMN queue_partition_key TEXT;
|
|
304
328
|
"""
|
|
305
329
|
|
|
306
|
-
|
|
330
|
+
sqlite_migration_three = """
|
|
331
|
+
CREATE INDEX "idx_workflow_status_queue_status_started"
|
|
332
|
+
ON "workflow_status" ("queue_name", "status", "started_at_epoch_ms")
|
|
333
|
+
"""
|
|
334
|
+
|
|
335
|
+
sqlite_migration_four = """
|
|
336
|
+
ALTER TABLE workflow_status ADD COLUMN forked_from TEXT;
|
|
337
|
+
"""
|
|
338
|
+
|
|
339
|
+
sqlite_migration_five = """
|
|
340
|
+
ALTER TABLE operation_outputs ADD COLUMN started_at_epoch_ms BIGINT;
|
|
341
|
+
ALTER TABLE operation_outputs ADD COLUMN completed_at_epoch_ms BIGINT;
|
|
342
|
+
"""
|
|
343
|
+
|
|
344
|
+
|
|
345
|
+
sqlite_migrations = [
|
|
346
|
+
sqlite_migration_one,
|
|
347
|
+
sqlite_migration_two,
|
|
348
|
+
sqlite_migration_three,
|
|
349
|
+
sqlite_migration_four,
|
|
350
|
+
sqlite_migration_five,
|
|
351
|
+
]
|
dbos/_schemas/system_database.py
CHANGED
|
@@ -78,6 +78,7 @@ class SystemSchema:
|
|
|
78
78
|
Column("inputs", Text()),
|
|
79
79
|
Column("priority", Integer(), nullable=False, server_default=text("'0'::int")),
|
|
80
80
|
Column("queue_partition_key", Text()),
|
|
81
|
+
Column("forked_from", Text()),
|
|
81
82
|
Index("workflow_status_created_at_index", "created_at"),
|
|
82
83
|
Index("workflow_status_executor_id_index", "executor_id"),
|
|
83
84
|
Index("workflow_status_status_index", "status"),
|
|
@@ -104,6 +105,8 @@ class SystemSchema:
|
|
|
104
105
|
Column("output", Text, nullable=True),
|
|
105
106
|
Column("error", Text, nullable=True),
|
|
106
107
|
Column("child_workflow_id", Text, nullable=True),
|
|
108
|
+
Column("started_at_epoch_ms", BigInteger, nullable=True),
|
|
109
|
+
Column("completed_at_epoch_ms", BigInteger, nullable=True),
|
|
107
110
|
PrimaryKeyConstraint("workflow_uuid", "function_id"),
|
|
108
111
|
)
|
|
109
112
|
|
dbos/_sys_db.py
CHANGED
|
@@ -114,6 +114,16 @@ class WorkflowStatus:
|
|
|
114
114
|
workflow_timeout_ms: Optional[int]
|
|
115
115
|
# The deadline of a workflow, computed by adding its timeout to its start time.
|
|
116
116
|
workflow_deadline_epoch_ms: Optional[int]
|
|
117
|
+
# Unique ID for deduplication on a queue
|
|
118
|
+
deduplication_id: Optional[str]
|
|
119
|
+
# Priority of the workflow on the queue, starting from 1 ~ 2,147,483,647. Default 0 (highest priority).
|
|
120
|
+
priority: Optional[int]
|
|
121
|
+
# If this workflow is enqueued on a partitioned queue, its partition key
|
|
122
|
+
queue_partition_key: Optional[str]
|
|
123
|
+
# If this workflow was forked from another, that workflow's ID.
|
|
124
|
+
forked_from: Optional[str]
|
|
125
|
+
# If this workflow was forked to others, those workflows' IDs
|
|
126
|
+
forked_to: Optional[list[str]]
|
|
117
127
|
|
|
118
128
|
# INTERNAL FIELDS
|
|
119
129
|
|
|
@@ -141,19 +151,13 @@ class WorkflowStatusInternal(TypedDict):
|
|
|
141
151
|
app_version: Optional[str]
|
|
142
152
|
app_id: Optional[str]
|
|
143
153
|
recovery_attempts: Optional[int]
|
|
144
|
-
# The start-to-close timeout of the workflow in ms
|
|
145
154
|
workflow_timeout_ms: Optional[int]
|
|
146
|
-
# The deadline of a workflow, computed by adding its timeout to its start time.
|
|
147
|
-
# Deadlines propagate to children. When the deadline is reached, the workflow is cancelled.
|
|
148
155
|
workflow_deadline_epoch_ms: Optional[int]
|
|
149
|
-
# Unique ID for deduplication on a queue
|
|
150
156
|
deduplication_id: Optional[str]
|
|
151
|
-
# Priority of the workflow on the queue, starting from 1 ~ 2,147,483,647. Default 0 (highest priority).
|
|
152
157
|
priority: int
|
|
153
|
-
# Serialized workflow inputs
|
|
154
158
|
inputs: str
|
|
155
|
-
# If this workflow is enqueued on a partitioned queue, its partition key
|
|
156
159
|
queue_partition_key: Optional[str]
|
|
160
|
+
forked_from: Optional[str]
|
|
157
161
|
|
|
158
162
|
|
|
159
163
|
class EnqueueOptionsInternal(TypedDict):
|
|
@@ -178,6 +182,7 @@ class OperationResultInternal(TypedDict):
|
|
|
178
182
|
function_name: str
|
|
179
183
|
output: Optional[str] # JSON (jsonpickle)
|
|
180
184
|
error: Optional[str] # JSON (jsonpickle)
|
|
185
|
+
started_at_epoch_ms: int
|
|
181
186
|
|
|
182
187
|
|
|
183
188
|
class GetEventWorkflowContext(TypedDict):
|
|
@@ -194,42 +199,32 @@ class GetWorkflowsInput:
|
|
|
194
199
|
"""
|
|
195
200
|
|
|
196
201
|
def __init__(self) -> None:
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
self.name: Optional[str] = None
|
|
201
|
-
|
|
202
|
-
self.
|
|
203
|
-
|
|
204
|
-
self.
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
self.
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
)
|
|
216
|
-
self.sort_desc: bool =
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
class GetQueuedWorkflowsInput(TypedDict):
|
|
225
|
-
queue_name: Optional[str] # Get workflows belonging to this queue
|
|
226
|
-
status: Optional[list[str]] # Get workflows with one of these statuses
|
|
227
|
-
start_time: Optional[str] # Timestamp in ISO 8601 format
|
|
228
|
-
end_time: Optional[str] # Timestamp in ISO 8601 format
|
|
229
|
-
limit: Optional[int] # Return up to this many workflows IDs.
|
|
230
|
-
offset: Optional[int] # Offset into the matching records for pagination
|
|
231
|
-
name: Optional[str] # The name of the workflow function
|
|
232
|
-
sort_desc: Optional[bool] # Sort by created_at in DESC or ASC order
|
|
202
|
+
# Search only in these workflow IDs
|
|
203
|
+
self.workflow_ids: Optional[List[str]] = None
|
|
204
|
+
# The name of the workflow function
|
|
205
|
+
self.name: Optional[str] = None
|
|
206
|
+
# The user who ran the workflow.
|
|
207
|
+
self.authenticated_user: Optional[str] = None
|
|
208
|
+
# Timestamp in ISO 8601 format
|
|
209
|
+
self.start_time: Optional[str] = None
|
|
210
|
+
# Timestamp in ISO 8601 format
|
|
211
|
+
self.end_time: Optional[str] = None
|
|
212
|
+
# Get workflows with one of these statuses
|
|
213
|
+
self.status: Optional[List[str]] = None
|
|
214
|
+
# The application version that ran this workflow.
|
|
215
|
+
self.application_version: Optional[str] = None
|
|
216
|
+
# Return up to this many workflows IDs. IDs are ordered by workflow creation time.
|
|
217
|
+
self.limit: Optional[int] = None
|
|
218
|
+
# Offset into the matching records for pagination
|
|
219
|
+
self.offset: Optional[int] = None
|
|
220
|
+
# If true, sort by created_at in DESC order. Default false (in ASC order).
|
|
221
|
+
self.sort_desc: bool = False
|
|
222
|
+
# Search only for workflow IDs starting with this string
|
|
223
|
+
self.workflow_id_prefix: Optional[str] = None
|
|
224
|
+
# Search only for workflows enqueued on this queue
|
|
225
|
+
self.queue_name: Optional[str] = None
|
|
226
|
+
# Search only currently enqueued workflows
|
|
227
|
+
self.queues_only: bool = False
|
|
233
228
|
|
|
234
229
|
|
|
235
230
|
class GetPendingWorkflowsOutput:
|
|
@@ -249,6 +244,10 @@ class StepInfo(TypedDict):
|
|
|
249
244
|
error: Optional[Exception]
|
|
250
245
|
# If the step starts or retrieves the result of a workflow, its ID
|
|
251
246
|
child_workflow_id: Optional[str]
|
|
247
|
+
# The UNIX epoch timestamp at which this step started
|
|
248
|
+
started_at_epoch_ms: Optional[int]
|
|
249
|
+
# The UNIX epoch timestamp at which this step completed
|
|
250
|
+
completed_at_epoch_ms: Optional[int]
|
|
252
251
|
|
|
253
252
|
|
|
254
253
|
_dbos_null_topic = "__null__topic__"
|
|
@@ -706,6 +705,7 @@ class SystemDatabase(ABC):
|
|
|
706
705
|
assumed_role=status["assumed_role"],
|
|
707
706
|
queue_name=INTERNAL_QUEUE_NAME,
|
|
708
707
|
inputs=status["inputs"],
|
|
708
|
+
forked_from=original_workflow_id,
|
|
709
709
|
)
|
|
710
710
|
)
|
|
711
711
|
|
|
@@ -767,6 +767,7 @@ class SystemDatabase(ABC):
|
|
|
767
767
|
SystemSchema.workflow_status.c.priority,
|
|
768
768
|
SystemSchema.workflow_status.c.inputs,
|
|
769
769
|
SystemSchema.workflow_status.c.queue_partition_key,
|
|
770
|
+
SystemSchema.workflow_status.c.forked_from,
|
|
770
771
|
).where(SystemSchema.workflow_status.c.workflow_uuid == workflow_uuid)
|
|
771
772
|
).fetchone()
|
|
772
773
|
if row is None:
|
|
@@ -795,6 +796,7 @@ class SystemDatabase(ABC):
|
|
|
795
796
|
"priority": row[17],
|
|
796
797
|
"inputs": row[18],
|
|
797
798
|
"queue_partition_key": row[19],
|
|
799
|
+
"forked_from": row[20],
|
|
798
800
|
}
|
|
799
801
|
return status
|
|
800
802
|
|
|
@@ -881,6 +883,10 @@ class SystemDatabase(ABC):
|
|
|
881
883
|
SystemSchema.workflow_status.c.application_id,
|
|
882
884
|
SystemSchema.workflow_status.c.workflow_deadline_epoch_ms,
|
|
883
885
|
SystemSchema.workflow_status.c.workflow_timeout_ms,
|
|
886
|
+
SystemSchema.workflow_status.c.deduplication_id,
|
|
887
|
+
SystemSchema.workflow_status.c.priority,
|
|
888
|
+
SystemSchema.workflow_status.c.queue_partition_key,
|
|
889
|
+
SystemSchema.workflow_status.c.forked_from,
|
|
884
890
|
]
|
|
885
891
|
if load_input:
|
|
886
892
|
load_columns.append(SystemSchema.workflow_status.c.inputs)
|
|
@@ -888,7 +894,15 @@ class SystemDatabase(ABC):
|
|
|
888
894
|
load_columns.append(SystemSchema.workflow_status.c.output)
|
|
889
895
|
load_columns.append(SystemSchema.workflow_status.c.error)
|
|
890
896
|
|
|
891
|
-
|
|
897
|
+
if input.queues_only:
|
|
898
|
+
query = sa.select(*load_columns).where(
|
|
899
|
+
sa.and_(
|
|
900
|
+
SystemSchema.workflow_status.c.queue_name.isnot(None),
|
|
901
|
+
SystemSchema.workflow_status.c.status.in_(["ENQUEUED", "PENDING"]),
|
|
902
|
+
)
|
|
903
|
+
)
|
|
904
|
+
else:
|
|
905
|
+
query = sa.select(*load_columns)
|
|
892
906
|
if input.sort_desc:
|
|
893
907
|
query = query.order_by(SystemSchema.workflow_status.c.created_at.desc())
|
|
894
908
|
else:
|
|
@@ -927,6 +941,10 @@ class SystemDatabase(ABC):
|
|
|
927
941
|
input.workflow_id_prefix
|
|
928
942
|
)
|
|
929
943
|
)
|
|
944
|
+
if input.queue_name:
|
|
945
|
+
query = query.where(
|
|
946
|
+
SystemSchema.workflow_status.c.queue_name == input.queue_name
|
|
947
|
+
)
|
|
930
948
|
if input.limit:
|
|
931
949
|
query = query.limit(input.limit)
|
|
932
950
|
if input.offset:
|
|
@@ -936,6 +954,7 @@ class SystemDatabase(ABC):
|
|
|
936
954
|
rows = c.execute(query).fetchall()
|
|
937
955
|
|
|
938
956
|
infos: List[WorkflowStatus] = []
|
|
957
|
+
workflow_ids: List[str] = []
|
|
939
958
|
for row in rows:
|
|
940
959
|
info = WorkflowStatus()
|
|
941
960
|
info.workflow_id = row[0]
|
|
@@ -957,10 +976,14 @@ class SystemDatabase(ABC):
|
|
|
957
976
|
info.app_id = row[14]
|
|
958
977
|
info.workflow_deadline_epoch_ms = row[15]
|
|
959
978
|
info.workflow_timeout_ms = row[16]
|
|
960
|
-
|
|
961
|
-
|
|
962
|
-
|
|
963
|
-
|
|
979
|
+
info.deduplication_id = row[17]
|
|
980
|
+
info.priority = row[18]
|
|
981
|
+
info.queue_partition_key = row[19]
|
|
982
|
+
info.forked_from = row[20]
|
|
983
|
+
|
|
984
|
+
raw_input = row[21] if load_input else None
|
|
985
|
+
raw_output = row[22] if load_output else None
|
|
986
|
+
raw_error = row[23] if load_output else None
|
|
964
987
|
inputs, output, exception = safe_deserialize(
|
|
965
988
|
self.serializer,
|
|
966
989
|
info.workflow_id,
|
|
@@ -972,119 +995,30 @@ class SystemDatabase(ABC):
|
|
|
972
995
|
info.output = output
|
|
973
996
|
info.error = exception
|
|
974
997
|
|
|
998
|
+
workflow_ids.append(info.workflow_id)
|
|
975
999
|
infos.append(info)
|
|
976
|
-
return infos
|
|
977
|
-
|
|
978
|
-
def get_queued_workflows(
|
|
979
|
-
self,
|
|
980
|
-
input: GetQueuedWorkflowsInput,
|
|
981
|
-
*,
|
|
982
|
-
load_input: bool = True,
|
|
983
|
-
) -> List[WorkflowStatus]:
|
|
984
|
-
"""
|
|
985
|
-
Retrieve a list of queued workflows result and inputs based on the input criteria. The result is a list of external-facing workflow status objects.
|
|
986
|
-
"""
|
|
987
|
-
load_columns = [
|
|
988
|
-
SystemSchema.workflow_status.c.workflow_uuid,
|
|
989
|
-
SystemSchema.workflow_status.c.status,
|
|
990
|
-
SystemSchema.workflow_status.c.name,
|
|
991
|
-
SystemSchema.workflow_status.c.recovery_attempts,
|
|
992
|
-
SystemSchema.workflow_status.c.config_name,
|
|
993
|
-
SystemSchema.workflow_status.c.class_name,
|
|
994
|
-
SystemSchema.workflow_status.c.authenticated_user,
|
|
995
|
-
SystemSchema.workflow_status.c.authenticated_roles,
|
|
996
|
-
SystemSchema.workflow_status.c.assumed_role,
|
|
997
|
-
SystemSchema.workflow_status.c.queue_name,
|
|
998
|
-
SystemSchema.workflow_status.c.executor_id,
|
|
999
|
-
SystemSchema.workflow_status.c.created_at,
|
|
1000
|
-
SystemSchema.workflow_status.c.updated_at,
|
|
1001
|
-
SystemSchema.workflow_status.c.application_version,
|
|
1002
|
-
SystemSchema.workflow_status.c.application_id,
|
|
1003
|
-
SystemSchema.workflow_status.c.workflow_deadline_epoch_ms,
|
|
1004
|
-
SystemSchema.workflow_status.c.workflow_timeout_ms,
|
|
1005
|
-
]
|
|
1006
|
-
if load_input:
|
|
1007
|
-
load_columns.append(SystemSchema.workflow_status.c.inputs)
|
|
1008
1000
|
|
|
1009
|
-
|
|
1010
|
-
|
|
1011
|
-
|
|
1012
|
-
|
|
1013
|
-
|
|
1014
|
-
|
|
1015
|
-
|
|
1016
|
-
|
|
1017
|
-
|
|
1018
|
-
|
|
1019
|
-
|
|
1020
|
-
|
|
1021
|
-
|
|
1022
|
-
|
|
1023
|
-
|
|
1024
|
-
|
|
1025
|
-
|
|
1026
|
-
|
|
1027
|
-
|
|
1028
|
-
|
|
1029
|
-
|
|
1030
|
-
query = query.where(SystemSchema.workflow_status.c.status.in_(status))
|
|
1031
|
-
if "start_time" in input and input["start_time"] is not None:
|
|
1032
|
-
query = query.where(
|
|
1033
|
-
SystemSchema.workflow_status.c.created_at
|
|
1034
|
-
>= datetime.datetime.fromisoformat(input["start_time"]).timestamp()
|
|
1035
|
-
* 1000
|
|
1036
|
-
)
|
|
1037
|
-
if "end_time" in input and input["end_time"] is not None:
|
|
1038
|
-
query = query.where(
|
|
1039
|
-
SystemSchema.workflow_status.c.created_at
|
|
1040
|
-
<= datetime.datetime.fromisoformat(input["end_time"]).timestamp() * 1000
|
|
1041
|
-
)
|
|
1042
|
-
if input.get("limit"):
|
|
1043
|
-
query = query.limit(input["limit"])
|
|
1044
|
-
if input.get("offset"):
|
|
1045
|
-
query = query.offset(input["offset"])
|
|
1046
|
-
|
|
1047
|
-
with self.engine.begin() as c:
|
|
1048
|
-
rows = c.execute(query).fetchall()
|
|
1049
|
-
|
|
1050
|
-
infos: List[WorkflowStatus] = []
|
|
1051
|
-
for row in rows:
|
|
1052
|
-
info = WorkflowStatus()
|
|
1053
|
-
info.workflow_id = row[0]
|
|
1054
|
-
info.status = row[1]
|
|
1055
|
-
info.name = row[2]
|
|
1056
|
-
info.recovery_attempts = row[3]
|
|
1057
|
-
info.config_name = row[4]
|
|
1058
|
-
info.class_name = row[5]
|
|
1059
|
-
info.authenticated_user = row[6]
|
|
1060
|
-
info.authenticated_roles = (
|
|
1061
|
-
json.loads(row[7]) if row[7] is not None else None
|
|
1062
|
-
)
|
|
1063
|
-
info.assumed_role = row[8]
|
|
1064
|
-
info.queue_name = row[9]
|
|
1065
|
-
info.executor_id = row[10]
|
|
1066
|
-
info.created_at = row[11]
|
|
1067
|
-
info.updated_at = row[12]
|
|
1068
|
-
info.app_version = row[13]
|
|
1069
|
-
info.app_id = row[14]
|
|
1070
|
-
info.workflow_deadline_epoch_ms = row[15]
|
|
1071
|
-
info.workflow_timeout_ms = row[16]
|
|
1072
|
-
|
|
1073
|
-
raw_input = row[17] if load_input else None
|
|
1074
|
-
|
|
1075
|
-
# Error and Output are not loaded because they should always be None for queued workflows.
|
|
1076
|
-
inputs, output, exception = safe_deserialize(
|
|
1077
|
-
self.serializer,
|
|
1078
|
-
info.workflow_id,
|
|
1079
|
-
serialized_input=raw_input,
|
|
1080
|
-
serialized_output=None,
|
|
1081
|
-
serialized_exception=None,
|
|
1082
|
-
)
|
|
1083
|
-
info.input = inputs
|
|
1084
|
-
info.output = output
|
|
1085
|
-
info.error = exception
|
|
1086
|
-
|
|
1087
|
-
infos.append(info)
|
|
1001
|
+
# Calculate forked_to relationships
|
|
1002
|
+
if workflow_ids:
|
|
1003
|
+
with self.engine.begin() as c:
|
|
1004
|
+
forked_to_query = sa.select(
|
|
1005
|
+
SystemSchema.workflow_status.c.forked_from,
|
|
1006
|
+
SystemSchema.workflow_status.c.workflow_uuid,
|
|
1007
|
+
).where(SystemSchema.workflow_status.c.forked_from.in_(workflow_ids))
|
|
1008
|
+
forked_to_rows = c.execute(forked_to_query).fetchall()
|
|
1009
|
+
|
|
1010
|
+
# Build a mapping of fork-parent workflow ID to list of fork-child workflow IDs
|
|
1011
|
+
forked_to_map: Dict[str, List[str]] = {}
|
|
1012
|
+
for row in forked_to_rows:
|
|
1013
|
+
parent_id = row[0]
|
|
1014
|
+
child_id = row[1]
|
|
1015
|
+
if parent_id not in forked_to_map:
|
|
1016
|
+
forked_to_map[parent_id] = []
|
|
1017
|
+
forked_to_map[parent_id].append(child_id)
|
|
1018
|
+
|
|
1019
|
+
# Populate the forked_to field for each workflow
|
|
1020
|
+
for info in infos:
|
|
1021
|
+
info.forked_to = forked_to_map.get(info.workflow_id, None)
|
|
1088
1022
|
|
|
1089
1023
|
return infos
|
|
1090
1024
|
|
|
@@ -1121,6 +1055,8 @@ class SystemDatabase(ABC):
|
|
|
1121
1055
|
SystemSchema.operation_outputs.c.output,
|
|
1122
1056
|
SystemSchema.operation_outputs.c.error,
|
|
1123
1057
|
SystemSchema.operation_outputs.c.child_workflow_id,
|
|
1058
|
+
SystemSchema.operation_outputs.c.started_at_epoch_ms,
|
|
1059
|
+
SystemSchema.operation_outputs.c.completed_at_epoch_ms,
|
|
1124
1060
|
).where(SystemSchema.operation_outputs.c.workflow_uuid == workflow_id)
|
|
1125
1061
|
).fetchall()
|
|
1126
1062
|
steps = []
|
|
@@ -1138,6 +1074,8 @@ class SystemDatabase(ABC):
|
|
|
1138
1074
|
output=output,
|
|
1139
1075
|
error=exception,
|
|
1140
1076
|
child_workflow_id=row[4],
|
|
1077
|
+
started_at_epoch_ms=row[5],
|
|
1078
|
+
completed_at_epoch_ms=row[6],
|
|
1141
1079
|
)
|
|
1142
1080
|
steps.append(step)
|
|
1143
1081
|
return steps
|
|
@@ -1154,6 +1092,8 @@ class SystemDatabase(ABC):
|
|
|
1154
1092
|
workflow_uuid=result["workflow_uuid"],
|
|
1155
1093
|
function_id=result["function_id"],
|
|
1156
1094
|
function_name=result["function_name"],
|
|
1095
|
+
started_at_epoch_ms=result["started_at_epoch_ms"],
|
|
1096
|
+
completed_at_epoch_ms=int(time.time() * 1000),
|
|
1157
1097
|
output=output,
|
|
1158
1098
|
error=error,
|
|
1159
1099
|
)
|
|
@@ -1340,6 +1280,7 @@ class SystemDatabase(ABC):
|
|
|
1340
1280
|
topic: Optional[str] = None,
|
|
1341
1281
|
) -> None:
|
|
1342
1282
|
function_name = "DBOS.send"
|
|
1283
|
+
start_time = int(time.time() * 1000)
|
|
1343
1284
|
topic = topic if topic is not None else _dbos_null_topic
|
|
1344
1285
|
with self.engine.begin() as c:
|
|
1345
1286
|
recorded_output = self._check_operation_execution_txn(
|
|
@@ -1376,6 +1317,7 @@ class SystemDatabase(ABC):
|
|
|
1376
1317
|
"workflow_uuid": workflow_uuid,
|
|
1377
1318
|
"function_id": function_id,
|
|
1378
1319
|
"function_name": function_name,
|
|
1320
|
+
"started_at_epoch_ms": start_time,
|
|
1379
1321
|
"output": None,
|
|
1380
1322
|
"error": None,
|
|
1381
1323
|
}
|
|
@@ -1391,6 +1333,7 @@ class SystemDatabase(ABC):
|
|
|
1391
1333
|
timeout_seconds: float = 60,
|
|
1392
1334
|
) -> Any:
|
|
1393
1335
|
function_name = "DBOS.recv"
|
|
1336
|
+
start_time = int(time.time() * 1000)
|
|
1394
1337
|
topic = topic if topic is not None else _dbos_null_topic
|
|
1395
1338
|
|
|
1396
1339
|
# First, check for previous executions.
|
|
@@ -1475,6 +1418,7 @@ class SystemDatabase(ABC):
|
|
|
1475
1418
|
"workflow_uuid": workflow_uuid,
|
|
1476
1419
|
"function_id": function_id,
|
|
1477
1420
|
"function_name": function_name,
|
|
1421
|
+
"started_at_epoch_ms": start_time,
|
|
1478
1422
|
"output": self.serializer.serialize(
|
|
1479
1423
|
message
|
|
1480
1424
|
), # None will be serialized to 'null'
|
|
@@ -1510,6 +1454,7 @@ class SystemDatabase(ABC):
|
|
|
1510
1454
|
skip_sleep: bool = False,
|
|
1511
1455
|
) -> float:
|
|
1512
1456
|
function_name = "DBOS.sleep"
|
|
1457
|
+
start_time = int(time.time() * 1000)
|
|
1513
1458
|
recorded_output = self.check_operation_execution(
|
|
1514
1459
|
workflow_uuid, function_id, function_name
|
|
1515
1460
|
)
|
|
@@ -1530,6 +1475,7 @@ class SystemDatabase(ABC):
|
|
|
1530
1475
|
"workflow_uuid": workflow_uuid,
|
|
1531
1476
|
"function_id": function_id,
|
|
1532
1477
|
"function_name": function_name,
|
|
1478
|
+
"started_at_epoch_ms": start_time,
|
|
1533
1479
|
"output": self.serializer.serialize(end_time),
|
|
1534
1480
|
"error": None,
|
|
1535
1481
|
}
|
|
@@ -1550,6 +1496,7 @@ class SystemDatabase(ABC):
|
|
|
1550
1496
|
message: Any,
|
|
1551
1497
|
) -> None:
|
|
1552
1498
|
function_name = "DBOS.setEvent"
|
|
1499
|
+
start_time = int(time.time() * 1000)
|
|
1553
1500
|
with self.engine.begin() as c:
|
|
1554
1501
|
recorded_output = self._check_operation_execution_txn(
|
|
1555
1502
|
workflow_uuid, function_id, function_name, conn=c
|
|
@@ -1579,6 +1526,7 @@ class SystemDatabase(ABC):
|
|
|
1579
1526
|
"workflow_uuid": workflow_uuid,
|
|
1580
1527
|
"function_id": function_id,
|
|
1581
1528
|
"function_name": function_name,
|
|
1529
|
+
"started_at_epoch_ms": start_time,
|
|
1582
1530
|
"output": None,
|
|
1583
1531
|
"error": None,
|
|
1584
1532
|
}
|
|
@@ -1639,6 +1587,7 @@ class SystemDatabase(ABC):
|
|
|
1639
1587
|
caller_ctx: Optional[GetEventWorkflowContext] = None,
|
|
1640
1588
|
) -> Any:
|
|
1641
1589
|
function_name = "DBOS.getEvent"
|
|
1590
|
+
start_time = int(time.time() * 1000)
|
|
1642
1591
|
get_sql = sa.select(
|
|
1643
1592
|
SystemSchema.workflow_events.c.value,
|
|
1644
1593
|
).where(
|
|
@@ -1713,6 +1662,7 @@ class SystemDatabase(ABC):
|
|
|
1713
1662
|
"workflow_uuid": caller_ctx["workflow_uuid"],
|
|
1714
1663
|
"function_id": caller_ctx["function_id"],
|
|
1715
1664
|
"function_name": function_name,
|
|
1665
|
+
"started_at_epoch_ms": start_time,
|
|
1716
1666
|
"output": self.serializer.serialize(
|
|
1717
1667
|
value
|
|
1718
1668
|
), # None will be serialized to 'null'
|
|
@@ -1951,6 +1901,7 @@ class SystemDatabase(ABC):
|
|
|
1951
1901
|
|
|
1952
1902
|
def call_function_as_step(self, fn: Callable[[], T], function_name: str) -> T:
|
|
1953
1903
|
ctx = get_local_dbos_context()
|
|
1904
|
+
start_time = int(time.time() * 1000)
|
|
1954
1905
|
if ctx and ctx.is_transaction():
|
|
1955
1906
|
raise Exception(f"Invalid call to `{function_name}` inside a transaction")
|
|
1956
1907
|
if ctx and ctx.is_workflow():
|
|
@@ -1978,6 +1929,7 @@ class SystemDatabase(ABC):
|
|
|
1978
1929
|
"workflow_uuid": ctx.workflow_id,
|
|
1979
1930
|
"function_id": ctx.function_id,
|
|
1980
1931
|
"function_name": function_name,
|
|
1932
|
+
"started_at_epoch_ms": start_time,
|
|
1981
1933
|
"output": self.serializer.serialize(result),
|
|
1982
1934
|
"error": None,
|
|
1983
1935
|
}
|
|
@@ -2056,6 +2008,7 @@ class SystemDatabase(ABC):
|
|
|
2056
2008
|
if value == _dbos_stream_closed_sentinel
|
|
2057
2009
|
else "DBOS.writeStream"
|
|
2058
2010
|
)
|
|
2011
|
+
start_time = int(time.time() * 1000)
|
|
2059
2012
|
|
|
2060
2013
|
with self.engine.begin() as c:
|
|
2061
2014
|
|
|
@@ -2102,6 +2055,7 @@ class SystemDatabase(ABC):
|
|
|
2102
2055
|
"workflow_uuid": workflow_uuid,
|
|
2103
2056
|
"function_id": function_id,
|
|
2104
2057
|
"function_name": function_name,
|
|
2058
|
+
"started_at_epoch_ms": start_time,
|
|
2105
2059
|
"output": None,
|
|
2106
2060
|
"error": None,
|
|
2107
2061
|
}
|
dbos/_workflow_commands.py
CHANGED
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import time
|
|
2
1
|
import uuid
|
|
3
2
|
from datetime import datetime
|
|
4
3
|
from typing import TYPE_CHECKING, List, Optional, Union
|
|
@@ -7,7 +6,6 @@ from dbos._context import get_local_dbos_context
|
|
|
7
6
|
|
|
8
7
|
from ._app_db import ApplicationDatabase
|
|
9
8
|
from ._sys_db import (
|
|
10
|
-
GetQueuedWorkflowsInput,
|
|
11
9
|
GetWorkflowsInput,
|
|
12
10
|
StepInfo,
|
|
13
11
|
SystemDatabase,
|
|
@@ -29,6 +27,7 @@ def list_workflows(
|
|
|
29
27
|
name: Optional[str] = None,
|
|
30
28
|
app_version: Optional[str] = None,
|
|
31
29
|
user: Optional[str] = None,
|
|
30
|
+
queue_name: Optional[str] = None,
|
|
32
31
|
limit: Optional[int] = None,
|
|
33
32
|
offset: Optional[int] = None,
|
|
34
33
|
sort_desc: bool = False,
|
|
@@ -43,6 +42,7 @@ def list_workflows(
|
|
|
43
42
|
input.end_time = end_time
|
|
44
43
|
input.status = status if status is None or isinstance(status, list) else [status]
|
|
45
44
|
input.application_version = app_version
|
|
45
|
+
input.queue_name = queue_name
|
|
46
46
|
input.limit = limit
|
|
47
47
|
input.name = name
|
|
48
48
|
input.offset = offset
|
|
@@ -69,19 +69,19 @@ def list_queued_workflows(
|
|
|
69
69
|
sort_desc: bool = False,
|
|
70
70
|
load_input: bool = True,
|
|
71
71
|
) -> List[WorkflowStatus]:
|
|
72
|
-
input
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
infos: List[WorkflowStatus] = sys_db.
|
|
84
|
-
input, load_input=load_input
|
|
72
|
+
input = GetWorkflowsInput()
|
|
73
|
+
input.start_time = start_time
|
|
74
|
+
input.end_time = end_time
|
|
75
|
+
input.status = status if status is None or isinstance(status, list) else [status]
|
|
76
|
+
input.limit = limit
|
|
77
|
+
input.name = name
|
|
78
|
+
input.offset = offset
|
|
79
|
+
input.sort_desc = sort_desc
|
|
80
|
+
input.queues_only = True
|
|
81
|
+
input.queue_name = queue_name
|
|
82
|
+
|
|
83
|
+
infos: List[WorkflowStatus] = sys_db.get_workflows(
|
|
84
|
+
input, load_input=load_input, load_output=False
|
|
85
85
|
)
|
|
86
86
|
return infos
|
|
87
87
|
|
dbos/cli/cli.py
CHANGED
|
@@ -145,7 +145,7 @@ def start() -> None:
|
|
|
145
145
|
if process.poll() is None:
|
|
146
146
|
os.killpg(os.getpgid(process.pid), signum)
|
|
147
147
|
|
|
148
|
-
# Exit
|
|
148
|
+
# Exit
|
|
149
149
|
os._exit(process.returncode if process.returncode is not None else 1)
|
|
150
150
|
|
|
151
151
|
# Configure the single handler only on Unix-like systems.
|
|
@@ -1,19 +1,19 @@
|
|
|
1
|
-
dbos-2.
|
|
2
|
-
dbos-2.
|
|
3
|
-
dbos-2.
|
|
4
|
-
dbos-2.
|
|
1
|
+
dbos-2.4.0a2.dist-info/METADATA,sha256=caImAOIEVU15BqG1mXlv-TEW1TmBKSva8ZZl4ESQ5iw,14532
|
|
2
|
+
dbos-2.4.0a2.dist-info/WHEEL,sha256=9P2ygRxDrTJz3gsagc0Z96ukrxjr-LFBGOgv3AuKlCA,90
|
|
3
|
+
dbos-2.4.0a2.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
|
|
4
|
+
dbos-2.4.0a2.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
|
|
5
5
|
dbos/__init__.py,sha256=M7FdFSBGhcvaLIXrNw_0eR68ijwMWV7_UEyimHMP_F4,1039
|
|
6
6
|
dbos/__main__.py,sha256=G7Exn-MhGrVJVDbgNlpzhfh8WMX_72t3_oJaFT9Lmt8,653
|
|
7
7
|
dbos/_admin_server.py,sha256=hubQJw5T8zGKCPNS6FQTXy8jQ8GTJxoYQaDTMlICl9k,16267
|
|
8
|
-
dbos/_app_db.py,sha256=
|
|
8
|
+
dbos/_app_db.py,sha256=3XHvTePe1JaAI42rO3waWGoEeDyXkFKGzTFwJxQHUmo,16464
|
|
9
9
|
dbos/_classproperty.py,sha256=f0X-_BySzn3yFDRKB2JpCbLYQ9tLwt1XftfshvY7CBs,626
|
|
10
|
-
dbos/_client.py,sha256=
|
|
10
|
+
dbos/_client.py,sha256=8yrIqO5Hg-TdYS6P5sxxVWz_iusarS9Is8DU3WezoUQ,19966
|
|
11
11
|
dbos/_conductor/conductor.py,sha256=3E_hL3c9g9yWqKZkvI6KA0-ZzPMPRo06TOzT1esMiek,24114
|
|
12
|
-
dbos/_conductor/protocol.py,sha256=
|
|
12
|
+
dbos/_conductor/protocol.py,sha256=Qt6dT2JzuaqjsKzWxfpyOEpB9TY1O6dBg26VLbLA6K4,8910
|
|
13
13
|
dbos/_context.py,sha256=XKllmsDR_oMcWOuZnoe1X4yv2JeOi_vsAuyWC-mWs_o,28164
|
|
14
|
-
dbos/_core.py,sha256=
|
|
14
|
+
dbos/_core.py,sha256=FCspRQFRMFyHpkl4vqR8IEw3aitD-VWB77CMVQrlyy8,50257
|
|
15
15
|
dbos/_croniter.py,sha256=XHAyUyibs_59sJQfSNWkP7rqQY6_XrlfuuCxk4jYqek,47559
|
|
16
|
-
dbos/_dbos.py,sha256=
|
|
16
|
+
dbos/_dbos.py,sha256=5-_jgEKkETOS4L_a5yyFKbI5jS8xNTaaCbDRBRWeoQc,59475
|
|
17
17
|
dbos/_dbos_config.py,sha256=mfajyeyeV1ZHaAg2GU3dxwvp_19wZtY2prNdVrXgPb8,24846
|
|
18
18
|
dbos/_debouncer.py,sha256=qNjIVmWqTPp64M2cEbLnpgGmlKVdCaAKysD1BPJgWh4,15297
|
|
19
19
|
dbos/_debug.py,sha256=0MfgNqutCUhI4PEmmra9x7f3DiFE_0nscfUCHdLimEY,1415
|
|
@@ -25,7 +25,7 @@ dbos/_flask.py,sha256=Npnakt-a3W5OykONFRkDRnumaDhTQmA0NPdUCGRYKXE,1652
|
|
|
25
25
|
dbos/_kafka.py,sha256=cA3hXyT-FR4LQZnaBMVLTZn7oko76rcTUC_kOo6aSis,4352
|
|
26
26
|
dbos/_kafka_message.py,sha256=NYvOXNG3Qn7bghn1pv3fg4Pbs86ILZGcK4IB-MLUNu0,409
|
|
27
27
|
dbos/_logger.py,sha256=ByGkkGwEWaqE9z6E2VNDFOgu_z4LNe7_SxsVgAXzoT0,5081
|
|
28
|
-
dbos/_migration.py,sha256=
|
|
28
|
+
dbos/_migration.py,sha256=94fhthMLMdWFlEKc_WB7hsn8cTxguKtvV6LxHDXcz8s,11520
|
|
29
29
|
dbos/_outcome.py,sha256=7HvosMfEHTh1U5P6xok7kFTGLwa2lPaul0YApb3UnN4,8191
|
|
30
30
|
dbos/_queue.py,sha256=GmqZHl9smES1KSmpauhSdsnZFJHDyfvRArmC-jBibhw,6228
|
|
31
31
|
dbos/_recovery.py,sha256=K-wlFhdf4yGRm6cUzyhcTjQUS0xp2T5rdNMLiiBErYg,2882
|
|
@@ -34,9 +34,9 @@ dbos/_roles.py,sha256=kCuhhg8XLtrHCgKgm44I0abIRTGHltf88OwjEKAUggk,2317
|
|
|
34
34
|
dbos/_scheduler.py,sha256=PLiCSUujlfEfojTnHwzY-P_AEOVEx7bvWvU5BuMgLPY,2708
|
|
35
35
|
dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
36
36
|
dbos/_schemas/application_database.py,sha256=SypAS9l9EsaBHFn9FR8jmnqt01M74d9AF1AMa4m2hhI,1040
|
|
37
|
-
dbos/_schemas/system_database.py,sha256=
|
|
37
|
+
dbos/_schemas/system_database.py,sha256=tQAFCnEyZ7bEXZm3FbGIYk5SNGk-AHA3R_vuR0hfH8s,5717
|
|
38
38
|
dbos/_serialization.py,sha256=ZGrkN5UclSLOqMVZgYpT72pw1l888ZXRoYuu3pIg3PA,2957
|
|
39
|
-
dbos/_sys_db.py,sha256=
|
|
39
|
+
dbos/_sys_db.py,sha256=Y7yf7xEYhW4YueB5qPfziJbDfCEO4zl2c5cpQwsYQgw,86020
|
|
40
40
|
dbos/_sys_db_postgres.py,sha256=_3m3hF6Pc23iZfUlIFYtDuC1Tw6KsjYqnDQE0HZpjt4,6965
|
|
41
41
|
dbos/_sys_db_sqlite.py,sha256=ifjKdy-Z9vlVIBf5L6XnSaNjiBdvqPE73asVHim4A5Q,6998
|
|
42
42
|
dbos/_templates/dbos-db-starter/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
|
|
@@ -48,12 +48,12 @@ dbos/_templates/dbos-db-starter/migrations/create_table.py.dbos,sha256=pVm2Q0Asx
|
|
|
48
48
|
dbos/_templates/dbos-db-starter/start_postgres_docker.py,sha256=lQVLlYO5YkhGPEgPqwGc7Y8uDKse9HsWv5fynJEFJHM,1681
|
|
49
49
|
dbos/_tracer.py,sha256=jTlTkb5vUr_Ai5W9JIJf6FpYjAL0IWL52EWM_HXsi54,3958
|
|
50
50
|
dbos/_utils.py,sha256=ZdoM1MDbHnlJrh31zfhp3iX62bAxK1kyvMwXnltC_84,1779
|
|
51
|
-
dbos/_workflow_commands.py,sha256=
|
|
51
|
+
dbos/_workflow_commands.py,sha256=VwnqyFl8bszktUcIYSy7_cgcUt7QXZadgTM74yJuQxM,5010
|
|
52
52
|
dbos/cli/_github_init.py,sha256=R_94Fnn40CAmPy-zM00lwHi0ndyfv57TmIooADjmag4,3378
|
|
53
53
|
dbos/cli/_template_init.py,sha256=AltKk256VocgvxLpuTxpjJyACrdHFjbGoqYhHzeLae4,2649
|
|
54
|
-
dbos/cli/cli.py,sha256=
|
|
54
|
+
dbos/cli/cli.py,sha256=AHz_JJj_qWCTRV8yT1RSA-hISFVIJrE9eUalApw9sxg,27149
|
|
55
55
|
dbos/cli/migration.py,sha256=I0_0ngWTuCPQf6Symbpd0lizaxWUKe3uTYEmuCmsrdU,3775
|
|
56
56
|
dbos/dbos-config.schema.json,sha256=47wofTZ5jlFynec7bG0L369tAXbRQQ2euBxBXvg4m9c,1730
|
|
57
57
|
dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
|
|
58
58
|
version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
|
|
59
|
-
dbos-2.
|
|
59
|
+
dbos-2.4.0a2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|