dbos 1.3.0a5__tar.gz → 1.3.0a7__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {dbos-1.3.0a5 → dbos-1.3.0a7}/PKG-INFO +1 -1
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_client.py +7 -6
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_core.py +6 -9
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_dbos_config.py +1 -1
- dbos-1.3.0a7/dbos/_migrations/versions/d994145b47b6_consolidate_inputs.py +30 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_schemas/system_database.py +1 -15
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_sys_db.py +32 -97
- {dbos-1.3.0a5 → dbos-1.3.0a7}/pyproject.toml +1 -1
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/test_queue.py +2 -16
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/test_schema_migration.py +0 -4
- {dbos-1.3.0a5 → dbos-1.3.0a7}/LICENSE +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/README.md +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/__init__.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/__main__.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_admin_server.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_app_db.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_classproperty.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_conductor/conductor.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_conductor/protocol.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_context.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_croniter.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_dbos.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_debug.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_docker_pg_helper.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_error.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_event_loop.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_fastapi.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_flask.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_kafka.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_kafka_message.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_logger.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_migrations/env.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_migrations/script.py.mako +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_migrations/versions/27ac6900c6ad_add_queue_dedup.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_migrations/versions/50f3227f0b4b_fix_job_queue.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_migrations/versions/5c361fc04708_added_system_tables.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_migrations/versions/66478e1b95e5_consolidate_queues.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_migrations/versions/83f3732ae8e7_workflow_timeout.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_migrations/versions/933e86bdac6a_add_queue_priority.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_migrations/versions/a3b18ad34abe_added_triggers.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_migrations/versions/d76646551a6b_job_queue_limiter.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_migrations/versions/d76646551a6c_workflow_queue.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_migrations/versions/eab0cc1d9a14_job_queue.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_outcome.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_queue.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_recovery.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_registrations.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_roles.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_scheduler.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_schemas/__init__.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_schemas/application_database.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_serialization.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_templates/dbos-db-starter/README.md +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_templates/dbos-db-starter/__package/main.py.dbos +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_templates/dbos-db-starter/alembic.ini +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_templates/dbos-db-starter/migrations/env.py.dbos +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_templates/dbos-db-starter/migrations/script.py.mako +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_tracer.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_utils.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_workflow_commands.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/cli/_github_init.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/cli/_template_init.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/cli/cli.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/dbos-config.schema.json +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/py.typed +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/__init__.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/atexit_no_ctor.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/atexit_no_launch.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/classdefs.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/client_collateral.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/client_worker.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/conftest.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/dupname_classdefs1.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/dupname_classdefsa.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/more_classdefs.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/queuedworkflow.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/test_admin_server.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/test_async.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/test_classdecorators.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/test_cli.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/test_client.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/test_concurrency.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/test_config.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/test_croniter.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/test_dbos.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/test_debug.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/test_docker_secrets.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/test_failures.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/test_fastapi.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/test_fastapi_roles.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/test_flask.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/test_kafka.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/test_outcome.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/test_package.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/test_scheduler.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/test_singleton.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/test_spans.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/test_sqlalchemy.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/test_workflow_introspection.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/tests/test_workflow_management.py +0 -0
- {dbos-1.3.0a5 → dbos-1.3.0a7}/version/__init__.py +0 -0
@@ -141,6 +141,11 @@ class DBOSClient:
|
|
141
141
|
"priority": options.get("priority"),
|
142
142
|
}
|
143
143
|
|
144
|
+
inputs: WorkflowInputs = {
|
145
|
+
"args": args,
|
146
|
+
"kwargs": kwargs,
|
147
|
+
}
|
148
|
+
|
144
149
|
status: WorkflowStatusInternal = {
|
145
150
|
"workflow_uuid": workflow_id,
|
146
151
|
"status": WorkflowStatusString.ENQUEUED.value,
|
@@ -169,16 +174,11 @@ class DBOSClient:
|
|
169
174
|
if enqueue_options_internal["priority"] is not None
|
170
175
|
else 0
|
171
176
|
),
|
172
|
-
|
173
|
-
|
174
|
-
inputs: WorkflowInputs = {
|
175
|
-
"args": args,
|
176
|
-
"kwargs": kwargs,
|
177
|
+
"inputs": _serialization.serialize_args(inputs),
|
177
178
|
}
|
178
179
|
|
179
180
|
self._sys_db.init_workflow(
|
180
181
|
status,
|
181
|
-
_serialization.serialize_args(inputs),
|
182
182
|
max_recovery_attempts=None,
|
183
183
|
)
|
184
184
|
return workflow_id
|
@@ -237,6 +237,7 @@ class DBOSClient:
|
|
237
237
|
"workflow_deadline_epoch_ms": None,
|
238
238
|
"deduplication_id": None,
|
239
239
|
"priority": 0,
|
240
|
+
"inputs": _serialization.serialize_args({"args": (), "kwargs": {}}),
|
240
241
|
}
|
241
242
|
with self._sys_db.engine.begin() as conn:
|
242
243
|
self._sys_db._insert_workflow_status(
|
@@ -252,6 +252,10 @@ def _init_workflow(
|
|
252
252
|
raise DBOSNonExistentWorkflowError(wfid)
|
253
253
|
return get_status_result
|
254
254
|
|
255
|
+
# If we have a class name, the first arg is the instance and do not serialize
|
256
|
+
if class_name is not None:
|
257
|
+
inputs = {"args": inputs["args"][1:], "kwargs": inputs["kwargs"]}
|
258
|
+
|
255
259
|
# Initialize a workflow status object from the context
|
256
260
|
status: WorkflowStatusInternal = {
|
257
261
|
"workflow_uuid": wfid,
|
@@ -291,16 +295,12 @@ def _init_workflow(
|
|
291
295
|
if enqueue_options is not None
|
292
296
|
else 0
|
293
297
|
),
|
298
|
+
"inputs": _serialization.serialize_args(inputs),
|
294
299
|
}
|
295
300
|
|
296
|
-
# If we have a class name, the first arg is the instance and do not serialize
|
297
|
-
if class_name is not None:
|
298
|
-
inputs = {"args": inputs["args"][1:], "kwargs": inputs["kwargs"]}
|
299
|
-
|
300
301
|
# Synchronously record the status and inputs for workflows
|
301
302
|
wf_status, workflow_deadline_epoch_ms = dbos._sys_db.init_workflow(
|
302
303
|
status,
|
303
|
-
_serialization.serialize_args(inputs),
|
304
304
|
max_recovery_attempts=max_recovery_attempts,
|
305
305
|
)
|
306
306
|
|
@@ -441,16 +441,13 @@ def execute_workflow_by_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[An
|
|
441
441
|
status = dbos._sys_db.get_workflow_status(workflow_id)
|
442
442
|
if not status:
|
443
443
|
raise DBOSRecoveryError(workflow_id, "Workflow status not found")
|
444
|
-
inputs =
|
445
|
-
if not inputs:
|
446
|
-
raise DBOSRecoveryError(workflow_id, "Workflow inputs not found")
|
444
|
+
inputs = _serialization.deserialize_args(status["inputs"])
|
447
445
|
wf_func = dbos._registry.workflow_info_map.get(status["name"], None)
|
448
446
|
if not wf_func:
|
449
447
|
raise DBOSWorkflowFunctionNotFoundError(
|
450
448
|
workflow_id, "Workflow function not found"
|
451
449
|
)
|
452
450
|
with DBOSContextEnsure():
|
453
|
-
ctx = assert_current_dbos_context()
|
454
451
|
# If this function belongs to a configured class, add that class instance as its first argument
|
455
452
|
if status["config_name"] is not None:
|
456
453
|
config_name = status["config_name"]
|
@@ -91,7 +91,7 @@ class ConfigFile(TypedDict, total=False):
|
|
91
91
|
Data structure containing the DBOS Configuration.
|
92
92
|
|
93
93
|
This configuration data is typically loaded from `dbos-config.yaml`.
|
94
|
-
See `https://docs.dbos.dev/
|
94
|
+
See `https://docs.dbos.dev/python/reference/configuration#dbos-configuration-file`
|
95
95
|
|
96
96
|
Attributes:
|
97
97
|
name (str): Application name
|
@@ -0,0 +1,30 @@
|
|
1
|
+
"""consolidate_inputs
|
2
|
+
|
3
|
+
Revision ID: d994145b47b6
|
4
|
+
Revises: 66478e1b95e5
|
5
|
+
Create Date: 2025-05-23 08:09:15.515009
|
6
|
+
|
7
|
+
"""
|
8
|
+
|
9
|
+
from typing import Sequence, Union
|
10
|
+
|
11
|
+
import sqlalchemy as sa
|
12
|
+
from alembic import op
|
13
|
+
|
14
|
+
# revision identifiers, used by Alembic.
|
15
|
+
revision: str = "d994145b47b6"
|
16
|
+
down_revision: Union[str, None] = "66478e1b95e5"
|
17
|
+
branch_labels: Union[str, Sequence[str], None] = None
|
18
|
+
depends_on: Union[str, Sequence[str], None] = None
|
19
|
+
|
20
|
+
|
21
|
+
def upgrade() -> None:
|
22
|
+
op.add_column(
|
23
|
+
"workflow_status",
|
24
|
+
sa.Column("inputs", sa.Text(), nullable=True),
|
25
|
+
schema="dbos",
|
26
|
+
)
|
27
|
+
|
28
|
+
|
29
|
+
def downgrade() -> None:
|
30
|
+
op.drop_column("workflow_status", "inputs", schema="dbos")
|
@@ -58,6 +58,7 @@ class SystemSchema:
|
|
58
58
|
Column("workflow_deadline_epoch_ms", BigInteger, nullable=True),
|
59
59
|
Column("started_at_epoch_ms", BigInteger(), nullable=True),
|
60
60
|
Column("deduplication_id", Text(), nullable=True),
|
61
|
+
Column("inputs", Text()),
|
61
62
|
Column("priority", Integer(), nullable=False, server_default=text("'0'::int")),
|
62
63
|
Index("workflow_status_created_at_index", "created_at"),
|
63
64
|
Index("workflow_status_executor_id_index", "executor_id"),
|
@@ -88,21 +89,6 @@ class SystemSchema:
|
|
88
89
|
PrimaryKeyConstraint("workflow_uuid", "function_id"),
|
89
90
|
)
|
90
91
|
|
91
|
-
workflow_inputs = Table(
|
92
|
-
"workflow_inputs",
|
93
|
-
metadata_obj,
|
94
|
-
Column(
|
95
|
-
"workflow_uuid",
|
96
|
-
Text,
|
97
|
-
ForeignKey(
|
98
|
-
"workflow_status.workflow_uuid", onupdate="CASCADE", ondelete="CASCADE"
|
99
|
-
),
|
100
|
-
primary_key=True,
|
101
|
-
nullable=False,
|
102
|
-
),
|
103
|
-
Column("inputs", Text, nullable=False),
|
104
|
-
)
|
105
|
-
|
106
92
|
notifications = Table(
|
107
93
|
"notifications",
|
108
94
|
metadata_obj,
|
@@ -140,6 +140,8 @@ class WorkflowStatusInternal(TypedDict):
|
|
140
140
|
deduplication_id: Optional[str]
|
141
141
|
# Priority of the workflow on the queue, starting from 1 ~ 2,147,483,647. Default 0 (highest priority).
|
142
142
|
priority: int
|
143
|
+
# Serialized workflow inputs
|
144
|
+
inputs: str
|
143
145
|
|
144
146
|
|
145
147
|
class EnqueueOptionsInternal(TypedDict):
|
@@ -462,6 +464,7 @@ class SystemDatabase:
|
|
462
464
|
workflow_deadline_epoch_ms=status["workflow_deadline_epoch_ms"],
|
463
465
|
deduplication_id=status["deduplication_id"],
|
464
466
|
priority=status["priority"],
|
467
|
+
inputs=status["inputs"],
|
465
468
|
)
|
466
469
|
.on_conflict_do_update(
|
467
470
|
index_elements=["workflow_uuid"],
|
@@ -642,9 +645,6 @@ class SystemDatabase:
|
|
642
645
|
status = self.get_workflow_status(original_workflow_id)
|
643
646
|
if status is None:
|
644
647
|
raise Exception(f"Workflow {original_workflow_id} not found")
|
645
|
-
inputs = self.get_workflow_inputs(original_workflow_id)
|
646
|
-
if inputs is None:
|
647
|
-
raise Exception(f"Workflow {original_workflow_id} not found")
|
648
648
|
|
649
649
|
with self.engine.begin() as c:
|
650
650
|
# Create an entry for the forked workflow with the same
|
@@ -666,13 +666,7 @@ class SystemDatabase:
|
|
666
666
|
authenticated_roles=status["authenticated_roles"],
|
667
667
|
assumed_role=status["assumed_role"],
|
668
668
|
queue_name=INTERNAL_QUEUE_NAME,
|
669
|
-
|
670
|
-
)
|
671
|
-
# Copy the original workflow's inputs into the forked workflow
|
672
|
-
c.execute(
|
673
|
-
pg.insert(SystemSchema.workflow_inputs).values(
|
674
|
-
workflow_uuid=forked_workflow_id,
|
675
|
-
inputs=_serialization.serialize_args(inputs),
|
669
|
+
inputs=status["inputs"],
|
676
670
|
)
|
677
671
|
)
|
678
672
|
|
@@ -732,6 +726,7 @@ class SystemDatabase:
|
|
732
726
|
SystemSchema.workflow_status.c.workflow_timeout_ms,
|
733
727
|
SystemSchema.workflow_status.c.deduplication_id,
|
734
728
|
SystemSchema.workflow_status.c.priority,
|
729
|
+
SystemSchema.workflow_status.c.inputs,
|
735
730
|
).where(SystemSchema.workflow_status.c.workflow_uuid == workflow_uuid)
|
736
731
|
).fetchone()
|
737
732
|
if row is None:
|
@@ -758,6 +753,7 @@ class SystemDatabase:
|
|
758
753
|
"workflow_timeout_ms": row[15],
|
759
754
|
"deduplication_id": row[16],
|
760
755
|
"priority": row[17],
|
756
|
+
"inputs": row[18],
|
761
757
|
}
|
762
758
|
return status
|
763
759
|
|
@@ -788,53 +784,6 @@ class SystemDatabase:
|
|
788
784
|
pass # CB: I guess we're assuming the WF will show up eventually.
|
789
785
|
time.sleep(1)
|
790
786
|
|
791
|
-
def _update_workflow_inputs(
|
792
|
-
self, workflow_uuid: str, inputs: str, conn: sa.Connection
|
793
|
-
) -> None:
|
794
|
-
if self._debug_mode:
|
795
|
-
raise Exception("called update_workflow_inputs in debug mode")
|
796
|
-
|
797
|
-
cmd = (
|
798
|
-
pg.insert(SystemSchema.workflow_inputs)
|
799
|
-
.values(
|
800
|
-
workflow_uuid=workflow_uuid,
|
801
|
-
inputs=inputs,
|
802
|
-
)
|
803
|
-
.on_conflict_do_update(
|
804
|
-
index_elements=["workflow_uuid"],
|
805
|
-
set_=dict(workflow_uuid=SystemSchema.workflow_inputs.c.workflow_uuid),
|
806
|
-
)
|
807
|
-
.returning(SystemSchema.workflow_inputs.c.inputs)
|
808
|
-
)
|
809
|
-
|
810
|
-
row = conn.execute(cmd).fetchone()
|
811
|
-
if row is not None and row[0] != inputs:
|
812
|
-
# In a distributed environment, scheduled workflows are enqueued multiple times with slightly different timestamps
|
813
|
-
if not workflow_uuid.startswith("sched-"):
|
814
|
-
dbos_logger.warning(
|
815
|
-
f"Workflow {workflow_uuid} has been called multiple times with different inputs"
|
816
|
-
)
|
817
|
-
# TODO: actually changing the input
|
818
|
-
|
819
|
-
return
|
820
|
-
|
821
|
-
@db_retry()
|
822
|
-
def get_workflow_inputs(
|
823
|
-
self, workflow_uuid: str
|
824
|
-
) -> Optional[_serialization.WorkflowInputs]:
|
825
|
-
with self.engine.begin() as c:
|
826
|
-
row = c.execute(
|
827
|
-
sa.select(SystemSchema.workflow_inputs.c.inputs).where(
|
828
|
-
SystemSchema.workflow_inputs.c.workflow_uuid == workflow_uuid
|
829
|
-
)
|
830
|
-
).fetchone()
|
831
|
-
if row is None:
|
832
|
-
return None
|
833
|
-
inputs: _serialization.WorkflowInputs = _serialization.deserialize_args(
|
834
|
-
row[0]
|
835
|
-
)
|
836
|
-
return inputs
|
837
|
-
|
838
787
|
def get_workflows(self, input: GetWorkflowsInput) -> List[WorkflowStatus]:
|
839
788
|
"""
|
840
789
|
Retrieve a list of workflows result and inputs based on the input criteria. The result is a list of external-facing workflow status objects.
|
@@ -855,15 +804,11 @@ class SystemDatabase:
|
|
855
804
|
SystemSchema.workflow_status.c.updated_at,
|
856
805
|
SystemSchema.workflow_status.c.application_version,
|
857
806
|
SystemSchema.workflow_status.c.application_id,
|
858
|
-
SystemSchema.
|
807
|
+
SystemSchema.workflow_status.c.inputs,
|
859
808
|
SystemSchema.workflow_status.c.output,
|
860
809
|
SystemSchema.workflow_status.c.error,
|
861
810
|
SystemSchema.workflow_status.c.workflow_deadline_epoch_ms,
|
862
811
|
SystemSchema.workflow_status.c.workflow_timeout_ms,
|
863
|
-
).join(
|
864
|
-
SystemSchema.workflow_inputs,
|
865
|
-
SystemSchema.workflow_status.c.workflow_uuid
|
866
|
-
== SystemSchema.workflow_inputs.c.workflow_uuid,
|
867
812
|
)
|
868
813
|
if input.sort_desc:
|
869
814
|
query = query.order_by(SystemSchema.workflow_status.c.created_at.desc())
|
@@ -953,39 +898,31 @@ class SystemDatabase:
|
|
953
898
|
"""
|
954
899
|
Retrieve a list of queued workflows result and inputs based on the input criteria. The result is a list of external-facing workflow status objects.
|
955
900
|
"""
|
956
|
-
query = (
|
957
|
-
|
958
|
-
|
959
|
-
|
960
|
-
|
961
|
-
|
962
|
-
|
963
|
-
|
964
|
-
|
965
|
-
|
966
|
-
|
967
|
-
|
968
|
-
|
969
|
-
|
970
|
-
|
971
|
-
|
972
|
-
|
973
|
-
|
974
|
-
|
975
|
-
|
976
|
-
|
977
|
-
|
978
|
-
|
979
|
-
|
980
|
-
SystemSchema.
|
981
|
-
SystemSchema.workflow_status.c.workflow_uuid
|
982
|
-
== SystemSchema.workflow_inputs.c.workflow_uuid,
|
983
|
-
)
|
984
|
-
.where(
|
985
|
-
sa.and_(
|
986
|
-
SystemSchema.workflow_status.c.queue_name.isnot(None),
|
987
|
-
SystemSchema.workflow_status.c.status.in_(["ENQUEUED", "PENDING"]),
|
988
|
-
)
|
901
|
+
query = sa.select(
|
902
|
+
SystemSchema.workflow_status.c.workflow_uuid,
|
903
|
+
SystemSchema.workflow_status.c.status,
|
904
|
+
SystemSchema.workflow_status.c.name,
|
905
|
+
SystemSchema.workflow_status.c.recovery_attempts,
|
906
|
+
SystemSchema.workflow_status.c.config_name,
|
907
|
+
SystemSchema.workflow_status.c.class_name,
|
908
|
+
SystemSchema.workflow_status.c.authenticated_user,
|
909
|
+
SystemSchema.workflow_status.c.authenticated_roles,
|
910
|
+
SystemSchema.workflow_status.c.assumed_role,
|
911
|
+
SystemSchema.workflow_status.c.queue_name,
|
912
|
+
SystemSchema.workflow_status.c.executor_id,
|
913
|
+
SystemSchema.workflow_status.c.created_at,
|
914
|
+
SystemSchema.workflow_status.c.updated_at,
|
915
|
+
SystemSchema.workflow_status.c.application_version,
|
916
|
+
SystemSchema.workflow_status.c.application_id,
|
917
|
+
SystemSchema.workflow_status.c.inputs,
|
918
|
+
SystemSchema.workflow_status.c.output,
|
919
|
+
SystemSchema.workflow_status.c.error,
|
920
|
+
SystemSchema.workflow_status.c.workflow_deadline_epoch_ms,
|
921
|
+
SystemSchema.workflow_status.c.workflow_timeout_ms,
|
922
|
+
).where(
|
923
|
+
sa.and_(
|
924
|
+
SystemSchema.workflow_status.c.queue_name.isnot(None),
|
925
|
+
SystemSchema.workflow_status.c.status.in_(["ENQUEUED", "PENDING"]),
|
989
926
|
)
|
990
927
|
)
|
991
928
|
if input["sort_desc"]:
|
@@ -1895,7 +1832,6 @@ class SystemDatabase:
|
|
1895
1832
|
def init_workflow(
|
1896
1833
|
self,
|
1897
1834
|
status: WorkflowStatusInternal,
|
1898
|
-
inputs: str,
|
1899
1835
|
*,
|
1900
1836
|
max_recovery_attempts: Optional[int],
|
1901
1837
|
) -> tuple[WorkflowStatuses, Optional[int]]:
|
@@ -1906,7 +1842,6 @@ class SystemDatabase:
|
|
1906
1842
|
wf_status, workflow_deadline_epoch_ms = self._insert_workflow_status(
|
1907
1843
|
status, conn, max_recovery_attempts=max_recovery_attempts
|
1908
1844
|
)
|
1909
|
-
self._update_workflow_inputs(status["workflow_uuid"], inputs, conn)
|
1910
1845
|
return wf_status, workflow_deadline_epoch_ms
|
1911
1846
|
|
1912
1847
|
def check_connection(self) -> None:
|
@@ -574,7 +574,7 @@ def test_worker_concurrency_with_n_dbos_instances(dbos: DBOS) -> None:
|
|
574
574
|
|
575
575
|
|
576
576
|
# Test error cases where we have duplicated workflows starting with the same workflow ID.
|
577
|
-
def test_duplicate_workflow_id(dbos: DBOS
|
577
|
+
def test_duplicate_workflow_id(dbos: DBOS) -> None:
|
578
578
|
wfid = str(uuid.uuid4())
|
579
579
|
|
580
580
|
@DBOS.workflow()
|
@@ -606,17 +606,11 @@ def test_duplicate_workflow_id(dbos: DBOS, caplog: pytest.LogCaptureFixture) ->
|
|
606
606
|
DBOS.sleep(0.1)
|
607
607
|
return self.config_name + ":" + var1
|
608
608
|
|
609
|
-
original_propagate = logging.getLogger("dbos").propagate
|
610
|
-
caplog.set_level(logging.WARNING, "dbos")
|
611
|
-
logging.getLogger("dbos").propagate = True
|
612
|
-
|
613
609
|
with SetWorkflowID(wfid):
|
614
610
|
origHandle = DBOS.start_workflow(test_workflow, "abc")
|
615
611
|
# The second one will generate a warning message but no error.
|
616
612
|
test_dup_workflow()
|
617
613
|
|
618
|
-
assert "Multiple workflows started in the same SetWorkflowID block." in caplog.text
|
619
|
-
|
620
614
|
# It's okay to call the same workflow with the same ID again.
|
621
615
|
with SetWorkflowID(wfid):
|
622
616
|
same_handle = DBOS.start_workflow(test_workflow, "abc")
|
@@ -661,25 +655,17 @@ def test_duplicate_workflow_id(dbos: DBOS, caplog: pytest.LogCaptureFixture) ->
|
|
661
655
|
with SetWorkflowID(wfid):
|
662
656
|
handle = queue.enqueue(test_workflow, "abc")
|
663
657
|
assert handle.get_result() == "abc"
|
664
|
-
assert "Workflow already exists in queue" in caplog.text
|
665
658
|
|
666
|
-
# Call with a different input
|
659
|
+
# Call with a different input still uses the recorded input.
|
667
660
|
with SetWorkflowID(wfid):
|
668
661
|
res = test_workflow("def")
|
669
662
|
# We want to see the warning message, but the result is non-deterministic
|
670
663
|
# TODO: in the future, we may want to always use the recorded inputs.
|
671
664
|
assert res == "abc" or res == "def"
|
672
|
-
assert (
|
673
|
-
f"Workflow {wfid} has been called multiple times with different inputs"
|
674
|
-
in caplog.text
|
675
|
-
)
|
676
665
|
|
677
666
|
assert origHandle.get_result() == "abc"
|
678
667
|
assert same_handle.get_result() == "abc"
|
679
668
|
|
680
|
-
# Reset logging
|
681
|
-
logging.getLogger("dbos").propagate = original_propagate
|
682
|
-
|
683
669
|
|
684
670
|
def test_queue_recovery(dbos: DBOS) -> None:
|
685
671
|
step_counter: int = 0
|
@@ -20,10 +20,6 @@ def test_systemdb_migration(dbos: DBOS) -> None:
|
|
20
20
|
result = connection.execute(sql)
|
21
21
|
assert result.fetchall() == []
|
22
22
|
|
23
|
-
sql = SystemSchema.workflow_inputs.select()
|
24
|
-
result = connection.execute(sql)
|
25
|
-
assert result.fetchall() == []
|
26
|
-
|
27
23
|
sql = SystemSchema.operation_outputs.select()
|
28
24
|
result = connection.execute(sql)
|
29
25
|
assert result.fetchall() == []
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{dbos-1.3.0a5 → dbos-1.3.0a7}/dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|