dbos 2.4.0a3__py3-none-any.whl → 2.4.0a7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbos might be problematic. Click here for more details.
- dbos/_client.py +4 -3
- dbos/_conductor/conductor.py +25 -29
- dbos/_context.py +2 -3
- dbos/_dbos.py +17 -16
- dbos/_dbos_config.py +1 -2
- dbos/_debouncer.py +4 -5
- dbos/_fastapi.py +2 -3
- dbos/_flask.py +2 -3
- dbos/_logger.py +5 -4
- dbos/_sys_db.py +27 -0
- dbos/_tracer.py +1 -1
- dbos/_utils.py +10 -0
- dbos/_workflow_commands.py +2 -2
- dbos/cli/migration.py +1 -0
- {dbos-2.4.0a3.dist-info → dbos-2.4.0a7.dist-info}/METADATA +1 -1
- {dbos-2.4.0a3.dist-info → dbos-2.4.0a7.dist-info}/RECORD +19 -19
- {dbos-2.4.0a3.dist-info → dbos-2.4.0a7.dist-info}/WHEEL +1 -1
- {dbos-2.4.0a3.dist-info → dbos-2.4.0a7.dist-info}/entry_points.txt +0 -0
- {dbos-2.4.0a3.dist-info → dbos-2.4.0a7.dist-info}/licenses/LICENSE +0 -0
dbos/_client.py
CHANGED
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
import asyncio
|
|
2
2
|
import json
|
|
3
3
|
import time
|
|
4
|
-
import uuid
|
|
5
4
|
from typing import (
|
|
6
5
|
TYPE_CHECKING,
|
|
7
6
|
Any,
|
|
@@ -20,6 +19,7 @@ import sqlalchemy as sa
|
|
|
20
19
|
from dbos._app_db import ApplicationDatabase
|
|
21
20
|
from dbos._context import MaxPriority, MinPriority
|
|
22
21
|
from dbos._sys_db import SystemDatabase
|
|
22
|
+
from dbos._utils import generate_uuid
|
|
23
23
|
|
|
24
24
|
if TYPE_CHECKING:
|
|
25
25
|
from dbos._dbos import WorkflowHandle, WorkflowHandleAsync
|
|
@@ -158,6 +158,7 @@ class DBOSClient:
|
|
|
158
158
|
engine=system_database_engine,
|
|
159
159
|
schema=dbos_system_schema,
|
|
160
160
|
serializer=serializer,
|
|
161
|
+
executor_id=None,
|
|
161
162
|
)
|
|
162
163
|
self._sys_db.check_connection()
|
|
163
164
|
if application_database_url:
|
|
@@ -187,7 +188,7 @@ class DBOSClient:
|
|
|
187
188
|
max_recovery_attempts = DEFAULT_MAX_RECOVERY_ATTEMPTS
|
|
188
189
|
workflow_id = options.get("workflow_id")
|
|
189
190
|
if workflow_id is None:
|
|
190
|
-
workflow_id =
|
|
191
|
+
workflow_id = generate_uuid()
|
|
191
192
|
workflow_timeout = options.get("workflow_timeout", None)
|
|
192
193
|
enqueue_options_internal: EnqueueOptionsInternal = {
|
|
193
194
|
"deduplication_id": options.get("deduplication_id"),
|
|
@@ -280,7 +281,7 @@ class DBOSClient:
|
|
|
280
281
|
topic: Optional[str] = None,
|
|
281
282
|
idempotency_key: Optional[str] = None,
|
|
282
283
|
) -> None:
|
|
283
|
-
idempotency_key = idempotency_key if idempotency_key else
|
|
284
|
+
idempotency_key = idempotency_key if idempotency_key else generate_uuid()
|
|
284
285
|
status: WorkflowStatusInternal = {
|
|
285
286
|
"workflow_uuid": f"{destination_id}-{idempotency_key}",
|
|
286
287
|
"status": WorkflowStatusString.SUCCESS.value,
|
dbos/_conductor/conductor.py
CHANGED
|
@@ -2,7 +2,6 @@ import socket
|
|
|
2
2
|
import threading
|
|
3
3
|
import time
|
|
4
4
|
import traceback
|
|
5
|
-
import uuid
|
|
6
5
|
from importlib.metadata import version
|
|
7
6
|
from typing import TYPE_CHECKING, Optional
|
|
8
7
|
|
|
@@ -11,7 +10,7 @@ from websockets.sync.client import connect
|
|
|
11
10
|
from websockets.sync.connection import Connection
|
|
12
11
|
|
|
13
12
|
from dbos._context import SetWorkflowID
|
|
14
|
-
from dbos._utils import GlobalParams
|
|
13
|
+
from dbos._utils import GlobalParams, generate_uuid
|
|
15
14
|
from dbos._workflow_commands import (
|
|
16
15
|
garbage_collect,
|
|
17
16
|
get_workflow,
|
|
@@ -192,7 +191,7 @@ class ConductorWebsocket(threading.Thread):
|
|
|
192
191
|
fork_message = p.ForkWorkflowRequest.from_json(message)
|
|
193
192
|
new_workflow_id = fork_message.body["new_workflow_id"]
|
|
194
193
|
if new_workflow_id is None:
|
|
195
|
-
new_workflow_id =
|
|
194
|
+
new_workflow_id = generate_uuid()
|
|
196
195
|
workflow_id = fork_message.body["workflow_id"]
|
|
197
196
|
start_step = fork_message.body["start_step"]
|
|
198
197
|
app_version = fork_message.body["application_version"]
|
|
@@ -223,23 +222,21 @@ class ConductorWebsocket(threading.Thread):
|
|
|
223
222
|
body = list_workflows_message.body
|
|
224
223
|
infos = []
|
|
225
224
|
try:
|
|
226
|
-
load_input = body.get("load_input", False)
|
|
227
|
-
load_output = body.get("load_output", False)
|
|
228
225
|
infos = list_workflows(
|
|
229
226
|
self.dbos._sys_db,
|
|
230
|
-
workflow_ids=body
|
|
231
|
-
user=body
|
|
232
|
-
start_time=body
|
|
233
|
-
end_time=body
|
|
234
|
-
status=body
|
|
235
|
-
app_version=body
|
|
236
|
-
forked_from=body
|
|
237
|
-
name=body
|
|
238
|
-
limit=body
|
|
239
|
-
offset=body
|
|
240
|
-
sort_desc=body
|
|
241
|
-
load_input=load_input,
|
|
242
|
-
load_output=load_output,
|
|
227
|
+
workflow_ids=body.get("workflow_uuids", None),
|
|
228
|
+
user=body.get("authenticated_user", None),
|
|
229
|
+
start_time=body.get("start_time", None),
|
|
230
|
+
end_time=body.get("end_time", None),
|
|
231
|
+
status=body.get("status", None),
|
|
232
|
+
app_version=body.get("application_version", None),
|
|
233
|
+
forked_from=body.get("forked_from", None),
|
|
234
|
+
name=body.get("workflow_name", None),
|
|
235
|
+
limit=body.get("limit", None),
|
|
236
|
+
offset=body.get("offset", None),
|
|
237
|
+
sort_desc=body.get("sort_desc", False),
|
|
238
|
+
load_input=body.get("load_input", False),
|
|
239
|
+
load_output=body.get("load_output", False),
|
|
243
240
|
)
|
|
244
241
|
except Exception as e:
|
|
245
242
|
error_message = f"Exception encountered when listing workflows: {traceback.format_exc()}"
|
|
@@ -262,19 +259,18 @@ class ConductorWebsocket(threading.Thread):
|
|
|
262
259
|
q_body = list_queued_workflows_message.body
|
|
263
260
|
infos = []
|
|
264
261
|
try:
|
|
265
|
-
q_load_input = q_body.get("load_input", False)
|
|
266
262
|
infos = list_queued_workflows(
|
|
267
263
|
self.dbos._sys_db,
|
|
268
|
-
start_time=q_body
|
|
269
|
-
end_time=q_body
|
|
270
|
-
status=q_body
|
|
271
|
-
forked_from=q_body
|
|
272
|
-
name=q_body
|
|
273
|
-
limit=q_body
|
|
274
|
-
offset=q_body
|
|
275
|
-
queue_name=q_body
|
|
276
|
-
sort_desc=q_body
|
|
277
|
-
load_input=
|
|
264
|
+
start_time=q_body.get("start_time", None),
|
|
265
|
+
end_time=q_body.get("end_time", None),
|
|
266
|
+
status=q_body.get("status", None),
|
|
267
|
+
forked_from=q_body.get("forked_from", None),
|
|
268
|
+
name=q_body.get("workflow_name", None),
|
|
269
|
+
limit=q_body.get("limit", None),
|
|
270
|
+
offset=q_body.get("offset", None),
|
|
271
|
+
queue_name=q_body.get("queue_name", None),
|
|
272
|
+
sort_desc=q_body.get("sort_desc", False),
|
|
273
|
+
load_input=q_body.get("load_input", False),
|
|
278
274
|
)
|
|
279
275
|
except Exception as e:
|
|
280
276
|
error_message = f"Exception encountered when listing queued workflows: {traceback.format_exc()}"
|
dbos/_context.py
CHANGED
|
@@ -2,7 +2,6 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
import json
|
|
4
4
|
import os
|
|
5
|
-
import uuid
|
|
6
5
|
from contextlib import AbstractContextManager
|
|
7
6
|
from contextvars import ContextVar
|
|
8
7
|
from dataclasses import dataclass
|
|
@@ -15,7 +14,7 @@ if TYPE_CHECKING:
|
|
|
15
14
|
|
|
16
15
|
from sqlalchemy.orm import Session
|
|
17
16
|
|
|
18
|
-
from dbos._utils import GlobalParams
|
|
17
|
+
from dbos._utils import GlobalParams, generate_uuid
|
|
19
18
|
|
|
20
19
|
from ._logger import dbos_logger
|
|
21
20
|
from ._tracer import dbos_tracer
|
|
@@ -151,7 +150,7 @@ class DBOSContext:
|
|
|
151
150
|
self.logger.warning(
|
|
152
151
|
f"Multiple workflows started in the same SetWorkflowID block. Only the first workflow is assigned the specified workflow ID; subsequent workflows will use a generated workflow ID."
|
|
153
152
|
)
|
|
154
|
-
wfid =
|
|
153
|
+
wfid = generate_uuid()
|
|
155
154
|
return wfid
|
|
156
155
|
|
|
157
156
|
def start_workflow(
|
dbos/_dbos.py
CHANGED
|
@@ -7,7 +7,6 @@ import os
|
|
|
7
7
|
import sys
|
|
8
8
|
import threading
|
|
9
9
|
import time
|
|
10
|
-
import uuid
|
|
11
10
|
from concurrent.futures import ThreadPoolExecutor
|
|
12
11
|
from logging import Logger
|
|
13
12
|
from typing import (
|
|
@@ -33,7 +32,7 @@ from dbos._conductor.conductor import ConductorWebsocket
|
|
|
33
32
|
from dbos._debouncer import debouncer_workflow
|
|
34
33
|
from dbos._serialization import DefaultSerializer, Serializer
|
|
35
34
|
from dbos._sys_db import SystemDatabase, WorkflowStatus
|
|
36
|
-
from dbos._utils import INTERNAL_QUEUE_NAME, GlobalParams
|
|
35
|
+
from dbos._utils import INTERNAL_QUEUE_NAME, GlobalParams, generate_uuid
|
|
37
36
|
from dbos._workflow_commands import fork_workflow, list_queued_workflows, list_workflows
|
|
38
37
|
|
|
39
38
|
from ._classproperty import classproperty
|
|
@@ -444,7 +443,7 @@ class DBOS:
|
|
|
444
443
|
if GlobalParams.app_version == "":
|
|
445
444
|
GlobalParams.app_version = self._registry.compute_app_version()
|
|
446
445
|
if self.conductor_key is not None:
|
|
447
|
-
GlobalParams.executor_id =
|
|
446
|
+
GlobalParams.executor_id = generate_uuid()
|
|
448
447
|
dbos_logger.info(f"Executor ID: {GlobalParams.executor_id}")
|
|
449
448
|
dbos_logger.info(f"Application version: {GlobalParams.app_version}")
|
|
450
449
|
self._executor_field = ThreadPoolExecutor(max_workers=sys.maxsize)
|
|
@@ -460,6 +459,7 @@ class DBOS:
|
|
|
460
459
|
debug_mode=debug_mode,
|
|
461
460
|
schema=schema,
|
|
462
461
|
serializer=self._serializer,
|
|
462
|
+
executor_id=GlobalParams.executor_id,
|
|
463
463
|
)
|
|
464
464
|
assert self._config["database"]["db_engine_kwargs"] is not None
|
|
465
465
|
if self._config["database_url"]:
|
|
@@ -495,20 +495,21 @@ class DBOS:
|
|
|
495
495
|
except Exception as e:
|
|
496
496
|
dbos_logger.warning(f"Failed to start admin server: {e}")
|
|
497
497
|
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
self.logger.info(
|
|
504
|
-
f"Recovering {len(workflow_ids)} workflows from application version {GlobalParams.app_version}"
|
|
505
|
-
)
|
|
506
|
-
else:
|
|
507
|
-
self.logger.info(
|
|
508
|
-
f"No workflows to recover from application version {GlobalParams.app_version}"
|
|
498
|
+
# Recover local workflows if not using a recovery service
|
|
499
|
+
if not self.conductor_key and not GlobalParams.dbos_cloud:
|
|
500
|
+
dbos_logger.debug("Retrieving local pending workflows for recovery")
|
|
501
|
+
workflow_ids = self._sys_db.get_pending_workflows(
|
|
502
|
+
GlobalParams.executor_id, GlobalParams.app_version
|
|
509
503
|
)
|
|
510
|
-
|
|
511
|
-
|
|
504
|
+
if (len(workflow_ids)) > 0:
|
|
505
|
+
self.logger.info(
|
|
506
|
+
f"Recovering {len(workflow_ids)} workflows from application version {GlobalParams.app_version}"
|
|
507
|
+
)
|
|
508
|
+
else:
|
|
509
|
+
self.logger.info(
|
|
510
|
+
f"No workflows to recover from application version {GlobalParams.app_version}"
|
|
511
|
+
)
|
|
512
|
+
self._executor.submit(startup_recovery_thread, self, workflow_ids)
|
|
512
513
|
|
|
513
514
|
# Listen to notifications
|
|
514
515
|
dbos_logger.debug("Starting notifications listener thread")
|
dbos/_dbos_config.py
CHANGED
dbos/_debouncer.py
CHANGED
|
@@ -2,7 +2,6 @@ import asyncio
|
|
|
2
2
|
import math
|
|
3
3
|
import time
|
|
4
4
|
import types
|
|
5
|
-
import uuid
|
|
6
5
|
from typing import (
|
|
7
6
|
TYPE_CHECKING,
|
|
8
7
|
Any,
|
|
@@ -39,7 +38,7 @@ from dbos._error import DBOSQueueDeduplicatedError
|
|
|
39
38
|
from dbos._queue import Queue
|
|
40
39
|
from dbos._registrations import get_dbos_func_name
|
|
41
40
|
from dbos._serialization import WorkflowInputs
|
|
42
|
-
from dbos._utils import INTERNAL_QUEUE_NAME
|
|
41
|
+
from dbos._utils import INTERNAL_QUEUE_NAME, generate_uuid
|
|
43
42
|
|
|
44
43
|
if TYPE_CHECKING:
|
|
45
44
|
from dbos._dbos import WorkflowHandle, WorkflowHandleAsync
|
|
@@ -209,7 +208,7 @@ class Debouncer(Generic[P, R]):
|
|
|
209
208
|
|
|
210
209
|
# Deterministically generate the user workflow ID and message ID
|
|
211
210
|
def assign_debounce_ids() -> tuple[str, str]:
|
|
212
|
-
return
|
|
211
|
+
return generate_uuid(), ctx.assign_workflow_id()
|
|
213
212
|
|
|
214
213
|
message_id, user_workflow_id = dbos._sys_db.call_function_as_step(
|
|
215
214
|
assign_debounce_ids, "DBOS.assign_debounce_ids"
|
|
@@ -320,14 +319,14 @@ class DebouncerClient:
|
|
|
320
319
|
"workflow_id": (
|
|
321
320
|
self.workflow_options["workflow_id"]
|
|
322
321
|
if self.workflow_options.get("workflow_id")
|
|
323
|
-
else
|
|
322
|
+
else generate_uuid()
|
|
324
323
|
),
|
|
325
324
|
"app_version": self.workflow_options.get("app_version"),
|
|
326
325
|
"deduplication_id": self.workflow_options.get("deduplication_id"),
|
|
327
326
|
"priority": self.workflow_options.get("priority"),
|
|
328
327
|
"workflow_timeout_sec": self.workflow_options.get("workflow_timeout"),
|
|
329
328
|
}
|
|
330
|
-
message_id =
|
|
329
|
+
message_id = generate_uuid()
|
|
331
330
|
while True:
|
|
332
331
|
try:
|
|
333
332
|
# Attempt to enqueue a debouncer for this workflow.
|
dbos/_fastapi.py
CHANGED
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import uuid
|
|
2
1
|
from typing import Any, Callable, MutableMapping, cast
|
|
3
2
|
|
|
4
3
|
from fastapi import FastAPI
|
|
@@ -9,7 +8,7 @@ from starlette.types import ASGIApp, Receive, Scope, Send
|
|
|
9
8
|
from . import DBOS
|
|
10
9
|
from ._context import EnterDBOSHandler, OperationType, SetWorkflowID, TracedAttributes
|
|
11
10
|
from ._error import DBOSException
|
|
12
|
-
from ._utils import request_id_header
|
|
11
|
+
from ._utils import generate_uuid, request_id_header
|
|
13
12
|
|
|
14
13
|
|
|
15
14
|
def _get_or_generate_request_id(request: FastAPIRequest) -> str:
|
|
@@ -17,7 +16,7 @@ def _get_or_generate_request_id(request: FastAPIRequest) -> str:
|
|
|
17
16
|
if request_id is not None:
|
|
18
17
|
return request_id
|
|
19
18
|
else:
|
|
20
|
-
return
|
|
19
|
+
return generate_uuid()
|
|
21
20
|
|
|
22
21
|
|
|
23
22
|
async def _dbos_error_handler(request: FastAPIRequest, gexc: Exception) -> JSONResponse:
|
dbos/_flask.py
CHANGED
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import uuid
|
|
2
1
|
from typing import Any
|
|
3
2
|
from urllib.parse import urlparse
|
|
4
3
|
|
|
@@ -6,7 +5,7 @@ from flask import Flask
|
|
|
6
5
|
from werkzeug.wrappers import Request as WRequest
|
|
7
6
|
|
|
8
7
|
from ._context import EnterDBOSHandler, OperationType, SetWorkflowID, TracedAttributes
|
|
9
|
-
from ._utils import request_id_header
|
|
8
|
+
from ._utils import generate_uuid, request_id_header
|
|
10
9
|
|
|
11
10
|
|
|
12
11
|
class FlaskMiddleware:
|
|
@@ -41,7 +40,7 @@ def _get_or_generate_request_id(request: WRequest) -> str:
|
|
|
41
40
|
if request_id is not None:
|
|
42
41
|
return request_id
|
|
43
42
|
else:
|
|
44
|
-
return
|
|
43
|
+
return generate_uuid()
|
|
45
44
|
|
|
46
45
|
|
|
47
46
|
def setup_flask_middleware(app: Flask) -> None:
|
dbos/_logger.py
CHANGED
|
@@ -79,7 +79,7 @@ def config_logger(config: "ConfigFile") -> None:
|
|
|
79
79
|
|
|
80
80
|
# Only set up OTLP provider and exporter if endpoints are provided
|
|
81
81
|
log_provider = get_logger_provider()
|
|
82
|
-
if otlp_logs_endpoints is not None:
|
|
82
|
+
if otlp_logs_endpoints is not None and len(otlp_logs_endpoints) > 0:
|
|
83
83
|
if not isinstance(log_provider, LoggerProvider):
|
|
84
84
|
log_provider = LoggerProvider(
|
|
85
85
|
Resource.create(
|
|
@@ -100,10 +100,11 @@ def config_logger(config: "ConfigFile") -> None:
|
|
|
100
100
|
|
|
101
101
|
# Even if no endpoints are provided, we still need a LoggerProvider to create the LoggingHandler
|
|
102
102
|
global _otlp_handler
|
|
103
|
-
_otlp_handler
|
|
103
|
+
if _otlp_handler is None:
|
|
104
|
+
_otlp_handler = LoggingHandler(logger_provider=log_provider)
|
|
104
105
|
|
|
105
|
-
|
|
106
|
-
|
|
106
|
+
# Direct DBOS logs to OTLP
|
|
107
|
+
dbos_logger.addHandler(_otlp_handler)
|
|
107
108
|
|
|
108
109
|
# Attach DBOS-specific attributes to all log entries.
|
|
109
110
|
global _dbos_log_transformer
|
dbos/_sys_db.py
CHANGED
|
@@ -351,6 +351,7 @@ class SystemDatabase(ABC):
|
|
|
351
351
|
engine: Optional[sa.Engine],
|
|
352
352
|
schema: Optional[str],
|
|
353
353
|
serializer: Serializer,
|
|
354
|
+
executor_id: Optional[str],
|
|
354
355
|
debug_mode: bool = False,
|
|
355
356
|
) -> "SystemDatabase":
|
|
356
357
|
"""Factory method to create the appropriate SystemDatabase implementation based on URL."""
|
|
@@ -363,6 +364,7 @@ class SystemDatabase(ABC):
|
|
|
363
364
|
engine=engine,
|
|
364
365
|
schema=schema,
|
|
365
366
|
serializer=serializer,
|
|
367
|
+
executor_id=executor_id,
|
|
366
368
|
debug_mode=debug_mode,
|
|
367
369
|
)
|
|
368
370
|
else:
|
|
@@ -374,6 +376,7 @@ class SystemDatabase(ABC):
|
|
|
374
376
|
engine=engine,
|
|
375
377
|
schema=schema,
|
|
376
378
|
serializer=serializer,
|
|
379
|
+
executor_id=executor_id,
|
|
377
380
|
debug_mode=debug_mode,
|
|
378
381
|
)
|
|
379
382
|
|
|
@@ -385,6 +388,7 @@ class SystemDatabase(ABC):
|
|
|
385
388
|
engine: Optional[sa.Engine],
|
|
386
389
|
schema: Optional[str],
|
|
387
390
|
serializer: Serializer,
|
|
391
|
+
executor_id: Optional[str],
|
|
388
392
|
debug_mode: bool = False,
|
|
389
393
|
):
|
|
390
394
|
import sqlalchemy.dialects.postgresql as pg
|
|
@@ -410,6 +414,8 @@ class SystemDatabase(ABC):
|
|
|
410
414
|
|
|
411
415
|
self.notifications_map = ThreadSafeConditionDict()
|
|
412
416
|
self.workflow_events_map = ThreadSafeConditionDict()
|
|
417
|
+
self.executor_id = executor_id
|
|
418
|
+
|
|
413
419
|
self._listener_thread_lock = threading.Lock()
|
|
414
420
|
|
|
415
421
|
# Now we can run background processes
|
|
@@ -1069,6 +1075,27 @@ class SystemDatabase(ABC):
|
|
|
1069
1075
|
error = result["error"]
|
|
1070
1076
|
output = result["output"]
|
|
1071
1077
|
assert error is None or output is None, "Only one of error or output can be set"
|
|
1078
|
+
wf_executor_id_row = conn.execute(
|
|
1079
|
+
sa.select(
|
|
1080
|
+
SystemSchema.workflow_status.c.executor_id,
|
|
1081
|
+
).where(
|
|
1082
|
+
SystemSchema.workflow_status.c.workflow_uuid == result["workflow_uuid"]
|
|
1083
|
+
)
|
|
1084
|
+
).fetchone()
|
|
1085
|
+
assert wf_executor_id_row is not None
|
|
1086
|
+
wf_executor_id = wf_executor_id_row[0]
|
|
1087
|
+
if self.executor_id is not None and wf_executor_id != self.executor_id:
|
|
1088
|
+
dbos_logger.debug(
|
|
1089
|
+
f'Resetting executor_id from {wf_executor_id} to {self.executor_id} for workflow {result["workflow_uuid"]}'
|
|
1090
|
+
)
|
|
1091
|
+
conn.execute(
|
|
1092
|
+
sa.update(SystemSchema.workflow_status)
|
|
1093
|
+
.values(executor_id=self.executor_id)
|
|
1094
|
+
.where(
|
|
1095
|
+
SystemSchema.workflow_status.c.workflow_uuid
|
|
1096
|
+
== result["workflow_uuid"]
|
|
1097
|
+
)
|
|
1098
|
+
)
|
|
1072
1099
|
sql = sa.insert(SystemSchema.operation_outputs).values(
|
|
1073
1100
|
workflow_uuid=result["workflow_uuid"],
|
|
1074
1101
|
function_id=result["function_id"],
|
dbos/_tracer.py
CHANGED
|
@@ -45,7 +45,7 @@ class DBOSTracer:
|
|
|
45
45
|
tracer_provider = trace.get_tracer_provider()
|
|
46
46
|
|
|
47
47
|
# Only set up OTLP provider and exporter if endpoints are provided
|
|
48
|
-
if otlp_traces_endpoints is not None:
|
|
48
|
+
if otlp_traces_endpoints is not None and len(otlp_traces_endpoints) > 0:
|
|
49
49
|
if not isinstance(tracer_provider, TracerProvider):
|
|
50
50
|
resource = Resource(
|
|
51
51
|
attributes={
|
dbos/_utils.py
CHANGED
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
import importlib.metadata
|
|
2
2
|
import os
|
|
3
|
+
import sys
|
|
4
|
+
import uuid
|
|
3
5
|
|
|
4
6
|
import psycopg
|
|
5
7
|
from sqlalchemy.exc import DBAPIError
|
|
@@ -12,6 +14,7 @@ request_id_header = "x-request-id"
|
|
|
12
14
|
class GlobalParams:
|
|
13
15
|
app_version: str = os.environ.get("DBOS__APPVERSION", "")
|
|
14
16
|
executor_id: str = os.environ.get("DBOS__VMID", "local")
|
|
17
|
+
dbos_cloud: bool = os.environ.get("DBOS__CLOUD") == "true"
|
|
15
18
|
try:
|
|
16
19
|
# Only works on Python >= 3.8
|
|
17
20
|
dbos_version = importlib.metadata.version("dbos")
|
|
@@ -57,3 +60,10 @@ def retriable_sqlite_exception(e: Exception) -> bool:
|
|
|
57
60
|
return True
|
|
58
61
|
else:
|
|
59
62
|
return False
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def generate_uuid() -> str:
|
|
66
|
+
if sys.version_info >= (3, 14):
|
|
67
|
+
return str(uuid.uuid7())
|
|
68
|
+
else:
|
|
69
|
+
return str(uuid.uuid4())
|
dbos/_workflow_commands.py
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
|
-
import uuid
|
|
2
1
|
from datetime import datetime
|
|
3
2
|
from typing import TYPE_CHECKING, List, Optional, Union
|
|
4
3
|
|
|
5
4
|
from dbos._context import get_local_dbos_context
|
|
5
|
+
from dbos._utils import generate_uuid
|
|
6
6
|
|
|
7
7
|
from ._app_db import ApplicationDatabase
|
|
8
8
|
from ._sys_db import (
|
|
@@ -125,7 +125,7 @@ def fork_workflow(
|
|
|
125
125
|
forked_workflow_id = ctx.id_assigned_for_next_workflow
|
|
126
126
|
ctx.id_assigned_for_next_workflow = ""
|
|
127
127
|
else:
|
|
128
|
-
forked_workflow_id =
|
|
128
|
+
forked_workflow_id = generate_uuid()
|
|
129
129
|
if app_db:
|
|
130
130
|
app_db.clone_workflow_transactions(workflow_id, forked_workflow_id, start_step)
|
|
131
131
|
sys_db.fork_workflow(
|
dbos/cli/migration.py
CHANGED
|
@@ -1,30 +1,30 @@
|
|
|
1
|
-
dbos-2.4.
|
|
2
|
-
dbos-2.4.
|
|
3
|
-
dbos-2.4.
|
|
4
|
-
dbos-2.4.
|
|
1
|
+
dbos-2.4.0a7.dist-info/METADATA,sha256=QfQAxKiyjgeLossCW9-CWaqwuGYS0Oo3YXCqUMPybB4,14532
|
|
2
|
+
dbos-2.4.0a7.dist-info/WHEEL,sha256=tsUv_t7BDeJeRHaSrczbGeuK-TtDpGsWi_JfpzD255I,90
|
|
3
|
+
dbos-2.4.0a7.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
|
|
4
|
+
dbos-2.4.0a7.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
|
|
5
5
|
dbos/__init__.py,sha256=M7FdFSBGhcvaLIXrNw_0eR68ijwMWV7_UEyimHMP_F4,1039
|
|
6
6
|
dbos/__main__.py,sha256=G7Exn-MhGrVJVDbgNlpzhfh8WMX_72t3_oJaFT9Lmt8,653
|
|
7
7
|
dbos/_admin_server.py,sha256=Kce_Cv6JXZBABzfOcNJdVHOwEukmp7SvO24TSa-gLIM,16371
|
|
8
8
|
dbos/_app_db.py,sha256=3XHvTePe1JaAI42rO3waWGoEeDyXkFKGzTFwJxQHUmo,16464
|
|
9
9
|
dbos/_classproperty.py,sha256=f0X-_BySzn3yFDRKB2JpCbLYQ9tLwt1XftfshvY7CBs,626
|
|
10
|
-
dbos/_client.py,sha256=
|
|
11
|
-
dbos/_conductor/conductor.py,sha256=
|
|
10
|
+
dbos/_client.py,sha256=Upl9tqrMKWQKDv8L13zaGbI03uWF2Cw3c7W_Cg-Bsls,20018
|
|
11
|
+
dbos/_conductor/conductor.py,sha256=8QNK2kXH2fG_YUg6E2fKGEhtIwFMcZIeelgP9b0kwrQ,24283
|
|
12
12
|
dbos/_conductor/protocol.py,sha256=nVjpcSw_OPoCM7NBU_IRWnk9dFQjOgAkg0ufhj8lFzI,8901
|
|
13
|
-
dbos/_context.py,sha256=
|
|
13
|
+
dbos/_context.py,sha256=GOoSpv1BE4jAXtMzTvPfy2VR6Rr4fSqTb0IjgCR_ER4,28165
|
|
14
14
|
dbos/_core.py,sha256=FCspRQFRMFyHpkl4vqR8IEw3aitD-VWB77CMVQrlyy8,50257
|
|
15
15
|
dbos/_croniter.py,sha256=XHAyUyibs_59sJQfSNWkP7rqQY6_XrlfuuCxk4jYqek,47559
|
|
16
|
-
dbos/_dbos.py,sha256=
|
|
17
|
-
dbos/_dbos_config.py,sha256=
|
|
18
|
-
dbos/_debouncer.py,sha256=
|
|
16
|
+
dbos/_dbos.py,sha256=KUD5YJUYhPasx0yLZHpqjLVqcnE8UYWIpK7OZJlNR6M,60050
|
|
17
|
+
dbos/_dbos_config.py,sha256=eV8_jBRLotWwx0_aGO7VoqbeB0vyN9dmIgG4G22tIeE,24814
|
|
18
|
+
dbos/_debouncer.py,sha256=G3QwqNCmdWC0zDy3LMV4kfK771aI-Kx8ULBgU7NXAwE,15294
|
|
19
19
|
dbos/_debug.py,sha256=0MfgNqutCUhI4PEmmra9x7f3DiFE_0nscfUCHdLimEY,1415
|
|
20
20
|
dbos/_docker_pg_helper.py,sha256=xySum4hTA8TVMBODoG19u4cXQAB1vCock-jwM2pnmSI,7791
|
|
21
21
|
dbos/_error.py,sha256=GwO0Ng4d4iB52brY09-Ss6Cz_V28Xc0D0cRCzZ6XmNM,8688
|
|
22
22
|
dbos/_event_loop.py,sha256=cvaFN9-II3MsHEOq8QoICc_8qSKrjikMlLfuhC3Y8Dk,2923
|
|
23
|
-
dbos/_fastapi.py,sha256=
|
|
24
|
-
dbos/_flask.py,sha256=
|
|
23
|
+
dbos/_fastapi.py,sha256=2Dss2CpAbiGo2dNTrcfMHufUTdj_gUukJ_qZHbwBQao,3422
|
|
24
|
+
dbos/_flask.py,sha256=_ST-eLLrg_KVj1eUvbeT2XeDIlECyvG9uOp3KCSjb2A,1653
|
|
25
25
|
dbos/_kafka.py,sha256=cA3hXyT-FR4LQZnaBMVLTZn7oko76rcTUC_kOo6aSis,4352
|
|
26
26
|
dbos/_kafka_message.py,sha256=NYvOXNG3Qn7bghn1pv3fg4Pbs86ILZGcK4IB-MLUNu0,409
|
|
27
|
-
dbos/_logger.py,sha256=
|
|
27
|
+
dbos/_logger.py,sha256=vJhmTltDGFSZCpfcnj5EGJJWYgf-b-7taV2thwM67Rc,5160
|
|
28
28
|
dbos/_migration.py,sha256=GJdxHhMUnsr3pjrGwi1f4PT76ABnn9kFUyqWp4Hakmw,11701
|
|
29
29
|
dbos/_outcome.py,sha256=7HvosMfEHTh1U5P6xok7kFTGLwa2lPaul0YApb3UnN4,8191
|
|
30
30
|
dbos/_queue.py,sha256=GmqZHl9smES1KSmpauhSdsnZFJHDyfvRArmC-jBibhw,6228
|
|
@@ -36,7 +36,7 @@ dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
36
36
|
dbos/_schemas/application_database.py,sha256=SypAS9l9EsaBHFn9FR8jmnqt01M74d9AF1AMa4m2hhI,1040
|
|
37
37
|
dbos/_schemas/system_database.py,sha256=tQAFCnEyZ7bEXZm3FbGIYk5SNGk-AHA3R_vuR0hfH8s,5717
|
|
38
38
|
dbos/_serialization.py,sha256=ZGrkN5UclSLOqMVZgYpT72pw1l888ZXRoYuu3pIg3PA,2957
|
|
39
|
-
dbos/_sys_db.py,sha256=
|
|
39
|
+
dbos/_sys_db.py,sha256=nxxIgwtzoHCRWXydwQHQ65YOTMFmUVX_p8EDZftUXXg,86259
|
|
40
40
|
dbos/_sys_db_postgres.py,sha256=_3m3hF6Pc23iZfUlIFYtDuC1Tw6KsjYqnDQE0HZpjt4,6965
|
|
41
41
|
dbos/_sys_db_sqlite.py,sha256=ifjKdy-Z9vlVIBf5L6XnSaNjiBdvqPE73asVHim4A5Q,6998
|
|
42
42
|
dbos/_templates/dbos-db-starter/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
|
|
@@ -46,14 +46,14 @@ dbos/_templates/dbos-db-starter/__package/schema.py,sha256=7Z27JGC8yy7Z44cbVXIRE
|
|
|
46
46
|
dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos,sha256=0wPktElM7kMB3OPHTXw4xBk9bgGKMqOHrrr7x_R23Z8,446
|
|
47
47
|
dbos/_templates/dbos-db-starter/migrations/create_table.py.dbos,sha256=pVm2Q0AsxS8pg85llbrXFD6jMccMqGjhGRjTEvS-hXk,942
|
|
48
48
|
dbos/_templates/dbos-db-starter/start_postgres_docker.py,sha256=lQVLlYO5YkhGPEgPqwGc7Y8uDKse9HsWv5fynJEFJHM,1681
|
|
49
|
-
dbos/_tracer.py,sha256=
|
|
50
|
-
dbos/_utils.py,sha256=
|
|
51
|
-
dbos/_workflow_commands.py,sha256=
|
|
49
|
+
dbos/_tracer.py,sha256=jVCn4EiPl8Lo21oN39yELAhS79JQilqSDLLFhkFEPCc,3993
|
|
50
|
+
dbos/_utils.py,sha256=OHFOQ1r5goT75eRRiddJd-s0-ik3zt3jSOIMUoElEhQ,2007
|
|
51
|
+
dbos/_workflow_commands.py,sha256=qQFFb4iQ1qReC4iwCTipDBVff0pjxtmeHM9uDdNg9Xg,5184
|
|
52
52
|
dbos/cli/_github_init.py,sha256=R_94Fnn40CAmPy-zM00lwHi0ndyfv57TmIooADjmag4,3378
|
|
53
53
|
dbos/cli/_template_init.py,sha256=AltKk256VocgvxLpuTxpjJyACrdHFjbGoqYhHzeLae4,2649
|
|
54
54
|
dbos/cli/cli.py,sha256=AHz_JJj_qWCTRV8yT1RSA-hISFVIJrE9eUalApw9sxg,27149
|
|
55
|
-
dbos/cli/migration.py,sha256=
|
|
55
|
+
dbos/cli/migration.py,sha256=jRAr4H7Bpf-PfmLoOA6qXk9i15Cl1TehUz-cphjvYJM,3805
|
|
56
56
|
dbos/dbos-config.schema.json,sha256=47wofTZ5jlFynec7bG0L369tAXbRQQ2euBxBXvg4m9c,1730
|
|
57
57
|
dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
|
|
58
58
|
version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
|
|
59
|
-
dbos-2.4.
|
|
59
|
+
dbos-2.4.0a7.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|