dbos 0.28.0a1__py3-none-any.whl → 0.28.0a6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dbos/_admin_server.py CHANGED
@@ -66,11 +66,11 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
66
66
  elif self.path == _deactivate_path:
67
67
  if not AdminRequestHandler.is_deactivated:
68
68
  dbos_logger.info(
69
- f"Deactivating DBOS executor {GlobalParams.executor_id} with version {GlobalParams.app_version}. This executor will complete existing workflows but will not start new workflows."
69
+ f"Deactivating DBOS executor {GlobalParams.executor_id} with version {GlobalParams.app_version}. This executor will complete existing workflows but will not create new workflows."
70
70
  )
71
71
  AdminRequestHandler.is_deactivated = True
72
- # Stop all scheduled workflows, queues, and kafka loops
73
- for event in self.dbos.stop_events:
72
+ # Stop all event receivers (scheduler and Kafka threads)
73
+ for event in self.dbos.poller_stop_events:
74
74
  event.set()
75
75
  self.send_response(200)
76
76
  self._end_headers()
dbos/_client.py CHANGED
@@ -6,6 +6,7 @@ from typing import Any, Generic, List, Optional, TypedDict, TypeVar
6
6
  from sqlalchemy import URL
7
7
 
8
8
  from dbos._app_db import ApplicationDatabase
9
+ from dbos._context import MaxPriority, MinPriority
9
10
 
10
11
  if sys.version_info < (3, 11):
11
12
  from typing_extensions import NotRequired
@@ -15,7 +16,7 @@ else:
15
16
  from dbos import _serialization
16
17
  from dbos._dbos import WorkflowHandle, WorkflowHandleAsync
17
18
  from dbos._dbos_config import parse_database_url_to_dbconfig
18
- from dbos._error import DBOSNonExistentWorkflowError
19
+ from dbos._error import DBOSException, DBOSNonExistentWorkflowError
19
20
  from dbos._registrations import DEFAULT_MAX_RECOVERY_ATTEMPTS
20
21
  from dbos._serialization import WorkflowInputs
21
22
  from dbos._sys_db import (
@@ -44,6 +45,15 @@ class EnqueueOptions(TypedDict):
44
45
  app_version: NotRequired[str]
45
46
  workflow_timeout: NotRequired[float]
46
47
  deduplication_id: NotRequired[str]
48
+ priority: NotRequired[int]
49
+
50
+
51
+ def validate_enqueue_options(options: EnqueueOptions) -> None:
52
+ priority = options.get("priority")
53
+ if priority is not None and (priority < MinPriority or priority > MaxPriority):
54
+ raise DBOSException(
55
+ f"Invalid priority {priority}. Priority must be between {MinPriority}~{MaxPriority}."
56
+ )
47
57
 
48
58
 
49
59
  class WorkflowHandleClientPolling(Generic[R]):
@@ -60,7 +70,7 @@ class WorkflowHandleClientPolling(Generic[R]):
60
70
  return res
61
71
 
62
72
  def get_status(self) -> WorkflowStatus:
63
- status = get_workflow(self._sys_db, self.workflow_id, False)
73
+ status = get_workflow(self._sys_db, self.workflow_id)
64
74
  if status is None:
65
75
  raise DBOSNonExistentWorkflowError(self.workflow_id)
66
76
  return status
@@ -82,9 +92,7 @@ class WorkflowHandleClientAsyncPolling(Generic[R]):
82
92
  return res
83
93
 
84
94
  async def get_status(self) -> WorkflowStatus:
85
- status = await asyncio.to_thread(
86
- get_workflow, self._sys_db, self.workflow_id, False
87
- )
95
+ status = await asyncio.to_thread(get_workflow, self._sys_db, self.workflow_id)
88
96
  if status is None:
89
97
  raise DBOSNonExistentWorkflowError(self.workflow_id)
90
98
  return status
@@ -103,6 +111,7 @@ class DBOSClient:
103
111
  self._sys_db.destroy()
104
112
 
105
113
  def _enqueue(self, options: EnqueueOptions, *args: Any, **kwargs: Any) -> str:
114
+ validate_enqueue_options(options)
106
115
  workflow_name = options["workflow_name"]
107
116
  queue_name = options["queue_name"]
108
117
 
@@ -116,6 +125,7 @@ class DBOSClient:
116
125
  workflow_timeout = options.get("workflow_timeout", None)
117
126
  enqueue_options_internal: EnqueueOptionsInternal = {
118
127
  "deduplication_id": options.get("deduplication_id"),
128
+ "priority": options.get("priority"),
119
129
  }
120
130
 
121
131
  status: WorkflowStatusInternal = {
@@ -129,7 +139,6 @@ class DBOSClient:
129
139
  "authenticated_user": None,
130
140
  "assumed_role": None,
131
141
  "authenticated_roles": None,
132
- "request": None,
133
142
  "output": None,
134
143
  "error": None,
135
144
  "created_at": None,
@@ -169,13 +178,13 @@ class DBOSClient:
169
178
  return WorkflowHandleClientAsyncPolling[R](workflow_id, self._sys_db)
170
179
 
171
180
  def retrieve_workflow(self, workflow_id: str) -> WorkflowHandle[R]:
172
- status = get_workflow(self._sys_db, workflow_id, False)
181
+ status = get_workflow(self._sys_db, workflow_id)
173
182
  if status is None:
174
183
  raise DBOSNonExistentWorkflowError(workflow_id)
175
184
  return WorkflowHandleClientPolling[R](workflow_id, self._sys_db)
176
185
 
177
186
  async def retrieve_workflow_async(self, workflow_id: str) -> WorkflowHandleAsync[R]:
178
- status = asyncio.to_thread(get_workflow, self._sys_db, workflow_id, False)
187
+ status = asyncio.to_thread(get_workflow, self._sys_db, workflow_id)
179
188
  if status is None:
180
189
  raise DBOSNonExistentWorkflowError(workflow_id)
181
190
  return WorkflowHandleClientAsyncPolling[R](workflow_id, self._sys_db)
@@ -198,7 +207,6 @@ class DBOSClient:
198
207
  "authenticated_user": None,
199
208
  "assumed_role": None,
200
209
  "authenticated_roles": None,
201
- "request": None,
202
210
  "output": None,
203
211
  "error": None,
204
212
  "created_at": None,
@@ -143,7 +143,6 @@ class ConductorWebsocket(threading.Thread):
143
143
  start_time=body["start_time"],
144
144
  end_time=body["end_time"],
145
145
  status=body["status"],
146
- request=False,
147
146
  app_version=body["application_version"],
148
147
  name=body["workflow_name"],
149
148
  limit=body["limit"],
@@ -176,7 +175,6 @@ class ConductorWebsocket(threading.Thread):
176
175
  start_time=q_body["start_time"],
177
176
  end_time=q_body["end_time"],
178
177
  status=q_body["status"],
179
- request=False,
180
178
  name=q_body["workflow_name"],
181
179
  limit=q_body["limit"],
182
180
  offset=q_body["offset"],
@@ -206,9 +204,7 @@ class ConductorWebsocket(threading.Thread):
206
204
  info = None
207
205
  try:
208
206
  info = get_workflow(
209
- self.dbos._sys_db,
210
- get_workflow_message.workflow_id,
211
- get_request=False,
207
+ self.dbos._sys_db, get_workflow_message.workflow_id
212
208
  )
213
209
  except Exception as e:
214
210
  error_message = f"Exception encountered when getting workflow {get_workflow_message.workflow_id}: {traceback.format_exc()}"
@@ -149,7 +149,7 @@ class WorkflowsOutput:
149
149
  inputs_str = str(info.input) if info.input is not None else None
150
150
  outputs_str = str(info.output) if info.output is not None else None
151
151
  error_str = str(info.error) if info.error is not None else None
152
- request_str = str(info.request) if info.request is not None else None
152
+ request_str = None
153
153
  roles_str = (
154
154
  str(info.authenticated_roles)
155
155
  if info.authenticated_roles is not None
dbos/_context.py CHANGED
@@ -16,7 +16,6 @@ from sqlalchemy.orm import Session
16
16
  from dbos._utils import GlobalParams
17
17
 
18
18
  from ._logger import dbos_logger
19
- from ._request import Request
20
19
  from ._tracer import dbos_tracer
21
20
 
22
21
 
@@ -31,6 +30,9 @@ class OperationType(Enum):
31
30
 
32
31
  OperationTypes = Literal["handler", "workflow", "transaction", "step", "procedure"]
33
32
 
33
+ MaxPriority = 2**31 - 1 # 2,147,483,647
34
+ MinPriority = 1
35
+
34
36
 
35
37
  # Keys must be the same as in TypeScript Transact
36
38
  class TracedAttributes(TypedDict, total=False):
@@ -73,8 +75,6 @@ class DBOSContext:
73
75
 
74
76
  self.logger = dbos_logger
75
77
 
76
- self.request: Optional["Request"] = None
77
-
78
78
  self.id_assigned_for_next_workflow: str = ""
79
79
  self.is_within_set_workflow_id_block: bool = False
80
80
 
@@ -100,6 +100,8 @@ class DBOSContext:
100
100
 
101
101
  # A user-specified deduplication ID for the enqueuing workflow.
102
102
  self.deduplication_id: Optional[str] = None
103
+ # A user-specified priority for the enqueuing workflow.
104
+ self.priority: Optional[int] = None
103
105
 
104
106
  def create_child(self) -> DBOSContext:
105
107
  rv = DBOSContext()
@@ -115,7 +117,6 @@ class DBOSContext:
115
117
  if self.authenticated_roles is not None
116
118
  else None
117
119
  )
118
- rv.request = self.request
119
120
  rv.assumed_role = self.assumed_role
120
121
  return rv
121
122
 
@@ -422,15 +423,23 @@ class SetEnqueueOptions:
422
423
 
423
424
  Usage:
424
425
  ```
425
- with SetEnqueueOptions(deduplication_id=<deduplication id>):
426
+ with SetEnqueueOptions(deduplication_id=<deduplication id>, priority=<priority>):
426
427
  queue.enqueue(...)
427
428
  ```
428
429
  """
429
430
 
430
- def __init__(self, *, deduplication_id: Optional[str] = None) -> None:
431
+ def __init__(
432
+ self, *, deduplication_id: Optional[str] = None, priority: Optional[int] = None
433
+ ) -> None:
431
434
  self.created_ctx = False
432
435
  self.deduplication_id: Optional[str] = deduplication_id
433
436
  self.saved_deduplication_id: Optional[str] = None
437
+ if priority is not None and (priority < MinPriority or priority > MaxPriority):
438
+ raise Exception(
439
+ f"Invalid priority {priority}. Priority must be between {MinPriority}~{MaxPriority}."
440
+ )
441
+ self.priority: Optional[int] = priority
442
+ self.saved_priority: Optional[int] = None
434
443
 
435
444
  def __enter__(self) -> SetEnqueueOptions:
436
445
  # Code to create a basic context
@@ -441,6 +450,8 @@ class SetEnqueueOptions:
441
450
  ctx = assert_current_dbos_context()
442
451
  self.saved_deduplication_id = ctx.deduplication_id
443
452
  ctx.deduplication_id = self.deduplication_id
453
+ self.saved_priority = ctx.priority
454
+ ctx.priority = self.priority
444
455
  return self
445
456
 
446
457
  def __exit__(
@@ -449,7 +460,9 @@ class SetEnqueueOptions:
449
460
  exc_value: Optional[BaseException],
450
461
  traceback: Optional[TracebackType],
451
462
  ) -> Literal[False]:
452
- assert_current_dbos_context().deduplication_id = self.saved_deduplication_id
463
+ curr_ctx = assert_current_dbos_context()
464
+ curr_ctx.deduplication_id = self.saved_deduplication_id
465
+ curr_ctx.priority = self.saved_priority
453
466
  # Code to clean up the basic context if we created it
454
467
  if self.created_ctx:
455
468
  _clear_local_dbos_context()
@@ -463,6 +476,7 @@ class EnterDBOSWorkflow(AbstractContextManager[DBOSContext, Literal[False]]):
463
476
  self.is_temp_workflow = attributes["name"] == "temp_wf"
464
477
  self.saved_workflow_timeout: Optional[int] = None
465
478
  self.saved_deduplication_id: Optional[str] = None
479
+ self.saved_priority: Optional[int] = None
466
480
 
467
481
  def __enter__(self) -> DBOSContext:
468
482
  # Code to create a basic context
@@ -476,10 +490,12 @@ class EnterDBOSWorkflow(AbstractContextManager[DBOSContext, Literal[False]]):
476
490
  # workflow's children (instead we propagate the deadline)
477
491
  self.saved_workflow_timeout = ctx.workflow_timeout_ms
478
492
  ctx.workflow_timeout_ms = None
479
- # Unset the deduplication_id context var so it is not applied to this
493
+ # Unset the deduplication_id and priority context var so it is not applied to this
480
494
  # workflow's children
481
495
  self.saved_deduplication_id = ctx.deduplication_id
482
496
  ctx.deduplication_id = None
497
+ self.saved_priority = ctx.priority
498
+ ctx.priority = None
483
499
  ctx.start_workflow(
484
500
  None, self.attributes, self.is_temp_workflow
485
501
  ) # Will get from the context's next workflow ID
@@ -498,7 +514,8 @@ class EnterDBOSWorkflow(AbstractContextManager[DBOSContext, Literal[False]]):
498
514
  ctx.workflow_timeout_ms = self.saved_workflow_timeout
499
515
  # Clear any propagating timeout
500
516
  ctx.workflow_deadline_epoch_ms = None
501
- # Restore the saved deduplication ID
517
+ # Restore the saved deduplication ID and priority
518
+ ctx.priority = self.saved_priority
502
519
  ctx.deduplication_id = self.saved_deduplication_id
503
520
  # Code to clean up the basic context if we created it
504
521
  if self.created_ctx:
dbos/_core.py CHANGED
@@ -266,9 +266,6 @@ def _init_workflow(
266
266
  "app_id": ctx.app_id,
267
267
  "app_version": GlobalParams.app_version,
268
268
  "executor_id": ctx.executor_id,
269
- "request": (
270
- _serialization.serialize(ctx.request) if ctx.request is not None else None
271
- ),
272
269
  "recovery_attempts": None,
273
270
  "authenticated_user": ctx.authenticated_user,
274
271
  "authenticated_roles": (
@@ -296,7 +293,7 @@ def _init_workflow(
296
293
 
297
294
  if workflow_deadline_epoch_ms is not None:
298
295
  evt = threading.Event()
299
- dbos.stop_events.append(evt)
296
+ dbos.background_thread_stop_events.append(evt)
300
297
 
301
298
  def timeout_func() -> None:
302
299
  try:
@@ -443,10 +440,6 @@ def execute_workflow_by_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[An
443
440
  )
444
441
  with DBOSContextEnsure():
445
442
  ctx = assert_current_dbos_context()
446
- request = status["request"]
447
- ctx.request = (
448
- _serialization.deserialize(request) if request is not None else None
449
- )
450
443
  # If this function belongs to a configured class, add that class instance as its first argument
451
444
  if status["config_name"] is not None:
452
445
  config_name = status["config_name"]
@@ -544,6 +537,7 @@ def start_workflow(
544
537
  )
545
538
  enqueue_options = EnqueueOptionsInternal(
546
539
  deduplication_id=local_ctx.deduplication_id if local_ctx is not None else None,
540
+ priority=local_ctx.priority if local_ctx is not None else None,
547
541
  )
548
542
  new_wf_id, new_wf_ctx = _get_new_wf()
549
543
 
@@ -635,6 +629,7 @@ async def start_workflow_async(
635
629
  )
636
630
  enqueue_options = EnqueueOptionsInternal(
637
631
  deduplication_id=local_ctx.deduplication_id if local_ctx is not None else None,
632
+ priority=local_ctx.priority if local_ctx is not None else None,
638
633
  )
639
634
  new_wf_id, new_wf_ctx = _get_new_wf()
640
635
 
dbos/_dbos.py CHANGED
@@ -71,14 +71,11 @@ from ._tracer import DBOSTracer, dbos_tracer
71
71
  if TYPE_CHECKING:
72
72
  from fastapi import FastAPI
73
73
  from ._kafka import _KafkaConsumerWorkflow
74
- from ._request import Request
75
74
  from flask import Flask
76
75
 
77
76
  from sqlalchemy import URL
78
77
  from sqlalchemy.orm import Session
79
78
 
80
- from ._request import Request
81
-
82
79
  if sys.version_info < (3, 10):
83
80
  from typing_extensions import ParamSpec
84
81
  else:
@@ -197,7 +194,7 @@ class DBOSRegistry:
197
194
  self, evt: threading.Event, func: Callable[..., Any], *args: Any, **kwargs: Any
198
195
  ) -> None:
199
196
  if self.dbos and self.dbos._launched:
200
- self.dbos.stop_events.append(evt)
197
+ self.dbos.poller_stop_events.append(evt)
201
198
  self.dbos._executor.submit(func, *args, **kwargs)
202
199
  else:
203
200
  self.pollers.append((evt, func, args, kwargs))
@@ -247,7 +244,7 @@ class DBOS:
247
244
  2. Starting workflow functions
248
245
  3. Retrieving workflow status information
249
246
  4. Interacting with workflows via events and messages
250
- 5. Accessing context, including the current user, request, SQL session, logger, and tracer
247
+ 5. Accessing context, including the current user, SQL session, logger, and tracer
251
248
 
252
249
  """
253
250
 
@@ -330,7 +327,10 @@ class DBOS:
330
327
  self._registry: DBOSRegistry = _get_or_create_dbos_registry()
331
328
  self._registry.dbos = self
332
329
  self._admin_server_field: Optional[AdminServer] = None
333
- self.stop_events: List[threading.Event] = []
330
+ # Stop internal background threads (queue thread, timeout threads, etc.)
331
+ self.background_thread_stop_events: List[threading.Event] = []
332
+ # Stop pollers (event receivers) that can create new workflows (scheduler, Kafka)
333
+ self.poller_stop_events: List[threading.Event] = []
334
334
  self.fastapi: Optional["FastAPI"] = fastapi
335
335
  self.flask: Optional["Flask"] = flask
336
336
  self._executor_field: Optional[ThreadPoolExecutor] = None
@@ -371,7 +371,7 @@ class DBOS:
371
371
  set_env_vars(self._config)
372
372
  config_logger(self._config)
373
373
  dbos_tracer.config(self._config)
374
- dbos_logger.info("Initializing DBOS")
374
+ dbos_logger.info(f"Initializing DBOS (v{GlobalParams.dbos_version})")
375
375
 
376
376
  # If using FastAPI, set up middleware and lifecycle events
377
377
  if self.fastapi is not None:
@@ -502,7 +502,7 @@ class DBOS:
502
502
 
503
503
  # Start the queue thread
504
504
  evt = threading.Event()
505
- self.stop_events.append(evt)
505
+ self.background_thread_stop_events.append(evt)
506
506
  bg_queue_thread = threading.Thread(
507
507
  target=queue_thread, args=(evt, self), daemon=True
508
508
  )
@@ -515,7 +515,7 @@ class DBOS:
515
515
  dbos_domain = os.environ.get("DBOS_DOMAIN", "cloud.dbos.dev")
516
516
  self.conductor_url = f"wss://{dbos_domain}/conductor/v1alpha1"
517
517
  evt = threading.Event()
518
- self.stop_events.append(evt)
518
+ self.background_thread_stop_events.append(evt)
519
519
  self.conductor_websocket = ConductorWebsocket(
520
520
  self,
521
521
  conductor_url=self.conductor_url,
@@ -527,7 +527,7 @@ class DBOS:
527
527
 
528
528
  # Grab any pollers that were deferred and start them
529
529
  for evt, func, args, kwargs in self._registry.pollers:
530
- self.stop_events.append(evt)
530
+ self.poller_stop_events.append(evt)
531
531
  poller_thread = threading.Thread(
532
532
  target=func, args=args, kwargs=kwargs, daemon=True
533
533
  )
@@ -583,7 +583,9 @@ class DBOS:
583
583
 
584
584
  def _destroy(self) -> None:
585
585
  self._initialized = False
586
- for event in self.stop_events:
586
+ for event in self.poller_stop_events:
587
+ event.set()
588
+ for event in self.background_thread_stop_events:
587
589
  event.set()
588
590
  self._background_event_loop.stop()
589
591
  if self._sys_db_field is not None:
@@ -760,7 +762,7 @@ class DBOS:
760
762
  """Return the status of a workflow execution."""
761
763
 
762
764
  def fn() -> Optional[WorkflowStatus]:
763
- return get_workflow(_get_dbos_instance()._sys_db, workflow_id, True)
765
+ return get_workflow(_get_dbos_instance()._sys_db, workflow_id)
764
766
 
765
767
  return _get_dbos_instance()._sys_db.call_function_as_step(fn, "DBOS.getStatus")
766
768
 
@@ -1156,12 +1158,6 @@ class DBOS:
1156
1158
  assert span
1157
1159
  return span
1158
1160
 
1159
- @classproperty
1160
- def request(cls) -> Optional["Request"]:
1161
- """Return the HTTP `Request`, if any, associated with the current context."""
1162
- ctx = assert_current_dbos_context()
1163
- return ctx.request
1164
-
1165
1161
  @classproperty
1166
1162
  def authenticated_user(cls) -> Optional[str]:
1167
1163
  """Return the current authenticated user, if any, associated with the current context."""
dbos/_fastapi.py CHANGED
@@ -7,15 +7,9 @@ from fastapi.responses import JSONResponse
7
7
  from starlette.types import ASGIApp, Receive, Scope, Send
8
8
 
9
9
  from . import DBOS
10
- from ._context import (
11
- EnterDBOSHandler,
12
- OperationType,
13
- SetWorkflowID,
14
- TracedAttributes,
15
- assert_current_dbos_context,
16
- )
10
+ from ._context import EnterDBOSHandler, OperationType, SetWorkflowID, TracedAttributes
17
11
  from ._error import DBOSException
18
- from ._request import Address, Request, request_id_header
12
+ from ._utils import request_id_header
19
13
 
20
14
 
21
15
  def _get_or_generate_request_id(request: FastAPIRequest) -> str:
@@ -26,19 +20,6 @@ def _get_or_generate_request_id(request: FastAPIRequest) -> str:
26
20
  return str(uuid.uuid4())
27
21
 
28
22
 
29
- def _make_request(request: FastAPIRequest) -> Request:
30
- return Request(
31
- headers=request.headers,
32
- path_params=request.path_params,
33
- query_params=request.query_params,
34
- url=str(request.url),
35
- base_url=str(request.base_url),
36
- client=Address(*request.client) if request.client is not None else None,
37
- cookies=request.cookies,
38
- method=request.method,
39
- )
40
-
41
-
42
23
  async def _dbos_error_handler(request: FastAPIRequest, gexc: Exception) -> JSONResponse:
43
24
  exc: DBOSException = cast(DBOSException, gexc)
44
25
  status_code = 500
@@ -96,8 +77,6 @@ def setup_fastapi_middleware(app: FastAPI, dbos: DBOS) -> None:
96
77
  "operationType": OperationType.HANDLER.value,
97
78
  }
98
79
  with EnterDBOSHandler(attributes):
99
- ctx = assert_current_dbos_context()
100
- ctx.request = _make_request(request)
101
80
  workflow_id = request.headers.get("dbos-idempotency-key")
102
81
  if workflow_id is not None:
103
82
  # Set the workflow ID for the handler
dbos/_flask.py CHANGED
@@ -2,17 +2,11 @@ import uuid
2
2
  from typing import Any
3
3
  from urllib.parse import urlparse
4
4
 
5
- from flask import Flask, request
5
+ from flask import Flask
6
6
  from werkzeug.wrappers import Request as WRequest
7
7
 
8
- from ._context import (
9
- EnterDBOSHandler,
10
- OperationType,
11
- SetWorkflowID,
12
- TracedAttributes,
13
- assert_current_dbos_context,
14
- )
15
- from ._request import Address, Request, request_id_header
8
+ from ._context import EnterDBOSHandler, OperationType, SetWorkflowID, TracedAttributes
9
+ from ._utils import request_id_header
16
10
 
17
11
 
18
12
  class FlaskMiddleware:
@@ -32,8 +26,6 @@ class FlaskMiddleware:
32
26
  "operationType": OperationType.HANDLER.value,
33
27
  }
34
28
  with EnterDBOSHandler(attributes):
35
- ctx = assert_current_dbos_context()
36
- ctx.request = _make_request(request)
37
29
  workflow_id = request.headers.get("dbos-idempotency-key")
38
30
  if workflow_id is not None:
39
31
  # Set the workflow ID for the handler
@@ -52,31 +44,5 @@ def _get_or_generate_request_id(request: WRequest) -> str:
52
44
  return str(uuid.uuid4())
53
45
 
54
46
 
55
- def _make_request(request: WRequest) -> Request:
56
- parsed_url = urlparse(request.url)
57
- base_url = f"{parsed_url.scheme}://{parsed_url.netloc}"
58
-
59
- client = None
60
- if request.remote_addr:
61
- hostname = request.remote_addr
62
- port = request.environ.get("REMOTE_PORT")
63
- if port:
64
- client = Address(hostname=hostname, port=int(port))
65
- else:
66
- # If port is not available, use 0 as a placeholder
67
- client = Address(hostname=hostname, port=0)
68
-
69
- return Request(
70
- headers=dict(request.headers),
71
- path_params={},
72
- query_params=dict(request.args),
73
- url=request.url,
74
- base_url=base_url,
75
- client=client,
76
- cookies=dict(request.cookies),
77
- method=request.method,
78
- )
79
-
80
-
81
47
  def setup_flask_middleware(app: Flask) -> None:
82
48
  app.wsgi_app = FlaskMiddleware(app.wsgi_app) # type: ignore
@@ -0,0 +1,35 @@
1
+ """add queue priority
2
+
3
+ Revision ID: 933e86bdac6a
4
+ Revises: 27ac6900c6ad
5
+ Create Date: 2025-04-25 18:17:40.462737
6
+
7
+ """
8
+
9
+ from typing import Sequence, Union
10
+
11
+ import sqlalchemy as sa
12
+ from alembic import op
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision: str = "933e86bdac6a"
16
+ down_revision: Union[str, None] = "27ac6900c6ad"
17
+ branch_labels: Union[str, Sequence[str], None] = None
18
+ depends_on: Union[str, Sequence[str], None] = None
19
+
20
+
21
+ def upgrade() -> None:
22
+ op.add_column(
23
+ "workflow_queue",
24
+ sa.Column(
25
+ "priority",
26
+ sa.Integer(),
27
+ nullable=False,
28
+ server_default=sa.text("'0'::int"),
29
+ ),
30
+ schema="dbos",
31
+ )
32
+
33
+
34
+ def downgrade() -> None:
35
+ op.drop_column("workflow_queue", "priority", schema="dbos")
dbos/_recovery.py CHANGED
@@ -29,7 +29,7 @@ def startup_recovery_thread(
29
29
  ) -> None:
30
30
  """Attempt to recover local pending workflows on startup using a background thread."""
31
31
  stop_event = threading.Event()
32
- dbos.stop_events.append(stop_event)
32
+ dbos.background_thread_stop_events.append(stop_event)
33
33
  while not stop_event.is_set() and len(pending_workflows) > 0:
34
34
  try:
35
35
  for pending_workflow in list(pending_workflows):
@@ -29,7 +29,6 @@ class SystemSchema:
29
29
  Column("authenticated_user", Text, nullable=True),
30
30
  Column("assumed_role", Text, nullable=True),
31
31
  Column("authenticated_roles", Text, nullable=True),
32
- Column("request", Text, nullable=True),
33
32
  Column("output", Text, nullable=True),
34
33
  Column("error", Text, nullable=True),
35
34
  Column("executor_id", Text, nullable=True),
@@ -180,6 +179,12 @@ class SystemSchema:
180
179
  Text,
181
180
  nullable=True,
182
181
  ),
182
+ Column(
183
+ "priority",
184
+ Integer,
185
+ nullable=False,
186
+ server_default=text("'0'::int"),
187
+ ),
183
188
  UniqueConstraint(
184
189
  "queue_name", "deduplication_id", name="uq_workflow_queue_name_dedup_id"
185
190
  ),