dbos 0.28.0a4__py3-none-any.whl → 0.28.0a7__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of dbos might be problematic. Click here for more details.
- dbos/_admin_server.py +3 -3
- dbos/_client.py +4 -8
- dbos/_conductor/conductor.py +71 -8
- dbos/_conductor/protocol.py +1 -1
- dbos/_context.py +0 -4
- dbos/_core.py +1 -8
- dbos/_dbos.py +13 -17
- dbos/_fastapi.py +2 -23
- dbos/_flask.py +3 -37
- dbos/_recovery.py +1 -1
- dbos/_schemas/system_database.py +0 -1
- dbos/_sys_db.py +46 -60
- dbos/_utils.py +2 -0
- dbos/_workflow_commands.py +4 -8
- {dbos-0.28.0a4.dist-info → dbos-0.28.0a7.dist-info}/METADATA +2 -2
- {dbos-0.28.0a4.dist-info → dbos-0.28.0a7.dist-info}/RECORD +19 -20
- dbos/_request.py +0 -35
- {dbos-0.28.0a4.dist-info → dbos-0.28.0a7.dist-info}/WHEEL +0 -0
- {dbos-0.28.0a4.dist-info → dbos-0.28.0a7.dist-info}/entry_points.txt +0 -0
- {dbos-0.28.0a4.dist-info → dbos-0.28.0a7.dist-info}/licenses/LICENSE +0 -0
dbos/_admin_server.py
CHANGED
@@ -66,11 +66,11 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
|
|
66
66
|
elif self.path == _deactivate_path:
|
67
67
|
if not AdminRequestHandler.is_deactivated:
|
68
68
|
dbos_logger.info(
|
69
|
-
f"Deactivating DBOS executor {GlobalParams.executor_id} with version {GlobalParams.app_version}. This executor will complete existing workflows but will not
|
69
|
+
f"Deactivating DBOS executor {GlobalParams.executor_id} with version {GlobalParams.app_version}. This executor will complete existing workflows but will not create new workflows."
|
70
70
|
)
|
71
71
|
AdminRequestHandler.is_deactivated = True
|
72
|
-
# Stop all
|
73
|
-
for event in self.dbos.
|
72
|
+
# Stop all event receivers (scheduler and Kafka threads)
|
73
|
+
for event in self.dbos.poller_stop_events:
|
74
74
|
event.set()
|
75
75
|
self.send_response(200)
|
76
76
|
self._end_headers()
|
dbos/_client.py
CHANGED
@@ -70,7 +70,7 @@ class WorkflowHandleClientPolling(Generic[R]):
|
|
70
70
|
return res
|
71
71
|
|
72
72
|
def get_status(self) -> WorkflowStatus:
|
73
|
-
status = get_workflow(self._sys_db, self.workflow_id
|
73
|
+
status = get_workflow(self._sys_db, self.workflow_id)
|
74
74
|
if status is None:
|
75
75
|
raise DBOSNonExistentWorkflowError(self.workflow_id)
|
76
76
|
return status
|
@@ -92,9 +92,7 @@ class WorkflowHandleClientAsyncPolling(Generic[R]):
|
|
92
92
|
return res
|
93
93
|
|
94
94
|
async def get_status(self) -> WorkflowStatus:
|
95
|
-
status = await asyncio.to_thread(
|
96
|
-
get_workflow, self._sys_db, self.workflow_id, False
|
97
|
-
)
|
95
|
+
status = await asyncio.to_thread(get_workflow, self._sys_db, self.workflow_id)
|
98
96
|
if status is None:
|
99
97
|
raise DBOSNonExistentWorkflowError(self.workflow_id)
|
100
98
|
return status
|
@@ -141,7 +139,6 @@ class DBOSClient:
|
|
141
139
|
"authenticated_user": None,
|
142
140
|
"assumed_role": None,
|
143
141
|
"authenticated_roles": None,
|
144
|
-
"request": None,
|
145
142
|
"output": None,
|
146
143
|
"error": None,
|
147
144
|
"created_at": None,
|
@@ -181,13 +178,13 @@ class DBOSClient:
|
|
181
178
|
return WorkflowHandleClientAsyncPolling[R](workflow_id, self._sys_db)
|
182
179
|
|
183
180
|
def retrieve_workflow(self, workflow_id: str) -> WorkflowHandle[R]:
|
184
|
-
status = get_workflow(self._sys_db, workflow_id
|
181
|
+
status = get_workflow(self._sys_db, workflow_id)
|
185
182
|
if status is None:
|
186
183
|
raise DBOSNonExistentWorkflowError(workflow_id)
|
187
184
|
return WorkflowHandleClientPolling[R](workflow_id, self._sys_db)
|
188
185
|
|
189
186
|
async def retrieve_workflow_async(self, workflow_id: str) -> WorkflowHandleAsync[R]:
|
190
|
-
status = asyncio.to_thread(get_workflow, self._sys_db, workflow_id
|
187
|
+
status = asyncio.to_thread(get_workflow, self._sys_db, workflow_id)
|
191
188
|
if status is None:
|
192
189
|
raise DBOSNonExistentWorkflowError(workflow_id)
|
193
190
|
return WorkflowHandleClientAsyncPolling[R](workflow_id, self._sys_db)
|
@@ -210,7 +207,6 @@ class DBOSClient:
|
|
210
207
|
"authenticated_user": None,
|
211
208
|
"assumed_role": None,
|
212
209
|
"authenticated_roles": None,
|
213
|
-
"request": None,
|
214
210
|
"output": None,
|
215
211
|
"error": None,
|
216
212
|
"created_at": None,
|
dbos/_conductor/conductor.py
CHANGED
@@ -2,6 +2,7 @@ import socket
|
|
2
2
|
import threading
|
3
3
|
import time
|
4
4
|
import traceback
|
5
|
+
from importlib.metadata import version
|
5
6
|
from typing import TYPE_CHECKING, Optional
|
6
7
|
|
7
8
|
from websockets import ConnectionClosed, ConnectionClosedOK, InvalidStatus
|
@@ -21,6 +22,9 @@ from . import protocol as p
|
|
21
22
|
if TYPE_CHECKING:
|
22
23
|
from dbos import DBOS
|
23
24
|
|
25
|
+
ws_version = version("websockets")
|
26
|
+
use_keepalive = ws_version < "15.0"
|
27
|
+
|
24
28
|
|
25
29
|
class ConductorWebsocket(threading.Thread):
|
26
30
|
|
@@ -35,14 +39,64 @@ class ConductorWebsocket(threading.Thread):
|
|
35
39
|
self.url = (
|
36
40
|
conductor_url.rstrip("/") + f"/websocket/{self.app_name}/{conductor_key}"
|
37
41
|
)
|
42
|
+
# TODO: once we can upgrade to websockets>=15.0, we can always use the built-in keepalive
|
43
|
+
self.ping_interval = 20 # Time between pings in seconds
|
44
|
+
self.ping_timeout = 15 # Time to wait for a pong response in seconds
|
45
|
+
self.keepalive_thread: Optional[threading.Thread] = None
|
46
|
+
self.pong_event: Optional[threading.Event] = None
|
47
|
+
self.last_ping_time = -1.0
|
48
|
+
|
49
|
+
self.dbos.logger.debug(
|
50
|
+
f"Connecting to conductor at {self.url} using websockets version {ws_version}"
|
51
|
+
)
|
52
|
+
|
53
|
+
def keepalive(self) -> None:
|
54
|
+
self.dbos.logger.debug("Starting keepalive thread")
|
55
|
+
while not self.evt.is_set():
|
56
|
+
if self.websocket is None or self.websocket.close_code is not None:
|
57
|
+
time.sleep(1)
|
58
|
+
continue
|
59
|
+
try:
|
60
|
+
self.last_ping_time = time.time()
|
61
|
+
self.pong_event = self.websocket.ping()
|
62
|
+
self.dbos.logger.debug("> Sent ping to conductor")
|
63
|
+
pong_result = self.pong_event.wait(self.ping_timeout)
|
64
|
+
elapsed_time = time.time() - self.last_ping_time
|
65
|
+
if not pong_result:
|
66
|
+
self.dbos.logger.warning(
|
67
|
+
f"Failed to receive pong from conductor after {elapsed_time:.2f} seconds. Reconnecting."
|
68
|
+
)
|
69
|
+
self.websocket.close()
|
70
|
+
continue
|
71
|
+
# Wait for the next ping interval
|
72
|
+
self.dbos.logger.debug(
|
73
|
+
f"< Received pong from conductor after {elapsed_time:.2f} seconds"
|
74
|
+
)
|
75
|
+
wait_time = self.ping_interval - elapsed_time
|
76
|
+
self.evt.wait(max(0, wait_time))
|
77
|
+
except ConnectionClosed:
|
78
|
+
# The main loop will try to reconnect
|
79
|
+
self.dbos.logger.debug("Connection to conductor closed.")
|
80
|
+
except Exception as e:
|
81
|
+
self.dbos.logger.warning(f"Failed to send ping to conductor: {e}.")
|
82
|
+
self.websocket.close()
|
38
83
|
|
39
84
|
def run(self) -> None:
|
40
85
|
while not self.evt.is_set():
|
41
86
|
try:
|
42
87
|
with connect(
|
43
|
-
self.url,
|
88
|
+
self.url,
|
89
|
+
open_timeout=5,
|
90
|
+
close_timeout=5,
|
91
|
+
logger=self.dbos.logger,
|
44
92
|
) as websocket:
|
45
93
|
self.websocket = websocket
|
94
|
+
if use_keepalive and self.keepalive_thread is None:
|
95
|
+
self.keepalive_thread = threading.Thread(
|
96
|
+
target=self.keepalive,
|
97
|
+
daemon=True,
|
98
|
+
)
|
99
|
+
self.keepalive_thread.start()
|
46
100
|
while not self.evt.is_set():
|
47
101
|
message = websocket.recv()
|
48
102
|
if not isinstance(message, str):
|
@@ -143,7 +197,6 @@ class ConductorWebsocket(threading.Thread):
|
|
143
197
|
start_time=body["start_time"],
|
144
198
|
end_time=body["end_time"],
|
145
199
|
status=body["status"],
|
146
|
-
request=False,
|
147
200
|
app_version=body["application_version"],
|
148
201
|
name=body["workflow_name"],
|
149
202
|
limit=body["limit"],
|
@@ -176,7 +229,6 @@ class ConductorWebsocket(threading.Thread):
|
|
176
229
|
start_time=q_body["start_time"],
|
177
230
|
end_time=q_body["end_time"],
|
178
231
|
status=q_body["status"],
|
179
|
-
request=False,
|
180
232
|
name=q_body["workflow_name"],
|
181
233
|
limit=q_body["limit"],
|
182
234
|
offset=q_body["offset"],
|
@@ -206,9 +258,7 @@ class ConductorWebsocket(threading.Thread):
|
|
206
258
|
info = None
|
207
259
|
try:
|
208
260
|
info = get_workflow(
|
209
|
-
self.dbos._sys_db,
|
210
|
-
get_workflow_message.workflow_id,
|
211
|
-
get_request=False,
|
261
|
+
self.dbos._sys_db, get_workflow_message.workflow_id
|
212
262
|
)
|
213
263
|
except Exception as e:
|
214
264
|
error_message = f"Exception encountered when getting workflow {get_workflow_message.workflow_id}: {traceback.format_exc()}"
|
@@ -287,8 +337,15 @@ class ConductorWebsocket(threading.Thread):
|
|
287
337
|
# Still need to send a response to the conductor
|
288
338
|
websocket.send(unknown_message.to_json())
|
289
339
|
except ConnectionClosedOK:
|
290
|
-
self.
|
291
|
-
|
340
|
+
if self.evt.is_set():
|
341
|
+
self.dbos.logger.info("Conductor connection terminated")
|
342
|
+
break
|
343
|
+
# Otherwise, we are trying to reconnect
|
344
|
+
self.dbos.logger.warning(
|
345
|
+
"Connection to conductor lost. Reconnecting..."
|
346
|
+
)
|
347
|
+
time.sleep(1)
|
348
|
+
continue
|
292
349
|
except ConnectionClosed as e:
|
293
350
|
self.dbos.logger.warning(
|
294
351
|
f"Connection to conductor lost. Reconnecting: {e}"
|
@@ -309,3 +366,9 @@ class ConductorWebsocket(threading.Thread):
|
|
309
366
|
)
|
310
367
|
time.sleep(1)
|
311
368
|
continue
|
369
|
+
|
370
|
+
# Wait for the keepalive thread to finish
|
371
|
+
if self.keepalive_thread is not None:
|
372
|
+
if self.pong_event is not None:
|
373
|
+
self.pong_event.set()
|
374
|
+
self.keepalive_thread.join()
|
dbos/_conductor/protocol.py
CHANGED
@@ -149,7 +149,7 @@ class WorkflowsOutput:
|
|
149
149
|
inputs_str = str(info.input) if info.input is not None else None
|
150
150
|
outputs_str = str(info.output) if info.output is not None else None
|
151
151
|
error_str = str(info.error) if info.error is not None else None
|
152
|
-
request_str =
|
152
|
+
request_str = None
|
153
153
|
roles_str = (
|
154
154
|
str(info.authenticated_roles)
|
155
155
|
if info.authenticated_roles is not None
|
dbos/_context.py
CHANGED
@@ -16,7 +16,6 @@ from sqlalchemy.orm import Session
|
|
16
16
|
from dbos._utils import GlobalParams
|
17
17
|
|
18
18
|
from ._logger import dbos_logger
|
19
|
-
from ._request import Request
|
20
19
|
from ._tracer import dbos_tracer
|
21
20
|
|
22
21
|
|
@@ -76,8 +75,6 @@ class DBOSContext:
|
|
76
75
|
|
77
76
|
self.logger = dbos_logger
|
78
77
|
|
79
|
-
self.request: Optional["Request"] = None
|
80
|
-
|
81
78
|
self.id_assigned_for_next_workflow: str = ""
|
82
79
|
self.is_within_set_workflow_id_block: bool = False
|
83
80
|
|
@@ -120,7 +117,6 @@ class DBOSContext:
|
|
120
117
|
if self.authenticated_roles is not None
|
121
118
|
else None
|
122
119
|
)
|
123
|
-
rv.request = self.request
|
124
120
|
rv.assumed_role = self.assumed_role
|
125
121
|
return rv
|
126
122
|
|
dbos/_core.py
CHANGED
@@ -266,9 +266,6 @@ def _init_workflow(
|
|
266
266
|
"app_id": ctx.app_id,
|
267
267
|
"app_version": GlobalParams.app_version,
|
268
268
|
"executor_id": ctx.executor_id,
|
269
|
-
"request": (
|
270
|
-
_serialization.serialize(ctx.request) if ctx.request is not None else None
|
271
|
-
),
|
272
269
|
"recovery_attempts": None,
|
273
270
|
"authenticated_user": ctx.authenticated_user,
|
274
271
|
"authenticated_roles": (
|
@@ -296,7 +293,7 @@ def _init_workflow(
|
|
296
293
|
|
297
294
|
if workflow_deadline_epoch_ms is not None:
|
298
295
|
evt = threading.Event()
|
299
|
-
dbos.
|
296
|
+
dbos.background_thread_stop_events.append(evt)
|
300
297
|
|
301
298
|
def timeout_func() -> None:
|
302
299
|
try:
|
@@ -443,10 +440,6 @@ def execute_workflow_by_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[An
|
|
443
440
|
)
|
444
441
|
with DBOSContextEnsure():
|
445
442
|
ctx = assert_current_dbos_context()
|
446
|
-
request = status["request"]
|
447
|
-
ctx.request = (
|
448
|
-
_serialization.deserialize(request) if request is not None else None
|
449
|
-
)
|
450
443
|
# If this function belongs to a configured class, add that class instance as its first argument
|
451
444
|
if status["config_name"] is not None:
|
452
445
|
config_name = status["config_name"]
|
dbos/_dbos.py
CHANGED
@@ -71,14 +71,11 @@ from ._tracer import DBOSTracer, dbos_tracer
|
|
71
71
|
if TYPE_CHECKING:
|
72
72
|
from fastapi import FastAPI
|
73
73
|
from ._kafka import _KafkaConsumerWorkflow
|
74
|
-
from ._request import Request
|
75
74
|
from flask import Flask
|
76
75
|
|
77
76
|
from sqlalchemy import URL
|
78
77
|
from sqlalchemy.orm import Session
|
79
78
|
|
80
|
-
from ._request import Request
|
81
|
-
|
82
79
|
if sys.version_info < (3, 10):
|
83
80
|
from typing_extensions import ParamSpec
|
84
81
|
else:
|
@@ -197,7 +194,7 @@ class DBOSRegistry:
|
|
197
194
|
self, evt: threading.Event, func: Callable[..., Any], *args: Any, **kwargs: Any
|
198
195
|
) -> None:
|
199
196
|
if self.dbos and self.dbos._launched:
|
200
|
-
self.dbos.
|
197
|
+
self.dbos.poller_stop_events.append(evt)
|
201
198
|
self.dbos._executor.submit(func, *args, **kwargs)
|
202
199
|
else:
|
203
200
|
self.pollers.append((evt, func, args, kwargs))
|
@@ -247,7 +244,7 @@ class DBOS:
|
|
247
244
|
2. Starting workflow functions
|
248
245
|
3. Retrieving workflow status information
|
249
246
|
4. Interacting with workflows via events and messages
|
250
|
-
5. Accessing context, including the current user,
|
247
|
+
5. Accessing context, including the current user, SQL session, logger, and tracer
|
251
248
|
|
252
249
|
"""
|
253
250
|
|
@@ -330,7 +327,10 @@ class DBOS:
|
|
330
327
|
self._registry: DBOSRegistry = _get_or_create_dbos_registry()
|
331
328
|
self._registry.dbos = self
|
332
329
|
self._admin_server_field: Optional[AdminServer] = None
|
333
|
-
|
330
|
+
# Stop internal background threads (queue thread, timeout threads, etc.)
|
331
|
+
self.background_thread_stop_events: List[threading.Event] = []
|
332
|
+
# Stop pollers (event receivers) that can create new workflows (scheduler, Kafka)
|
333
|
+
self.poller_stop_events: List[threading.Event] = []
|
334
334
|
self.fastapi: Optional["FastAPI"] = fastapi
|
335
335
|
self.flask: Optional["Flask"] = flask
|
336
336
|
self._executor_field: Optional[ThreadPoolExecutor] = None
|
@@ -502,7 +502,7 @@ class DBOS:
|
|
502
502
|
|
503
503
|
# Start the queue thread
|
504
504
|
evt = threading.Event()
|
505
|
-
self.
|
505
|
+
self.background_thread_stop_events.append(evt)
|
506
506
|
bg_queue_thread = threading.Thread(
|
507
507
|
target=queue_thread, args=(evt, self), daemon=True
|
508
508
|
)
|
@@ -515,7 +515,7 @@ class DBOS:
|
|
515
515
|
dbos_domain = os.environ.get("DBOS_DOMAIN", "cloud.dbos.dev")
|
516
516
|
self.conductor_url = f"wss://{dbos_domain}/conductor/v1alpha1"
|
517
517
|
evt = threading.Event()
|
518
|
-
self.
|
518
|
+
self.background_thread_stop_events.append(evt)
|
519
519
|
self.conductor_websocket = ConductorWebsocket(
|
520
520
|
self,
|
521
521
|
conductor_url=self.conductor_url,
|
@@ -527,7 +527,7 @@ class DBOS:
|
|
527
527
|
|
528
528
|
# Grab any pollers that were deferred and start them
|
529
529
|
for evt, func, args, kwargs in self._registry.pollers:
|
530
|
-
self.
|
530
|
+
self.poller_stop_events.append(evt)
|
531
531
|
poller_thread = threading.Thread(
|
532
532
|
target=func, args=args, kwargs=kwargs, daemon=True
|
533
533
|
)
|
@@ -583,7 +583,9 @@ class DBOS:
|
|
583
583
|
|
584
584
|
def _destroy(self) -> None:
|
585
585
|
self._initialized = False
|
586
|
-
for event in self.
|
586
|
+
for event in self.poller_stop_events:
|
587
|
+
event.set()
|
588
|
+
for event in self.background_thread_stop_events:
|
587
589
|
event.set()
|
588
590
|
self._background_event_loop.stop()
|
589
591
|
if self._sys_db_field is not None:
|
@@ -760,7 +762,7 @@ class DBOS:
|
|
760
762
|
"""Return the status of a workflow execution."""
|
761
763
|
|
762
764
|
def fn() -> Optional[WorkflowStatus]:
|
763
|
-
return get_workflow(_get_dbos_instance()._sys_db, workflow_id
|
765
|
+
return get_workflow(_get_dbos_instance()._sys_db, workflow_id)
|
764
766
|
|
765
767
|
return _get_dbos_instance()._sys_db.call_function_as_step(fn, "DBOS.getStatus")
|
766
768
|
|
@@ -1156,12 +1158,6 @@ class DBOS:
|
|
1156
1158
|
assert span
|
1157
1159
|
return span
|
1158
1160
|
|
1159
|
-
@classproperty
|
1160
|
-
def request(cls) -> Optional["Request"]:
|
1161
|
-
"""Return the HTTP `Request`, if any, associated with the current context."""
|
1162
|
-
ctx = assert_current_dbos_context()
|
1163
|
-
return ctx.request
|
1164
|
-
|
1165
1161
|
@classproperty
|
1166
1162
|
def authenticated_user(cls) -> Optional[str]:
|
1167
1163
|
"""Return the current authenticated user, if any, associated with the current context."""
|
dbos/_fastapi.py
CHANGED
@@ -7,15 +7,9 @@ from fastapi.responses import JSONResponse
|
|
7
7
|
from starlette.types import ASGIApp, Receive, Scope, Send
|
8
8
|
|
9
9
|
from . import DBOS
|
10
|
-
from ._context import
|
11
|
-
EnterDBOSHandler,
|
12
|
-
OperationType,
|
13
|
-
SetWorkflowID,
|
14
|
-
TracedAttributes,
|
15
|
-
assert_current_dbos_context,
|
16
|
-
)
|
10
|
+
from ._context import EnterDBOSHandler, OperationType, SetWorkflowID, TracedAttributes
|
17
11
|
from ._error import DBOSException
|
18
|
-
from .
|
12
|
+
from ._utils import request_id_header
|
19
13
|
|
20
14
|
|
21
15
|
def _get_or_generate_request_id(request: FastAPIRequest) -> str:
|
@@ -26,19 +20,6 @@ def _get_or_generate_request_id(request: FastAPIRequest) -> str:
|
|
26
20
|
return str(uuid.uuid4())
|
27
21
|
|
28
22
|
|
29
|
-
def _make_request(request: FastAPIRequest) -> Request:
|
30
|
-
return Request(
|
31
|
-
headers=request.headers,
|
32
|
-
path_params=request.path_params,
|
33
|
-
query_params=request.query_params,
|
34
|
-
url=str(request.url),
|
35
|
-
base_url=str(request.base_url),
|
36
|
-
client=Address(*request.client) if request.client is not None else None,
|
37
|
-
cookies=request.cookies,
|
38
|
-
method=request.method,
|
39
|
-
)
|
40
|
-
|
41
|
-
|
42
23
|
async def _dbos_error_handler(request: FastAPIRequest, gexc: Exception) -> JSONResponse:
|
43
24
|
exc: DBOSException = cast(DBOSException, gexc)
|
44
25
|
status_code = 500
|
@@ -96,8 +77,6 @@ def setup_fastapi_middleware(app: FastAPI, dbos: DBOS) -> None:
|
|
96
77
|
"operationType": OperationType.HANDLER.value,
|
97
78
|
}
|
98
79
|
with EnterDBOSHandler(attributes):
|
99
|
-
ctx = assert_current_dbos_context()
|
100
|
-
ctx.request = _make_request(request)
|
101
80
|
workflow_id = request.headers.get("dbos-idempotency-key")
|
102
81
|
if workflow_id is not None:
|
103
82
|
# Set the workflow ID for the handler
|
dbos/_flask.py
CHANGED
@@ -2,17 +2,11 @@ import uuid
|
|
2
2
|
from typing import Any
|
3
3
|
from urllib.parse import urlparse
|
4
4
|
|
5
|
-
from flask import Flask
|
5
|
+
from flask import Flask
|
6
6
|
from werkzeug.wrappers import Request as WRequest
|
7
7
|
|
8
|
-
from ._context import
|
9
|
-
|
10
|
-
OperationType,
|
11
|
-
SetWorkflowID,
|
12
|
-
TracedAttributes,
|
13
|
-
assert_current_dbos_context,
|
14
|
-
)
|
15
|
-
from ._request import Address, Request, request_id_header
|
8
|
+
from ._context import EnterDBOSHandler, OperationType, SetWorkflowID, TracedAttributes
|
9
|
+
from ._utils import request_id_header
|
16
10
|
|
17
11
|
|
18
12
|
class FlaskMiddleware:
|
@@ -32,8 +26,6 @@ class FlaskMiddleware:
|
|
32
26
|
"operationType": OperationType.HANDLER.value,
|
33
27
|
}
|
34
28
|
with EnterDBOSHandler(attributes):
|
35
|
-
ctx = assert_current_dbos_context()
|
36
|
-
ctx.request = _make_request(request)
|
37
29
|
workflow_id = request.headers.get("dbos-idempotency-key")
|
38
30
|
if workflow_id is not None:
|
39
31
|
# Set the workflow ID for the handler
|
@@ -52,31 +44,5 @@ def _get_or_generate_request_id(request: WRequest) -> str:
|
|
52
44
|
return str(uuid.uuid4())
|
53
45
|
|
54
46
|
|
55
|
-
def _make_request(request: WRequest) -> Request:
|
56
|
-
parsed_url = urlparse(request.url)
|
57
|
-
base_url = f"{parsed_url.scheme}://{parsed_url.netloc}"
|
58
|
-
|
59
|
-
client = None
|
60
|
-
if request.remote_addr:
|
61
|
-
hostname = request.remote_addr
|
62
|
-
port = request.environ.get("REMOTE_PORT")
|
63
|
-
if port:
|
64
|
-
client = Address(hostname=hostname, port=int(port))
|
65
|
-
else:
|
66
|
-
# If port is not available, use 0 as a placeholder
|
67
|
-
client = Address(hostname=hostname, port=0)
|
68
|
-
|
69
|
-
return Request(
|
70
|
-
headers=dict(request.headers),
|
71
|
-
path_params={},
|
72
|
-
query_params=dict(request.args),
|
73
|
-
url=request.url,
|
74
|
-
base_url=base_url,
|
75
|
-
client=client,
|
76
|
-
cookies=dict(request.cookies),
|
77
|
-
method=request.method,
|
78
|
-
)
|
79
|
-
|
80
|
-
|
81
47
|
def setup_flask_middleware(app: Flask) -> None:
|
82
48
|
app.wsgi_app = FlaskMiddleware(app.wsgi_app) # type: ignore
|
dbos/_recovery.py
CHANGED
@@ -29,7 +29,7 @@ def startup_recovery_thread(
|
|
29
29
|
) -> None:
|
30
30
|
"""Attempt to recover local pending workflows on startup using a background thread."""
|
31
31
|
stop_event = threading.Event()
|
32
|
-
dbos.
|
32
|
+
dbos.background_thread_stop_events.append(stop_event)
|
33
33
|
while not stop_event.is_set() and len(pending_workflows) > 0:
|
34
34
|
try:
|
35
35
|
for pending_workflow in list(pending_workflows):
|
dbos/_schemas/system_database.py
CHANGED
@@ -29,7 +29,6 @@ class SystemSchema:
|
|
29
29
|
Column("authenticated_user", Text, nullable=True),
|
30
30
|
Column("assumed_role", Text, nullable=True),
|
31
31
|
Column("authenticated_roles", Text, nullable=True),
|
32
|
-
Column("request", Text, nullable=True),
|
33
32
|
Column("output", Text, nullable=True),
|
34
33
|
Column("error", Text, nullable=True),
|
35
34
|
Column("executor_id", Text, nullable=True),
|
dbos/_sys_db.py
CHANGED
@@ -106,8 +106,6 @@ class WorkflowStatus:
|
|
106
106
|
app_id: Optional[str]
|
107
107
|
# The number of times this workflow's execution has been attempted
|
108
108
|
recovery_attempts: Optional[int]
|
109
|
-
# The HTTP request that triggered the workflow, if known
|
110
|
-
request: Optional[str]
|
111
109
|
|
112
110
|
|
113
111
|
class WorkflowStatusInternal(TypedDict):
|
@@ -120,7 +118,6 @@ class WorkflowStatusInternal(TypedDict):
|
|
120
118
|
assumed_role: Optional[str]
|
121
119
|
authenticated_roles: Optional[str] # JSON list of roles
|
122
120
|
output: Optional[str] # JSON (jsonpickle)
|
123
|
-
request: Optional[str] # JSON (jsonpickle)
|
124
121
|
error: Optional[str] # JSON (jsonpickle)
|
125
122
|
created_at: Optional[int] # Unix epoch timestamp in ms
|
126
123
|
updated_at: Optional[int] # Unix epoch timestamp in ms
|
@@ -372,7 +369,6 @@ class SystemDatabase:
|
|
372
369
|
executor_id=status["executor_id"],
|
373
370
|
application_version=status["app_version"],
|
374
371
|
application_id=status["app_id"],
|
375
|
-
request=status["request"],
|
376
372
|
authenticated_user=status["authenticated_user"],
|
377
373
|
authenticated_roles=status["authenticated_roles"],
|
378
374
|
assumed_role=status["assumed_role"],
|
@@ -480,7 +476,6 @@ class SystemDatabase:
|
|
480
476
|
executor_id=status["executor_id"],
|
481
477
|
application_version=status["app_version"],
|
482
478
|
application_id=status["app_id"],
|
483
|
-
request=status["request"],
|
484
479
|
authenticated_user=status["authenticated_user"],
|
485
480
|
authenticated_roles=status["authenticated_roles"],
|
486
481
|
assumed_role=status["assumed_role"],
|
@@ -628,7 +623,6 @@ class SystemDatabase:
|
|
628
623
|
else status["app_version"]
|
629
624
|
),
|
630
625
|
application_id=status["app_id"],
|
631
|
-
request=status["request"],
|
632
626
|
authenticated_user=status["authenticated_user"],
|
633
627
|
authenticated_roles=status["authenticated_roles"],
|
634
628
|
assumed_role=status["assumed_role"],
|
@@ -690,7 +684,6 @@ class SystemDatabase:
|
|
690
684
|
sa.select(
|
691
685
|
SystemSchema.workflow_status.c.status,
|
692
686
|
SystemSchema.workflow_status.c.name,
|
693
|
-
SystemSchema.workflow_status.c.request,
|
694
687
|
SystemSchema.workflow_status.c.recovery_attempts,
|
695
688
|
SystemSchema.workflow_status.c.config_name,
|
696
689
|
SystemSchema.workflow_status.c.class_name,
|
@@ -715,21 +708,20 @@ class SystemDatabase:
|
|
715
708
|
"error": None,
|
716
709
|
"status": row[0],
|
717
710
|
"name": row[1],
|
718
|
-
"
|
719
|
-
"
|
720
|
-
"
|
721
|
-
"
|
722
|
-
"
|
723
|
-
"
|
724
|
-
"
|
725
|
-
"
|
726
|
-
"
|
727
|
-
"
|
728
|
-
"
|
729
|
-
"
|
730
|
-
"
|
731
|
-
"
|
732
|
-
"workflow_timeout_ms": row[16],
|
711
|
+
"recovery_attempts": row[2],
|
712
|
+
"config_name": row[3],
|
713
|
+
"class_name": row[4],
|
714
|
+
"authenticated_user": row[5],
|
715
|
+
"authenticated_roles": row[6],
|
716
|
+
"assumed_role": row[7],
|
717
|
+
"queue_name": row[8],
|
718
|
+
"executor_id": row[9],
|
719
|
+
"created_at": row[10],
|
720
|
+
"updated_at": row[11],
|
721
|
+
"app_version": row[12],
|
722
|
+
"app_id": row[13],
|
723
|
+
"workflow_deadline_epoch_ms": row[14],
|
724
|
+
"workflow_timeout_ms": row[15],
|
733
725
|
}
|
734
726
|
return status
|
735
727
|
|
@@ -805,9 +797,7 @@ class SystemDatabase:
|
|
805
797
|
)
|
806
798
|
return inputs
|
807
799
|
|
808
|
-
def get_workflows(
|
809
|
-
self, input: GetWorkflowsInput, get_request: bool = False
|
810
|
-
) -> List[WorkflowStatus]:
|
800
|
+
def get_workflows(self, input: GetWorkflowsInput) -> List[WorkflowStatus]:
|
811
801
|
"""
|
812
802
|
Retrieve a list of workflows result and inputs based on the input criteria. The result is a list of external-facing workflow status objects.
|
813
803
|
"""
|
@@ -815,7 +805,6 @@ class SystemDatabase:
|
|
815
805
|
SystemSchema.workflow_status.c.workflow_uuid,
|
816
806
|
SystemSchema.workflow_status.c.status,
|
817
807
|
SystemSchema.workflow_status.c.name,
|
818
|
-
SystemSchema.workflow_status.c.request,
|
819
808
|
SystemSchema.workflow_status.c.recovery_attempts,
|
820
809
|
SystemSchema.workflow_status.c.config_name,
|
821
810
|
SystemSchema.workflow_status.c.class_name,
|
@@ -888,27 +877,26 @@ class SystemDatabase:
|
|
888
877
|
info.workflow_id = row[0]
|
889
878
|
info.status = row[1]
|
890
879
|
info.name = row[2]
|
891
|
-
info.
|
892
|
-
info.
|
893
|
-
info.
|
894
|
-
info.
|
895
|
-
info.authenticated_user = row[7]
|
880
|
+
info.recovery_attempts = row[3]
|
881
|
+
info.config_name = row[4]
|
882
|
+
info.class_name = row[5]
|
883
|
+
info.authenticated_user = row[6]
|
896
884
|
info.authenticated_roles = (
|
897
|
-
json.loads(row[
|
885
|
+
json.loads(row[7]) if row[7] is not None else None
|
898
886
|
)
|
899
|
-
info.assumed_role = row[
|
900
|
-
info.queue_name = row[
|
901
|
-
info.executor_id = row[
|
902
|
-
info.created_at = row[
|
903
|
-
info.updated_at = row[
|
904
|
-
info.app_version = row[
|
905
|
-
info.app_id = row[
|
887
|
+
info.assumed_role = row[8]
|
888
|
+
info.queue_name = row[9]
|
889
|
+
info.executor_id = row[10]
|
890
|
+
info.created_at = row[11]
|
891
|
+
info.updated_at = row[12]
|
892
|
+
info.app_version = row[13]
|
893
|
+
info.app_id = row[14]
|
906
894
|
|
907
895
|
inputs, output, exception = _serialization.safe_deserialize(
|
908
896
|
info.workflow_id,
|
909
|
-
serialized_input=row[
|
910
|
-
serialized_output=row[
|
911
|
-
serialized_exception=row[
|
897
|
+
serialized_input=row[15],
|
898
|
+
serialized_output=row[16],
|
899
|
+
serialized_exception=row[17],
|
912
900
|
)
|
913
901
|
info.input = inputs
|
914
902
|
info.output = output
|
@@ -918,7 +906,7 @@ class SystemDatabase:
|
|
918
906
|
return infos
|
919
907
|
|
920
908
|
def get_queued_workflows(
|
921
|
-
self, input: GetQueuedWorkflowsInput
|
909
|
+
self, input: GetQueuedWorkflowsInput
|
922
910
|
) -> List[WorkflowStatus]:
|
923
911
|
"""
|
924
912
|
Retrieve a list of queued workflows result and inputs based on the input criteria. The result is a list of external-facing workflow status objects.
|
@@ -927,7 +915,6 @@ class SystemDatabase:
|
|
927
915
|
SystemSchema.workflow_status.c.workflow_uuid,
|
928
916
|
SystemSchema.workflow_status.c.status,
|
929
917
|
SystemSchema.workflow_status.c.name,
|
930
|
-
SystemSchema.workflow_status.c.request,
|
931
918
|
SystemSchema.workflow_status.c.recovery_attempts,
|
932
919
|
SystemSchema.workflow_status.c.config_name,
|
933
920
|
SystemSchema.workflow_status.c.class_name,
|
@@ -996,27 +983,26 @@ class SystemDatabase:
|
|
996
983
|
info.workflow_id = row[0]
|
997
984
|
info.status = row[1]
|
998
985
|
info.name = row[2]
|
999
|
-
info.
|
1000
|
-
info.
|
1001
|
-
info.
|
1002
|
-
info.
|
1003
|
-
info.authenticated_user = row[7]
|
986
|
+
info.recovery_attempts = row[3]
|
987
|
+
info.config_name = row[4]
|
988
|
+
info.class_name = row[5]
|
989
|
+
info.authenticated_user = row[6]
|
1004
990
|
info.authenticated_roles = (
|
1005
|
-
json.loads(row[
|
991
|
+
json.loads(row[7]) if row[7] is not None else None
|
1006
992
|
)
|
1007
|
-
info.assumed_role = row[
|
1008
|
-
info.queue_name = row[
|
1009
|
-
info.executor_id = row[
|
1010
|
-
info.created_at = row[
|
1011
|
-
info.updated_at = row[
|
1012
|
-
info.app_version = row[
|
1013
|
-
info.app_id = row[
|
993
|
+
info.assumed_role = row[8]
|
994
|
+
info.queue_name = row[9]
|
995
|
+
info.executor_id = row[10]
|
996
|
+
info.created_at = row[11]
|
997
|
+
info.updated_at = row[12]
|
998
|
+
info.app_version = row[13]
|
999
|
+
info.app_id = row[14]
|
1014
1000
|
|
1015
1001
|
inputs, output, exception = _serialization.safe_deserialize(
|
1016
1002
|
info.workflow_id,
|
1017
|
-
serialized_input=row[
|
1018
|
-
serialized_output=row[
|
1019
|
-
serialized_exception=row[
|
1003
|
+
serialized_input=row[15],
|
1004
|
+
serialized_output=row[16],
|
1005
|
+
serialized_exception=row[17],
|
1020
1006
|
)
|
1021
1007
|
info.input = inputs
|
1022
1008
|
info.output = output
|
dbos/_utils.py
CHANGED
dbos/_workflow_commands.py
CHANGED
@@ -27,7 +27,6 @@ def list_workflows(
|
|
27
27
|
limit: Optional[int] = None,
|
28
28
|
offset: Optional[int] = None,
|
29
29
|
sort_desc: bool = False,
|
30
|
-
request: bool = False,
|
31
30
|
workflow_id_prefix: Optional[str] = None,
|
32
31
|
) -> List[WorkflowStatus]:
|
33
32
|
input = GetWorkflowsInput()
|
@@ -43,7 +42,7 @@ def list_workflows(
|
|
43
42
|
input.sort_desc = sort_desc
|
44
43
|
input.workflow_id_prefix = workflow_id_prefix
|
45
44
|
|
46
|
-
infos: List[WorkflowStatus] = sys_db.get_workflows(input
|
45
|
+
infos: List[WorkflowStatus] = sys_db.get_workflows(input)
|
47
46
|
|
48
47
|
return infos
|
49
48
|
|
@@ -59,7 +58,6 @@ def list_queued_workflows(
|
|
59
58
|
limit: Optional[int] = None,
|
60
59
|
offset: Optional[int] = None,
|
61
60
|
sort_desc: bool = False,
|
62
|
-
request: bool = False,
|
63
61
|
) -> List[WorkflowStatus]:
|
64
62
|
input: GetQueuedWorkflowsInput = {
|
65
63
|
"queue_name": queue_name,
|
@@ -72,17 +70,15 @@ def list_queued_workflows(
|
|
72
70
|
"sort_desc": sort_desc,
|
73
71
|
}
|
74
72
|
|
75
|
-
infos: List[WorkflowStatus] = sys_db.get_queued_workflows(input
|
73
|
+
infos: List[WorkflowStatus] = sys_db.get_queued_workflows(input)
|
76
74
|
return infos
|
77
75
|
|
78
76
|
|
79
|
-
def get_workflow(
|
80
|
-
sys_db: SystemDatabase, workflow_id: str, get_request: bool
|
81
|
-
) -> Optional[WorkflowStatus]:
|
77
|
+
def get_workflow(sys_db: SystemDatabase, workflow_id: str) -> Optional[WorkflowStatus]:
|
82
78
|
input = GetWorkflowsInput()
|
83
79
|
input.workflow_ids = [workflow_id]
|
84
80
|
|
85
|
-
infos: List[WorkflowStatus] = sys_db.get_workflows(input
|
81
|
+
infos: List[WorkflowStatus] = sys_db.get_workflows(input)
|
86
82
|
if not infos:
|
87
83
|
return None
|
88
84
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: dbos
|
3
|
-
Version: 0.28.
|
3
|
+
Version: 0.28.0a7
|
4
4
|
Summary: Ultra-lightweight durable execution in Python
|
5
5
|
Author-Email: "DBOS, Inc." <contact@dbos.dev>
|
6
6
|
License: MIT
|
@@ -22,7 +22,7 @@ Requires-Dist: docker>=7.1.0
|
|
22
22
|
Requires-Dist: cryptography>=43.0.3
|
23
23
|
Requires-Dist: rich>=13.9.4
|
24
24
|
Requires-Dist: pyjwt>=2.10.1
|
25
|
-
Requires-Dist: websockets>=
|
25
|
+
Requires-Dist: websockets>=14.0
|
26
26
|
Description-Content-Type: text/markdown
|
27
27
|
|
28
28
|
|
@@ -1,26 +1,26 @@
|
|
1
|
-
dbos-0.28.
|
2
|
-
dbos-0.28.
|
3
|
-
dbos-0.28.
|
4
|
-
dbos-0.28.
|
1
|
+
dbos-0.28.0a7.dist-info/METADATA,sha256=qUA4yCy56WYU15e1V8V2Pb_xNM9ccKpov9vvnfMGYYM,13268
|
2
|
+
dbos-0.28.0a7.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
|
3
|
+
dbos-0.28.0a7.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
|
4
|
+
dbos-0.28.0a7.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
|
5
5
|
dbos/__init__.py,sha256=-FdBlOlr-f2tY__C23J4v22MoCAXqcDN_-zXsJXdoZ0,1005
|
6
6
|
dbos/__main__.py,sha256=G7Exn-MhGrVJVDbgNlpzhfh8WMX_72t3_oJaFT9Lmt8,653
|
7
|
-
dbos/_admin_server.py,sha256=
|
7
|
+
dbos/_admin_server.py,sha256=CM02jyC9H21fM7Pjn1BhPxNwAOV7CXmMJd0SdaNq8dQ,9062
|
8
8
|
dbos/_app_db.py,sha256=3j8_5-MlSDY0otLRszFE-GfenU6JC20fcfSL-drSNYk,11800
|
9
9
|
dbos/_classproperty.py,sha256=f0X-_BySzn3yFDRKB2JpCbLYQ9tLwt1XftfshvY7CBs,626
|
10
|
-
dbos/_client.py,sha256=
|
11
|
-
dbos/_conductor/conductor.py,sha256=
|
12
|
-
dbos/_conductor/protocol.py,sha256=
|
13
|
-
dbos/_context.py,sha256=
|
14
|
-
dbos/_core.py,sha256=
|
10
|
+
dbos/_client.py,sha256=mGrricoU6437QM4SWV-6Vm806AgVru8ygKgGgK1LZGA,13823
|
11
|
+
dbos/_conductor/conductor.py,sha256=L8va6dGae0BlH43AHWAAW8_z04QUYPWPhjUK64sn3P4,19692
|
12
|
+
dbos/_conductor/protocol.py,sha256=jwX8ZjmAIlXu1vw9R3b5PfHSNdwofeYOKj8rkfAFVg0,6630
|
13
|
+
dbos/_context.py,sha256=Ly1CXF1nWxICQgIpDZSaONGlz1yERBs63gqmR-yqCzM,24476
|
14
|
+
dbos/_core.py,sha256=UDpSgRA9m_YuViNXR9tVgNFLC-zxKZPxjlkj2a-Kj00,48317
|
15
15
|
dbos/_croniter.py,sha256=XHAyUyibs_59sJQfSNWkP7rqQY6_XrlfuuCxk4jYqek,47559
|
16
|
-
dbos/_dbos.py,sha256=
|
16
|
+
dbos/_dbos.py,sha256=nne4oaIEC0tR2OV766y7mjTLjGaqKz7EnuY-T2CbTtc,48431
|
17
17
|
dbos/_dbos_config.py,sha256=L0Z0OOB5FoPM9g-joZqXGeJnlxWQsEUtgPtgtg9Uf48,21732
|
18
18
|
dbos/_debug.py,sha256=MNlQVZ6TscGCRQeEEL0VE8Uignvr6dPeDDDefS3xgIE,1823
|
19
19
|
dbos/_docker_pg_helper.py,sha256=NmcgqmR5rQA_4igfeqh8ugNT2z3YmoOvuep_MEtxTiY,5854
|
20
20
|
dbos/_error.py,sha256=EN4eVBjMT3k7O7hfqJl6mIf4sxWPsiAOM086yhcGH_g,8012
|
21
21
|
dbos/_event_loop.py,sha256=NmaLbEQFfEK36S_0KhVD39YdYrGce3qSKCTJ-5RqKQ0,2136
|
22
|
-
dbos/_fastapi.py,sha256=
|
23
|
-
dbos/_flask.py,sha256=
|
22
|
+
dbos/_fastapi.py,sha256=m4SL3H9P-NBQ_ZrbFxAWMOqNyIi3HGEn2ODR7xAK038,3118
|
23
|
+
dbos/_flask.py,sha256=Npnakt-a3W5OykONFRkDRnumaDhTQmA0NPdUCGRYKXE,1652
|
24
24
|
dbos/_kafka.py,sha256=pz0xZ9F3X9Ky1k-VSbeF3tfPhP3UPr3lUUhUfE41__U,4198
|
25
25
|
dbos/_kafka_message.py,sha256=NYvOXNG3Qn7bghn1pv3fg4Pbs86ILZGcK4IB-MLUNu0,409
|
26
26
|
dbos/_logger.py,sha256=qv2srteCF2rSRjCK1VGOck3ieIkwUe9Lvbv60mJc16E,4069
|
@@ -39,16 +39,15 @@ dbos/_migrations/versions/eab0cc1d9a14_job_queue.py,sha256=uvhFOtqbBreCePhAxZfIT
|
|
39
39
|
dbos/_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py,sha256=m90Lc5YH0ZISSq1MyxND6oq3RZrZKrIqEsZtwJ1jWxA,1049
|
40
40
|
dbos/_outcome.py,sha256=EXxBg4jXCVJsByDQ1VOCIedmbeq_03S6d-p1vqQrLFU,6810
|
41
41
|
dbos/_queue.py,sha256=aKCGahWBGJOLOv5PCOOId96Va3YQ4ICuHWXy-eQXohE,3526
|
42
|
-
dbos/_recovery.py,sha256=
|
42
|
+
dbos/_recovery.py,sha256=jVMexjfCCNopzyn8gVQzJCmGJaP9G3C1EFaoCQ_Nh7g,2564
|
43
43
|
dbos/_registrations.py,sha256=EZzG3ZfYmWA2bHX2hpnSIQ3PTi3-cXsvbcmXjyOusMk,7302
|
44
|
-
dbos/_request.py,sha256=cX1B3Atlh160phgS35gF1VEEV4pD126c9F3BDgBmxZU,929
|
45
44
|
dbos/_roles.py,sha256=iOsgmIAf1XVzxs3gYWdGRe1B880YfOw5fpU7Jwx8_A8,2271
|
46
45
|
dbos/_scheduler.py,sha256=SR1oRZRcVzYsj-JauV2LA8JtwTkt8mru7qf6H1AzQ1U,2027
|
47
46
|
dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
48
47
|
dbos/_schemas/application_database.py,sha256=SypAS9l9EsaBHFn9FR8jmnqt01M74d9AF1AMa4m2hhI,1040
|
49
|
-
dbos/_schemas/system_database.py,sha256=
|
48
|
+
dbos/_schemas/system_database.py,sha256=3Z0L72bOgHnusK1hBaETWU9RfiLBP0QnS-fdu41i0yY,5835
|
50
49
|
dbos/_serialization.py,sha256=bWuwhXSQcGmiazvhJHA5gwhrRWxtmFmcCFQSDJnqqkU,3666
|
51
|
-
dbos/_sys_db.py,sha256=
|
50
|
+
dbos/_sys_db.py,sha256=GjRTrN64jUMNUnEhLSkqGMiwJWOZhB9WFeuihJs35aM,82163
|
52
51
|
dbos/_templates/dbos-db-starter/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
|
53
52
|
dbos/_templates/dbos-db-starter/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
54
53
|
dbos/_templates/dbos-db-starter/__package/main.py,sha256=nJMN3ZD2lmwg4Dcgmiwqc-tQGuCJuJal2Xl85iA277U,2453
|
@@ -60,12 +59,12 @@ dbos/_templates/dbos-db-starter/migrations/script.py.mako,sha256=MEqL-2qATlST9TA
|
|
60
59
|
dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py,sha256=MpS7LGaJS0CpvsjhfDkp9EJqvMvVCjRPfUp4c0aE2ys,941
|
61
60
|
dbos/_templates/dbos-db-starter/start_postgres_docker.py,sha256=lQVLlYO5YkhGPEgPqwGc7Y8uDKse9HsWv5fynJEFJHM,1681
|
62
61
|
dbos/_tracer.py,sha256=yN6GRDKu_1p-EqtQLNarMocPfga2ZuqpzStzzSPYhzo,2732
|
63
|
-
dbos/_utils.py,sha256=
|
64
|
-
dbos/_workflow_commands.py,sha256=
|
62
|
+
dbos/_utils.py,sha256=UbpMYRBSyvJqdXeWAnfSw8xXM1R1mfnyl1oTunhEjJM,513
|
63
|
+
dbos/_workflow_commands.py,sha256=2E8FRUv_nLYkpBTwfhh_ELhySYpMrm8qGB9J44g6DSE,3872
|
65
64
|
dbos/cli/_github_init.py,sha256=Y_bDF9gfO2jB1id4FV5h1oIxEJRWyqVjhb7bNEa5nQ0,3224
|
66
65
|
dbos/cli/_template_init.py,sha256=-WW3kbq0W_Tq4WbMqb1UGJG3xvJb3woEY5VspG95Srk,2857
|
67
66
|
dbos/cli/cli.py,sha256=gXKELYAK9_CTejQ-WbNEIqnEYByJndXHDYSX4naFg8g,19106
|
68
67
|
dbos/dbos-config.schema.json,sha256=8KcwJb_sQc4-6tQG2TLmjE_nratfrQa0qVLl9XPsvWE,6367
|
69
68
|
dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
|
70
69
|
version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
|
71
|
-
dbos-0.28.
|
70
|
+
dbos-0.28.0a7.dist-info/RECORD,,
|
dbos/_request.py
DELETED
@@ -1,35 +0,0 @@
|
|
1
|
-
from dataclasses import dataclass
|
2
|
-
from typing import Any, Mapping, NamedTuple, Optional
|
3
|
-
|
4
|
-
request_id_header = "x-request-id"
|
5
|
-
|
6
|
-
|
7
|
-
class Address(NamedTuple):
|
8
|
-
hostname: str
|
9
|
-
port: int
|
10
|
-
|
11
|
-
|
12
|
-
@dataclass
|
13
|
-
class Request:
|
14
|
-
"""
|
15
|
-
Serializable HTTP Request object.
|
16
|
-
|
17
|
-
Attributes:
|
18
|
-
base_url(str): Base of URL requested, as in application code
|
19
|
-
client(Optional[Address]): HTTP Client
|
20
|
-
cookies(Mapping[str, str]): HTTP Cookies
|
21
|
-
headers(Mapping[str, str]): HTTP headers
|
22
|
-
method(str): HTTP verb
|
23
|
-
path_params(Mapping[str, Any]): Parameters extracted from URL path sections
|
24
|
-
query_params(Mapping[str, str]): URL query string parameters
|
25
|
-
url(str): Full URL accessed
|
26
|
-
"""
|
27
|
-
|
28
|
-
headers: Mapping[str, str]
|
29
|
-
path_params: Mapping[str, Any]
|
30
|
-
query_params: Mapping[str, str]
|
31
|
-
url: str
|
32
|
-
base_url: str
|
33
|
-
client: Optional[Address]
|
34
|
-
cookies: Mapping[str, str]
|
35
|
-
method: str
|
File without changes
|
File without changes
|
File without changes
|