dbos 0.26.0a13__py3-none-any.whl → 0.26.0a15__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dbos/_admin_server.py +49 -5
- dbos/_app_db.py +56 -0
- dbos/_core.py +3 -1
- dbos/_dbos.py +40 -7
- dbos/_event_loop.py +67 -0
- dbos/_sys_db.py +48 -3
- dbos/_workflow_commands.py +1 -0
- dbos/cli/cli.py +49 -2
- {dbos-0.26.0a13.dist-info → dbos-0.26.0a15.dist-info}/METADATA +1 -1
- {dbos-0.26.0a13.dist-info → dbos-0.26.0a15.dist-info}/RECORD +13 -12
- {dbos-0.26.0a13.dist-info → dbos-0.26.0a15.dist-info}/WHEEL +0 -0
- {dbos-0.26.0a13.dist-info → dbos-0.26.0a15.dist-info}/entry_points.txt +0 -0
- {dbos-0.26.0a13.dist-info → dbos-0.26.0a15.dist-info}/licenses/LICENSE +0 -0
dbos/_admin_server.py
CHANGED
@@ -7,6 +7,7 @@ from functools import partial
|
|
7
7
|
from http.server import BaseHTTPRequestHandler, ThreadingHTTPServer
|
8
8
|
from typing import TYPE_CHECKING, Any, List, TypedDict
|
9
9
|
|
10
|
+
from ._error import DBOSException
|
10
11
|
from ._logger import dbos_logger
|
11
12
|
from ._recovery import recover_pending_workflows
|
12
13
|
from ._utils import GlobalParams
|
@@ -22,6 +23,7 @@ _workflow_queues_metadata_path = "/dbos-workflow-queues-metadata"
|
|
22
23
|
# /workflows/:workflow_id/resume
|
23
24
|
# /workflows/:workflow_id/restart
|
24
25
|
# /workflows/:workflow_id/steps
|
26
|
+
# /workflows/:workflow_id/fork
|
25
27
|
|
26
28
|
|
27
29
|
class AdminServer:
|
@@ -123,6 +125,9 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
|
|
123
125
|
restart_match = re.match(
|
124
126
|
r"^/workflows/(?P<workflow_id>[^/]+)/restart$", self.path
|
125
127
|
)
|
128
|
+
fork_match = re.match(
|
129
|
+
r"^/workflows/(?P<workflow_id>[^/]+)/fork$", self.path
|
130
|
+
)
|
126
131
|
resume_match = re.match(
|
127
132
|
r"^/workflows/(?P<workflow_id>[^/]+)/resume$", self.path
|
128
133
|
)
|
@@ -130,7 +135,23 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
|
|
130
135
|
r"^/workflows/(?P<workflow_id>[^/]+)/cancel$", self.path
|
131
136
|
)
|
132
137
|
|
133
|
-
if
|
138
|
+
if fork_match:
|
139
|
+
workflow_id = fork_match.group("workflow_id")
|
140
|
+
try:
|
141
|
+
data = json.loads(post_data.decode("utf-8"))
|
142
|
+
start_step: int = data.get("start_step", 1)
|
143
|
+
self._handle_fork(workflow_id, start_step)
|
144
|
+
except (json.JSONDecodeError, AttributeError) as e:
|
145
|
+
self.send_response(500)
|
146
|
+
self.send_header("Content-Type", "application/json")
|
147
|
+
self.end_headers()
|
148
|
+
self.wfile.write(
|
149
|
+
json.dumps({"error": f"Invalid JSON input: {str(e)}"}).encode(
|
150
|
+
"utf-8"
|
151
|
+
)
|
152
|
+
)
|
153
|
+
return
|
154
|
+
elif restart_match:
|
134
155
|
workflow_id = restart_match.group("workflow_id")
|
135
156
|
self._handle_restart(workflow_id)
|
136
157
|
elif resume_match:
|
@@ -147,10 +168,33 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
|
|
147
168
|
return # Disable admin server request logging
|
148
169
|
|
149
170
|
def _handle_restart(self, workflow_id: str) -> None:
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
-
|
171
|
+
try:
|
172
|
+
print(f"Restarting workflow {workflow_id}")
|
173
|
+
self.dbos.restart_workflow(workflow_id)
|
174
|
+
self.send_response(204)
|
175
|
+
self._end_headers()
|
176
|
+
except DBOSException as e:
|
177
|
+
print(f"Error restarting workflow: {e}")
|
178
|
+
self.send_response(500)
|
179
|
+
response_body = json.dumps({"error": str(e)}).encode("utf-8")
|
180
|
+
self.send_header("Content-Type", "application/json")
|
181
|
+
self.send_header("Content-Length", str(len(response_body)))
|
182
|
+
self.end_headers()
|
183
|
+
self.wfile.write(response_body)
|
184
|
+
|
185
|
+
def _handle_fork(self, workflow_id: str, start_step: int) -> None:
|
186
|
+
try:
|
187
|
+
self.dbos.fork_workflow(workflow_id, start_step)
|
188
|
+
self.send_response(204)
|
189
|
+
self._end_headers()
|
190
|
+
except DBOSException as e:
|
191
|
+
print(f"Error forking workflow: {e}")
|
192
|
+
self.send_response(500)
|
193
|
+
response_body = json.dumps({"error": str(e)}).encode("utf-8")
|
194
|
+
self.send_header("Content-Type", "application/json")
|
195
|
+
self.send_header("Content-Length", str(len(response_body)))
|
196
|
+
self.end_headers()
|
197
|
+
self.wfile.write(response_body)
|
154
198
|
|
155
199
|
def _handle_resume(self, workflow_id: str) -> None:
|
156
200
|
print("Resuming workflow", workflow_id)
|
dbos/_app_db.py
CHANGED
@@ -228,3 +228,59 @@ class ApplicationDatabase:
|
|
228
228
|
)
|
229
229
|
for row in rows
|
230
230
|
]
|
231
|
+
|
232
|
+
def get_max_function_id(self, workflow_uuid: str) -> Optional[int]:
|
233
|
+
with self.engine.begin() as conn:
|
234
|
+
max_function_id_row = conn.execute(
|
235
|
+
sa.select(
|
236
|
+
sa.func.max(ApplicationSchema.transaction_outputs.c.function_id)
|
237
|
+
).where(
|
238
|
+
ApplicationSchema.transaction_outputs.c.workflow_uuid
|
239
|
+
== workflow_uuid
|
240
|
+
)
|
241
|
+
).fetchone()
|
242
|
+
|
243
|
+
max_function_id = max_function_id_row[0] if max_function_id_row else None
|
244
|
+
|
245
|
+
return max_function_id
|
246
|
+
|
247
|
+
def clone_workflow_transactions(
|
248
|
+
self, src_workflow_id: str, forked_workflow_id: str, start_step: int
|
249
|
+
) -> None:
|
250
|
+
"""
|
251
|
+
Copies all steps from dbos.transctions_outputs where function_id < input function_id
|
252
|
+
into a new workflow_uuid. Returns the new workflow_uuid.
|
253
|
+
"""
|
254
|
+
|
255
|
+
with self.engine.begin() as conn:
|
256
|
+
|
257
|
+
insert_stmt = sa.insert(ApplicationSchema.transaction_outputs).from_select(
|
258
|
+
[
|
259
|
+
"workflow_uuid",
|
260
|
+
"function_id",
|
261
|
+
"output",
|
262
|
+
"error",
|
263
|
+
"txn_id",
|
264
|
+
"txn_snapshot",
|
265
|
+
"executor_id",
|
266
|
+
"function_name",
|
267
|
+
],
|
268
|
+
sa.select(
|
269
|
+
sa.literal(forked_workflow_id).label("workflow_uuid"),
|
270
|
+
ApplicationSchema.transaction_outputs.c.function_id,
|
271
|
+
ApplicationSchema.transaction_outputs.c.output,
|
272
|
+
ApplicationSchema.transaction_outputs.c.error,
|
273
|
+
ApplicationSchema.transaction_outputs.c.txn_id,
|
274
|
+
ApplicationSchema.transaction_outputs.c.txn_snapshot,
|
275
|
+
ApplicationSchema.transaction_outputs.c.executor_id,
|
276
|
+
ApplicationSchema.transaction_outputs.c.function_name,
|
277
|
+
).where(
|
278
|
+
(
|
279
|
+
ApplicationSchema.transaction_outputs.c.workflow_uuid
|
280
|
+
== src_workflow_id
|
281
|
+
)
|
282
|
+
& (ApplicationSchema.transaction_outputs.c.function_id < start_step)
|
283
|
+
),
|
284
|
+
)
|
285
|
+
|
286
|
+
conn.execute(insert_stmt)
|
dbos/_core.py
CHANGED
@@ -365,7 +365,9 @@ def _execute_workflow_wthread(
|
|
365
365
|
if isinstance(result, Immediate):
|
366
366
|
return cast(Immediate[R], result)()
|
367
367
|
else:
|
368
|
-
return
|
368
|
+
return dbos._background_event_loop.submit_coroutine(
|
369
|
+
cast(Pending[R], result)()
|
370
|
+
)
|
369
371
|
except Exception:
|
370
372
|
dbos.logger.error(
|
371
373
|
f"Exception encountered in asynchronous workflow: {traceback.format_exc()}"
|
dbos/_dbos.py
CHANGED
@@ -4,7 +4,6 @@ import asyncio
|
|
4
4
|
import atexit
|
5
5
|
import hashlib
|
6
6
|
import inspect
|
7
|
-
import json
|
8
7
|
import os
|
9
8
|
import sys
|
10
9
|
import threading
|
@@ -31,7 +30,6 @@ from typing import (
|
|
31
30
|
|
32
31
|
from opentelemetry.trace import Span
|
33
32
|
|
34
|
-
from dbos import _serialization
|
35
33
|
from dbos._conductor.conductor import ConductorWebsocket
|
36
34
|
from dbos._utils import INTERNAL_QUEUE_NAME, GlobalParams
|
37
35
|
from dbos._workflow_commands import (
|
@@ -112,6 +110,7 @@ from ._error import (
|
|
112
110
|
DBOSException,
|
113
111
|
DBOSNonExistentWorkflowError,
|
114
112
|
)
|
113
|
+
from ._event_loop import BackgroundEventLoop
|
115
114
|
from ._logger import add_otlp_to_all_loggers, config_logger, dbos_logger, init_logger
|
116
115
|
from ._sys_db import SystemDatabase
|
117
116
|
from ._workflow_commands import WorkflowStatus, get_workflow
|
@@ -341,6 +340,7 @@ class DBOS:
|
|
341
340
|
self.conductor_url: Optional[str] = conductor_url
|
342
341
|
self.conductor_key: Optional[str] = conductor_key
|
343
342
|
self.conductor_websocket: Optional[ConductorWebsocket] = None
|
343
|
+
self._background_event_loop: BackgroundEventLoop = BackgroundEventLoop()
|
344
344
|
|
345
345
|
init_logger()
|
346
346
|
|
@@ -451,6 +451,7 @@ class DBOS:
|
|
451
451
|
dbos_logger.info(f"Executor ID: {GlobalParams.executor_id}")
|
452
452
|
dbos_logger.info(f"Application version: {GlobalParams.app_version}")
|
453
453
|
self._executor_field = ThreadPoolExecutor(max_workers=64)
|
454
|
+
self._background_event_loop.start()
|
454
455
|
self._sys_db_field = SystemDatabase(
|
455
456
|
self.config["database"], debug_mode=debug_mode
|
456
457
|
)
|
@@ -568,6 +569,7 @@ class DBOS:
|
|
568
569
|
self._initialized = False
|
569
570
|
for event in self.stop_events:
|
570
571
|
event.set()
|
572
|
+
self._background_event_loop.stop()
|
571
573
|
if self._sys_db_field is not None:
|
572
574
|
self._sys_db_field.destroy()
|
573
575
|
self._sys_db_field = None
|
@@ -958,14 +960,45 @@ class DBOS:
|
|
958
960
|
def restart_workflow(cls, workflow_id: str) -> WorkflowHandle[Any]:
|
959
961
|
"""Restart a workflow with a new workflow ID"""
|
960
962
|
|
963
|
+
return cls.fork_workflow(workflow_id, 1)
|
964
|
+
|
965
|
+
@classmethod
|
966
|
+
def fork_workflow(
|
967
|
+
cls, workflow_id: str, start_step: int = 1
|
968
|
+
) -> WorkflowHandle[Any]:
|
969
|
+
"""Restart a workflow with a new workflow ID"""
|
970
|
+
|
971
|
+
def get_max_function_id(workflow_uuid: str) -> int:
|
972
|
+
max_transactions = (
|
973
|
+
_get_dbos_instance()._app_db.get_max_function_id(workflow_uuid) or 0
|
974
|
+
)
|
975
|
+
max_operations = (
|
976
|
+
_get_dbos_instance()._sys_db.get_max_function_id(workflow_uuid) or 0
|
977
|
+
)
|
978
|
+
return max(max_transactions, max_operations)
|
979
|
+
|
980
|
+
max_function_id = get_max_function_id(workflow_id)
|
981
|
+
if max_function_id > 0 and start_step > max_function_id:
|
982
|
+
raise DBOSException(
|
983
|
+
f"Cannot fork workflow {workflow_id} at step {start_step}. The workflow has {max_function_id} steps."
|
984
|
+
)
|
985
|
+
|
961
986
|
def fn() -> str:
|
962
|
-
|
963
|
-
|
987
|
+
forked_workflow_id = str(uuid.uuid4())
|
988
|
+
dbos_logger.info(f"Forking workflow: {workflow_id} from step {start_step}")
|
989
|
+
|
990
|
+
_get_dbos_instance()._app_db.clone_workflow_transactions(
|
991
|
+
workflow_id, forked_workflow_id, start_step
|
992
|
+
)
|
993
|
+
|
994
|
+
return _get_dbos_instance()._sys_db.fork_workflow(
|
995
|
+
workflow_id, forked_workflow_id, start_step
|
996
|
+
)
|
964
997
|
|
965
|
-
|
966
|
-
fn, "DBOS.
|
998
|
+
new_id = _get_dbos_instance()._sys_db.call_function_as_step(
|
999
|
+
fn, "DBOS.forkWorkflow"
|
967
1000
|
)
|
968
|
-
return cls.retrieve_workflow(
|
1001
|
+
return cls.retrieve_workflow(new_id)
|
969
1002
|
|
970
1003
|
@classmethod
|
971
1004
|
def list_workflows(
|
dbos/_event_loop.py
ADDED
@@ -0,0 +1,67 @@
|
|
1
|
+
import asyncio
|
2
|
+
import threading
|
3
|
+
from typing import Any, Coroutine, Optional, TypeVar
|
4
|
+
|
5
|
+
|
6
|
+
class BackgroundEventLoop:
|
7
|
+
"""
|
8
|
+
This is the event loop to which DBOS submits any coroutines that are not started from within an event loop.
|
9
|
+
In particular, coroutines submitted to queues (such as from scheduled workflows) run on this event loop.
|
10
|
+
"""
|
11
|
+
|
12
|
+
def __init__(self) -> None:
|
13
|
+
self._loop: Optional[asyncio.AbstractEventLoop] = None
|
14
|
+
self._thread: Optional[threading.Thread] = None
|
15
|
+
self._running = False
|
16
|
+
self._ready = threading.Event()
|
17
|
+
|
18
|
+
def start(self) -> None:
|
19
|
+
if self._running:
|
20
|
+
return
|
21
|
+
|
22
|
+
self._thread = threading.Thread(target=self._run_event_loop, daemon=True)
|
23
|
+
self._thread.start()
|
24
|
+
self._ready.wait() # Wait until the loop is running
|
25
|
+
|
26
|
+
def stop(self) -> None:
|
27
|
+
if not self._running or self._loop is None or self._thread is None:
|
28
|
+
return
|
29
|
+
|
30
|
+
asyncio.run_coroutine_threadsafe(self._shutdown(), self._loop)
|
31
|
+
self._thread.join()
|
32
|
+
self._running = False
|
33
|
+
|
34
|
+
def _run_event_loop(self) -> None:
|
35
|
+
self._loop = asyncio.new_event_loop()
|
36
|
+
asyncio.set_event_loop(self._loop)
|
37
|
+
|
38
|
+
self._running = True
|
39
|
+
self._ready.set() # Signal that the loop is ready
|
40
|
+
|
41
|
+
try:
|
42
|
+
self._loop.run_forever()
|
43
|
+
finally:
|
44
|
+
self._loop.close()
|
45
|
+
|
46
|
+
async def _shutdown(self) -> None:
|
47
|
+
if self._loop is None:
|
48
|
+
raise RuntimeError("Event loop not started")
|
49
|
+
tasks = [
|
50
|
+
task
|
51
|
+
for task in asyncio.all_tasks(self._loop)
|
52
|
+
if task is not asyncio.current_task(self._loop)
|
53
|
+
]
|
54
|
+
|
55
|
+
for task in tasks:
|
56
|
+
task.cancel()
|
57
|
+
|
58
|
+
await asyncio.gather(*tasks, return_exceptions=True)
|
59
|
+
self._loop.stop()
|
60
|
+
|
61
|
+
T = TypeVar("T")
|
62
|
+
|
63
|
+
def submit_coroutine(self, coro: Coroutine[Any, Any, T]) -> T:
|
64
|
+
"""Submit a coroutine to the background event loop"""
|
65
|
+
if self._loop is None:
|
66
|
+
raise RuntimeError("Event loop not started")
|
67
|
+
return asyncio.run_coroutine_threadsafe(coro, self._loop).result()
|
dbos/_sys_db.py
CHANGED
@@ -489,15 +489,29 @@ class SystemDatabase:
|
|
489
489
|
.values(status=WorkflowStatusString.ENQUEUED.value, recovery_attempts=0)
|
490
490
|
)
|
491
491
|
|
492
|
-
def
|
492
|
+
def get_max_function_id(self, workflow_uuid: str) -> Optional[int]:
|
493
|
+
with self.engine.begin() as conn:
|
494
|
+
max_function_id_row = conn.execute(
|
495
|
+
sa.select(
|
496
|
+
sa.func.max(SystemSchema.operation_outputs.c.function_id)
|
497
|
+
).where(SystemSchema.operation_outputs.c.workflow_uuid == workflow_uuid)
|
498
|
+
).fetchone()
|
499
|
+
|
500
|
+
max_function_id = max_function_id_row[0] if max_function_id_row else None
|
501
|
+
|
502
|
+
return max_function_id
|
503
|
+
|
504
|
+
def fork_workflow(
|
505
|
+
self, original_workflow_id: str, forked_workflow_id: str, start_step: int = 1
|
506
|
+
) -> str:
|
507
|
+
|
493
508
|
status = self.get_workflow_status(original_workflow_id)
|
494
509
|
if status is None:
|
495
510
|
raise Exception(f"Workflow {original_workflow_id} not found")
|
496
511
|
inputs = self.get_workflow_inputs(original_workflow_id)
|
497
512
|
if inputs is None:
|
498
513
|
raise Exception(f"Workflow {original_workflow_id} not found")
|
499
|
-
|
500
|
-
forked_workflow_id = str(uuid.uuid4())
|
514
|
+
|
501
515
|
with self.engine.begin() as c:
|
502
516
|
# Create an entry for the forked workflow with the same
|
503
517
|
# initial values as the original.
|
@@ -524,6 +538,37 @@ class SystemDatabase:
|
|
524
538
|
inputs=_serialization.serialize_args(inputs),
|
525
539
|
)
|
526
540
|
)
|
541
|
+
|
542
|
+
if start_step > 1:
|
543
|
+
|
544
|
+
# Copy the original workflow's outputs into the forked workflow
|
545
|
+
insert_stmt = sa.insert(SystemSchema.operation_outputs).from_select(
|
546
|
+
[
|
547
|
+
"workflow_uuid",
|
548
|
+
"function_id",
|
549
|
+
"output",
|
550
|
+
"error",
|
551
|
+
"function_name",
|
552
|
+
"child_workflow_id",
|
553
|
+
],
|
554
|
+
sa.select(
|
555
|
+
sa.literal(forked_workflow_id).label("workflow_uuid"),
|
556
|
+
SystemSchema.operation_outputs.c.function_id,
|
557
|
+
SystemSchema.operation_outputs.c.output,
|
558
|
+
SystemSchema.operation_outputs.c.error,
|
559
|
+
SystemSchema.operation_outputs.c.function_name,
|
560
|
+
SystemSchema.operation_outputs.c.child_workflow_id,
|
561
|
+
).where(
|
562
|
+
(
|
563
|
+
SystemSchema.operation_outputs.c.workflow_uuid
|
564
|
+
== original_workflow_id
|
565
|
+
)
|
566
|
+
& (SystemSchema.operation_outputs.c.function_id < start_step)
|
567
|
+
),
|
568
|
+
)
|
569
|
+
|
570
|
+
c.execute(insert_stmt)
|
571
|
+
|
527
572
|
# Enqueue the forked workflow on the internal queue
|
528
573
|
c.execute(
|
529
574
|
pg.insert(SystemSchema.workflow_queue).values(
|
dbos/_workflow_commands.py
CHANGED
dbos/cli/cli.py
CHANGED
@@ -433,13 +433,60 @@ def restart(
|
|
433
433
|
] = 3001,
|
434
434
|
) -> None:
|
435
435
|
response = requests.post(
|
436
|
-
f"http://{host}:{port}/workflows/{uuid}/restart",
|
436
|
+
f"http://{host}:{port}/workflows/{uuid}/restart",
|
437
|
+
json=[],
|
438
|
+
timeout=5,
|
437
439
|
)
|
438
440
|
|
439
441
|
if response.status_code == 204:
|
440
442
|
print(f"Workflow {uuid} has been restarted")
|
441
443
|
else:
|
442
|
-
|
444
|
+
error_message = response.json().get("error", "Unknown error")
|
445
|
+
print(
|
446
|
+
f"Failed to restart workflow {uuid}. "
|
447
|
+
f"Status code: {response.status_code}. "
|
448
|
+
f"Error: {error_message}"
|
449
|
+
)
|
450
|
+
|
451
|
+
|
452
|
+
@workflow.command(
|
453
|
+
help="fork a workflow from the beginning with a new id and from a step"
|
454
|
+
)
|
455
|
+
def fork(
|
456
|
+
uuid: Annotated[str, typer.Argument()],
|
457
|
+
host: Annotated[
|
458
|
+
typing.Optional[str],
|
459
|
+
typer.Option("--host", "-H", help="Specify the admin host"),
|
460
|
+
] = "localhost",
|
461
|
+
port: Annotated[
|
462
|
+
typing.Optional[int],
|
463
|
+
typer.Option("--port", "-p", help="Specify the admin port"),
|
464
|
+
] = 3001,
|
465
|
+
step: Annotated[
|
466
|
+
typing.Optional[int],
|
467
|
+
typer.Option(
|
468
|
+
"--step",
|
469
|
+
"-s",
|
470
|
+
help="Restart from this step (default: first step)",
|
471
|
+
),
|
472
|
+
] = 1,
|
473
|
+
) -> None:
|
474
|
+
print(f"Forking workflow {uuid} from step {step}")
|
475
|
+
response = requests.post(
|
476
|
+
f"http://{host}:{port}/workflows/{uuid}/fork",
|
477
|
+
json={"start_step": step},
|
478
|
+
timeout=5,
|
479
|
+
)
|
480
|
+
|
481
|
+
if response.status_code == 204:
|
482
|
+
print(f"Workflow {uuid} has been forked")
|
483
|
+
else:
|
484
|
+
error_message = response.json().get("error", "Unknown error")
|
485
|
+
print(
|
486
|
+
f"Failed to fork workflow {uuid}. "
|
487
|
+
f"Status code: {response.status_code}. "
|
488
|
+
f"Error: {error_message}"
|
489
|
+
)
|
443
490
|
|
444
491
|
|
445
492
|
@queue.command(name="list", help="List enqueued functions for your application")
|
@@ -1,23 +1,24 @@
|
|
1
|
-
dbos-0.26.
|
2
|
-
dbos-0.26.
|
3
|
-
dbos-0.26.
|
4
|
-
dbos-0.26.
|
1
|
+
dbos-0.26.0a15.dist-info/METADATA,sha256=-vvSiXF-3_KrI9sYYJ7xD6HQXDVGyI0c0F1Jo6rZQoM,5554
|
2
|
+
dbos-0.26.0a15.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
|
3
|
+
dbos-0.26.0a15.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
|
4
|
+
dbos-0.26.0a15.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
|
5
5
|
dbos/__init__.py,sha256=3NQfGlBiiUSM_v88STdVP3rNZvGkUL_9WbSotKb8Voo,873
|
6
6
|
dbos/__main__.py,sha256=G7Exn-MhGrVJVDbgNlpzhfh8WMX_72t3_oJaFT9Lmt8,653
|
7
|
-
dbos/_admin_server.py,sha256=
|
8
|
-
dbos/_app_db.py,sha256=
|
7
|
+
dbos/_admin_server.py,sha256=RrbABfR1D3p9c_QLrCSrgFuYce6FKi0fjMRIYLjO_Y8,9038
|
8
|
+
dbos/_app_db.py,sha256=Q9lEyCJFoZMTlnjMO8Pj8bczVmVWyDOP8qPQ6l5PpEU,11241
|
9
9
|
dbos/_classproperty.py,sha256=f0X-_BySzn3yFDRKB2JpCbLYQ9tLwt1XftfshvY7CBs,626
|
10
10
|
dbos/_client.py,sha256=5iaoFsu5wAqwjjj3EWusZ1eDbBAW8FwYazhokdCJ9h4,10964
|
11
11
|
dbos/_conductor/conductor.py,sha256=HYzVL29IMMrs2Mnms_7cHJynCnmmEN5SDQOMjzn3UoU,16840
|
12
12
|
dbos/_conductor/protocol.py,sha256=xN7pmooyF1pqbH1b6WhllU5718P7zSb_b0KCwA6bzcs,6716
|
13
13
|
dbos/_context.py,sha256=I8sLkdKTTkZEz7wG-MjynaQB6XEF2bLXuwNksiauP7w,19430
|
14
|
-
dbos/_core.py,sha256=
|
14
|
+
dbos/_core.py,sha256=de8GecFmW5DNf5dYfnpSX3IDO24Wc6pBpCC1VZ1iVyI,45505
|
15
15
|
dbos/_croniter.py,sha256=XHAyUyibs_59sJQfSNWkP7rqQY6_XrlfuuCxk4jYqek,47559
|
16
|
-
dbos/_dbos.py,sha256=
|
16
|
+
dbos/_dbos.py,sha256=xl5swjxBSRfCPn_8_ZagOdmjPYf1SDjtla2sAH4v0dY,47390
|
17
17
|
dbos/_dbos_config.py,sha256=m05IFjM0jSwZBsnFMF_4qP2JkjVFc0gqyM2tnotXq20,20636
|
18
18
|
dbos/_debug.py,sha256=MNlQVZ6TscGCRQeEEL0VE8Uignvr6dPeDDDefS3xgIE,1823
|
19
19
|
dbos/_docker_pg_helper.py,sha256=NmcgqmR5rQA_4igfeqh8ugNT2z3YmoOvuep_MEtxTiY,5854
|
20
20
|
dbos/_error.py,sha256=9ITvFsN_Udpx0xXtYQHXXXb6PjPr3TmMondGmprV-L0,7003
|
21
|
+
dbos/_event_loop.py,sha256=NmaLbEQFfEK36S_0KhVD39YdYrGce3qSKCTJ-5RqKQ0,2136
|
21
22
|
dbos/_fastapi.py,sha256=PhaKftbApHnjtYEOw0EYna_3K0cmz__J9of7mRJWzu4,3704
|
22
23
|
dbos/_flask.py,sha256=DZKUZR5-xOzPI7tYZ53r2PvvHVoAb8SYwLzMVFsVfjI,2608
|
23
24
|
dbos/_kafka.py,sha256=pz0xZ9F3X9Ky1k-VSbeF3tfPhP3UPr3lUUhUfE41__U,4198
|
@@ -44,7 +45,7 @@ dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
44
45
|
dbos/_schemas/application_database.py,sha256=SypAS9l9EsaBHFn9FR8jmnqt01M74d9AF1AMa4m2hhI,1040
|
45
46
|
dbos/_schemas/system_database.py,sha256=W9eSpL7SZzQkxcEZ4W07BOcwkkDr35b9oCjUOgfHWek,5336
|
46
47
|
dbos/_serialization.py,sha256=YCYv0qKAwAZ1djZisBC7khvKqG-5OcIv9t9EC5PFIog,1743
|
47
|
-
dbos/_sys_db.py,sha256=
|
48
|
+
dbos/_sys_db.py,sha256=uQO45HVR6bS1Fa_iH8uQKnFPFuNTLM0obPXtLuKd_vc,70117
|
48
49
|
dbos/_templates/dbos-db-starter/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
|
49
50
|
dbos/_templates/dbos-db-starter/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
50
51
|
dbos/_templates/dbos-db-starter/__package/main.py,sha256=nJMN3ZD2lmwg4Dcgmiwqc-tQGuCJuJal2Xl85iA277U,2453
|
@@ -57,11 +58,11 @@ dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py,sh
|
|
57
58
|
dbos/_templates/dbos-db-starter/start_postgres_docker.py,sha256=lQVLlYO5YkhGPEgPqwGc7Y8uDKse9HsWv5fynJEFJHM,1681
|
58
59
|
dbos/_tracer.py,sha256=dFDSFlta-rfA3-ahIRLYwnnoAOmlavdxAGllqwFgnCA,2440
|
59
60
|
dbos/_utils.py,sha256=nFRUHzVjXG5AusF85AlYHikj63Tzi-kQm992ihsrAxA,201
|
60
|
-
dbos/_workflow_commands.py,sha256=
|
61
|
+
dbos/_workflow_commands.py,sha256=YJamxSQqI0pQMKo-G1aGJkLHB2JMXaijlBaHabq11kg,6165
|
61
62
|
dbos/cli/_github_init.py,sha256=Y_bDF9gfO2jB1id4FV5h1oIxEJRWyqVjhb7bNEa5nQ0,3224
|
62
63
|
dbos/cli/_template_init.py,sha256=-WW3kbq0W_Tq4WbMqb1UGJG3xvJb3woEY5VspG95Srk,2857
|
63
|
-
dbos/cli/cli.py,sha256=
|
64
|
+
dbos/cli/cli.py,sha256=1qCTs__A9LOEfU44XZ6TufwmRwe68ZEwbWEPli3vnVM,17873
|
64
65
|
dbos/dbos-config.schema.json,sha256=i7jcxXqByKq0Jzv3nAUavONtj03vTwj6vWP4ylmBr8o,5694
|
65
66
|
dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
|
66
67
|
version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
|
67
|
-
dbos-0.26.
|
68
|
+
dbos-0.26.0a15.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|