dbos 1.4.1__py3-none-any.whl → 1.5.0a2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dbos/_admin_server.py +21 -0
- dbos/_app_db.py +18 -0
- dbos/_sys_db.py +56 -0
- dbos/_workflow_commands.py +36 -2
- {dbos-1.4.1.dist-info → dbos-1.5.0a2.dist-info}/METADATA +1 -1
- {dbos-1.4.1.dist-info → dbos-1.5.0a2.dist-info}/RECORD +9 -9
- {dbos-1.4.1.dist-info → dbos-1.5.0a2.dist-info}/WHEEL +0 -0
- {dbos-1.4.1.dist-info → dbos-1.5.0a2.dist-info}/entry_points.txt +0 -0
- {dbos-1.4.1.dist-info → dbos-1.5.0a2.dist-info}/licenses/LICENSE +0 -0
dbos/_admin_server.py
CHANGED
@@ -7,6 +7,8 @@ from functools import partial
|
|
7
7
|
from http.server import BaseHTTPRequestHandler, ThreadingHTTPServer
|
8
8
|
from typing import TYPE_CHECKING, Any, List, Optional, TypedDict
|
9
9
|
|
10
|
+
from dbos._workflow_commands import garbage_collect, global_timeout
|
11
|
+
|
10
12
|
from ._context import SetWorkflowID
|
11
13
|
from ._error import DBOSException
|
12
14
|
from ._logger import dbos_logger
|
@@ -20,6 +22,8 @@ _health_check_path = "/dbos-healthz"
|
|
20
22
|
_workflow_recovery_path = "/dbos-workflow-recovery"
|
21
23
|
_deactivate_path = "/deactivate"
|
22
24
|
_workflow_queues_metadata_path = "/dbos-workflow-queues-metadata"
|
25
|
+
_garbage_collect_path = "/dbos-garbage-collect"
|
26
|
+
_global_timeout_path = "/dbos-global-timeout"
|
23
27
|
# /workflows/:workflow_id/cancel
|
24
28
|
# /workflows/:workflow_id/resume
|
25
29
|
# /workflows/:workflow_id/restart
|
@@ -122,6 +126,23 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
|
|
122
126
|
self.send_response(200)
|
123
127
|
self._end_headers()
|
124
128
|
self.wfile.write(json.dumps(workflow_ids).encode("utf-8"))
|
129
|
+
elif self.path == _garbage_collect_path:
|
130
|
+
inputs = json.loads(post_data.decode("utf-8"))
|
131
|
+
cutoff_epoch_timestamp_ms = inputs.get("cutoff_epoch_timestamp_ms", None)
|
132
|
+
rows_threshold = inputs.get("rows_threshold", None)
|
133
|
+
garbage_collect(
|
134
|
+
self.dbos,
|
135
|
+
cutoff_epoch_timestamp_ms=cutoff_epoch_timestamp_ms,
|
136
|
+
rows_threshold=rows_threshold,
|
137
|
+
)
|
138
|
+
self.send_response(204)
|
139
|
+
self._end_headers()
|
140
|
+
elif self.path == _global_timeout_path:
|
141
|
+
inputs = json.loads(post_data.decode("utf-8"))
|
142
|
+
timeout_ms = inputs.get("timeout_ms", None)
|
143
|
+
global_timeout(self.dbos, timeout_ms)
|
144
|
+
self.send_response(204)
|
145
|
+
self._end_headers()
|
125
146
|
else:
|
126
147
|
|
127
148
|
restart_match = re.match(
|
dbos/_app_db.py
CHANGED
@@ -256,3 +256,21 @@ class ApplicationDatabase:
|
|
256
256
|
)
|
257
257
|
|
258
258
|
conn.execute(insert_stmt)
|
259
|
+
|
260
|
+
def garbage_collect(
|
261
|
+
self, cutoff_epoch_timestamp_ms: int, pending_workflow_ids: list[str]
|
262
|
+
) -> None:
|
263
|
+
with self.engine.begin() as c:
|
264
|
+
delete_query = sa.delete(ApplicationSchema.transaction_outputs).where(
|
265
|
+
ApplicationSchema.transaction_outputs.c.created_at
|
266
|
+
< cutoff_epoch_timestamp_ms
|
267
|
+
)
|
268
|
+
|
269
|
+
if len(pending_workflow_ids) > 0:
|
270
|
+
delete_query = delete_query.where(
|
271
|
+
~ApplicationSchema.transaction_outputs.c.workflow_uuid.in_(
|
272
|
+
pending_workflow_ids
|
273
|
+
)
|
274
|
+
)
|
275
|
+
|
276
|
+
c.execute(delete_query)
|
dbos/_sys_db.py
CHANGED
@@ -1852,6 +1852,62 @@ class SystemDatabase:
|
|
1852
1852
|
dbos_logger.error(f"Error connecting to the DBOS system database: {e}")
|
1853
1853
|
raise
|
1854
1854
|
|
1855
|
+
def garbage_collect(
|
1856
|
+
self, cutoff_epoch_timestamp_ms: Optional[int], rows_threshold: Optional[int]
|
1857
|
+
) -> Optional[tuple[int, list[str]]]:
|
1858
|
+
if rows_threshold is not None:
|
1859
|
+
with self.engine.begin() as c:
|
1860
|
+
# Get the created_at timestamp of the rows_threshold newest row
|
1861
|
+
result = c.execute(
|
1862
|
+
sa.select(SystemSchema.workflow_status.c.created_at)
|
1863
|
+
.order_by(SystemSchema.workflow_status.c.created_at.desc())
|
1864
|
+
.limit(1)
|
1865
|
+
.offset(rows_threshold - 1)
|
1866
|
+
).fetchone()
|
1867
|
+
|
1868
|
+
if result is not None:
|
1869
|
+
rows_based_cutoff = result[0]
|
1870
|
+
# Use the more restrictive cutoff (higher timestamp = more recent = more deletion)
|
1871
|
+
if (
|
1872
|
+
cutoff_epoch_timestamp_ms is None
|
1873
|
+
or rows_based_cutoff > cutoff_epoch_timestamp_ms
|
1874
|
+
):
|
1875
|
+
cutoff_epoch_timestamp_ms = rows_based_cutoff
|
1876
|
+
|
1877
|
+
if cutoff_epoch_timestamp_ms is None:
|
1878
|
+
return None
|
1879
|
+
|
1880
|
+
with self.engine.begin() as c:
|
1881
|
+
# Delete all workflows older than cutoff that are NOT PENDING or ENQUEUED
|
1882
|
+
c.execute(
|
1883
|
+
sa.delete(SystemSchema.workflow_status)
|
1884
|
+
.where(
|
1885
|
+
SystemSchema.workflow_status.c.created_at
|
1886
|
+
< cutoff_epoch_timestamp_ms
|
1887
|
+
)
|
1888
|
+
.where(
|
1889
|
+
~SystemSchema.workflow_status.c.status.in_(
|
1890
|
+
[
|
1891
|
+
WorkflowStatusString.PENDING.value,
|
1892
|
+
WorkflowStatusString.ENQUEUED.value,
|
1893
|
+
]
|
1894
|
+
)
|
1895
|
+
)
|
1896
|
+
)
|
1897
|
+
|
1898
|
+
# Then, get the IDs of all remaining old workflows
|
1899
|
+
pending_enqueued_result = c.execute(
|
1900
|
+
sa.select(SystemSchema.workflow_status.c.workflow_uuid).where(
|
1901
|
+
SystemSchema.workflow_status.c.created_at
|
1902
|
+
< cutoff_epoch_timestamp_ms
|
1903
|
+
)
|
1904
|
+
).fetchall()
|
1905
|
+
|
1906
|
+
# Return the final cutoff and workflow IDs
|
1907
|
+
return cutoff_epoch_timestamp_ms, [
|
1908
|
+
row[0] for row in pending_enqueued_result
|
1909
|
+
]
|
1910
|
+
|
1855
1911
|
|
1856
1912
|
def reset_system_database(postgres_db_url: sa.URL, sysdb_name: str) -> None:
|
1857
1913
|
try:
|
dbos/_workflow_commands.py
CHANGED
@@ -1,8 +1,9 @@
|
|
1
|
+
import time
|
1
2
|
import uuid
|
2
|
-
from
|
3
|
+
from datetime import datetime
|
4
|
+
from typing import TYPE_CHECKING, List, Optional
|
3
5
|
|
4
6
|
from dbos._context import get_local_dbos_context
|
5
|
-
from dbos._error import DBOSException
|
6
7
|
|
7
8
|
from ._app_db import ApplicationDatabase
|
8
9
|
from ._sys_db import (
|
@@ -11,8 +12,12 @@ from ._sys_db import (
|
|
11
12
|
StepInfo,
|
12
13
|
SystemDatabase,
|
13
14
|
WorkflowStatus,
|
15
|
+
WorkflowStatusString,
|
14
16
|
)
|
15
17
|
|
18
|
+
if TYPE_CHECKING:
|
19
|
+
from ._dbos import DBOS
|
20
|
+
|
16
21
|
|
17
22
|
def list_workflows(
|
18
23
|
sys_db: SystemDatabase,
|
@@ -118,3 +123,32 @@ def fork_workflow(
|
|
118
123
|
application_version=application_version,
|
119
124
|
)
|
120
125
|
return forked_workflow_id
|
126
|
+
|
127
|
+
|
128
|
+
def garbage_collect(
|
129
|
+
dbos: "DBOS",
|
130
|
+
cutoff_epoch_timestamp_ms: Optional[int],
|
131
|
+
rows_threshold: Optional[int],
|
132
|
+
) -> None:
|
133
|
+
if cutoff_epoch_timestamp_ms is None and rows_threshold is None:
|
134
|
+
return
|
135
|
+
result = dbos._sys_db.garbage_collect(
|
136
|
+
cutoff_epoch_timestamp_ms=cutoff_epoch_timestamp_ms,
|
137
|
+
rows_threshold=rows_threshold,
|
138
|
+
)
|
139
|
+
if result is not None:
|
140
|
+
cutoff_epoch_timestamp_ms, pending_workflow_ids = result
|
141
|
+
dbos._app_db.garbage_collect(cutoff_epoch_timestamp_ms, pending_workflow_ids)
|
142
|
+
|
143
|
+
|
144
|
+
def global_timeout(dbos: "DBOS", timeout_ms: int) -> None:
|
145
|
+
cutoff_epoch_timestamp_ms = int(time.time() * 1000) - timeout_ms
|
146
|
+
cutoff_iso = datetime.fromtimestamp(cutoff_epoch_timestamp_ms / 1000).isoformat()
|
147
|
+
for workflow in dbos.list_workflows(
|
148
|
+
status=WorkflowStatusString.PENDING.value, end_time=cutoff_iso
|
149
|
+
):
|
150
|
+
dbos.cancel_workflow(workflow.workflow_id)
|
151
|
+
for workflow in dbos.list_workflows(
|
152
|
+
status=WorkflowStatusString.ENQUEUED.value, end_time=cutoff_iso
|
153
|
+
):
|
154
|
+
dbos.cancel_workflow(workflow.workflow_id)
|
@@ -1,11 +1,11 @@
|
|
1
|
-
dbos-1.
|
2
|
-
dbos-1.
|
3
|
-
dbos-1.
|
4
|
-
dbos-1.
|
1
|
+
dbos-1.5.0a2.dist-info/METADATA,sha256=1_SM7qqqbdht1SS-0o1s9YSUSJ2ZIC2Hm7oyCZFU64w,13267
|
2
|
+
dbos-1.5.0a2.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
|
3
|
+
dbos-1.5.0a2.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
|
4
|
+
dbos-1.5.0a2.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
|
5
5
|
dbos/__init__.py,sha256=NssPCubaBxdiKarOWa-wViz1hdJSkmBGcpLX_gQ4NeA,891
|
6
6
|
dbos/__main__.py,sha256=G7Exn-MhGrVJVDbgNlpzhfh8WMX_72t3_oJaFT9Lmt8,653
|
7
|
-
dbos/_admin_server.py,sha256=
|
8
|
-
dbos/_app_db.py,sha256=
|
7
|
+
dbos/_admin_server.py,sha256=SVk55SxT07OHi0wHt_VpQsBXOeuJL2017k7_YQI3oeg,11574
|
8
|
+
dbos/_app_db.py,sha256=htblDPfqrpb_uZoFcvaud7cgQ-PDyn6Bn-cBidxdCTA,10603
|
9
9
|
dbos/_classproperty.py,sha256=f0X-_BySzn3yFDRKB2JpCbLYQ9tLwt1XftfshvY7CBs,626
|
10
10
|
dbos/_client.py,sha256=cQxw1Nbh_vKZ03lONt0EmUhwXBk3B3NczZrmfXXeefY,14667
|
11
11
|
dbos/_conductor/conductor.py,sha256=o0IaZjwnZ2TOyHeP2H4iSX6UnXLXQ4uODvWAKD9hHMs,21703
|
@@ -49,7 +49,7 @@ dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
49
49
|
dbos/_schemas/application_database.py,sha256=SypAS9l9EsaBHFn9FR8jmnqt01M74d9AF1AMa4m2hhI,1040
|
50
50
|
dbos/_schemas/system_database.py,sha256=rbFKggONdvvbb45InvGz0TM6a7c-Ux9dcaL-h_7Z7pU,4438
|
51
51
|
dbos/_serialization.py,sha256=bWuwhXSQcGmiazvhJHA5gwhrRWxtmFmcCFQSDJnqqkU,3666
|
52
|
-
dbos/_sys_db.py,sha256=
|
52
|
+
dbos/_sys_db.py,sha256=now889o6Mlmcdopp8xF5_0LAE67KeVH9Vm-4svIqo5s,80170
|
53
53
|
dbos/_templates/dbos-db-starter/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
|
54
54
|
dbos/_templates/dbos-db-starter/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
55
55
|
dbos/_templates/dbos-db-starter/__package/main.py.dbos,sha256=aQnBPSSQpkB8ERfhf7gB7P9tsU6OPKhZscfeh0yiaD8,2702
|
@@ -62,11 +62,11 @@ dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py,sh
|
|
62
62
|
dbos/_templates/dbos-db-starter/start_postgres_docker.py,sha256=lQVLlYO5YkhGPEgPqwGc7Y8uDKse9HsWv5fynJEFJHM,1681
|
63
63
|
dbos/_tracer.py,sha256=yN6GRDKu_1p-EqtQLNarMocPfga2ZuqpzStzzSPYhzo,2732
|
64
64
|
dbos/_utils.py,sha256=uywq1QrjMwy17btjxW4bES49povlQwYwYbvKwMT6C2U,1575
|
65
|
-
dbos/_workflow_commands.py,sha256=
|
65
|
+
dbos/_workflow_commands.py,sha256=2_ubdzKNJZQ32oftiOGFo2JBsfU_koGC1giXRgwwexI,4539
|
66
66
|
dbos/cli/_github_init.py,sha256=Y_bDF9gfO2jB1id4FV5h1oIxEJRWyqVjhb7bNEa5nQ0,3224
|
67
67
|
dbos/cli/_template_init.py,sha256=7JBcpMqP1r2mfCnvWatu33z8ctEGHJarlZYKgB83cXE,2972
|
68
68
|
dbos/cli/cli.py,sha256=EemOMqNpzSU2BQhAxV_e59pBRITDLwt49HF6W3uWBZg,20775
|
69
69
|
dbos/dbos-config.schema.json,sha256=CjaspeYmOkx6Ip_pcxtmfXJTn_YGdSx_0pcPBF7KZmo,6060
|
70
70
|
dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
|
71
71
|
version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
|
72
|
-
dbos-1.
|
72
|
+
dbos-1.5.0a2.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|