dbos 1.4.0a1__py3-none-any.whl → 1.5.0a2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dbos/_admin_server.py CHANGED
@@ -7,6 +7,8 @@ from functools import partial
7
7
  from http.server import BaseHTTPRequestHandler, ThreadingHTTPServer
8
8
  from typing import TYPE_CHECKING, Any, List, Optional, TypedDict
9
9
 
10
+ from dbos._workflow_commands import garbage_collect, global_timeout
11
+
10
12
  from ._context import SetWorkflowID
11
13
  from ._error import DBOSException
12
14
  from ._logger import dbos_logger
@@ -20,6 +22,8 @@ _health_check_path = "/dbos-healthz"
20
22
  _workflow_recovery_path = "/dbos-workflow-recovery"
21
23
  _deactivate_path = "/deactivate"
22
24
  _workflow_queues_metadata_path = "/dbos-workflow-queues-metadata"
25
+ _garbage_collect_path = "/dbos-garbage-collect"
26
+ _global_timeout_path = "/dbos-global-timeout"
23
27
  # /workflows/:workflow_id/cancel
24
28
  # /workflows/:workflow_id/resume
25
29
  # /workflows/:workflow_id/restart
@@ -122,6 +126,23 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
122
126
  self.send_response(200)
123
127
  self._end_headers()
124
128
  self.wfile.write(json.dumps(workflow_ids).encode("utf-8"))
129
+ elif self.path == _garbage_collect_path:
130
+ inputs = json.loads(post_data.decode("utf-8"))
131
+ cutoff_epoch_timestamp_ms = inputs.get("cutoff_epoch_timestamp_ms", None)
132
+ rows_threshold = inputs.get("rows_threshold", None)
133
+ garbage_collect(
134
+ self.dbos,
135
+ cutoff_epoch_timestamp_ms=cutoff_epoch_timestamp_ms,
136
+ rows_threshold=rows_threshold,
137
+ )
138
+ self.send_response(204)
139
+ self._end_headers()
140
+ elif self.path == _global_timeout_path:
141
+ inputs = json.loads(post_data.decode("utf-8"))
142
+ timeout_ms = inputs.get("timeout_ms", None)
143
+ global_timeout(self.dbos, timeout_ms)
144
+ self.send_response(204)
145
+ self._end_headers()
125
146
  else:
126
147
 
127
148
  restart_match = re.match(
dbos/_app_db.py CHANGED
@@ -256,3 +256,21 @@ class ApplicationDatabase:
256
256
  )
257
257
 
258
258
  conn.execute(insert_stmt)
259
+
260
+ def garbage_collect(
261
+ self, cutoff_epoch_timestamp_ms: int, pending_workflow_ids: list[str]
262
+ ) -> None:
263
+ with self.engine.begin() as c:
264
+ delete_query = sa.delete(ApplicationSchema.transaction_outputs).where(
265
+ ApplicationSchema.transaction_outputs.c.created_at
266
+ < cutoff_epoch_timestamp_ms
267
+ )
268
+
269
+ if len(pending_workflow_ids) > 0:
270
+ delete_query = delete_query.where(
271
+ ~ApplicationSchema.transaction_outputs.c.workflow_uuid.in_(
272
+ pending_workflow_ids
273
+ )
274
+ )
275
+
276
+ c.execute(delete_query)
dbos/_debug.py CHANGED
@@ -15,11 +15,11 @@ class PythonModule:
15
15
 
16
16
 
17
17
  def debug_workflow(workflow_id: str, entrypoint: Union[str, PythonModule]) -> None:
18
- # include the current directory (represented by empty string) in the search path
19
- # if it not already included
20
- if "" not in sys.path:
21
- sys.path.insert(0, "")
22
18
  if isinstance(entrypoint, str):
19
+ # ensure the entrypoint parent directory is in sys.path
20
+ parent = str(Path(entrypoint).parent)
21
+ if parent not in sys.path:
22
+ sys.path.insert(0, parent)
23
23
  runpy.run_path(entrypoint)
24
24
  elif isinstance(entrypoint, PythonModule):
25
25
  runpy.run_module(entrypoint.module_name)
dbos/_sys_db.py CHANGED
@@ -1852,6 +1852,62 @@ class SystemDatabase:
1852
1852
  dbos_logger.error(f"Error connecting to the DBOS system database: {e}")
1853
1853
  raise
1854
1854
 
1855
+ def garbage_collect(
1856
+ self, cutoff_epoch_timestamp_ms: Optional[int], rows_threshold: Optional[int]
1857
+ ) -> Optional[tuple[int, list[str]]]:
1858
+ if rows_threshold is not None:
1859
+ with self.engine.begin() as c:
1860
+ # Get the created_at timestamp of the rows_threshold newest row
1861
+ result = c.execute(
1862
+ sa.select(SystemSchema.workflow_status.c.created_at)
1863
+ .order_by(SystemSchema.workflow_status.c.created_at.desc())
1864
+ .limit(1)
1865
+ .offset(rows_threshold - 1)
1866
+ ).fetchone()
1867
+
1868
+ if result is not None:
1869
+ rows_based_cutoff = result[0]
1870
+ # Use the more restrictive cutoff (higher timestamp = more recent = more deletion)
1871
+ if (
1872
+ cutoff_epoch_timestamp_ms is None
1873
+ or rows_based_cutoff > cutoff_epoch_timestamp_ms
1874
+ ):
1875
+ cutoff_epoch_timestamp_ms = rows_based_cutoff
1876
+
1877
+ if cutoff_epoch_timestamp_ms is None:
1878
+ return None
1879
+
1880
+ with self.engine.begin() as c:
1881
+ # Delete all workflows older than cutoff that are NOT PENDING or ENQUEUED
1882
+ c.execute(
1883
+ sa.delete(SystemSchema.workflow_status)
1884
+ .where(
1885
+ SystemSchema.workflow_status.c.created_at
1886
+ < cutoff_epoch_timestamp_ms
1887
+ )
1888
+ .where(
1889
+ ~SystemSchema.workflow_status.c.status.in_(
1890
+ [
1891
+ WorkflowStatusString.PENDING.value,
1892
+ WorkflowStatusString.ENQUEUED.value,
1893
+ ]
1894
+ )
1895
+ )
1896
+ )
1897
+
1898
+ # Then, get the IDs of all remaining old workflows
1899
+ pending_enqueued_result = c.execute(
1900
+ sa.select(SystemSchema.workflow_status.c.workflow_uuid).where(
1901
+ SystemSchema.workflow_status.c.created_at
1902
+ < cutoff_epoch_timestamp_ms
1903
+ )
1904
+ ).fetchall()
1905
+
1906
+ # Return the final cutoff and workflow IDs
1907
+ return cutoff_epoch_timestamp_ms, [
1908
+ row[0] for row in pending_enqueued_result
1909
+ ]
1910
+
1855
1911
 
1856
1912
  def reset_system_database(postgres_db_url: sa.URL, sysdb_name: str) -> None:
1857
1913
  try:
@@ -1,8 +1,9 @@
1
+ import time
1
2
  import uuid
2
- from typing import List, Optional
3
+ from datetime import datetime
4
+ from typing import TYPE_CHECKING, List, Optional
3
5
 
4
6
  from dbos._context import get_local_dbos_context
5
- from dbos._error import DBOSException
6
7
 
7
8
  from ._app_db import ApplicationDatabase
8
9
  from ._sys_db import (
@@ -11,8 +12,12 @@ from ._sys_db import (
11
12
  StepInfo,
12
13
  SystemDatabase,
13
14
  WorkflowStatus,
15
+ WorkflowStatusString,
14
16
  )
15
17
 
18
+ if TYPE_CHECKING:
19
+ from ._dbos import DBOS
20
+
16
21
 
17
22
  def list_workflows(
18
23
  sys_db: SystemDatabase,
@@ -118,3 +123,32 @@ def fork_workflow(
118
123
  application_version=application_version,
119
124
  )
120
125
  return forked_workflow_id
126
+
127
+
128
+ def garbage_collect(
129
+ dbos: "DBOS",
130
+ cutoff_epoch_timestamp_ms: Optional[int],
131
+ rows_threshold: Optional[int],
132
+ ) -> None:
133
+ if cutoff_epoch_timestamp_ms is None and rows_threshold is None:
134
+ return
135
+ result = dbos._sys_db.garbage_collect(
136
+ cutoff_epoch_timestamp_ms=cutoff_epoch_timestamp_ms,
137
+ rows_threshold=rows_threshold,
138
+ )
139
+ if result is not None:
140
+ cutoff_epoch_timestamp_ms, pending_workflow_ids = result
141
+ dbos._app_db.garbage_collect(cutoff_epoch_timestamp_ms, pending_workflow_ids)
142
+
143
+
144
+ def global_timeout(dbos: "DBOS", timeout_ms: int) -> None:
145
+ cutoff_epoch_timestamp_ms = int(time.time() * 1000) - timeout_ms
146
+ cutoff_iso = datetime.fromtimestamp(cutoff_epoch_timestamp_ms / 1000).isoformat()
147
+ for workflow in dbos.list_workflows(
148
+ status=WorkflowStatusString.PENDING.value, end_time=cutoff_iso
149
+ ):
150
+ dbos.cancel_workflow(workflow.workflow_id)
151
+ for workflow in dbos.list_workflows(
152
+ status=WorkflowStatusString.ENQUEUED.value, end_time=cutoff_iso
153
+ ):
154
+ dbos.cancel_workflow(workflow.workflow_id)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 1.4.0a1
3
+ Version: 1.5.0a2
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -1,11 +1,11 @@
1
- dbos-1.4.0a1.dist-info/METADATA,sha256=Snqbd6UknvRQFKSZJUL9tBkcEvfCGnRNXGH--L4nGPc,13267
2
- dbos-1.4.0a1.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
3
- dbos-1.4.0a1.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
4
- dbos-1.4.0a1.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
1
+ dbos-1.5.0a2.dist-info/METADATA,sha256=1_SM7qqqbdht1SS-0o1s9YSUSJ2ZIC2Hm7oyCZFU64w,13267
2
+ dbos-1.5.0a2.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
3
+ dbos-1.5.0a2.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
4
+ dbos-1.5.0a2.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
5
5
  dbos/__init__.py,sha256=NssPCubaBxdiKarOWa-wViz1hdJSkmBGcpLX_gQ4NeA,891
6
6
  dbos/__main__.py,sha256=G7Exn-MhGrVJVDbgNlpzhfh8WMX_72t3_oJaFT9Lmt8,653
7
- dbos/_admin_server.py,sha256=TWXi4drrzKFpKkUmEJpJkQBZxAtOalnhtYicEn2nDK0,10618
8
- dbos/_app_db.py,sha256=0PKqpxJ3EbIaak3Wl0lNl3hXvhBfz4EEHaCw1bUOvIM,9937
7
+ dbos/_admin_server.py,sha256=SVk55SxT07OHi0wHt_VpQsBXOeuJL2017k7_YQI3oeg,11574
8
+ dbos/_app_db.py,sha256=htblDPfqrpb_uZoFcvaud7cgQ-PDyn6Bn-cBidxdCTA,10603
9
9
  dbos/_classproperty.py,sha256=f0X-_BySzn3yFDRKB2JpCbLYQ9tLwt1XftfshvY7CBs,626
10
10
  dbos/_client.py,sha256=cQxw1Nbh_vKZ03lONt0EmUhwXBk3B3NczZrmfXXeefY,14667
11
11
  dbos/_conductor/conductor.py,sha256=o0IaZjwnZ2TOyHeP2H4iSX6UnXLXQ4uODvWAKD9hHMs,21703
@@ -15,7 +15,7 @@ dbos/_core.py,sha256=hvHKi31-3LG5yfWa-KhsnoFrXsV_eT-GeKIZFT4chx8,48533
15
15
  dbos/_croniter.py,sha256=XHAyUyibs_59sJQfSNWkP7rqQY6_XrlfuuCxk4jYqek,47559
16
16
  dbos/_dbos.py,sha256=GVx3NY59tKWW6nlAtH2PvX4Ne_eOHvY012MtXVK_FQA,47265
17
17
  dbos/_dbos_config.py,sha256=2CC1YR8lP9W-_NsMUMnTnW-v-70KN4XkbJEeNJ78RlQ,20373
18
- dbos/_debug.py,sha256=MNlQVZ6TscGCRQeEEL0VE8Uignvr6dPeDDDefS3xgIE,1823
18
+ dbos/_debug.py,sha256=99j2SChWmCPAlZoDmjsJGe77tpU2LEa8E2TtLAnnh7o,1831
19
19
  dbos/_docker_pg_helper.py,sha256=tLJXWqZ4S-ExcaPnxg_i6cVxL6ZxrYlZjaGsklY-s2I,6115
20
20
  dbos/_error.py,sha256=q0OQJZTbR8FFHV9hEpAGpz9oWBT5L509zUhmyff7FJw,8500
21
21
  dbos/_event_loop.py,sha256=cvaFN9-II3MsHEOq8QoICc_8qSKrjikMlLfuhC3Y8Dk,2923
@@ -49,7 +49,7 @@ dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
49
49
  dbos/_schemas/application_database.py,sha256=SypAS9l9EsaBHFn9FR8jmnqt01M74d9AF1AMa4m2hhI,1040
50
50
  dbos/_schemas/system_database.py,sha256=rbFKggONdvvbb45InvGz0TM6a7c-Ux9dcaL-h_7Z7pU,4438
51
51
  dbos/_serialization.py,sha256=bWuwhXSQcGmiazvhJHA5gwhrRWxtmFmcCFQSDJnqqkU,3666
52
- dbos/_sys_db.py,sha256=67z_K0aKH8M_oRs9c13zhp6skpT-sLAw8nYRBa3JM5w,77844
52
+ dbos/_sys_db.py,sha256=now889o6Mlmcdopp8xF5_0LAE67KeVH9Vm-4svIqo5s,80170
53
53
  dbos/_templates/dbos-db-starter/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
54
54
  dbos/_templates/dbos-db-starter/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
55
55
  dbos/_templates/dbos-db-starter/__package/main.py.dbos,sha256=aQnBPSSQpkB8ERfhf7gB7P9tsU6OPKhZscfeh0yiaD8,2702
@@ -62,11 +62,11 @@ dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py,sh
62
62
  dbos/_templates/dbos-db-starter/start_postgres_docker.py,sha256=lQVLlYO5YkhGPEgPqwGc7Y8uDKse9HsWv5fynJEFJHM,1681
63
63
  dbos/_tracer.py,sha256=yN6GRDKu_1p-EqtQLNarMocPfga2ZuqpzStzzSPYhzo,2732
64
64
  dbos/_utils.py,sha256=uywq1QrjMwy17btjxW4bES49povlQwYwYbvKwMT6C2U,1575
65
- dbos/_workflow_commands.py,sha256=UCpHWvCEXjVZtf5FNanFvtJpgUJDSI1EFBqQP0x_2A0,3346
65
+ dbos/_workflow_commands.py,sha256=2_ubdzKNJZQ32oftiOGFo2JBsfU_koGC1giXRgwwexI,4539
66
66
  dbos/cli/_github_init.py,sha256=Y_bDF9gfO2jB1id4FV5h1oIxEJRWyqVjhb7bNEa5nQ0,3224
67
67
  dbos/cli/_template_init.py,sha256=7JBcpMqP1r2mfCnvWatu33z8ctEGHJarlZYKgB83cXE,2972
68
68
  dbos/cli/cli.py,sha256=EemOMqNpzSU2BQhAxV_e59pBRITDLwt49HF6W3uWBZg,20775
69
69
  dbos/dbos-config.schema.json,sha256=CjaspeYmOkx6Ip_pcxtmfXJTn_YGdSx_0pcPBF7KZmo,6060
70
70
  dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
71
71
  version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
72
- dbos-1.4.0a1.dist-info/RECORD,,
72
+ dbos-1.5.0a2.dist-info/RECORD,,
File without changes