dbos 1.12.0a2__py3-none-any.whl → 1.13.0a3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbos might be problematic. Click here for more details.

Files changed (32) hide show
  1. dbos/_alembic_migrations/versions/471b60d64126_dbos_migrations.py +35 -0
  2. dbos/_context.py +4 -0
  3. dbos/_core.py +3 -0
  4. dbos/_dbos.py +24 -6
  5. dbos/_dbos_config.py +6 -0
  6. dbos/_fastapi.py +3 -1
  7. dbos/_logger.py +3 -1
  8. dbos/_migration.py +233 -0
  9. dbos/_sys_db.py +10 -40
  10. dbos/_tracer.py +5 -1
  11. dbos/cli/cli.py +35 -6
  12. {dbos-1.12.0a2.dist-info → dbos-1.13.0a3.dist-info}/METADATA +1 -1
  13. {dbos-1.12.0a2.dist-info → dbos-1.13.0a3.dist-info}/RECORD +32 -30
  14. /dbos/{_migrations → _alembic_migrations}/env.py +0 -0
  15. /dbos/{_migrations → _alembic_migrations}/script.py.mako +0 -0
  16. /dbos/{_migrations → _alembic_migrations}/versions/01ce9f07bd10_streaming.py +0 -0
  17. /dbos/{_migrations → _alembic_migrations}/versions/04ca4f231047_workflow_queues_executor_id.py +0 -0
  18. /dbos/{_migrations → _alembic_migrations}/versions/27ac6900c6ad_add_queue_dedup.py +0 -0
  19. /dbos/{_migrations → _alembic_migrations}/versions/50f3227f0b4b_fix_job_queue.py +0 -0
  20. /dbos/{_migrations → _alembic_migrations}/versions/5c361fc04708_added_system_tables.py +0 -0
  21. /dbos/{_migrations → _alembic_migrations}/versions/66478e1b95e5_consolidate_queues.py +0 -0
  22. /dbos/{_migrations → _alembic_migrations}/versions/83f3732ae8e7_workflow_timeout.py +0 -0
  23. /dbos/{_migrations → _alembic_migrations}/versions/933e86bdac6a_add_queue_priority.py +0 -0
  24. /dbos/{_migrations → _alembic_migrations}/versions/a3b18ad34abe_added_triggers.py +0 -0
  25. /dbos/{_migrations → _alembic_migrations}/versions/d76646551a6b_job_queue_limiter.py +0 -0
  26. /dbos/{_migrations → _alembic_migrations}/versions/d76646551a6c_workflow_queue.py +0 -0
  27. /dbos/{_migrations → _alembic_migrations}/versions/d994145b47b6_consolidate_inputs.py +0 -0
  28. /dbos/{_migrations → _alembic_migrations}/versions/eab0cc1d9a14_job_queue.py +0 -0
  29. /dbos/{_migrations → _alembic_migrations}/versions/f4b9b32ba814_functionname_childid_op_outputs.py +0 -0
  30. {dbos-1.12.0a2.dist-info → dbos-1.13.0a3.dist-info}/WHEEL +0 -0
  31. {dbos-1.12.0a2.dist-info → dbos-1.13.0a3.dist-info}/entry_points.txt +0 -0
  32. {dbos-1.12.0a2.dist-info → dbos-1.13.0a3.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,35 @@
1
+ """dbos_migrations
2
+
3
+ Revision ID: 471b60d64126
4
+ Revises: 01ce9f07bd10
5
+ Create Date: 2025-08-21 14:22:31.455266
6
+
7
+ """
8
+
9
+ from typing import Sequence, Union
10
+
11
+ import sqlalchemy as sa
12
+ from alembic import op
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision: str = "471b60d64126"
16
+ down_revision: Union[str, None] = "01ce9f07bd10"
17
+ branch_labels: Union[str, Sequence[str], None] = None
18
+ depends_on: Union[str, Sequence[str], None] = None
19
+
20
+
21
+ def upgrade() -> None:
22
+ # Create dbos_migrations table
23
+ op.create_table(
24
+ "dbos_migrations",
25
+ sa.Column("version", sa.BigInteger(), nullable=False),
26
+ sa.PrimaryKeyConstraint("version"),
27
+ schema="dbos",
28
+ )
29
+
30
+ # Insert initial version 1
31
+ op.execute("INSERT INTO dbos.dbos_migrations (version) VALUES (1)")
32
+
33
+
34
+ def downgrade() -> None:
35
+ op.drop_table("dbos_migrations", schema="dbos")
dbos/_context.py CHANGED
@@ -221,6 +221,8 @@ class DBOSContext:
221
221
  return None
222
222
 
223
223
  def _start_span(self, attributes: TracedAttributes) -> None:
224
+ if dbos_tracer.disable_otlp:
225
+ return
224
226
  attributes["operationUUID"] = (
225
227
  self.workflow_id if len(self.workflow_id) > 0 else None
226
228
  )
@@ -246,6 +248,8 @@ class DBOSContext:
246
248
  cm.__enter__()
247
249
 
248
250
  def _end_span(self, exc_value: Optional[BaseException]) -> None:
251
+ if dbos_tracer.disable_otlp:
252
+ return
249
253
  context_span = self.context_spans.pop()
250
254
  if exc_value is None:
251
255
  context_span.span.set_status(Status(StatusCode.OK))
dbos/_core.py CHANGED
@@ -356,6 +356,7 @@ def _get_wf_invoke_func(
356
356
  )
357
357
  return recorded_result
358
358
  try:
359
+ dbos._active_workflows_set.add(status["workflow_uuid"])
359
360
  output = func()
360
361
  if not dbos.debug_mode:
361
362
  dbos._sys_db.update_workflow_outcome(
@@ -378,6 +379,8 @@ def _get_wf_invoke_func(
378
379
  error=_serialization.serialize_exception(error),
379
380
  )
380
381
  raise
382
+ finally:
383
+ dbos._active_workflows_set.discard(status["workflow_uuid"])
381
384
 
382
385
  return persist
383
386
 
dbos/_dbos.py CHANGED
@@ -293,16 +293,24 @@ class DBOS:
293
293
  return _dbos_global_instance
294
294
 
295
295
  @classmethod
296
- def destroy(cls, *, destroy_registry: bool = False) -> None:
296
+ def destroy(
297
+ cls,
298
+ *,
299
+ destroy_registry: bool = False,
300
+ workflow_completion_timeout_sec: int = 0,
301
+ ) -> None:
297
302
  global _dbos_global_instance
298
303
  if _dbos_global_instance is not None:
299
- _dbos_global_instance._destroy()
304
+ _dbos_global_instance._destroy(
305
+ workflow_completion_timeout_sec=workflow_completion_timeout_sec,
306
+ )
300
307
  _dbos_global_instance = None
301
308
  if destroy_registry:
302
309
  global _dbos_global_registry
303
310
  _dbos_global_registry = None
304
311
  GlobalParams.app_version = os.environ.get("DBOS__APPVERSION", "")
305
312
  GlobalParams.executor_id = os.environ.get("DBOS__VMID", "local")
313
+ dbos_logger.info("DBOS successfully shut down")
306
314
 
307
315
  def __init__(
308
316
  self,
@@ -337,6 +345,7 @@ class DBOS:
337
345
  self.conductor_key: Optional[str] = conductor_key
338
346
  self.conductor_websocket: Optional[ConductorWebsocket] = None
339
347
  self._background_event_loop: BackgroundEventLoop = BackgroundEventLoop()
348
+ self._active_workflows_set: set[str] = set()
340
349
 
341
350
  # Globally set the application version and executor ID.
342
351
  # In DBOS Cloud, instead use the values supplied through environment variables.
@@ -588,12 +597,23 @@ class DBOS:
588
597
 
589
598
  reset_system_database(pg_db_url, sysdb_name)
590
599
 
591
- def _destroy(self) -> None:
600
+ def _destroy(self, *, workflow_completion_timeout_sec: int) -> None:
592
601
  self._initialized = False
593
602
  for event in self.poller_stop_events:
594
603
  event.set()
595
604
  for event in self.background_thread_stop_events:
596
605
  event.set()
606
+ if workflow_completion_timeout_sec > 0:
607
+ deadline = time.time() + workflow_completion_timeout_sec
608
+ while time.time() < deadline:
609
+ time.sleep(1)
610
+ active_workflows = len(self._active_workflows_set)
611
+ if active_workflows > 0:
612
+ dbos_logger.info(
613
+ f"Attempting to shut down DBOS. {active_workflows} workflows remain active. IDs: {self._active_workflows_set}"
614
+ )
615
+ else:
616
+ break
597
617
  self._background_event_loop.stop()
598
618
  if self._sys_db_field is not None:
599
619
  self._sys_db_field.destroy()
@@ -609,10 +629,8 @@ class DBOS:
609
629
  and self.conductor_websocket.websocket is not None
610
630
  ):
611
631
  self.conductor_websocket.websocket.close()
612
- # CB - This needs work, some things ought to stop before DBs are tossed out,
613
- # on the other hand it hangs to move it
614
632
  if self._executor_field is not None:
615
- self._executor_field.shutdown(cancel_futures=True)
633
+ self._executor_field.shutdown(wait=False, cancel_futures=True)
616
634
  self._executor_field = None
617
635
  for bg_thread in self._background_threads:
618
636
  bg_thread.join()
dbos/_dbos_config.py CHANGED
@@ -33,6 +33,9 @@ class DBOSConfig(TypedDict, total=False):
33
33
  admin_port (int): Admin port
34
34
  run_admin_server (bool): Whether to run the DBOS admin server
35
35
  otlp_attributes (dict[str, str]): A set of custom attributes to apply OTLP-exported logs and traces
36
+ application_version (str): Application version
37
+ executor_id (str): Executor ID, used to identify the application instance in distributed environments
38
+ disable_otlp (bool): If True, disables OTLP tracing and logging. Defaults to False.
36
39
  """
37
40
 
38
41
  name: str
@@ -49,6 +52,7 @@ class DBOSConfig(TypedDict, total=False):
49
52
  otlp_attributes: Optional[dict[str, str]]
50
53
  application_version: Optional[str]
51
54
  executor_id: Optional[str]
55
+ disable_otlp: Optional[bool]
52
56
 
53
57
 
54
58
  class RuntimeConfig(TypedDict, total=False):
@@ -91,6 +95,7 @@ class TelemetryConfig(TypedDict, total=False):
91
95
  logs: Optional[LoggerConfig]
92
96
  OTLPExporter: Optional[OTLPExporterConfig]
93
97
  otlp_attributes: Optional[dict[str, str]]
98
+ disable_otlp: Optional[bool]
94
99
 
95
100
 
96
101
  class ConfigFile(TypedDict, total=False):
@@ -157,6 +162,7 @@ def translate_dbos_config_to_config_file(config: DBOSConfig) -> ConfigFile:
157
162
  telemetry: TelemetryConfig = {
158
163
  "OTLPExporter": {"tracesEndpoint": [], "logsEndpoint": []},
159
164
  "otlp_attributes": config.get("otlp_attributes", {}),
165
+ "disable_otlp": config.get("disable_otlp", False),
160
166
  }
161
167
  # For mypy
162
168
  assert telemetry["OTLPExporter"] is not None
dbos/_fastapi.py CHANGED
@@ -49,7 +49,7 @@ class LifespanMiddleware:
49
49
  if not self.dbos._launched:
50
50
  self.dbos._launch()
51
51
  elif message["type"] == "lifespan.shutdown.complete":
52
- self.dbos._destroy()
52
+ self.dbos.destroy()
53
53
  await send(message)
54
54
 
55
55
  # Call the original app with our wrapped functions
@@ -83,4 +83,6 @@ def setup_fastapi_middleware(app: FastAPI, dbos: DBOS) -> None:
83
83
  response = await call_next(request)
84
84
  else:
85
85
  response = await call_next(request)
86
+ if hasattr(response, "status_code"):
87
+ DBOS.span.set_attribute("responseCode", response.status_code)
86
88
  return response
dbos/_logger.py CHANGED
@@ -77,7 +77,9 @@ def config_logger(config: "ConfigFile") -> None:
77
77
  otlp_logs_endpoints = (
78
78
  config.get("telemetry", {}).get("OTLPExporter", {}).get("logsEndpoint") # type: ignore
79
79
  )
80
- if otlp_logs_endpoints:
80
+ disable_otlp = config.get("telemetry", {}).get("disable_otlp", False) # type: ignore
81
+
82
+ if not disable_otlp and otlp_logs_endpoints:
81
83
  log_provider = PatchedOTLPLoggerProvider(
82
84
  Resource.create(
83
85
  attributes={
dbos/_migration.py ADDED
@@ -0,0 +1,233 @@
1
+ import logging
2
+ import os
3
+ import re
4
+
5
+ import sqlalchemy as sa
6
+ from alembic import command
7
+ from alembic.config import Config
8
+
9
+ from ._logger import dbos_logger
10
+
11
+
12
+ def ensure_dbos_schema(engine: sa.Engine) -> bool:
13
+ """
14
+ True if using DBOS migrations (DBOS schema and migrations table already exist or were created)
15
+ False if using Alembic migrations (DBOS schema exists, but dbos_migrations table doesn't)
16
+ """
17
+ with engine.begin() as conn:
18
+ # Check if dbos schema exists
19
+ schema_result = conn.execute(
20
+ sa.text(
21
+ "SELECT schema_name FROM information_schema.schemata WHERE schema_name = 'dbos'"
22
+ )
23
+ )
24
+ schema_existed = schema_result.fetchone() is not None
25
+
26
+ # Create schema if it doesn't exist
27
+ if not schema_existed:
28
+ conn.execute(sa.text("CREATE SCHEMA dbos"))
29
+
30
+ # Check if dbos_migrations table exists
31
+ table_result = conn.execute(
32
+ sa.text(
33
+ "SELECT table_name FROM information_schema.tables WHERE table_schema = 'dbos' AND table_name = 'dbos_migrations'"
34
+ )
35
+ )
36
+ table_exists = table_result.fetchone() is not None
37
+
38
+ if table_exists:
39
+ return True
40
+ elif schema_existed:
41
+ return False
42
+ else:
43
+ conn.execute(
44
+ sa.text(
45
+ "CREATE TABLE dbos.dbos_migrations (version BIGINT NOT NULL PRIMARY KEY)"
46
+ )
47
+ )
48
+ return True
49
+
50
+
51
+ def run_alembic_migrations(engine: sa.Engine) -> None:
52
+ """Run system database schema migrations with Alembic.
53
+ This is DEPRECATED in favor of DBOS-managed migrations.
54
+ It is retained only for backwards compatibility and
55
+ will be removed in the next major version."""
56
+ # Run a schema migration for the system database
57
+ migration_dir = os.path.join(
58
+ os.path.dirname(os.path.realpath(__file__)), "_alembic_migrations"
59
+ )
60
+ alembic_cfg = Config()
61
+ alembic_cfg.set_main_option("script_location", migration_dir)
62
+ logging.getLogger("alembic").setLevel(logging.WARNING)
63
+ # Alembic requires the % in URL-escaped parameters to itself be escaped to %%.
64
+ escaped_conn_string = re.sub(
65
+ r"%(?=[0-9A-Fa-f]{2})",
66
+ "%%",
67
+ engine.url.render_as_string(hide_password=False),
68
+ )
69
+ alembic_cfg.set_main_option("sqlalchemy.url", escaped_conn_string)
70
+ try:
71
+ command.upgrade(alembic_cfg, "head")
72
+ except Exception as e:
73
+ dbos_logger.warning(
74
+ f"Exception during system database construction. This is most likely because the system database was configured using a later version of DBOS: {e}"
75
+ )
76
+
77
+
78
+ def run_dbos_migrations(engine: sa.Engine) -> None:
79
+ """Run DBOS-managed migrations by executing each SQL command in dbos_migrations."""
80
+ with engine.begin() as conn:
81
+ # Get current migration version
82
+ result = conn.execute(sa.text("SELECT version FROM dbos.dbos_migrations"))
83
+ current_version = result.fetchone()
84
+ last_applied = current_version[0] if current_version else 0
85
+
86
+ # Apply migrations starting from the next version
87
+ for i, migration_sql in enumerate(dbos_migrations, 1):
88
+ if i <= last_applied:
89
+ continue
90
+
91
+ # Execute the migration
92
+ dbos_logger.info(f"Applying DBOS system database schema migration {i}")
93
+ conn.execute(sa.text(migration_sql))
94
+
95
+ # Update the single row with the new version
96
+ if last_applied == 0:
97
+ conn.execute(
98
+ sa.text(
99
+ "INSERT INTO dbos.dbos_migrations (version) VALUES (:version)"
100
+ ),
101
+ {"version": i},
102
+ )
103
+ else:
104
+ conn.execute(
105
+ sa.text("UPDATE dbos.dbos_migrations SET version = :version"),
106
+ {"version": i},
107
+ )
108
+ last_applied = i
109
+
110
+
111
+ dbos_migration_one = """
112
+ -- Enable uuid extension for generating UUIDs
113
+ CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
114
+
115
+ CREATE TABLE dbos.workflow_status (
116
+ workflow_uuid TEXT PRIMARY KEY,
117
+ status TEXT,
118
+ name TEXT,
119
+ authenticated_user TEXT,
120
+ assumed_role TEXT,
121
+ authenticated_roles TEXT,
122
+ request TEXT,
123
+ output TEXT,
124
+ error TEXT,
125
+ executor_id TEXT,
126
+ created_at BIGINT NOT NULL DEFAULT (EXTRACT(epoch FROM now()) * 1000::numeric)::bigint,
127
+ updated_at BIGINT NOT NULL DEFAULT (EXTRACT(epoch FROM now()) * 1000::numeric)::bigint,
128
+ application_version TEXT,
129
+ application_id TEXT,
130
+ class_name VARCHAR(255) DEFAULT NULL,
131
+ config_name VARCHAR(255) DEFAULT NULL,
132
+ recovery_attempts BIGINT DEFAULT 0,
133
+ queue_name TEXT,
134
+ workflow_timeout_ms BIGINT,
135
+ workflow_deadline_epoch_ms BIGINT,
136
+ inputs TEXT,
137
+ started_at_epoch_ms BIGINT,
138
+ deduplication_id TEXT,
139
+ priority INTEGER NOT NULL DEFAULT 0
140
+ );
141
+
142
+ CREATE INDEX workflow_status_created_at_index ON dbos.workflow_status (created_at);
143
+ CREATE INDEX workflow_status_executor_id_index ON dbos.workflow_status (executor_id);
144
+ CREATE INDEX workflow_status_status_index ON dbos.workflow_status (status);
145
+
146
+ ALTER TABLE dbos.workflow_status
147
+ ADD CONSTRAINT uq_workflow_status_queue_name_dedup_id
148
+ UNIQUE (queue_name, deduplication_id);
149
+
150
+ CREATE TABLE dbos.operation_outputs (
151
+ workflow_uuid TEXT NOT NULL,
152
+ function_id INTEGER NOT NULL,
153
+ function_name TEXT NOT NULL DEFAULT '',
154
+ output TEXT,
155
+ error TEXT,
156
+ child_workflow_id TEXT,
157
+ PRIMARY KEY (workflow_uuid, function_id),
158
+ FOREIGN KEY (workflow_uuid) REFERENCES dbos.workflow_status(workflow_uuid)
159
+ ON UPDATE CASCADE ON DELETE CASCADE
160
+ );
161
+
162
+ CREATE TABLE dbos.notifications (
163
+ destination_uuid TEXT NOT NULL,
164
+ topic TEXT,
165
+ message TEXT NOT NULL,
166
+ created_at_epoch_ms BIGINT NOT NULL DEFAULT (EXTRACT(epoch FROM now()) * 1000::numeric)::bigint,
167
+ message_uuid TEXT NOT NULL DEFAULT gen_random_uuid(), -- Built-in function
168
+ FOREIGN KEY (destination_uuid) REFERENCES dbos.workflow_status(workflow_uuid)
169
+ ON UPDATE CASCADE ON DELETE CASCADE
170
+ );
171
+ CREATE INDEX idx_workflow_topic ON dbos.notifications (destination_uuid, topic);
172
+
173
+ -- Create notification function
174
+ CREATE OR REPLACE FUNCTION dbos.notifications_function() RETURNS TRIGGER AS $$
175
+ DECLARE
176
+ payload text := NEW.destination_uuid || '::' || NEW.topic;
177
+ BEGIN
178
+ PERFORM pg_notify('dbos_notifications_channel', payload);
179
+ RETURN NEW;
180
+ END;
181
+ $$ LANGUAGE plpgsql;
182
+
183
+ -- Create notification trigger
184
+ CREATE TRIGGER dbos_notifications_trigger
185
+ AFTER INSERT ON dbos.notifications
186
+ FOR EACH ROW EXECUTE FUNCTION dbos.notifications_function();
187
+
188
+ CREATE TABLE dbos.workflow_events (
189
+ workflow_uuid TEXT NOT NULL,
190
+ key TEXT NOT NULL,
191
+ value TEXT NOT NULL,
192
+ PRIMARY KEY (workflow_uuid, key),
193
+ FOREIGN KEY (workflow_uuid) REFERENCES dbos.workflow_status(workflow_uuid)
194
+ ON UPDATE CASCADE ON DELETE CASCADE
195
+ );
196
+
197
+ -- Create events function
198
+ CREATE OR REPLACE FUNCTION dbos.workflow_events_function() RETURNS TRIGGER AS $$
199
+ DECLARE
200
+ payload text := NEW.workflow_uuid || '::' || NEW.key;
201
+ BEGIN
202
+ PERFORM pg_notify('dbos_workflow_events_channel', payload);
203
+ RETURN NEW;
204
+ END;
205
+ $$ LANGUAGE plpgsql;
206
+
207
+ -- Create events trigger
208
+ CREATE TRIGGER dbos_workflow_events_trigger
209
+ AFTER INSERT ON dbos.workflow_events
210
+ FOR EACH ROW EXECUTE FUNCTION dbos.workflow_events_function();
211
+
212
+ CREATE TABLE dbos.streams (
213
+ workflow_uuid TEXT NOT NULL,
214
+ key TEXT NOT NULL,
215
+ value TEXT NOT NULL,
216
+ "offset" INTEGER NOT NULL,
217
+ PRIMARY KEY (workflow_uuid, key, "offset"),
218
+ FOREIGN KEY (workflow_uuid) REFERENCES dbos.workflow_status(workflow_uuid)
219
+ ON UPDATE CASCADE ON DELETE CASCADE
220
+ );
221
+
222
+ CREATE TABLE dbos.event_dispatch_kv (
223
+ service_name TEXT NOT NULL,
224
+ workflow_fn_name TEXT NOT NULL,
225
+ key TEXT NOT NULL,
226
+ value TEXT,
227
+ update_seq NUMERIC(38,0),
228
+ update_time NUMERIC(38,15),
229
+ PRIMARY KEY (service_name, workflow_fn_name, key)
230
+ );
231
+ """
232
+
233
+ dbos_migrations = [dbos_migration_one]
dbos/_sys_db.py CHANGED
@@ -1,10 +1,7 @@
1
1
  import datetime
2
2
  import functools
3
3
  import json
4
- import logging
5
- import os
6
4
  import random
7
- import re
8
5
  import threading
9
6
  import time
10
7
  from enum import Enum
@@ -25,11 +22,14 @@ from typing import (
25
22
  import psycopg
26
23
  import sqlalchemy as sa
27
24
  import sqlalchemy.dialects.postgresql as pg
28
- from alembic import command
29
- from alembic.config import Config
30
25
  from sqlalchemy.exc import DBAPIError
31
26
  from sqlalchemy.sql import func
32
27
 
28
+ from dbos._migration import (
29
+ ensure_dbos_schema,
30
+ run_alembic_migrations,
31
+ run_dbos_migrations,
32
+ )
33
33
  from dbos._utils import INTERNAL_QUEUE_NAME, retriable_postgres_exception
34
34
 
35
35
  from . import _serialization
@@ -386,41 +386,11 @@ class SystemDatabase:
386
386
  conn.execute(sa.text(f"CREATE DATABASE {sysdb_name}"))
387
387
  engine.dispose()
388
388
 
389
- # Run a schema migration for the system database
390
- migration_dir = os.path.join(
391
- os.path.dirname(os.path.realpath(__file__)), "_migrations"
392
- )
393
- alembic_cfg = Config()
394
- alembic_cfg.set_main_option("script_location", migration_dir)
395
- logging.getLogger("alembic").setLevel(logging.WARNING)
396
- # Alembic requires the % in URL-escaped parameters to itself be escaped to %%.
397
- escaped_conn_string = re.sub(
398
- r"%(?=[0-9A-Fa-f]{2})",
399
- "%%",
400
- self.engine.url.render_as_string(hide_password=False),
401
- )
402
- alembic_cfg.set_main_option("sqlalchemy.url", escaped_conn_string)
403
- try:
404
- command.upgrade(alembic_cfg, "head")
405
- except Exception as e:
406
- dbos_logger.warning(
407
- f"Exception during system database construction. This is most likely because the system database was configured using a later version of DBOS: {e}"
408
- )
409
- alembic_cfg = Config()
410
- alembic_cfg.set_main_option("script_location", migration_dir)
411
- # Alembic requires the % in URL-escaped parameters to itself be escaped to %%.
412
- escaped_conn_string = re.sub(
413
- r"%(?=[0-9A-Fa-f]{2})",
414
- "%%",
415
- self.engine.url.render_as_string(hide_password=False),
416
- )
417
- alembic_cfg.set_main_option("sqlalchemy.url", escaped_conn_string)
418
- try:
419
- command.upgrade(alembic_cfg, "head")
420
- except Exception as e:
421
- dbos_logger.warning(
422
- f"Exception during system database construction. This is most likely because the system database was configured using a later version of DBOS: {e}"
423
- )
389
+ using_dbos_migrations = ensure_dbos_schema(self.engine)
390
+ if not using_dbos_migrations:
391
+ # Complete the Alembic migrations, create the dbos_migrations table
392
+ run_alembic_migrations(self.engine)
393
+ run_dbos_migrations(self.engine)
424
394
 
425
395
  # Destroy the pool when finished
426
396
  def destroy(self) -> None:
dbos/_tracer.py CHANGED
@@ -24,10 +24,14 @@ class DBOSTracer:
24
24
  def __init__(self) -> None:
25
25
  self.app_id = os.environ.get("DBOS__APPID", None)
26
26
  self.provider: Optional[TracerProvider] = None
27
+ self.disable_otlp: bool = False
27
28
 
28
29
  def config(self, config: ConfigFile) -> None:
29
30
  self.otlp_attributes = config.get("telemetry", {}).get("otlp_attributes", {}) # type: ignore
30
- if not isinstance(trace.get_tracer_provider(), TracerProvider):
31
+ self.disable_otlp = config.get("telemetry", {}).get("disable_otlp", False) # type: ignore
32
+ if not self.disable_otlp and not isinstance(
33
+ trace.get_tracer_provider(), TracerProvider
34
+ ):
31
35
  resource = Resource(
32
36
  attributes={
33
37
  ResourceAttributes.SERVICE_NAME: config["name"],
dbos/cli/cli.py CHANGED
@@ -14,6 +14,7 @@ from rich import print as richprint
14
14
  from rich.prompt import IntPrompt
15
15
  from typing_extensions import Annotated, List
16
16
 
17
+ from dbos._context import SetWorkflowID
17
18
  from dbos._debug import debug_workflow, parse_start_command
18
19
  from dbos.cli.migration import grant_dbos_schema_permissions, migrate_dbos_databases
19
20
 
@@ -567,7 +568,9 @@ def resume(
567
568
  start_client(db_url=db_url).resume_workflow(workflow_id=workflow_id)
568
569
 
569
570
 
570
- @workflow.command(help="Restart a workflow from the beginning with a new id")
571
+ @workflow.command(
572
+ help="[DEPRECATED - Use fork instead] Restart a workflow from the beginning with a new id"
573
+ )
571
574
  def restart(
572
575
  workflow_id: Annotated[str, typer.Argument()],
573
576
  db_url: Annotated[
@@ -600,6 +603,22 @@ def fork(
600
603
  help="Restart from this step",
601
604
  ),
602
605
  ] = 1,
606
+ forked_workflow_id: Annotated[
607
+ typing.Optional[str],
608
+ typer.Option(
609
+ "--forked-workflow-id",
610
+ "-f",
611
+ help="Custom ID for the forked workflow",
612
+ ),
613
+ ] = None,
614
+ application_version: Annotated[
615
+ typing.Optional[str],
616
+ typer.Option(
617
+ "--application-version",
618
+ "-v",
619
+ help="Custom application version for the forked workflow",
620
+ ),
621
+ ] = None,
603
622
  db_url: Annotated[
604
623
  typing.Optional[str],
605
624
  typer.Option(
@@ -609,11 +628,21 @@ def fork(
609
628
  ),
610
629
  ] = None,
611
630
  ) -> None:
612
- status = (
613
- start_client(db_url=db_url)
614
- .fork_workflow(workflow_id=workflow_id, start_step=step)
615
- .get_status()
616
- )
631
+ client = start_client(db_url=db_url)
632
+
633
+ if forked_workflow_id is not None:
634
+ with SetWorkflowID(forked_workflow_id):
635
+ status = client.fork_workflow(
636
+ workflow_id=workflow_id,
637
+ start_step=step,
638
+ application_version=application_version,
639
+ ).get_status()
640
+ else:
641
+ status = client.fork_workflow(
642
+ workflow_id=workflow_id,
643
+ start_step=step,
644
+ application_version=application_version,
645
+ ).get_status()
617
646
  print(jsonpickle.encode(status, unpicklable=False))
618
647
 
619
648
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 1.12.0a2
3
+ Version: 1.13.0a3
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -1,45 +1,47 @@
1
- dbos-1.12.0a2.dist-info/METADATA,sha256=oNte-Ruz9ts7sv1QqQRgCXRHZfi861HiknkNI1LUDJ4,13268
2
- dbos-1.12.0a2.dist-info/WHEEL,sha256=9P2ygRxDrTJz3gsagc0Z96ukrxjr-LFBGOgv3AuKlCA,90
3
- dbos-1.12.0a2.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
4
- dbos-1.12.0a2.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
1
+ dbos-1.13.0a3.dist-info/METADATA,sha256=EkcdC671U6Va-Oi2RP7Zs1v7Ag6ZaN99Gy4lil5JVss,13268
2
+ dbos-1.13.0a3.dist-info/WHEEL,sha256=9P2ygRxDrTJz3gsagc0Z96ukrxjr-LFBGOgv3AuKlCA,90
3
+ dbos-1.13.0a3.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
4
+ dbos-1.13.0a3.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
5
5
  dbos/__init__.py,sha256=NssPCubaBxdiKarOWa-wViz1hdJSkmBGcpLX_gQ4NeA,891
6
6
  dbos/__main__.py,sha256=G7Exn-MhGrVJVDbgNlpzhfh8WMX_72t3_oJaFT9Lmt8,653
7
7
  dbos/_admin_server.py,sha256=e8ELhcDWqR3_PNobnNgUvLGh5lzZq0yFSF6dvtzoQRI,16267
8
+ dbos/_alembic_migrations/env.py,sha256=38SIGVbmn_VV2x2u1aHLcPOoWgZ84eCymf3g_NljmbU,1626
9
+ dbos/_alembic_migrations/script.py.mako,sha256=MEqL-2qATlST9TAOeYgscMn1uy6HUS9NFvDgl93dMj8,635
10
+ dbos/_alembic_migrations/versions/01ce9f07bd10_streaming.py,sha256=5F2tCCXbjP3ZrRFVBwJdaf4FHLlWuhQkMQiYmypfSNM,1123
11
+ dbos/_alembic_migrations/versions/04ca4f231047_workflow_queues_executor_id.py,sha256=ICLPl8CN9tQXMsLDsAj8z1TsL831-Z3F8jSBvrR-wyw,736
12
+ dbos/_alembic_migrations/versions/27ac6900c6ad_add_queue_dedup.py,sha256=56w1v6TdofW3V18iwm0MP0SAeSaAUPSS40HIcn6qYIE,1072
13
+ dbos/_alembic_migrations/versions/471b60d64126_dbos_migrations.py,sha256=XHlv_RFDoO3l3hjAVHm4uH2OA67e_BjQyW1D7HQqYLc,851
14
+ dbos/_alembic_migrations/versions/50f3227f0b4b_fix_job_queue.py,sha256=ZBYrtTdxy64HxIAlOes89fVIk2P1gNaJack7wuC_epg,873
15
+ dbos/_alembic_migrations/versions/5c361fc04708_added_system_tables.py,sha256=Xr9hBDJjkAtymlauOmAy00yUHj0VVUaEz7kNwEM9IwE,6403
16
+ dbos/_alembic_migrations/versions/66478e1b95e5_consolidate_queues.py,sha256=Qo9C8pFSdN0GPM0fN-DI5GPRegXq99Mig2me04IXfLI,1894
17
+ dbos/_alembic_migrations/versions/83f3732ae8e7_workflow_timeout.py,sha256=Q_R35pb8AfVI3sg5mzKwyoPfYB88Ychcc8gwxpM9R7A,1035
18
+ dbos/_alembic_migrations/versions/933e86bdac6a_add_queue_priority.py,sha256=yZX2kGF33skpXIBdMXtDNx-Nl_orFatKeHB8c-3K8-c,773
19
+ dbos/_alembic_migrations/versions/a3b18ad34abe_added_triggers.py,sha256=Rv0ZsZYZ_WdgGEULYsPfnp4YzaO5L198gDTgYY39AVA,2022
20
+ dbos/_alembic_migrations/versions/d76646551a6b_job_queue_limiter.py,sha256=8PyFi8rd6CN-mUro43wGhsg5wcQWKZPRHD6jw8R5pVc,986
21
+ dbos/_alembic_migrations/versions/d76646551a6c_workflow_queue.py,sha256=G942nophZ2uC2vc4hGBC02Ptng1715roTjY3xiyzZU4,729
22
+ dbos/_alembic_migrations/versions/d994145b47b6_consolidate_inputs.py,sha256=_J0jP247fuo66fzOmLlKFO9FJ_CRBXlqa2lnLrcXugQ,672
23
+ dbos/_alembic_migrations/versions/eab0cc1d9a14_job_queue.py,sha256=uvhFOtqbBreCePhAxZfIT0qCAI7BiZTou9wt6QnbY7c,1412
24
+ dbos/_alembic_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py,sha256=m90Lc5YH0ZISSq1MyxND6oq3RZrZKrIqEsZtwJ1jWxA,1049
8
25
  dbos/_app_db.py,sha256=bUXQqzc0C9PHh4Zl2tHfBrQWNBURdI7F7XXjCpYirmw,10959
9
26
  dbos/_classproperty.py,sha256=f0X-_BySzn3yFDRKB2JpCbLYQ9tLwt1XftfshvY7CBs,626
10
27
  dbos/_client.py,sha256=_wMe4qnRSwiRZo74xdqTBetbHlIVy3vQifdSd7os1ZY,18213
11
28
  dbos/_conductor/conductor.py,sha256=3E_hL3c9g9yWqKZkvI6KA0-ZzPMPRo06TOzT1esMiek,24114
12
29
  dbos/_conductor/protocol.py,sha256=q3rgLxINFtWFigdOONc-4gX4vn66UmMlJQD6Kj8LnL4,7420
13
- dbos/_context.py,sha256=8yZOTM1ehhk6URLa0EP9_20aOd6SZLhXBmcPwFEZDlA,26739
14
- dbos/_core.py,sha256=tKAahVW7StJ_KuW4e1fWnCSE5-2SaI7RZIMWZWcuzm8,48848
30
+ dbos/_context.py,sha256=IMboNgbCqTxfIORqeifE3da-Ce5siMz7MYMLPk5M-AQ,26851
31
+ dbos/_core.py,sha256=bEtIXf0OEzEwiS4sYLJR3vT-bmt5Hg1SBHUcCkaHmMQ,49005
15
32
  dbos/_croniter.py,sha256=XHAyUyibs_59sJQfSNWkP7rqQY6_XrlfuuCxk4jYqek,47559
16
- dbos/_dbos.py,sha256=bn5S_T6HZnDipYEVqhLq_F2Zo904fjT3J5oO15Frhrs,57715
17
- dbos/_dbos_config.py,sha256=er8oF3e9zGlEG9KntX7uBSXrDuVvROtkzVidzXjOwUU,21746
33
+ dbos/_dbos.py,sha256=kNTJLQn1uLnzbglyRZLSkJYOYhQseoVp84yaV1lAc00,58458
34
+ dbos/_dbos_config.py,sha256=qduDBmrpISNRZcCO-NcARQMeO2AzExyC2h3HCd9lRqU,22128
18
35
  dbos/_debug.py,sha256=99j2SChWmCPAlZoDmjsJGe77tpU2LEa8E2TtLAnnh7o,1831
19
36
  dbos/_docker_pg_helper.py,sha256=tLJXWqZ4S-ExcaPnxg_i6cVxL6ZxrYlZjaGsklY-s2I,6115
20
37
  dbos/_error.py,sha256=GwO0Ng4d4iB52brY09-Ss6Cz_V28Xc0D0cRCzZ6XmNM,8688
21
38
  dbos/_event_loop.py,sha256=cvaFN9-II3MsHEOq8QoICc_8qSKrjikMlLfuhC3Y8Dk,2923
22
- dbos/_fastapi.py,sha256=T7YlVY77ASqyTqq0aAPclZ9YzlXdGTT0lEYSwSgt1EE,3151
39
+ dbos/_fastapi.py,sha256=D0H6TPYYTJ0LnkKn7t9sfPwPgDx6fO8AZQtvBcH3ibI,3277
23
40
  dbos/_flask.py,sha256=Npnakt-a3W5OykONFRkDRnumaDhTQmA0NPdUCGRYKXE,1652
24
41
  dbos/_kafka.py,sha256=Gm4fHWl7gYb-i5BMvwNwm5Km3z8zQpseqdMgqgFjlGI,4252
25
42
  dbos/_kafka_message.py,sha256=NYvOXNG3Qn7bghn1pv3fg4Pbs86ILZGcK4IB-MLUNu0,409
26
- dbos/_logger.py,sha256=Dp6bHZKUtcm5gWwYHj_HA5Wj5OMuJGUrpl2g2i4xDZg,4620
27
- dbos/_migrations/env.py,sha256=38SIGVbmn_VV2x2u1aHLcPOoWgZ84eCymf3g_NljmbU,1626
28
- dbos/_migrations/script.py.mako,sha256=MEqL-2qATlST9TAOeYgscMn1uy6HUS9NFvDgl93dMj8,635
29
- dbos/_migrations/versions/01ce9f07bd10_streaming.py,sha256=5F2tCCXbjP3ZrRFVBwJdaf4FHLlWuhQkMQiYmypfSNM,1123
30
- dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py,sha256=ICLPl8CN9tQXMsLDsAj8z1TsL831-Z3F8jSBvrR-wyw,736
31
- dbos/_migrations/versions/27ac6900c6ad_add_queue_dedup.py,sha256=56w1v6TdofW3V18iwm0MP0SAeSaAUPSS40HIcn6qYIE,1072
32
- dbos/_migrations/versions/50f3227f0b4b_fix_job_queue.py,sha256=ZBYrtTdxy64HxIAlOes89fVIk2P1gNaJack7wuC_epg,873
33
- dbos/_migrations/versions/5c361fc04708_added_system_tables.py,sha256=Xr9hBDJjkAtymlauOmAy00yUHj0VVUaEz7kNwEM9IwE,6403
34
- dbos/_migrations/versions/66478e1b95e5_consolidate_queues.py,sha256=Qo9C8pFSdN0GPM0fN-DI5GPRegXq99Mig2me04IXfLI,1894
35
- dbos/_migrations/versions/83f3732ae8e7_workflow_timeout.py,sha256=Q_R35pb8AfVI3sg5mzKwyoPfYB88Ychcc8gwxpM9R7A,1035
36
- dbos/_migrations/versions/933e86bdac6a_add_queue_priority.py,sha256=yZX2kGF33skpXIBdMXtDNx-Nl_orFatKeHB8c-3K8-c,773
37
- dbos/_migrations/versions/a3b18ad34abe_added_triggers.py,sha256=Rv0ZsZYZ_WdgGEULYsPfnp4YzaO5L198gDTgYY39AVA,2022
38
- dbos/_migrations/versions/d76646551a6b_job_queue_limiter.py,sha256=8PyFi8rd6CN-mUro43wGhsg5wcQWKZPRHD6jw8R5pVc,986
39
- dbos/_migrations/versions/d76646551a6c_workflow_queue.py,sha256=G942nophZ2uC2vc4hGBC02Ptng1715roTjY3xiyzZU4,729
40
- dbos/_migrations/versions/d994145b47b6_consolidate_inputs.py,sha256=_J0jP247fuo66fzOmLlKFO9FJ_CRBXlqa2lnLrcXugQ,672
41
- dbos/_migrations/versions/eab0cc1d9a14_job_queue.py,sha256=uvhFOtqbBreCePhAxZfIT0qCAI7BiZTou9wt6QnbY7c,1412
42
- dbos/_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py,sha256=m90Lc5YH0ZISSq1MyxND6oq3RZrZKrIqEsZtwJ1jWxA,1049
43
+ dbos/_logger.py,sha256=vvBL4kzJWBPtfcHIuL0nJAq5cgpeo_D3IiIUDicWLOg,4732
44
+ dbos/_migration.py,sha256=C2ODGlZKjnGKS3N8jlokzRDdQIHlQMPHnEtBsSXYX34,8016
43
45
  dbos/_outcome.py,sha256=Kz3aL7517q9UEFTx3Cq9zzztjWyWVOx_08fZyHo9dvg,7035
44
46
  dbos/_queue.py,sha256=0kJTPwXy3nZ4Epzt-lHky9M9S4L31645drPGFR8fIJY,4854
45
47
  dbos/_recovery.py,sha256=K-wlFhdf4yGRm6cUzyhcTjQUS0xp2T5rdNMLiiBErYg,2882
@@ -50,7 +52,7 @@ dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
50
52
  dbos/_schemas/application_database.py,sha256=SypAS9l9EsaBHFn9FR8jmnqt01M74d9AF1AMa4m2hhI,1040
51
53
  dbos/_schemas/system_database.py,sha256=-dAKk-_Y3vzbpLT4ei-sIrBQgFyQiwPj1enZb1TYc8I,4943
52
54
  dbos/_serialization.py,sha256=bWuwhXSQcGmiazvhJHA5gwhrRWxtmFmcCFQSDJnqqkU,3666
53
- dbos/_sys_db.py,sha256=MWSrGeCUMgctOMxJ3SViswVzC9URUMy7COdI7vRSLv8,86256
55
+ dbos/_sys_db.py,sha256=YLoiAUiY1-kaGOVnJxoY_dVI2QeR50m_7ijlIdZYWFk,84768
54
56
  dbos/_templates/dbos-db-starter/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
55
57
  dbos/_templates/dbos-db-starter/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
56
58
  dbos/_templates/dbos-db-starter/__package/main.py.dbos,sha256=aQnBPSSQpkB8ERfhf7gB7P9tsU6OPKhZscfeh0yiaD8,2702
@@ -61,14 +63,14 @@ dbos/_templates/dbos-db-starter/migrations/env.py.dbos,sha256=IBB_gz9RjC20HPfOTG
61
63
  dbos/_templates/dbos-db-starter/migrations/script.py.mako,sha256=MEqL-2qATlST9TAOeYgscMn1uy6HUS9NFvDgl93dMj8,635
62
64
  dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py,sha256=MpS7LGaJS0CpvsjhfDkp9EJqvMvVCjRPfUp4c0aE2ys,941
63
65
  dbos/_templates/dbos-db-starter/start_postgres_docker.py,sha256=lQVLlYO5YkhGPEgPqwGc7Y8uDKse9HsWv5fynJEFJHM,1681
64
- dbos/_tracer.py,sha256=RnlcaOJEx_58hr2J9L9g6E7gjAHAeEtEGugJZmCwNfQ,2963
66
+ dbos/_tracer.py,sha256=8aOAVTBnj2q9DcOb5KJCfo56CVZ1ZvsWBscaNlIX-7k,3150
65
67
  dbos/_utils.py,sha256=uywq1QrjMwy17btjxW4bES49povlQwYwYbvKwMT6C2U,1575
66
68
  dbos/_workflow_commands.py,sha256=EmmAaQfRWeOZm_WPTznuU-O3he3jiSzzT9VpYrhxugE,4835
67
69
  dbos/cli/_github_init.py,sha256=Y_bDF9gfO2jB1id4FV5h1oIxEJRWyqVjhb7bNEa5nQ0,3224
68
70
  dbos/cli/_template_init.py,sha256=7JBcpMqP1r2mfCnvWatu33z8ctEGHJarlZYKgB83cXE,2972
69
- dbos/cli/cli.py,sha256=ey7E-lNFgvUWhsd-mkFwZvTdYorv6hU2zsMOS23n1yQ,22214
71
+ dbos/cli/cli.py,sha256=btUbl0L_1cf46W8z0Hi6nPBCLaSqY9I4c1eZCG7obow,23128
70
72
  dbos/cli/migration.py,sha256=eI0sc0vYq2iUP3cBHPfTa6WHCyDBr8ld9nRxEZZzFrU,3316
71
73
  dbos/dbos-config.schema.json,sha256=CjaspeYmOkx6Ip_pcxtmfXJTn_YGdSx_0pcPBF7KZmo,6060
72
74
  dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
73
75
  version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
74
- dbos-1.12.0a2.dist-info/RECORD,,
76
+ dbos-1.13.0a3.dist-info/RECORD,,
File without changes