dbos 1.2.0a6__py3-none-any.whl → 1.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dbos/_client.py CHANGED
@@ -3,8 +3,6 @@ import sys
3
3
  import uuid
4
4
  from typing import Any, Generic, List, Optional, TypedDict, TypeVar
5
5
 
6
- from sqlalchemy import URL
7
-
8
6
  from dbos._app_db import ApplicationDatabase
9
7
  from dbos._context import MaxPriority, MinPriority
10
8
 
@@ -15,6 +13,7 @@ else:
15
13
 
16
14
  from dbos import _serialization
17
15
  from dbos._dbos import WorkflowHandle, WorkflowHandleAsync
16
+ from dbos._dbos_config import is_valid_database_url
18
17
  from dbos._error import DBOSException, DBOSNonExistentWorkflowError
19
18
  from dbos._registrations import DEFAULT_MAX_RECOVERY_ATTEMPTS
20
19
  from dbos._serialization import WorkflowInputs
@@ -99,6 +98,7 @@ class WorkflowHandleClientAsyncPolling(Generic[R]):
99
98
 
100
99
  class DBOSClient:
101
100
  def __init__(self, database_url: str, *, system_database: Optional[str] = None):
101
+ assert is_valid_database_url(database_url)
102
102
  # We only create database connections but do not run migrations
103
103
  self._sys_db = SystemDatabase(
104
104
  database_url=database_url,
@@ -141,6 +141,11 @@ class DBOSClient:
141
141
  "priority": options.get("priority"),
142
142
  }
143
143
 
144
+ inputs: WorkflowInputs = {
145
+ "args": args,
146
+ "kwargs": kwargs,
147
+ }
148
+
144
149
  status: WorkflowStatusInternal = {
145
150
  "workflow_uuid": workflow_id,
146
151
  "status": WorkflowStatusString.ENQUEUED.value,
@@ -163,18 +168,18 @@ class DBOSClient:
163
168
  int(workflow_timeout * 1000) if workflow_timeout is not None else None
164
169
  ),
165
170
  "workflow_deadline_epoch_ms": None,
166
- }
167
-
168
- inputs: WorkflowInputs = {
169
- "args": args,
170
- "kwargs": kwargs,
171
+ "deduplication_id": enqueue_options_internal["deduplication_id"],
172
+ "priority": (
173
+ enqueue_options_internal["priority"]
174
+ if enqueue_options_internal["priority"] is not None
175
+ else 0
176
+ ),
177
+ "inputs": _serialization.serialize_args(inputs),
171
178
  }
172
179
 
173
180
  self._sys_db.init_workflow(
174
181
  status,
175
- _serialization.serialize_args(inputs),
176
182
  max_recovery_attempts=None,
177
- enqueue_options=enqueue_options_internal,
178
183
  )
179
184
  return workflow_id
180
185
 
@@ -230,6 +235,9 @@ class DBOSClient:
230
235
  "app_version": None,
231
236
  "workflow_timeout_ms": None,
232
237
  "workflow_deadline_epoch_ms": None,
238
+ "deduplication_id": None,
239
+ "priority": 0,
240
+ "inputs": _serialization.serialize_args({"args": (), "kwargs": {}}),
233
241
  }
234
242
  with self._sys_db.engine.begin() as conn:
235
243
  self._sys_db._insert_workflow_status(
dbos/_core.py CHANGED
@@ -252,6 +252,10 @@ def _init_workflow(
252
252
  raise DBOSNonExistentWorkflowError(wfid)
253
253
  return get_status_result
254
254
 
255
+ # If we have a class name, the first arg is the instance and do not serialize
256
+ if class_name is not None:
257
+ inputs = {"args": inputs["args"][1:], "kwargs": inputs["kwargs"]}
258
+
255
259
  # Initialize a workflow status object from the context
256
260
  status: WorkflowStatusInternal = {
257
261
  "workflow_uuid": wfid,
@@ -279,18 +283,25 @@ def _init_workflow(
279
283
  "updated_at": None,
280
284
  "workflow_timeout_ms": workflow_timeout_ms,
281
285
  "workflow_deadline_epoch_ms": workflow_deadline_epoch_ms,
286
+ "deduplication_id": (
287
+ enqueue_options["deduplication_id"] if enqueue_options is not None else None
288
+ ),
289
+ "priority": (
290
+ (
291
+ enqueue_options["priority"]
292
+ if enqueue_options["priority"] is not None
293
+ else 0
294
+ )
295
+ if enqueue_options is not None
296
+ else 0
297
+ ),
298
+ "inputs": _serialization.serialize_args(inputs),
282
299
  }
283
300
 
284
- # If we have a class name, the first arg is the instance and do not serialize
285
- if class_name is not None:
286
- inputs = {"args": inputs["args"][1:], "kwargs": inputs["kwargs"]}
287
-
288
301
  # Synchronously record the status and inputs for workflows
289
302
  wf_status, workflow_deadline_epoch_ms = dbos._sys_db.init_workflow(
290
303
  status,
291
- _serialization.serialize_args(inputs),
292
304
  max_recovery_attempts=max_recovery_attempts,
293
- enqueue_options=enqueue_options,
294
305
  )
295
306
 
296
307
  if workflow_deadline_epoch_ms is not None:
@@ -342,13 +353,12 @@ def _get_wf_invoke_func(
342
353
  return recorded_result
343
354
  try:
344
355
  output = func()
345
- status["status"] = "SUCCESS"
346
- status["output"] = _serialization.serialize(output)
347
356
  if not dbos.debug_mode:
348
- if status["queue_name"] is not None:
349
- queue = dbos._registry.queue_info_map[status["queue_name"]]
350
- dbos._sys_db.remove_from_queue(status["workflow_uuid"], queue)
351
- dbos._sys_db.update_workflow_status(status)
357
+ dbos._sys_db.update_workflow_outcome(
358
+ status["workflow_uuid"],
359
+ "SUCCESS",
360
+ output=_serialization.serialize(output),
361
+ )
352
362
  return output
353
363
  except DBOSWorkflowConflictIDError:
354
364
  # Await the workflow result
@@ -357,13 +367,12 @@ def _get_wf_invoke_func(
357
367
  except DBOSWorkflowCancelledError as error:
358
368
  raise
359
369
  except Exception as error:
360
- status["status"] = "ERROR"
361
- status["error"] = _serialization.serialize_exception(error)
362
370
  if not dbos.debug_mode:
363
- if status["queue_name"] is not None:
364
- queue = dbos._registry.queue_info_map[status["queue_name"]]
365
- dbos._sys_db.remove_from_queue(status["workflow_uuid"], queue)
366
- dbos._sys_db.update_workflow_status(status)
371
+ dbos._sys_db.update_workflow_outcome(
372
+ status["workflow_uuid"],
373
+ "ERROR",
374
+ error=_serialization.serialize_exception(error),
375
+ )
367
376
  raise
368
377
 
369
378
  return persist
@@ -432,16 +441,13 @@ def execute_workflow_by_id(dbos: "DBOS", workflow_id: str) -> "WorkflowHandle[An
432
441
  status = dbos._sys_db.get_workflow_status(workflow_id)
433
442
  if not status:
434
443
  raise DBOSRecoveryError(workflow_id, "Workflow status not found")
435
- inputs = dbos._sys_db.get_workflow_inputs(workflow_id)
436
- if not inputs:
437
- raise DBOSRecoveryError(workflow_id, "Workflow inputs not found")
444
+ inputs = _serialization.deserialize_args(status["inputs"])
438
445
  wf_func = dbos._registry.workflow_info_map.get(status["name"], None)
439
446
  if not wf_func:
440
447
  raise DBOSWorkflowFunctionNotFoundError(
441
448
  workflow_id, "Workflow function not found"
442
449
  )
443
450
  with DBOSContextEnsure():
444
- ctx = assert_current_dbos_context()
445
451
  # If this function belongs to a configured class, add that class instance as its first argument
446
452
  if status["config_name"] is not None:
447
453
  config_name = status["config_name"]
dbos/_dbos.py CHANGED
@@ -92,7 +92,6 @@ from ._dbos_config import (
92
92
  DBOSConfig,
93
93
  overwrite_config,
94
94
  process_config,
95
- set_env_vars,
96
95
  translate_dbos_config_to_config_file,
97
96
  )
98
97
  from ._error import (
@@ -101,7 +100,13 @@ from ._error import (
101
100
  DBOSNonExistentWorkflowError,
102
101
  )
103
102
  from ._event_loop import BackgroundEventLoop
104
- from ._logger import add_otlp_to_all_loggers, config_logger, dbos_logger, init_logger
103
+ from ._logger import (
104
+ add_otlp_to_all_loggers,
105
+ add_transformer_to_all_loggers,
106
+ config_logger,
107
+ dbos_logger,
108
+ init_logger,
109
+ )
105
110
  from ._workflow_commands import get_workflow, list_workflow_steps
106
111
 
107
112
  # Most DBOS functions are just any callable F, so decorators / wrappers work on F
@@ -216,6 +221,8 @@ class DBOSRegistry:
216
221
  sources = sorted(
217
222
  [inspect.getsource(wf) for wf in self.workflow_info_map.values()]
218
223
  )
224
+ # Different DBOS versions should produce different app versions
225
+ sources.append(GlobalParams.dbos_version)
219
226
  for source in sources:
220
227
  hasher.update(source.encode("utf-8"))
221
228
  return hasher.hexdigest()
@@ -298,7 +305,6 @@ class DBOS:
298
305
 
299
306
  self._launched: bool = False
300
307
  self._debug_mode: bool = False
301
- self._configured_threadpool: bool = False
302
308
  self._sys_db_field: Optional[SystemDatabase] = None
303
309
  self._app_db_field: Optional[ApplicationDatabase] = None
304
310
  self._registry: DBOSRegistry = _get_or_create_dbos_registry()
@@ -329,7 +335,6 @@ class DBOS:
329
335
  else:
330
336
  raise ValueError("No valid configuration was loaded.")
331
337
 
332
- set_env_vars(self._config)
333
338
  config_logger(self._config)
334
339
  dbos_tracer.config(self._config)
335
340
  dbos_logger.info(f"Initializing DBOS (v{GlobalParams.dbos_version})")
@@ -412,7 +417,7 @@ class DBOS:
412
417
  GlobalParams.executor_id = str(uuid.uuid4())
413
418
  dbos_logger.info(f"Executor ID: {GlobalParams.executor_id}")
414
419
  dbos_logger.info(f"Application version: {GlobalParams.app_version}")
415
- self._executor_field = ThreadPoolExecutor(max_workers=64)
420
+ self._executor_field = ThreadPoolExecutor(max_workers=sys.maxsize)
416
421
  self._background_event_loop.start()
417
422
  assert self._config["database_url"] is not None
418
423
  assert self._config["database"]["sys_db_engine_kwargs"] is not None
@@ -515,6 +520,7 @@ class DBOS:
515
520
  for handler in dbos_logger.handlers:
516
521
  handler.flush()
517
522
  add_otlp_to_all_loggers()
523
+ add_transformer_to_all_loggers()
518
524
  except Exception:
519
525
  dbos_logger.error(f"DBOS failed to launch: {traceback.format_exc()}")
520
526
  raise
@@ -943,11 +949,8 @@ class DBOS:
943
949
 
944
950
  This function is called before the first call to asyncio.to_thread.
945
951
  """
946
- if _get_dbos_instance()._configured_threadpool:
947
- return
948
952
  loop = asyncio.get_running_loop()
949
953
  loop.set_default_executor(_get_dbos_instance()._executor)
950
- _get_dbos_instance()._configured_threadpool = True
951
954
 
952
955
  @classmethod
953
956
  def resume_workflow(cls, workflow_id: str) -> WorkflowHandle[Any]:
dbos/_dbos_config.py CHANGED
@@ -91,7 +91,7 @@ class ConfigFile(TypedDict, total=False):
91
91
  Data structure containing the DBOS Configuration.
92
92
 
93
93
  This configuration data is typically loaded from `dbos-config.yaml`.
94
- See `https://docs.dbos.dev/api-reference/configuration`_
94
+ See `https://docs.dbos.dev/python/reference/configuration#dbos-configuration-file`
95
95
 
96
96
  Attributes:
97
97
  name (str): Application name
@@ -329,17 +329,9 @@ def process_config(
329
329
  if data.get("database_url") is not None and data["database_url"] != "":
330
330
  # Parse the db string and check required fields
331
331
  assert data["database_url"] is not None
332
+ assert is_valid_database_url(data["database_url"])
333
+
332
334
  url = make_url(data["database_url"])
333
- required_fields = [
334
- ("username", "Username must be specified in the connection URL"),
335
- ("password", "Password must be specified in the connection URL"),
336
- ("host", "Host must be specified in the connection URL"),
337
- ("database", "Database name must be specified in the connection URL"),
338
- ]
339
- for field_name, error_message in required_fields:
340
- field_value = getattr(url, field_name, None)
341
- if not field_value:
342
- raise DBOSInitializationError(error_message)
343
335
 
344
336
  if not data["database"].get("sys_db_name"):
345
337
  assert url.database is not None
@@ -385,6 +377,9 @@ def process_config(
385
377
  if not silent and logs["logLevel"] == "INFO" or logs["logLevel"] == "DEBUG":
386
378
  log_url = make_url(data["database_url"]).render_as_string(hide_password=True)
387
379
  print(f"[bold blue]Using database connection string: {log_url}[/bold blue]")
380
+ print(
381
+ f"[bold blue]Database engine parameters: {data['database']['db_engine_kwargs']}[/bold blue]"
382
+ )
388
383
 
389
384
  # Return data as ConfigFile type
390
385
  return data
@@ -432,6 +427,21 @@ def configure_db_engine_parameters(
432
427
  data["sys_db_engine_kwargs"] = system_engine_kwargs
433
428
 
434
429
 
430
+ def is_valid_database_url(database_url: str) -> bool:
431
+ url = make_url(database_url)
432
+ required_fields = [
433
+ ("username", "Username must be specified in the connection URL"),
434
+ ("password", "Password must be specified in the connection URL"),
435
+ ("host", "Host must be specified in the connection URL"),
436
+ ("database", "Database name must be specified in the connection URL"),
437
+ ]
438
+ for field_name, error_message in required_fields:
439
+ field_value = getattr(url, field_name, None)
440
+ if not field_value:
441
+ raise DBOSInitializationError(error_message)
442
+ return True
443
+
444
+
435
445
  def _is_valid_app_name(name: str) -> bool:
436
446
  name_len = len(name)
437
447
  if name_len < 3 or name_len > 30:
@@ -445,12 +455,6 @@ def _app_name_to_db_name(app_name: str) -> str:
445
455
  return name if not name[0].isdigit() else f"_{name}"
446
456
 
447
457
 
448
- def set_env_vars(config: ConfigFile) -> None:
449
- for env, value in config.get("env", {}).items():
450
- if value is not None:
451
- os.environ[env] = str(value)
452
-
453
-
454
458
  def overwrite_config(provided_config: ConfigFile) -> ConfigFile:
455
459
  # Load the DBOS configuration file and force the use of:
456
460
  # 1. The database url provided by DBOS_DATABASE_URL
dbos/_event_loop.py CHANGED
@@ -1,6 +1,5 @@
1
1
  import asyncio
2
2
  import threading
3
- from concurrent.futures import ThreadPoolExecutor
4
3
  from typing import Any, Coroutine, Optional, TypeVar
5
4
 
6
5
 
@@ -34,17 +33,15 @@ class BackgroundEventLoop:
34
33
 
35
34
  def _run_event_loop(self) -> None:
36
35
  self._loop = asyncio.new_event_loop()
37
- with ThreadPoolExecutor(max_workers=64) as thread_pool:
38
- self._loop.set_default_executor(thread_pool)
39
- asyncio.set_event_loop(self._loop)
36
+ asyncio.set_event_loop(self._loop)
40
37
 
41
- self._running = True
42
- self._ready.set() # Signal that the loop is ready
38
+ self._running = True
39
+ self._ready.set() # Signal that the loop is ready
43
40
 
44
- try:
45
- self._loop.run_forever()
46
- finally:
47
- self._loop.close()
41
+ try:
42
+ self._loop.run_forever()
43
+ finally:
44
+ self._loop.close()
48
45
 
49
46
  async def _shutdown(self) -> None:
50
47
  if self._loop is None:
dbos/_logger.py CHANGED
@@ -16,7 +16,7 @@ if TYPE_CHECKING:
16
16
  from ._dbos_config import ConfigFile
17
17
 
18
18
  dbos_logger = logging.getLogger("dbos")
19
- _otlp_handler, _otlp_transformer = None, None
19
+ _otlp_handler, _dbos_log_transformer = None, None
20
20
 
21
21
 
22
22
  class DBOSLogTransformer(logging.Filter):
@@ -93,25 +93,31 @@ def config_logger(config: "ConfigFile") -> None:
93
93
  global _otlp_handler
94
94
  _otlp_handler = LoggingHandler(logger_provider=log_provider)
95
95
 
96
- # Attach DBOS-specific attributes to all log entries.
97
- global _otlp_transformer
98
- _otlp_transformer = DBOSLogTransformer()
99
-
100
96
  # Direct DBOS logs to OTLP
101
97
  dbos_logger.addHandler(_otlp_handler)
102
- dbos_logger.addFilter(_otlp_transformer)
98
+
99
+ # Attach DBOS-specific attributes to all log entries.
100
+ global _dbos_log_transformer
101
+ _dbos_log_transformer = DBOSLogTransformer()
102
+ dbos_logger.addFilter(_dbos_log_transformer)
103
103
 
104
104
 
105
105
  def add_otlp_to_all_loggers() -> None:
106
- if _otlp_handler is not None and _otlp_transformer is not None:
106
+ if _otlp_handler is not None:
107
107
  root = logging.root
108
-
109
108
  root.addHandler(_otlp_handler)
110
- root.addFilter(_otlp_transformer)
111
-
112
109
  for logger_name in root.manager.loggerDict:
113
110
  if logger_name != dbos_logger.name:
114
111
  logger = logging.getLogger(logger_name)
115
112
  if not logger.propagate:
116
113
  logger.addHandler(_otlp_handler)
117
- logger.addFilter(_otlp_transformer)
114
+
115
+
116
+ def add_transformer_to_all_loggers() -> None:
117
+ if _dbos_log_transformer is not None:
118
+ root = logging.root
119
+ root.addFilter(_dbos_log_transformer)
120
+ for logger_name in root.manager.loggerDict:
121
+ if logger_name != dbos_logger.name:
122
+ logger = logging.getLogger(logger_name)
123
+ logger.addFilter(_dbos_log_transformer)
@@ -0,0 +1,71 @@
1
+ """consolidate_queues
2
+
3
+ Revision ID: 66478e1b95e5
4
+ Revises: 933e86bdac6a
5
+ Create Date: 2025-05-21 10:14:25.674613
6
+
7
+ """
8
+
9
+ from typing import Sequence, Union
10
+
11
+ import sqlalchemy as sa
12
+ from alembic import op
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision: str = "66478e1b95e5"
16
+ down_revision: Union[str, None] = "933e86bdac6a"
17
+ branch_labels: Union[str, Sequence[str], None] = None
18
+ depends_on: Union[str, Sequence[str], None] = None
19
+
20
+
21
+ def upgrade() -> None:
22
+ # Add new columns to workflow_status table
23
+ op.add_column(
24
+ "workflow_status",
25
+ sa.Column("started_at_epoch_ms", sa.BigInteger(), nullable=True),
26
+ schema="dbos",
27
+ )
28
+
29
+ op.add_column(
30
+ "workflow_status",
31
+ sa.Column("deduplication_id", sa.Text(), nullable=True),
32
+ schema="dbos",
33
+ )
34
+
35
+ op.add_column(
36
+ "workflow_status",
37
+ sa.Column(
38
+ "priority", sa.Integer(), nullable=False, server_default=sa.text("'0'::int")
39
+ ),
40
+ schema="dbos",
41
+ )
42
+
43
+ # Add unique constraint for deduplication_id
44
+ op.create_unique_constraint(
45
+ "uq_workflow_status_queue_name_dedup_id",
46
+ "workflow_status",
47
+ ["queue_name", "deduplication_id"],
48
+ schema="dbos",
49
+ )
50
+
51
+ # Add index on status field
52
+ op.create_index(
53
+ "workflow_status_status_index", "workflow_status", ["status"], schema="dbos"
54
+ )
55
+
56
+
57
+ def downgrade() -> None:
58
+ # Drop indexes
59
+ op.drop_index(
60
+ "workflow_status_status_index", table_name="workflow_status", schema="dbos"
61
+ )
62
+
63
+ # Drop unique constraint
64
+ op.drop_constraint(
65
+ "uq_workflow_status_queue_name_dedup_id", "workflow_status", schema="dbos"
66
+ )
67
+
68
+ # Drop columns
69
+ op.drop_column("workflow_status", "priority", schema="dbos")
70
+ op.drop_column("workflow_status", "deduplication_id", schema="dbos")
71
+ op.drop_column("workflow_status", "started_at_epoch_ms", schema="dbos")
@@ -0,0 +1,30 @@
1
+ """consolidate_inputs
2
+
3
+ Revision ID: d994145b47b6
4
+ Revises: 66478e1b95e5
5
+ Create Date: 2025-05-23 08:09:15.515009
6
+
7
+ """
8
+
9
+ from typing import Sequence, Union
10
+
11
+ import sqlalchemy as sa
12
+ from alembic import op
13
+
14
+ # revision identifiers, used by Alembic.
15
+ revision: str = "d994145b47b6"
16
+ down_revision: Union[str, None] = "66478e1b95e5"
17
+ branch_labels: Union[str, Sequence[str], None] = None
18
+ depends_on: Union[str, Sequence[str], None] = None
19
+
20
+
21
+ def upgrade() -> None:
22
+ op.add_column(
23
+ "workflow_status",
24
+ sa.Column("inputs", sa.Text(), nullable=True),
25
+ schema="dbos",
26
+ )
27
+
28
+
29
+ def downgrade() -> None:
30
+ op.drop_column("workflow_status", "inputs", schema="dbos")
dbos/_queue.py CHANGED
@@ -5,8 +5,9 @@ from typing import TYPE_CHECKING, Any, Callable, Coroutine, Optional, TypedDict
5
5
  from psycopg import errors
6
6
  from sqlalchemy.exc import OperationalError
7
7
 
8
+ from dbos._context import get_local_dbos_context
8
9
  from dbos._logger import dbos_logger
9
- from dbos._utils import GlobalParams
10
+ from dbos._utils import INTERNAL_QUEUE_NAME, GlobalParams
10
11
 
11
12
  from ._core import P, R, execute_workflow_by_id, start_workflow, start_workflow_async
12
13
 
@@ -41,6 +42,7 @@ class Queue:
41
42
  limiter: Optional[QueueRateLimit] = None,
42
43
  *, # Disable positional arguments from here on
43
44
  worker_concurrency: Optional[int] = None,
45
+ priority_enabled: bool = False,
44
46
  ) -> None:
45
47
  if (
46
48
  worker_concurrency is not None
@@ -54,10 +56,11 @@ class Queue:
54
56
  self.concurrency = concurrency
55
57
  self.worker_concurrency = worker_concurrency
56
58
  self.limiter = limiter
59
+ self.priority_enabled = priority_enabled
57
60
  from ._dbos import _get_or_create_dbos_registry
58
61
 
59
62
  registry = _get_or_create_dbos_registry()
60
- if self.name in registry.queue_info_map:
63
+ if self.name in registry.queue_info_map and self.name != INTERNAL_QUEUE_NAME:
61
64
  dbos_logger.warning(f"Queue {name} has already been declared")
62
65
  registry.queue_info_map[self.name] = self
63
66
 
@@ -66,6 +69,16 @@ class Queue:
66
69
  ) -> "WorkflowHandle[R]":
67
70
  from ._dbos import _get_dbos_instance
68
71
 
72
+ context = get_local_dbos_context()
73
+ if (
74
+ context is not None
75
+ and context.priority is not None
76
+ and not self.priority_enabled
77
+ ):
78
+ dbos_logger.warning(
79
+ f"Priority is not enabled for queue {self.name}. Setting priority will not have any effect."
80
+ )
81
+
69
82
  dbos = _get_dbos_instance()
70
83
  return start_workflow(dbos, func, self.name, False, *args, **kwargs)
71
84
 
@@ -1,6 +1,5 @@
1
1
  from sqlalchemy import (
2
2
  BigInteger,
3
- Boolean,
4
3
  Column,
5
4
  ForeignKey,
6
5
  Index,
@@ -57,8 +56,18 @@ class SystemSchema:
57
56
  Column("queue_name", Text, nullable=True),
58
57
  Column("workflow_timeout_ms", BigInteger, nullable=True),
59
58
  Column("workflow_deadline_epoch_ms", BigInteger, nullable=True),
59
+ Column("started_at_epoch_ms", BigInteger(), nullable=True),
60
+ Column("deduplication_id", Text(), nullable=True),
61
+ Column("inputs", Text()),
62
+ Column("priority", Integer(), nullable=False, server_default=text("'0'::int")),
60
63
  Index("workflow_status_created_at_index", "created_at"),
61
64
  Index("workflow_status_executor_id_index", "executor_id"),
65
+ Index("workflow_status_status_index", "status"),
66
+ UniqueConstraint(
67
+ "queue_name",
68
+ "deduplication_id",
69
+ name="uq_workflow_status_queue_name_dedup_id",
70
+ ),
62
71
  )
63
72
 
64
73
  operation_outputs = Table(
@@ -80,21 +89,6 @@ class SystemSchema:
80
89
  PrimaryKeyConstraint("workflow_uuid", "function_id"),
81
90
  )
82
91
 
83
- workflow_inputs = Table(
84
- "workflow_inputs",
85
- metadata_obj,
86
- Column(
87
- "workflow_uuid",
88
- Text,
89
- ForeignKey(
90
- "workflow_status.workflow_uuid", onupdate="CASCADE", ondelete="CASCADE"
91
- ),
92
- primary_key=True,
93
- nullable=False,
94
- ),
95
- Column("inputs", Text, nullable=False),
96
- )
97
-
98
92
  notifications = Table(
99
93
  "notifications",
100
94
  metadata_obj,
@@ -138,54 +132,3 @@ class SystemSchema:
138
132
  Column("value", Text, nullable=False),
139
133
  PrimaryKeyConstraint("workflow_uuid", "key"),
140
134
  )
141
-
142
- scheduler_state = Table(
143
- "scheduler_state",
144
- metadata_obj,
145
- Column("workflow_fn_name", Text, primary_key=True, nullable=False),
146
- Column("last_run_time", BigInteger, nullable=False),
147
- )
148
-
149
- workflow_queue = Table(
150
- "workflow_queue",
151
- metadata_obj,
152
- Column(
153
- "workflow_uuid",
154
- Text,
155
- ForeignKey(
156
- "workflow_status.workflow_uuid", onupdate="CASCADE", ondelete="CASCADE"
157
- ),
158
- nullable=False,
159
- primary_key=True,
160
- ),
161
- # Column("executor_id", Text), # This column is deprecated. Do *not* use it.
162
- Column("queue_name", Text, nullable=False),
163
- Column(
164
- "created_at_epoch_ms",
165
- BigInteger,
166
- nullable=False,
167
- server_default=text("(EXTRACT(epoch FROM now()) * 1000::numeric)::bigint"),
168
- ),
169
- Column(
170
- "started_at_epoch_ms",
171
- BigInteger(),
172
- ),
173
- Column(
174
- "completed_at_epoch_ms",
175
- BigInteger(),
176
- ),
177
- Column(
178
- "deduplication_id",
179
- Text,
180
- nullable=True,
181
- ),
182
- Column(
183
- "priority",
184
- Integer,
185
- nullable=False,
186
- server_default=text("'0'::int"),
187
- ),
188
- UniqueConstraint(
189
- "queue_name", "deduplication_id", name="uq_workflow_queue_name_dedup_id"
190
- ),
191
- )