dbos 0.23.0a5__py3-none-any.whl → 0.23.0a9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dbos/__main__.py ADDED
@@ -0,0 +1,26 @@
1
+ import re
2
+ import sys
3
+ from typing import NoReturn, Optional, Union
4
+
5
+ from dbos.cli.cli import app
6
+
7
+
8
+ def main() -> NoReturn:
9
+ # Modify sys.argv[0] to remove script or executable extensions
10
+ sys.argv[0] = re.sub(r"(-script\.pyw|\.exe)?$", "", sys.argv[0])
11
+
12
+ retval: Optional[Union[str, int]] = 1
13
+ try:
14
+ app()
15
+ retval = None
16
+ except SystemExit as e:
17
+ retval = e.code
18
+ except Exception as e:
19
+ print(f"Error: {e}", file=sys.stderr)
20
+ retval = 1
21
+ finally:
22
+ sys.exit(retval)
23
+
24
+
25
+ if __name__ == "__main__":
26
+ main()
dbos/_app_db.py CHANGED
@@ -27,29 +27,30 @@ class RecordedResult(TypedDict):
27
27
 
28
28
  class ApplicationDatabase:
29
29
 
30
- def __init__(self, config: ConfigFile):
30
+ def __init__(self, config: ConfigFile, *, debug_mode: bool = False):
31
31
  self.config = config
32
32
 
33
33
  app_db_name = config["database"]["app_db_name"]
34
34
 
35
35
  # If the application database does not already exist, create it
36
- postgres_db_url = sa.URL.create(
37
- "postgresql+psycopg",
38
- username=config["database"]["username"],
39
- password=config["database"]["password"],
40
- host=config["database"]["hostname"],
41
- port=config["database"]["port"],
42
- database="postgres",
43
- )
44
- postgres_db_engine = sa.create_engine(postgres_db_url)
45
- with postgres_db_engine.connect() as conn:
46
- conn.execution_options(isolation_level="AUTOCOMMIT")
47
- if not conn.execute(
48
- sa.text("SELECT 1 FROM pg_database WHERE datname=:db_name"),
49
- parameters={"db_name": app_db_name},
50
- ).scalar():
51
- conn.execute(sa.text(f"CREATE DATABASE {app_db_name}"))
52
- postgres_db_engine.dispose()
36
+ if not debug_mode:
37
+ postgres_db_url = sa.URL.create(
38
+ "postgresql+psycopg",
39
+ username=config["database"]["username"],
40
+ password=config["database"]["password"],
41
+ host=config["database"]["hostname"],
42
+ port=config["database"]["port"],
43
+ database="postgres",
44
+ )
45
+ postgres_db_engine = sa.create_engine(postgres_db_url)
46
+ with postgres_db_engine.connect() as conn:
47
+ conn.execution_options(isolation_level="AUTOCOMMIT")
48
+ if not conn.execute(
49
+ sa.text("SELECT 1 FROM pg_database WHERE datname=:db_name"),
50
+ parameters={"db_name": app_db_name},
51
+ ).scalar():
52
+ conn.execute(sa.text(f"CREATE DATABASE {app_db_name}"))
53
+ postgres_db_engine.dispose()
53
54
 
54
55
  # Create a connection pool for the application database
55
56
  app_db_url = sa.URL.create(
@@ -64,14 +65,16 @@ class ApplicationDatabase:
64
65
  app_db_url, pool_size=20, max_overflow=5, pool_timeout=30
65
66
  )
66
67
  self.sessionmaker = sessionmaker(bind=self.engine)
68
+ self.debug_mode = debug_mode
67
69
 
68
70
  # Create the dbos schema and transaction_outputs table in the application database
69
- with self.engine.begin() as conn:
70
- schema_creation_query = sa.text(
71
- f"CREATE SCHEMA IF NOT EXISTS {ApplicationSchema.schema}"
72
- )
73
- conn.execute(schema_creation_query)
74
- ApplicationSchema.metadata_obj.create_all(self.engine)
71
+ if not debug_mode:
72
+ with self.engine.begin() as conn:
73
+ schema_creation_query = sa.text(
74
+ f"CREATE SCHEMA IF NOT EXISTS {ApplicationSchema.schema}"
75
+ )
76
+ conn.execute(schema_creation_query)
77
+ ApplicationSchema.metadata_obj.create_all(self.engine)
75
78
 
76
79
  def destroy(self) -> None:
77
80
  self.engine.dispose()
@@ -100,6 +103,8 @@ class ApplicationDatabase:
100
103
  raise
101
104
 
102
105
  def record_transaction_error(self, output: TransactionResultInternal) -> None:
106
+ if self.debug_mode:
107
+ raise Exception("called record_transaction_error in debug mode")
103
108
  try:
104
109
  with self.engine.begin() as conn:
105
110
  conn.execute(
dbos/_core.py CHANGED
@@ -186,21 +186,31 @@ def _init_workflow(
186
186
  inputs = {"args": inputs["args"][1:], "kwargs": inputs["kwargs"]}
187
187
 
188
188
  wf_status = status["status"]
189
- if temp_wf_type != "transaction" or queue is not None:
190
- # Synchronously record the status and inputs for workflows and single-step workflows
191
- # We also have to do this for single-step workflows because of the foreign key constraint on the operation outputs table
192
- # TODO: Make this transactional (and with the queue step below)
193
- wf_status = dbos._sys_db.insert_workflow_status(
194
- status, max_recovery_attempts=max_recovery_attempts
195
- )
196
- # TODO: Modify the inputs if they were changed by `update_workflow_inputs`
197
- dbos._sys_db.update_workflow_inputs(wfid, _serialization.serialize_args(inputs))
189
+ if dbos.debug_mode:
190
+ get_status_result = dbos._sys_db.get_workflow_status(wfid)
191
+ if get_status_result is None:
192
+ raise DBOSNonExistentWorkflowError(wfid)
193
+ wf_status = get_status_result["status"]
198
194
  else:
199
- # Buffer the inputs for single-transaction workflows, but don't buffer the status
200
- dbos._sys_db.buffer_workflow_inputs(wfid, _serialization.serialize_args(inputs))
195
+ if temp_wf_type != "transaction" or queue is not None:
196
+ # Synchronously record the status and inputs for workflows and single-step workflows
197
+ # We also have to do this for single-step workflows because of the foreign key constraint on the operation outputs table
198
+ # TODO: Make this transactional (and with the queue step below)
199
+ wf_status = dbos._sys_db.insert_workflow_status(
200
+ status, max_recovery_attempts=max_recovery_attempts
201
+ )
202
+ # TODO: Modify the inputs if they were changed by `update_workflow_inputs`
203
+ dbos._sys_db.update_workflow_inputs(
204
+ wfid, _serialization.serialize_args(inputs)
205
+ )
206
+ else:
207
+ # Buffer the inputs for single-transaction workflows, but don't buffer the status
208
+ dbos._sys_db.buffer_workflow_inputs(
209
+ wfid, _serialization.serialize_args(inputs)
210
+ )
201
211
 
202
- if queue is not None and wf_status == WorkflowStatusString.ENQUEUED.value:
203
- dbos._sys_db.enqueue(wfid, queue)
212
+ if queue is not None and wf_status == WorkflowStatusString.ENQUEUED.value:
213
+ dbos._sys_db.enqueue(wfid, queue)
204
214
 
205
215
  status["status"] = wf_status
206
216
  return status
@@ -215,10 +225,11 @@ def _get_wf_invoke_func(
215
225
  output = func()
216
226
  status["status"] = "SUCCESS"
217
227
  status["output"] = _serialization.serialize(output)
218
- if status["queue_name"] is not None:
219
- queue = dbos._registry.queue_info_map[status["queue_name"]]
220
- dbos._sys_db.remove_from_queue(status["workflow_uuid"], queue)
221
- dbos._sys_db.buffer_workflow_status(status)
228
+ if not dbos.debug_mode:
229
+ if status["queue_name"] is not None:
230
+ queue = dbos._registry.queue_info_map[status["queue_name"]]
231
+ dbos._sys_db.remove_from_queue(status["workflow_uuid"], queue)
232
+ dbos._sys_db.buffer_workflow_status(status)
222
233
  return output
223
234
  except DBOSWorkflowConflictIDError:
224
235
  # Retrieve the workflow handle and wait for the result.
@@ -233,10 +244,11 @@ def _get_wf_invoke_func(
233
244
  except Exception as error:
234
245
  status["status"] = "ERROR"
235
246
  status["error"] = _serialization.serialize_exception(error)
236
- if status["queue_name"] is not None:
237
- queue = dbos._registry.queue_info_map[status["queue_name"]]
238
- dbos._sys_db.remove_from_queue(status["workflow_uuid"], queue)
239
- dbos._sys_db.update_workflow_status(status)
247
+ if not dbos.debug_mode:
248
+ if status["queue_name"] is not None:
249
+ queue = dbos._registry.queue_info_map[status["queue_name"]]
250
+ dbos._sys_db.remove_from_queue(status["workflow_uuid"], queue)
251
+ dbos._sys_db.update_workflow_status(status)
240
252
  raise
241
253
 
242
254
  return persist
@@ -422,10 +434,12 @@ def start_workflow(
422
434
 
423
435
  wf_status = status["status"]
424
436
 
425
- if (
426
- not execute_workflow
427
- or wf_status == WorkflowStatusString.ERROR.value
428
- or wf_status == WorkflowStatusString.SUCCESS.value
437
+ if not execute_workflow or (
438
+ not dbos.debug_mode
439
+ and (
440
+ wf_status == WorkflowStatusString.ERROR.value
441
+ or wf_status == WorkflowStatusString.SUCCESS.value
442
+ )
429
443
  ):
430
444
  dbos.logger.debug(
431
445
  f"Workflow {new_wf_id} already completed with status {wf_status}. Directly returning a workflow handle."
@@ -597,6 +611,10 @@ def decorate_transaction(
597
611
  ctx.function_id,
598
612
  )
599
613
  )
614
+ if dbos.debug_mode and recorded_output is None:
615
+ raise DBOSException(
616
+ "Transaction output not found in debug mode"
617
+ )
600
618
  if recorded_output:
601
619
  dbos.logger.debug(
602
620
  f"Replaying transaction, id: {ctx.function_id}, name: {attributes['name']}"
@@ -780,6 +798,8 @@ def decorate_step(
780
798
  recorded_output = dbos._sys_db.check_operation_execution(
781
799
  ctx.workflow_id, ctx.function_id
782
800
  )
801
+ if dbos.debug_mode and recorded_output is None:
802
+ raise DBOSException("Step output not found in debug mode")
783
803
  if recorded_output:
784
804
  dbos.logger.debug(
785
805
  f"Replaying step, id: {ctx.function_id}, name: {attributes['name']}"
dbos/_croniter.py CHANGED
@@ -5,14 +5,14 @@ Copyright (C) 2010-2012 Matsumoto Taichi.
5
5
 
6
6
  Permission is hereby granted, free of charge, to any person obtaining a copy of this
7
7
  software and associated documentation files (the "Software"), to deal in the Software
8
- without restriction, including without limitation the rights to use, copy, modify,
8
+ without restriction, including without limitation the rights to use, copy, modify,
9
9
  merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit
10
10
  persons to whom the Software is furnished to do so, subject to the following conditions:
11
11
 
12
12
  The above copyright notice and this permission notice shall be included in all
13
13
  copies or substantial portions of the Software.
14
14
 
15
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
16
16
  INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
17
17
  PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
18
18
  FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
dbos/_dbos.py CHANGED
@@ -313,6 +313,7 @@ class DBOS:
313
313
  dbos_logger.info("Initializing DBOS")
314
314
  self.config: ConfigFile = config
315
315
  self._launched: bool = False
316
+ self._debug_mode: bool = False
316
317
  self._sys_db_field: Optional[SystemDatabase] = None
317
318
  self._app_db_field: Optional[ApplicationDatabase] = None
318
319
  self._registry: DBOSRegistry = _get_or_create_dbos_registry()
@@ -380,23 +381,32 @@ class DBOS:
380
381
  rv: AdminServer = self._admin_server_field
381
382
  return rv
382
383
 
384
+ @property
385
+ def debug_mode(self) -> bool:
386
+ return self._debug_mode
387
+
383
388
  @classmethod
384
- def launch(cls) -> None:
389
+ def launch(cls, *, debug_mode: bool = False) -> None:
385
390
  if _dbos_global_instance is not None:
386
- _dbos_global_instance._launch()
391
+ _dbos_global_instance._launch(debug_mode=debug_mode)
387
392
 
388
- def _launch(self) -> None:
393
+ def _launch(self, *, debug_mode: bool = False) -> None:
389
394
  try:
390
395
  if self._launched:
391
396
  dbos_logger.warning(f"DBOS was already launched")
392
397
  return
393
398
  self._launched = True
399
+ self._debug_mode = debug_mode
394
400
  if GlobalParams.app_version == "":
395
401
  GlobalParams.app_version = self._registry.compute_app_version()
396
402
  dbos_logger.info(f"Application version: {GlobalParams.app_version}")
397
403
  self._executor_field = ThreadPoolExecutor(max_workers=64)
398
- self._sys_db_field = SystemDatabase(self.config)
399
- self._app_db_field = ApplicationDatabase(self.config)
404
+ self._sys_db_field = SystemDatabase(self.config, debug_mode=debug_mode)
405
+ self._app_db_field = ApplicationDatabase(self.config, debug_mode=debug_mode)
406
+
407
+ if debug_mode:
408
+ return
409
+
400
410
  admin_port = self.config["runtimeConfig"].get("admin_port")
401
411
  if admin_port is None:
402
412
  admin_port = 3001
dbos/_dbos_config.py CHANGED
@@ -192,7 +192,11 @@ def load_config(
192
192
  data = cast(ConfigFile, data)
193
193
  db_connection = load_db_connection()
194
194
  if not silent:
195
- if data["database"].get("hostname"):
195
+ if os.getenv("DBOS_DBHOST"):
196
+ print(
197
+ "[bold blue]Loading database connection parameters from debug environment variables[/bold blue]"
198
+ )
199
+ elif data["database"].get("hostname"):
196
200
  print(
197
201
  "[bold blue]Loading database connection parameters from dbos-config.yaml[/bold blue]"
198
202
  )
@@ -205,32 +209,62 @@ def load_config(
205
209
  "[bold blue]Using default database connection parameters (localhost)[/bold blue]"
206
210
  )
207
211
 
212
+ dbos_dbport: Optional[int] = None
213
+ dbport_env = os.getenv("DBOS_DBPORT")
214
+ if dbport_env:
215
+ try:
216
+ dbos_dbport = int(dbport_env)
217
+ except ValueError:
218
+ pass
219
+ dbos_dblocalsuffix: Optional[bool] = None
220
+ dblocalsuffix_env = os.getenv("DBOS_DBLOCALSUFFIX")
221
+ if dblocalsuffix_env:
222
+ try:
223
+ dbos_dblocalsuffix = dblocalsuffix_env.casefold() == "true".casefold()
224
+ except ValueError:
225
+ pass
226
+
208
227
  data["database"]["hostname"] = (
209
- data["database"].get("hostname") or db_connection.get("hostname") or "localhost"
228
+ os.getenv("DBOS_DBHOST")
229
+ or data["database"].get("hostname")
230
+ or db_connection.get("hostname")
231
+ or "localhost"
210
232
  )
233
+
211
234
  data["database"]["port"] = (
212
- data["database"].get("port") or db_connection.get("port") or 5432
235
+ dbos_dbport or data["database"].get("port") or db_connection.get("port") or 5432
213
236
  )
214
237
  data["database"]["username"] = (
215
- data["database"].get("username") or db_connection.get("username") or "postgres"
238
+ os.getenv("DBOS_DBUSER")
239
+ or data["database"].get("username")
240
+ or db_connection.get("username")
241
+ or "postgres"
216
242
  )
217
243
  data["database"]["password"] = (
218
- data["database"].get("password")
244
+ os.getenv("DBOS_DBPASSWORD")
245
+ or data["database"].get("password")
219
246
  or db_connection.get("password")
220
247
  or os.environ.get("PGPASSWORD")
221
248
  or "dbos"
222
249
  )
223
- data["database"]["local_suffix"] = (
224
- data["database"].get("local_suffix")
225
- or db_connection.get("local_suffix")
226
- or False
227
- )
250
+
251
+ local_suffix = False
252
+ dbcon_local_suffix = db_connection.get("local_suffix")
253
+ if dbcon_local_suffix is not None:
254
+ local_suffix = dbcon_local_suffix
255
+ if data["database"].get("local_suffix") is not None:
256
+ local_suffix = data["database"].get("local_suffix")
257
+ if dbos_dblocalsuffix is not None:
258
+ local_suffix = dbos_dblocalsuffix
259
+ data["database"]["local_suffix"] = local_suffix
228
260
 
229
261
  # Configure the DBOS logger
230
262
  config_logger(data)
231
263
 
232
264
  # Check the connectivity to the database and make sure it's properly configured
233
- if use_db_wizard:
265
+ # Note, never use db wizard if the DBOS is running in debug mode (i.e. DBOS_DEBUG_WORKFLOW_ID env var is set)
266
+ debugWorkflowId = os.getenv("DBOS_DEBUG_WORKFLOW_ID")
267
+ if use_db_wizard and debugWorkflowId is None:
234
268
  data = db_wizard(data, config_file_path)
235
269
 
236
270
  if "local_suffix" in data["database"] and data["database"]["local_suffix"]:
dbos/_debug.py ADDED
@@ -0,0 +1,45 @@
1
+ import re
2
+ import runpy
3
+ import sys
4
+ from typing import Union
5
+
6
+ from dbos import DBOS
7
+
8
+
9
+ class PythonModule:
10
+ def __init__(self, module_name: str):
11
+ self.module_name = module_name
12
+
13
+
14
+ def debug_workflow(workflow_id: str, entrypoint: Union[str, PythonModule]) -> None:
15
+ # include the current directory (represented by empty string) in the search path
16
+ # if it not already included
17
+ if "" not in sys.path:
18
+ sys.path.insert(0, "")
19
+ if isinstance(entrypoint, str):
20
+ runpy.run_path(entrypoint)
21
+ elif isinstance(entrypoint, PythonModule):
22
+ runpy.run_module(entrypoint.module_name)
23
+ else:
24
+ raise ValueError("Invalid entrypoint type. Must be a string or PythonModule.")
25
+
26
+ DBOS.logger.info(f"Debugging workflow {workflow_id}...")
27
+ DBOS.launch(debug_mode=True)
28
+ handle = DBOS.execute_workflow_id(workflow_id)
29
+ handle.get_result()
30
+ DBOS.logger.info("Workflow Debugging complete. Exiting process.")
31
+
32
+
33
+ def parse_start_command(command: str) -> Union[str, PythonModule]:
34
+ match = re.match(r"fastapi\s+run\s+(\.?[\w/]+\.py)", command)
35
+ if match:
36
+ return match.group(1)
37
+ match = re.match(r"python3?\s+(\.?[\w/]+\.py)", command)
38
+ if match:
39
+ return match.group(1)
40
+ match = re.match(r"python3?\s+-m\s+([\w\.]+)", command)
41
+ if match:
42
+ return PythonModule(match.group(1))
43
+ raise ValueError(
44
+ "Invalid command format. Must be 'fastapi run <script>' or 'python <script>' or 'python -m <module>'"
45
+ )
@@ -2,7 +2,7 @@
2
2
  Add system tables.
3
3
 
4
4
  Revision ID: 5c361fc04708
5
- Revises:
5
+ Revises:
6
6
  Create Date: 2024-07-21 13:06:13.724602
7
7
  # mypy: allow-untyped-defs, allow-untyped-calls
8
8
  """
dbos/_sys_db.py CHANGED
@@ -14,9 +14,7 @@ from typing import (
14
14
  Optional,
15
15
  Sequence,
16
16
  Set,
17
- Tuple,
18
17
  TypedDict,
19
- cast,
20
18
  )
21
19
 
22
20
  import psycopg
@@ -126,6 +124,7 @@ class GetWorkflowsInput:
126
124
  self.offset: Optional[int] = (
127
125
  None # Offset into the matching records for pagination
128
126
  )
127
+ self.sort_desc: bool = False # If true, sort by created_at in DESC order. Default false (in ASC order).
129
128
 
130
129
 
131
130
  class GetQueuedWorkflowsInput(TypedDict):
@@ -136,6 +135,7 @@ class GetQueuedWorkflowsInput(TypedDict):
136
135
  limit: Optional[int] # Return up to this many workflows IDs.
137
136
  offset: Optional[int] # Offset into the matching records for pagination
138
137
  name: Optional[str] # The name of the workflow function
138
+ sort_desc: Optional[bool] # Sort by created_at in DESC or ASC order
139
139
 
140
140
 
141
141
  class GetWorkflowsOutput:
@@ -149,25 +149,6 @@ class GetPendingWorkflowsOutput:
149
149
  self.queue_name: Optional[str] = queue_name
150
150
 
151
151
 
152
- class WorkflowInformation(TypedDict, total=False):
153
- workflow_uuid: str
154
- status: WorkflowStatuses # The status of the workflow.
155
- name: str # The name of the workflow function.
156
- workflow_class_name: str # The class name holding the workflow function.
157
- workflow_config_name: (
158
- str # The name of the configuration, if the class needs configuration
159
- )
160
- authenticated_user: str # The user who ran the workflow. Empty string if not set.
161
- assumed_role: str
162
- # The role used to run this workflow. Empty string if authorization is not required.
163
- authenticated_roles: List[str]
164
- # All roles the authenticated user has, if any.
165
- input: Optional[_serialization.WorkflowInputs]
166
- output: Optional[str]
167
- error: Optional[str]
168
- request: Optional[str]
169
-
170
-
171
152
  _dbos_null_topic = "__null__topic__"
172
153
  _buffer_flush_batch_size = 100
173
154
  _buffer_flush_interval_secs = 1.0
@@ -175,7 +156,7 @@ _buffer_flush_interval_secs = 1.0
175
156
 
176
157
  class SystemDatabase:
177
158
 
178
- def __init__(self, config: ConfigFile):
159
+ def __init__(self, config: ConfigFile, *, debug_mode: bool = False):
179
160
  self.config = config
180
161
 
181
162
  sysdb_name = (
@@ -184,26 +165,27 @@ class SystemDatabase:
184
165
  else config["database"]["app_db_name"] + SystemSchema.sysdb_suffix
185
166
  )
186
167
 
187
- # If the system database does not already exist, create it
188
- postgres_db_url = sa.URL.create(
189
- "postgresql+psycopg",
190
- username=config["database"]["username"],
191
- password=config["database"]["password"],
192
- host=config["database"]["hostname"],
193
- port=config["database"]["port"],
194
- database="postgres",
195
- # fills the "application_name" column in pg_stat_activity
196
- query={"application_name": f"dbos_transact_{GlobalParams.executor_id}"},
197
- )
198
- engine = sa.create_engine(postgres_db_url)
199
- with engine.connect() as conn:
200
- conn.execution_options(isolation_level="AUTOCOMMIT")
201
- if not conn.execute(
202
- sa.text("SELECT 1 FROM pg_database WHERE datname=:db_name"),
203
- parameters={"db_name": sysdb_name},
204
- ).scalar():
205
- conn.execute(sa.text(f"CREATE DATABASE {sysdb_name}"))
206
- engine.dispose()
168
+ if not debug_mode:
169
+ # If the system database does not already exist, create it
170
+ postgres_db_url = sa.URL.create(
171
+ "postgresql+psycopg",
172
+ username=config["database"]["username"],
173
+ password=config["database"]["password"],
174
+ host=config["database"]["hostname"],
175
+ port=config["database"]["port"],
176
+ database="postgres",
177
+ # fills the "application_name" column in pg_stat_activity
178
+ query={"application_name": f"dbos_transact_{GlobalParams.executor_id}"},
179
+ )
180
+ engine = sa.create_engine(postgres_db_url)
181
+ with engine.connect() as conn:
182
+ conn.execution_options(isolation_level="AUTOCOMMIT")
183
+ if not conn.execute(
184
+ sa.text("SELECT 1 FROM pg_database WHERE datname=:db_name"),
185
+ parameters={"db_name": sysdb_name},
186
+ ).scalar():
187
+ conn.execute(sa.text(f"CREATE DATABASE {sysdb_name}"))
188
+ engine.dispose()
207
189
 
208
190
  system_db_url = sa.URL.create(
209
191
  "postgresql+psycopg",
@@ -222,25 +204,41 @@ class SystemDatabase:
222
204
  )
223
205
 
224
206
  # Run a schema migration for the system database
225
- migration_dir = os.path.join(
226
- os.path.dirname(os.path.realpath(__file__)), "_migrations"
227
- )
228
- alembic_cfg = Config()
229
- alembic_cfg.set_main_option("script_location", migration_dir)
230
- logging.getLogger("alembic").setLevel(logging.WARNING)
231
- # Alembic requires the % in URL-escaped parameters to itself be escaped to %%.
232
- escaped_conn_string = re.sub(
233
- r"%(?=[0-9A-Fa-f]{2})",
234
- "%%",
235
- self.engine.url.render_as_string(hide_password=False),
236
- )
237
- alembic_cfg.set_main_option("sqlalchemy.url", escaped_conn_string)
238
- try:
239
- command.upgrade(alembic_cfg, "head")
240
- except Exception as e:
241
- dbos_logger.warning(
242
- f"Exception during system database construction. This is most likely because the system database was configured using a later version of DBOS: {e}"
207
+ if not debug_mode:
208
+ migration_dir = os.path.join(
209
+ os.path.dirname(os.path.realpath(__file__)), "_migrations"
243
210
  )
211
+ alembic_cfg = Config()
212
+ alembic_cfg.set_main_option("script_location", migration_dir)
213
+ logging.getLogger("alembic").setLevel(logging.WARNING)
214
+ # Alembic requires the % in URL-escaped parameters to itself be escaped to %%.
215
+ escaped_conn_string = re.sub(
216
+ r"%(?=[0-9A-Fa-f]{2})",
217
+ "%%",
218
+ self.engine.url.render_as_string(hide_password=False),
219
+ )
220
+ alembic_cfg.set_main_option("sqlalchemy.url", escaped_conn_string)
221
+ try:
222
+ command.upgrade(alembic_cfg, "head")
223
+ except Exception as e:
224
+ dbos_logger.warning(
225
+ f"Exception during system database construction. This is most likely because the system database was configured using a later version of DBOS: {e}"
226
+ )
227
+ alembic_cfg = Config()
228
+ alembic_cfg.set_main_option("script_location", migration_dir)
229
+ # Alembic requires the % in URL-escaped parameters to itself be escaped to %%.
230
+ escaped_conn_string = re.sub(
231
+ r"%(?=[0-9A-Fa-f]{2})",
232
+ "%%",
233
+ self.engine.url.render_as_string(hide_password=False),
234
+ )
235
+ alembic_cfg.set_main_option("sqlalchemy.url", escaped_conn_string)
236
+ try:
237
+ command.upgrade(alembic_cfg, "head")
238
+ except Exception as e:
239
+ dbos_logger.warning(
240
+ f"Exception during system database construction. This is most likely because the system database was configured using a later version of DBOS: {e}"
241
+ )
244
242
 
245
243
  self.notification_conn: Optional[psycopg.connection.Connection] = None
246
244
  self.notifications_map: Dict[str, threading.Condition] = {}
@@ -256,6 +254,7 @@ class SystemDatabase:
256
254
 
257
255
  # Now we can run background processes
258
256
  self._run_background_processes = True
257
+ self._debug_mode = debug_mode
259
258
 
260
259
  # Destroy the pool when finished
261
260
  def destroy(self) -> None:
@@ -277,6 +276,8 @@ class SystemDatabase:
277
276
  *,
278
277
  max_recovery_attempts: int = DEFAULT_MAX_RECOVERY_ATTEMPTS,
279
278
  ) -> WorkflowStatuses:
279
+ if self._debug_mode:
280
+ raise Exception("called insert_workflow_status in debug mode")
280
281
  wf_status: WorkflowStatuses = status["status"]
281
282
 
282
283
  cmd = (
@@ -376,6 +377,8 @@ class SystemDatabase:
376
377
  *,
377
378
  conn: Optional[sa.Connection] = None,
378
379
  ) -> None:
380
+ if self._debug_mode:
381
+ raise Exception("called update_workflow_status in debug mode")
379
382
  wf_status: WorkflowStatuses = status["status"]
380
383
 
381
384
  cmd = (
@@ -425,6 +428,8 @@ class SystemDatabase:
425
428
  self,
426
429
  workflow_id: str,
427
430
  ) -> None:
431
+ if self._debug_mode:
432
+ raise Exception("called cancel_workflow in debug mode")
428
433
  with self.engine.begin() as c:
429
434
  # Remove the workflow from the queues table so it does not block the table
430
435
  c.execute(
@@ -445,6 +450,8 @@ class SystemDatabase:
445
450
  self,
446
451
  workflow_id: str,
447
452
  ) -> None:
453
+ if self._debug_mode:
454
+ raise Exception("called resume_workflow in debug mode")
448
455
  with self.engine.begin() as c:
449
456
  # Check the status of the workflow. If it is complete, do nothing.
450
457
  row = c.execute(
@@ -592,6 +599,9 @@ class SystemDatabase:
592
599
  def update_workflow_inputs(
593
600
  self, workflow_uuid: str, inputs: str, conn: Optional[sa.Connection] = None
594
601
  ) -> None:
602
+ if self._debug_mode:
603
+ raise Exception("called update_workflow_inputs in debug mode")
604
+
595
605
  cmd = (
596
606
  pg.insert(SystemSchema.workflow_inputs)
597
607
  .values(
@@ -637,9 +647,11 @@ class SystemDatabase:
637
647
  return inputs
638
648
 
639
649
  def get_workflows(self, input: GetWorkflowsInput) -> GetWorkflowsOutput:
640
- query = sa.select(SystemSchema.workflow_status.c.workflow_uuid).order_by(
641
- SystemSchema.workflow_status.c.created_at.asc()
642
- )
650
+ query = sa.select(SystemSchema.workflow_status.c.workflow_uuid)
651
+ if input.sort_desc:
652
+ query = query.order_by(SystemSchema.workflow_status.c.created_at.desc())
653
+ else:
654
+ query = query.order_by(SystemSchema.workflow_status.c.created_at.asc())
643
655
  if input.name:
644
656
  query = query.where(SystemSchema.workflow_status.c.name == input.name)
645
657
  if input.authenticated_user:
@@ -683,15 +695,15 @@ class SystemDatabase:
683
695
  self, input: GetQueuedWorkflowsInput
684
696
  ) -> GetWorkflowsOutput:
685
697
 
686
- query = (
687
- sa.select(SystemSchema.workflow_queue.c.workflow_uuid)
688
- .join(
689
- SystemSchema.workflow_status,
690
- SystemSchema.workflow_queue.c.workflow_uuid
691
- == SystemSchema.workflow_status.c.workflow_uuid,
692
- )
693
- .order_by(SystemSchema.workflow_status.c.created_at.asc())
698
+ query = sa.select(SystemSchema.workflow_queue.c.workflow_uuid).join(
699
+ SystemSchema.workflow_status,
700
+ SystemSchema.workflow_queue.c.workflow_uuid
701
+ == SystemSchema.workflow_status.c.workflow_uuid,
694
702
  )
703
+ if input["sort_desc"]:
704
+ query = query.order_by(SystemSchema.workflow_status.c.created_at.desc())
705
+ else:
706
+ query = query.order_by(SystemSchema.workflow_status.c.created_at.asc())
695
707
 
696
708
  if input.get("name"):
697
709
  query = query.where(SystemSchema.workflow_status.c.name == input["name"])
@@ -754,6 +766,8 @@ class SystemDatabase:
754
766
  def record_operation_result(
755
767
  self, result: OperationResultInternal, conn: Optional[sa.Connection] = None
756
768
  ) -> None:
769
+ if self._debug_mode:
770
+ raise Exception("called record_operation_result in debug mode")
757
771
  error = result["error"]
758
772
  output = result["output"]
759
773
  assert error is None or output is None, "Only one of error or output can be set"
@@ -813,6 +827,11 @@ class SystemDatabase:
813
827
  recorded_output = self.check_operation_execution(
814
828
  workflow_uuid, function_id, conn=c
815
829
  )
830
+ if self._debug_mode and recorded_output is None:
831
+ raise Exception(
832
+ "called send in debug mode without a previous execution"
833
+ )
834
+
816
835
  if recorded_output is not None:
817
836
  dbos_logger.debug(
818
837
  f"Replaying send, id: {function_id}, destination_uuid: {destination_uuid}, topic: {topic}"
@@ -856,6 +875,8 @@ class SystemDatabase:
856
875
 
857
876
  # First, check for previous executions.
858
877
  recorded_output = self.check_operation_execution(workflow_uuid, function_id)
878
+ if self._debug_mode and recorded_output is None:
879
+ raise Exception("called recv in debug mode without a previous execution")
859
880
  if recorded_output is not None:
860
881
  dbos_logger.debug(f"Replaying recv, id: {function_id}, topic: {topic}")
861
882
  if recorded_output["output"] is not None:
@@ -1005,6 +1026,9 @@ class SystemDatabase:
1005
1026
  ) -> float:
1006
1027
  recorded_output = self.check_operation_execution(workflow_uuid, function_id)
1007
1028
  end_time: float
1029
+ if self._debug_mode and recorded_output is None:
1030
+ raise Exception("called sleep in debug mode without a previous execution")
1031
+
1008
1032
  if recorded_output is not None:
1009
1033
  dbos_logger.debug(f"Replaying sleep, id: {function_id}, seconds: {seconds}")
1010
1034
  assert recorded_output["output"] is not None, "no recorded end time"
@@ -1039,6 +1063,10 @@ class SystemDatabase:
1039
1063
  recorded_output = self.check_operation_execution(
1040
1064
  workflow_uuid, function_id, conn=c
1041
1065
  )
1066
+ if self._debug_mode and recorded_output is None:
1067
+ raise Exception(
1068
+ "called set_event in debug mode without a previous execution"
1069
+ )
1042
1070
  if recorded_output is not None:
1043
1071
  dbos_logger.debug(f"Replaying set_event, id: {function_id}, key: {key}")
1044
1072
  return # Already sent before
@@ -1083,6 +1111,10 @@ class SystemDatabase:
1083
1111
  recorded_output = self.check_operation_execution(
1084
1112
  caller_ctx["workflow_uuid"], caller_ctx["function_id"]
1085
1113
  )
1114
+ if self._debug_mode and recorded_output is None:
1115
+ raise Exception(
1116
+ "called get_event in debug mode without a previous execution"
1117
+ )
1086
1118
  if recorded_output is not None:
1087
1119
  dbos_logger.debug(
1088
1120
  f"Replaying get_event, id: {caller_ctx['function_id']}, key: {key}"
@@ -1145,6 +1177,9 @@ class SystemDatabase:
1145
1177
  return value
1146
1178
 
1147
1179
  def _flush_workflow_status_buffer(self) -> None:
1180
+ if self._debug_mode:
1181
+ raise Exception("called _flush_workflow_status_buffer in debug mode")
1182
+
1148
1183
  """Export the workflow status buffer to the database, up to the batch size."""
1149
1184
  if len(self._workflow_status_buffer) == 0:
1150
1185
  return
@@ -1175,6 +1210,9 @@ class SystemDatabase:
1175
1210
  break
1176
1211
 
1177
1212
  def _flush_workflow_inputs_buffer(self) -> None:
1213
+ if self._debug_mode:
1214
+ raise Exception("called _flush_workflow_inputs_buffer in debug mode")
1215
+
1178
1216
  """Export the workflow inputs buffer to the database, up to the batch size."""
1179
1217
  if len(self._workflow_inputs_buffer) == 0:
1180
1218
  return
@@ -1239,6 +1277,8 @@ class SystemDatabase:
1239
1277
  )
1240
1278
 
1241
1279
  def enqueue(self, workflow_id: str, queue_name: str) -> None:
1280
+ if self._debug_mode:
1281
+ raise Exception("called enqueue in debug mode")
1242
1282
  with self.engine.begin() as c:
1243
1283
  c.execute(
1244
1284
  pg.insert(SystemSchema.workflow_queue)
@@ -1250,6 +1290,9 @@ class SystemDatabase:
1250
1290
  )
1251
1291
 
1252
1292
  def start_queued_workflows(self, queue: "Queue", executor_id: str) -> List[str]:
1293
+ if self._debug_mode:
1294
+ return []
1295
+
1253
1296
  start_time_ms = int(time.time() * 1000)
1254
1297
  if queue.limiter is not None:
1255
1298
  limiter_period_ms = int(queue.limiter["period"] * 1000)
@@ -1400,6 +1443,9 @@ class SystemDatabase:
1400
1443
  return ret_ids
1401
1444
 
1402
1445
  def remove_from_queue(self, workflow_id: str, queue: "Queue") -> None:
1446
+ if self._debug_mode:
1447
+ raise Exception("called remove_from_queue in debug mode")
1448
+
1403
1449
  with self.engine.begin() as c:
1404
1450
  if queue.limiter is None:
1405
1451
  c.execute(
@@ -1415,6 +1461,8 @@ class SystemDatabase:
1415
1461
  )
1416
1462
 
1417
1463
  def clear_queue_assignment(self, workflow_id: str) -> None:
1464
+ if self._debug_mode:
1465
+ raise Exception("called clear_queue_assignment in debug mode")
1418
1466
  with self.engine.begin() as c:
1419
1467
  c.execute(
1420
1468
  sa.update(SystemSchema.workflow_queue)
@@ -2,7 +2,7 @@
2
2
  Initialize application database.
3
3
 
4
4
  Revision ID: c6b516e182b2
5
- Revises:
5
+ Revises:
6
6
  Create Date: 2024-07-31 18:06:42.500040
7
7
  """
8
8
 
@@ -49,6 +49,7 @@ def list_workflows(
49
49
  name: Optional[str] = None,
50
50
  limit: Optional[int] = None,
51
51
  offset: Optional[int] = None,
52
+ sort_desc: bool = False,
52
53
  ) -> List[WorkflowInformation]:
53
54
  input = GetWorkflowsInput()
54
55
  input.workflow_ids = workflow_ids
@@ -61,6 +62,7 @@ def list_workflows(
61
62
  input.limit = limit
62
63
  input.name = name
63
64
  input.offset = offset
65
+ input.sort_desc = sort_desc
64
66
 
65
67
  output: GetWorkflowsOutput = sys_db.get_workflows(input)
66
68
  infos: List[WorkflowInformation] = []
@@ -82,6 +84,7 @@ def list_queued_workflows(
82
84
  name: Optional[str] = None,
83
85
  request: bool = False,
84
86
  offset: Optional[int] = None,
87
+ sort_desc: bool = False,
85
88
  ) -> List[WorkflowInformation]:
86
89
  input: GetQueuedWorkflowsInput = {
87
90
  "queue_name": queue_name,
@@ -91,6 +94,7 @@ def list_queued_workflows(
91
94
  "limit": limit,
92
95
  "name": name,
93
96
  "offset": offset,
97
+ "sort_desc": sort_desc,
94
98
  }
95
99
  output: GetWorkflowsOutput = sys_db.get_queued_workflows(input)
96
100
  infos: List[WorkflowInformation] = []
dbos/cli/cli.py CHANGED
@@ -15,6 +15,8 @@ from rich import print
15
15
  from rich.prompt import IntPrompt
16
16
  from typing_extensions import Annotated
17
17
 
18
+ from dbos._debug import debug_workflow, parse_start_command
19
+
18
20
  from .. import load_config
19
21
  from .._app_db import ApplicationDatabase
20
22
  from .._dbos_config import _is_valid_app_name
@@ -232,6 +234,22 @@ def reset(
232
234
  return
233
235
 
234
236
 
237
+ @app.command(help="Replay Debug a DBOS workflow")
238
+ def debug(
239
+ workflow_id: Annotated[str, typer.Argument(help="Workflow ID to debug")],
240
+ ) -> None:
241
+ config = load_config(silent=True, use_db_wizard=False)
242
+ start = config["runtimeConfig"]["start"]
243
+ if not start:
244
+ typer.echo("No start commands found in 'dbos-config.yaml'")
245
+ raise typer.Exit(code=1)
246
+ if len(start) > 1:
247
+ typer.echo("Multiple start commands found in 'dbos-config.yaml'")
248
+ raise typer.Exit(code=1)
249
+ entrypoint = parse_start_command(start[0])
250
+ debug_workflow(workflow_id, entrypoint)
251
+
252
+
235
253
  @workflow.command(help="List workflows for your application")
236
254
  def list(
237
255
  limit: Annotated[
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 0.23.0a5
3
+ Version: 0.23.0a9
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -78,6 +78,9 @@ You can use DBOS to add reliable background jobs or cron scheduling or queues to
78
78
  Install and configure with:
79
79
 
80
80
  ```shell
81
+ python3 -m venv dbos-example/.venv
82
+ cd dbos-example
83
+ source .venv/bin/activate
81
84
  pip install dbos
82
85
  dbos init --config
83
86
  ```
@@ -103,7 +106,7 @@ def step_two():
103
106
  def dbos_workflow():
104
107
  step_one()
105
108
  for _ in range(5):
106
- print("Press Control + \ to stop the app...")
109
+ print("Press Control + C twice to stop the app...")
107
110
  DBOS.sleep(1)
108
111
  step_two()
109
112
 
@@ -114,7 +117,7 @@ def fastapi_endpoint():
114
117
 
115
118
  Save the program into `main.py` and start it with `fastapi run`.
116
119
  Visit `localhost:8000` in your browser to start the workflow.
117
- When prompted, press `Control + \` to force quit your application.
120
+ When prompted, press `Control + C` (You may need to press `Control + C` twice quickly, or press `Control + \`, if `Control + C` is not effective in your environment) to force quit your application.
118
121
  It should crash midway through the workflow, having completed step one but not step two.
119
122
  Then, restart your app with `fastapi run`.
120
123
  It should resume the workflow from where it left off, completing step two without re-executing step one.
@@ -1,20 +1,22 @@
1
- dbos-0.23.0a5.dist-info/METADATA,sha256=saWbNoKPc4hy7CWVchMo64BhlRX3rBrl8q49PC4GCAY,5309
2
- dbos-0.23.0a5.dist-info/WHEEL,sha256=thaaA2w1JzcGC48WYufAs8nrYZjJm8LqNfnXFOFyCC4,90
3
- dbos-0.23.0a5.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
4
- dbos-0.23.0a5.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
1
+ dbos-0.23.0a9.dist-info/METADATA,sha256=KCH9_w-I-4MaqoTS0g8Zm4lAPxsWD71dOQwe4jyT3Ww,5523
2
+ dbos-0.23.0a9.dist-info/WHEEL,sha256=thaaA2w1JzcGC48WYufAs8nrYZjJm8LqNfnXFOFyCC4,90
3
+ dbos-0.23.0a9.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
4
+ dbos-0.23.0a9.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
5
5
  dbos/__init__.py,sha256=CxRHBHEthPL4PZoLbZhp3rdm44-KkRTT2-7DkK9d4QQ,724
6
+ dbos/__main__.py,sha256=P7jAr-7L9XE5mrsQ7i4b-bLr2ap1tCQfhMByLCRWDj0,568
6
7
  dbos/_admin_server.py,sha256=YiVn5lywz2Vg8_juyNHOYl0HVEy48--7b4phwK7r92o,5732
7
- dbos/_app_db.py,sha256=_tv2vmPjjiaikwgxH3mqxgJ4nUUcG2-0uMXKWCqVu1c,5509
8
+ dbos/_app_db.py,sha256=QFL1ceCugJFj_LBvK_G_0tt5jjyTM-4KnqmhbuC1ggg,5826
8
9
  dbos/_classproperty.py,sha256=f0X-_BySzn3yFDRKB2JpCbLYQ9tLwt1XftfshvY7CBs,626
9
10
  dbos/_cloudutils/authentication.py,sha256=V0fCWQN9stCkhbuuxgPTGpvuQcDqfU3KAxPAh01vKW4,5007
10
11
  dbos/_cloudutils/cloudutils.py,sha256=YC7jGsIopT0KveLsqbRpQk2KlRBk-nIRC_UCgep4f3o,7797
11
12
  dbos/_cloudutils/databases.py,sha256=_shqaqSvhY4n2ScgQ8IP5PDZvzvcx3YBKV8fj-cxhSY,8543
12
13
  dbos/_context.py,sha256=Ue5qu3rzLfRmPkz-UUZi9ZS8iXpapRN0NTM4mbA2QmQ,17738
13
- dbos/_core.py,sha256=MWIa8r-KwnadYQtGSod2KdAaeQ4gTJAUMPhMGaM0u2c,36613
14
- dbos/_croniter.py,sha256=hbhgfsHBqclUS8VeLnJ9PSE9Z54z6mi4nnrr1aUXn0k,47561
14
+ dbos/_core.py,sha256=UQb068FT59Op-F5RmtxreSeSQ1_wljOso0dQCUOPrC4,37528
15
+ dbos/_croniter.py,sha256=XHAyUyibs_59sJQfSNWkP7rqQY6_XrlfuuCxk4jYqek,47559
15
16
  dbos/_db_wizard.py,sha256=6tfJaCRa1NtkUdNW75a2yvi_mEgnPJ9C1HP2zPG1hCU,8067
16
- dbos/_dbos.py,sha256=JNAFYQ3kVjnZsUl0qJ-JWeaSHKI51VGE3JBXdaPD8Oo,39054
17
- dbos/_dbos_config.py,sha256=DfiqVVxNqnafkocSzLqBp1Ig5vCviDTDK_GO3zTtQqI,8298
17
+ dbos/_dbos.py,sha256=jNh1R4b13a4g1VXeSPaC2d3J2JXiF6YNpCBek9hkcbM,39393
18
+ dbos/_dbos_config.py,sha256=_VETbEsMZ66563A8sX05B_coKz2BrILbIm9H5BmnPmk,9572
19
+ dbos/_debug.py,sha256=wcvjM2k4BrK7mlYjImUZXNBUB00fPGjQrNimZXlj76c,1491
18
20
  dbos/_error.py,sha256=xqB7b7g5AF_OwOvqLKLXL1xldn2gAtORix2ZC2B8zK0,5089
19
21
  dbos/_fastapi.py,sha256=ke03vqsSYDnO6XeOtOVFXj0-f-v1MGsOxa9McaROvNc,3616
20
22
  dbos/_flask.py,sha256=DZKUZR5-xOzPI7tYZ53r2PvvHVoAb8SYwLzMVFsVfjI,2608
@@ -25,7 +27,7 @@ dbos/_migrations/env.py,sha256=38SIGVbmn_VV2x2u1aHLcPOoWgZ84eCymf3g_NljmbU,1626
25
27
  dbos/_migrations/script.py.mako,sha256=MEqL-2qATlST9TAOeYgscMn1uy6HUS9NFvDgl93dMj8,635
26
28
  dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py,sha256=ICLPl8CN9tQXMsLDsAj8z1TsL831-Z3F8jSBvrR-wyw,736
27
29
  dbos/_migrations/versions/50f3227f0b4b_fix_job_queue.py,sha256=ZBYrtTdxy64HxIAlOes89fVIk2P1gNaJack7wuC_epg,873
28
- dbos/_migrations/versions/5c361fc04708_added_system_tables.py,sha256=QMgFMb0aLgC25YicsvPSr6AHRCA6Zd66hyaRUhwKzrQ,6404
30
+ dbos/_migrations/versions/5c361fc04708_added_system_tables.py,sha256=Xr9hBDJjkAtymlauOmAy00yUHj0VVUaEz7kNwEM9IwE,6403
29
31
  dbos/_migrations/versions/a3b18ad34abe_added_triggers.py,sha256=Rv0ZsZYZ_WdgGEULYsPfnp4YzaO5L198gDTgYY39AVA,2022
30
32
  dbos/_migrations/versions/d76646551a6b_job_queue_limiter.py,sha256=8PyFi8rd6CN-mUro43wGhsg5wcQWKZPRHD6jw8R5pVc,986
31
33
  dbos/_migrations/versions/d76646551a6c_workflow_queue.py,sha256=G942nophZ2uC2vc4hGBC02Ptng1715roTjY3xiyzZU4,729
@@ -41,7 +43,7 @@ dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
41
43
  dbos/_schemas/application_database.py,sha256=KeyoPrF7hy_ODXV7QNike_VFSD74QBRfQ76D7QyE9HI,966
42
44
  dbos/_schemas/system_database.py,sha256=rwp4EvCSaXcUoMaRczZCvETCxGp72k3-hvLyGUDkih0,5163
43
45
  dbos/_serialization.py,sha256=YCYv0qKAwAZ1djZisBC7khvKqG-5OcIv9t9EC5PFIog,1743
44
- dbos/_sys_db.py,sha256=tyBQicOkkf563CU83MwZubENpI5AmCYFyz2tSbQ9XQ4,61045
46
+ dbos/_sys_db.py,sha256=NVd4OCyEfAvokyewNX35qUN8nj8L3x_j2vzA1t4BNp0,63817
45
47
  dbos/_templates/dbos-db-starter/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
46
48
  dbos/_templates/dbos-db-starter/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
47
49
  dbos/_templates/dbos-db-starter/__package/main.py,sha256=eI0SS9Nwj-fldtiuSzIlIG6dC91GXXwdRsoHxv6S_WI,2719
@@ -50,15 +52,15 @@ dbos/_templates/dbos-db-starter/alembic.ini,sha256=VKBn4Gy8mMuCdY7Hip1jmo3wEUJ1V
50
52
  dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos,sha256=OMlcpdYUJKjyAme7phOz3pbn9upcIRjm42iwEThWUEQ,495
51
53
  dbos/_templates/dbos-db-starter/migrations/env.py.dbos,sha256=GUV6sjkDzf9Vl6wkGEd0RSkK-ftRfV6EUwSQdd0qFXg,2392
52
54
  dbos/_templates/dbos-db-starter/migrations/script.py.mako,sha256=MEqL-2qATlST9TAOeYgscMn1uy6HUS9NFvDgl93dMj8,635
53
- dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py,sha256=U5thFWGqNN4QLrNXT7wUUqftIFDNE5eSdqD8JNW1mec,942
55
+ dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py,sha256=MpS7LGaJS0CpvsjhfDkp9EJqvMvVCjRPfUp4c0aE2ys,941
54
56
  dbos/_templates/dbos-db-starter/start_postgres_docker.py,sha256=lQVLlYO5YkhGPEgPqwGc7Y8uDKse9HsWv5fynJEFJHM,1681
55
57
  dbos/_tracer.py,sha256=_Id9j9kCrptSNpEpLiRk_g5VPp-DrTWP1WNZInd5BA4,2439
56
58
  dbos/_utils.py,sha256=wjOJzxN66IzL9p4dwcEmQACRQah_V09G6mJI2exQfOM,155
57
- dbos/_workflow_commands.py,sha256=ZAdVccVuSFlcZ_kEdo6MwQ9xaslVXn_MMRTybJlkyQw,4652
59
+ dbos/_workflow_commands.py,sha256=Z1PwprvR_A8PXV2FNhcMrvV8B4NlDI9dc5naMeeNKGw,4774
58
60
  dbos/cli/_github_init.py,sha256=Y_bDF9gfO2jB1id4FV5h1oIxEJRWyqVjhb7bNEa5nQ0,3224
59
61
  dbos/cli/_template_init.py,sha256=AfuMaO8bmr9WsPNHr6j2cp7kjVVZDUpH7KpbTg0hhFs,2722
60
- dbos/cli/cli.py,sha256=BJWFT94I14uKTmMYSI4ITscPMBgidgjV0RBx5_LyNKI,14849
62
+ dbos/cli/cli.py,sha256=ohrXoRshwxOwN-gFag_RW2yoNjwcCVv6nTYQdaArwek,15506
61
63
  dbos/dbos-config.schema.json,sha256=X5TpXNcARGceX0zQs0fVgtZW_Xj9uBbY5afPt9Rz9yk,5741
62
64
  dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
63
65
  version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
64
- dbos-0.23.0a5.dist-info/RECORD,,
66
+ dbos-0.23.0a9.dist-info/RECORD,,