dbos 0.25.0a16__py3-none-any.whl → 0.26.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
dbos/_utils.py CHANGED
@@ -1,5 +1,7 @@
1
1
  import os
2
2
 
3
+ INTERNAL_QUEUE_NAME = "_dbos_internal_queue"
4
+
3
5
 
4
6
  class GlobalParams:
5
7
  app_version: str = os.environ.get("DBOS__APPVERSION", "")
@@ -1,57 +1,18 @@
1
- import json
2
- from typing import Any, List, Optional
1
+ import uuid
2
+ from typing import List, Optional
3
3
 
4
- from . import _serialization
4
+ from dbos._error import DBOSException
5
+
6
+ from ._app_db import ApplicationDatabase
5
7
  from ._sys_db import (
6
8
  GetQueuedWorkflowsInput,
7
9
  GetWorkflowsInput,
8
- GetWorkflowsOutput,
9
10
  StepInfo,
10
11
  SystemDatabase,
12
+ WorkflowStatus,
11
13
  )
12
14
 
13
15
 
14
- class WorkflowStatus:
15
- # The workflow ID
16
- workflow_id: str
17
- # The workflow status. Must be one of ENQUEUED, PENDING, SUCCESS, ERROR, CANCELLED, or RETRIES_EXCEEDED
18
- status: str
19
- # The name of the workflow function
20
- name: str
21
- # The name of the workflow's class, if any
22
- class_name: Optional[str]
23
- # The name with which the workflow's class instance was configured, if any
24
- config_name: Optional[str]
25
- # The user who ran the workflow, if specified
26
- authenticated_user: Optional[str]
27
- # The role with which the workflow ran, if specified
28
- assumed_role: Optional[str]
29
- # All roles which the authenticated user could assume
30
- authenticated_roles: Optional[list[str]]
31
- # The deserialized workflow input object
32
- input: Optional[_serialization.WorkflowInputs]
33
- # The workflow's output, if any
34
- output: Optional[Any] = None
35
- # The error the workflow threw, if any
36
- error: Optional[Exception] = None
37
- # Workflow start time, as a Unix epoch timestamp in ms
38
- created_at: Optional[int]
39
- # Last time the workflow status was updated, as a Unix epoch timestamp in ms
40
- updated_at: Optional[int]
41
- # If this workflow was enqueued, on which queue
42
- queue_name: Optional[str]
43
- # The executor to most recently executed this workflow
44
- executor_id: Optional[str]
45
- # The application version on which this workflow was started
46
- app_version: Optional[str]
47
- # The ID of the application executing this workflow
48
- app_id: Optional[str]
49
- # The number of times this workflow's execution has been attempted
50
- recovery_attempts: Optional[int]
51
- # The HTTP request that triggered the workflow, if known
52
- request: Optional[str]
53
-
54
-
55
16
  def list_workflows(
56
17
  sys_db: SystemDatabase,
57
18
  *,
@@ -66,6 +27,7 @@ def list_workflows(
66
27
  offset: Optional[int] = None,
67
28
  sort_desc: bool = False,
68
29
  request: bool = False,
30
+ workflow_id_prefix: Optional[str] = None,
69
31
  ) -> List[WorkflowStatus]:
70
32
  input = GetWorkflowsInput()
71
33
  input.workflow_ids = workflow_ids
@@ -78,13 +40,10 @@ def list_workflows(
78
40
  input.name = name
79
41
  input.offset = offset
80
42
  input.sort_desc = sort_desc
43
+ input.workflow_id_prefix = workflow_id_prefix
44
+
45
+ infos: List[WorkflowStatus] = sys_db.get_workflows(input, request)
81
46
 
82
- output: GetWorkflowsOutput = sys_db.get_workflows(input)
83
- infos: List[WorkflowStatus] = []
84
- for workflow_id in output.workflow_uuids:
85
- info = get_workflow(sys_db, workflow_id, request) # Call the method for each ID
86
- if info is not None:
87
- infos.append(info)
88
47
  return infos
89
48
 
90
49
 
@@ -111,65 +70,51 @@ def list_queued_workflows(
111
70
  "offset": offset,
112
71
  "sort_desc": sort_desc,
113
72
  }
114
- output: GetWorkflowsOutput = sys_db.get_queued_workflows(input)
115
- infos: List[WorkflowStatus] = []
116
- for workflow_id in output.workflow_uuids:
117
- info = get_workflow(sys_db, workflow_id, request) # Call the method for each ID
118
- if info is not None:
119
- infos.append(info)
73
+
74
+ infos: List[WorkflowStatus] = sys_db.get_queued_workflows(input, request)
120
75
  return infos
121
76
 
122
77
 
123
78
  def get_workflow(
124
79
  sys_db: SystemDatabase, workflow_id: str, get_request: bool
125
80
  ) -> Optional[WorkflowStatus]:
81
+ input = GetWorkflowsInput()
82
+ input.workflow_ids = [workflow_id]
126
83
 
127
- internal_status = sys_db.get_workflow_status(workflow_id)
128
- if internal_status is None:
84
+ infos: List[WorkflowStatus] = sys_db.get_workflows(input, get_request)
85
+ if not infos:
129
86
  return None
130
87
 
131
- info = WorkflowStatus()
132
-
133
- info.workflow_id = workflow_id
134
- info.status = internal_status["status"]
135
- info.name = internal_status["name"]
136
- info.class_name = internal_status["class_name"]
137
- info.config_name = internal_status["config_name"]
138
- info.authenticated_user = internal_status["authenticated_user"]
139
- info.assumed_role = internal_status["assumed_role"]
140
- info.authenticated_roles = (
141
- json.loads(internal_status["authenticated_roles"])
142
- if internal_status["authenticated_roles"] is not None
143
- else None
144
- )
145
- info.request = internal_status["request"]
146
- info.created_at = internal_status["created_at"]
147
- info.updated_at = internal_status["updated_at"]
148
- info.queue_name = internal_status["queue_name"]
149
- info.executor_id = internal_status["executor_id"]
150
- info.app_version = internal_status["app_version"]
151
- info.app_id = internal_status["app_id"]
152
- info.recovery_attempts = internal_status["recovery_attempts"]
153
-
154
- input_data = sys_db.get_workflow_inputs(workflow_id)
155
- if input_data is not None:
156
- info.input = input_data
157
-
158
- if internal_status.get("status") == "SUCCESS":
159
- result = sys_db.await_workflow_result(workflow_id)
160
- info.output = result
161
- elif internal_status.get("status") == "ERROR":
162
- try:
163
- sys_db.await_workflow_result(workflow_id)
164
- except Exception as e:
165
- info.error = e
166
-
167
- if not get_request:
168
- info.request = None
169
-
170
- return info
171
-
172
-
173
- def list_workflow_steps(sys_db: SystemDatabase, workflow_id: str) -> List[StepInfo]:
174
- output = sys_db.get_workflow_steps(workflow_id)
175
- return output
88
+ return infos[0]
89
+
90
+
91
+ def list_workflow_steps(
92
+ sys_db: SystemDatabase, app_db: ApplicationDatabase, workflow_id: str
93
+ ) -> List[StepInfo]:
94
+ steps = sys_db.get_workflow_steps(workflow_id)
95
+ transactions = app_db.get_transactions(workflow_id)
96
+ merged_steps = steps + transactions
97
+ merged_steps.sort(key=lambda step: step["function_id"])
98
+ return merged_steps
99
+
100
+
101
+ def fork_workflow(
102
+ sys_db: SystemDatabase,
103
+ app_db: ApplicationDatabase,
104
+ workflow_id: str,
105
+ start_step: int,
106
+ ) -> str:
107
+ def get_max_function_id(workflow_uuid: str) -> int:
108
+ max_transactions = app_db.get_max_function_id(workflow_uuid) or 0
109
+ max_operations = sys_db.get_max_function_id(workflow_uuid) or 0
110
+ return max(max_transactions, max_operations)
111
+
112
+ max_function_id = get_max_function_id(workflow_id)
113
+ if max_function_id > 0 and start_step > max_function_id:
114
+ raise DBOSException(
115
+ f"Cannot fork workflow {workflow_id} from step {start_step}. The workflow has {max_function_id} steps."
116
+ )
117
+ forked_workflow_id = str(uuid.uuid4())
118
+ app_db.clone_workflow_transactions(workflow_id, forked_workflow_id, start_step)
119
+ sys_db.fork_workflow(workflow_id, forked_workflow_id, start_step)
120
+ return forked_workflow_id
dbos/cli/cli.py CHANGED
@@ -20,6 +20,7 @@ from dbos._debug import debug_workflow, parse_start_command
20
20
  from .. import load_config
21
21
  from .._app_db import ApplicationDatabase
22
22
  from .._dbos_config import _is_valid_app_name
23
+ from .._docker_pg_helper import start_docker_pg, stop_docker_pg
23
24
  from .._sys_db import SystemDatabase, reset_system_database
24
25
  from .._workflow_commands import (
25
26
  get_workflow,
@@ -37,6 +38,21 @@ queue = typer.Typer()
37
38
  app.add_typer(workflow, name="workflow", help="Manage DBOS workflows")
38
39
  workflow.add_typer(queue, name="queue", help="Manage enqueued workflows")
39
40
 
41
+ postgres = typer.Typer()
42
+ app.add_typer(
43
+ postgres, name="postgres", help="Manage local Postgres database with Docker"
44
+ )
45
+
46
+
47
+ @postgres.command(name="start", help="Start a local Postgres database")
48
+ def pg_start() -> None:
49
+ start_docker_pg()
50
+
51
+
52
+ @postgres.command(name="stop", help="Stop the local Postgres database")
53
+ def pg_stop() -> None:
54
+ stop_docker_pg()
55
+
40
56
 
41
57
  def _on_windows() -> bool:
42
58
  return platform.system() == "Windows"
@@ -246,7 +262,7 @@ def reset(
246
262
  def debug(
247
263
  workflow_id: Annotated[str, typer.Argument(help="Workflow ID to debug")],
248
264
  ) -> None:
249
- config = load_config(silent=True, use_db_wizard=False)
265
+ config = load_config(silent=True)
250
266
  start = config["runtimeConfig"]["start"]
251
267
  if not start:
252
268
  typer.echo("No start commands found in 'dbos-config.yaml'")
@@ -350,8 +366,11 @@ def steps(
350
366
  ) -> None:
351
367
  config = load_config(silent=True)
352
368
  sys_db = SystemDatabase(config["database"])
369
+ app_db = ApplicationDatabase(config["database"])
353
370
  print(
354
- jsonpickle.encode(list_workflow_steps(sys_db, workflow_id), unpicklable=False)
371
+ jsonpickle.encode(
372
+ list_workflow_steps(sys_db, app_db, workflow_id), unpicklable=False
373
+ )
355
374
  )
356
375
 
357
376
 
@@ -414,13 +433,60 @@ def restart(
414
433
  ] = 3001,
415
434
  ) -> None:
416
435
  response = requests.post(
417
- f"http://{host}:{port}/workflows/{uuid}/restart", json=[], timeout=5
436
+ f"http://{host}:{port}/workflows/{uuid}/restart",
437
+ json=[],
438
+ timeout=5,
418
439
  )
419
440
 
420
441
  if response.status_code == 204:
421
442
  print(f"Workflow {uuid} has been restarted")
422
443
  else:
423
- print(f"Failed to resume workflow {uuid}. Status code: {response.status_code}")
444
+ error_message = response.json().get("error", "Unknown error")
445
+ print(
446
+ f"Failed to restart workflow {uuid}. "
447
+ f"Status code: {response.status_code}. "
448
+ f"Error: {error_message}"
449
+ )
450
+
451
+
452
+ @workflow.command(
453
+ help="fork a workflow from the beginning with a new id and from a step"
454
+ )
455
+ def fork(
456
+ uuid: Annotated[str, typer.Argument()],
457
+ host: Annotated[
458
+ typing.Optional[str],
459
+ typer.Option("--host", "-H", help="Specify the admin host"),
460
+ ] = "localhost",
461
+ port: Annotated[
462
+ typing.Optional[int],
463
+ typer.Option("--port", "-p", help="Specify the admin port"),
464
+ ] = 3001,
465
+ step: Annotated[
466
+ typing.Optional[int],
467
+ typer.Option(
468
+ "--step",
469
+ "-s",
470
+ help="Restart from this step (default: first step)",
471
+ ),
472
+ ] = 1,
473
+ ) -> None:
474
+ print(f"Forking workflow {uuid} from step {step}")
475
+ response = requests.post(
476
+ f"http://{host}:{port}/workflows/{uuid}/fork",
477
+ json={"start_step": step},
478
+ timeout=5,
479
+ )
480
+
481
+ if response.status_code == 204:
482
+ print(f"Workflow {uuid} has been forked")
483
+ else:
484
+ error_message = response.json().get("error", "Unknown error")
485
+ print(
486
+ f"Failed to fork workflow {uuid}. "
487
+ f"Status code: {response.status_code}. "
488
+ f"Error: {error_message}"
489
+ )
424
490
 
425
491
 
426
492
  @queue.command(name="list", help="List enqueued functions for your application")
@@ -24,47 +24,51 @@
24
24
  "additionalProperties": false,
25
25
  "properties": {
26
26
  "hostname": {
27
- "type": "string",
28
- "description": "The hostname or IP address of the application database"
27
+ "type": ["string", "null"],
28
+ "description": "The hostname or IP address of the application database. DEPRECATED: Use database_url instead",
29
+ "deprecated": true
29
30
  },
30
31
  "port": {
31
- "type": "number",
32
- "description": "The port number of the application database"
32
+ "type": ["number", "null"],
33
+ "description": "The port number of the application database. DEPRECATED: Use database_url instead",
34
+ "deprecated": true
33
35
  },
34
36
  "username": {
35
- "type": "string",
36
- "description": "The username to use when connecting to the application database",
37
+ "type": ["string", "null"],
38
+ "description": "The username to use when connecting to the application database. DEPRECATED: Use database_url instead",
37
39
  "not": {
38
40
  "enum": ["dbos"]
39
- }
41
+ },
42
+ "deprecated": true
40
43
  },
41
44
  "password": {
42
45
  "type": ["string", "null"],
43
- "description": "The password to use when connecting to the application database. Developers are strongly encouraged to use environment variable substitution to avoid storing secrets in source."
46
+ "description": "The password to use when connecting to the application database. Developers are strongly encouraged to use environment variable substitution (${VAR_NAME}) or Docker secrets (${DOCKER_SECRET:SECRET_NAME}) to avoid storing secrets in source. DEPRECATED: Use database_url instead",
47
+ "deprecated": true
44
48
  },
45
49
  "connectionTimeoutMillis": {
46
- "type": "number",
47
- "description": "The number of milliseconds the system waits before timing out when connecting to the application database"
50
+ "type": ["number", "null"],
51
+ "description": "The number of milliseconds the system waits before timing out when connecting to the application database. DEPRECATED: Use database_url instead",
52
+ "deprecated": true
48
53
  },
49
54
  "app_db_name": {
50
- "type": "string",
51
- "description": "The name of the application database"
55
+ "type": ["string", "null"],
56
+ "description": "The name of the application database. DEPRECATED: Use database_url instead",
57
+ "deprecated": true
52
58
  },
53
59
  "sys_db_name": {
54
60
  "type": "string",
55
61
  "description": "The name of the system database"
56
62
  },
57
63
  "ssl": {
58
- "type": "boolean",
59
- "description": "Use SSL/TLS to securely connect to the database (default: true)"
64
+ "type": ["boolean", "null"],
65
+ "description": "Use SSL/TLS to securely connect to the database (default: true). DEPRECATED: Use database_url instead",
66
+ "deprecated": true
60
67
  },
61
68
  "ssl_ca": {
62
- "type": "string",
63
- "description": "If using SSL/TLS to securely connect to a database, path to an SSL root certificate file"
64
- },
65
- "local_suffix": {
66
- "type": "boolean",
67
- "description": "Whether to suffix app_db_name with '_local'. Set to true when doing local development using a DBOS Cloud database."
69
+ "type": ["string", "null"],
70
+ "description": "If using SSL/TLS to securely connect to a database, path to an SSL root certificate file. DEPRECATED: Use database_url instead",
71
+ "deprecated": true
68
72
  },
69
73
  "app_db_client": {
70
74
  "type": "string",
@@ -82,7 +86,8 @@
82
86
  },
83
87
  "rollback": {
84
88
  "type": "array",
85
- "description": "Specify a list of user DB rollback commands to run"
89
+ "description": "Specify a list of user DB rollback commands to run. DEPRECATED",
90
+ "deprecated": true
86
91
  }
87
92
  }
88
93
  },
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 0.25.0a16
3
+ Version: 0.26.0
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -1,29 +1,27 @@
1
- dbos-0.25.0a16.dist-info/METADATA,sha256=D0y5Vi7WtgVlpaBzrj3JhirxVna3o-zhQQiOu_Kb1Mc,5554
2
- dbos-0.25.0a16.dist-info/WHEEL,sha256=thaaA2w1JzcGC48WYufAs8nrYZjJm8LqNfnXFOFyCC4,90
3
- dbos-0.25.0a16.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
4
- dbos-0.25.0a16.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
5
- dbos/__init__.py,sha256=3NQfGlBiiUSM_v88STdVP3rNZvGkUL_9WbSotKb8Voo,873
1
+ dbos-0.26.0.dist-info/METADATA,sha256=-XcZMGdYZLM8yqGuwxhB-tbFwowZJw0ciFXhmvi2lmA,5551
2
+ dbos-0.26.0.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
3
+ dbos-0.26.0.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
4
+ dbos-0.26.0.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
5
+ dbos/__init__.py,sha256=VoGS7H9GVtNAnD2S4zseIEioS1dNIJXRovQ4oHlg8og,842
6
6
  dbos/__main__.py,sha256=G7Exn-MhGrVJVDbgNlpzhfh8WMX_72t3_oJaFT9Lmt8,653
7
- dbos/_admin_server.py,sha256=FLUacm9WGIPjB5s3QhdpMCilc8JHJOF0KMNStF82qs0,6625
8
- dbos/_app_db.py,sha256=R3sbh--84A4i-dTz8IXYmxO4b4s5VSwiPQvi_le52mg,6109
7
+ dbos/_admin_server.py,sha256=RrbABfR1D3p9c_QLrCSrgFuYce6FKi0fjMRIYLjO_Y8,9038
8
+ dbos/_app_db.py,sha256=obNlgC9IZ20y8tqQeA1q4TjceG3jBFalxz70ieDOWCA,11332
9
9
  dbos/_classproperty.py,sha256=f0X-_BySzn3yFDRKB2JpCbLYQ9tLwt1XftfshvY7CBs,626
10
- dbos/_client.py,sha256=XKjR0a3JA9Xfo9VJx48EcdyiVDaA10FvI7Y_I1v27Ng,7384
11
- dbos/_cloudutils/authentication.py,sha256=V0fCWQN9stCkhbuuxgPTGpvuQcDqfU3KAxPAh01vKW4,5007
12
- dbos/_cloudutils/cloudutils.py,sha256=YC7jGsIopT0KveLsqbRpQk2KlRBk-nIRC_UCgep4f3o,7797
13
- dbos/_cloudutils/databases.py,sha256=_shqaqSvhY4n2ScgQ8IP5PDZvzvcx3YBKV8fj-cxhSY,8543
14
- dbos/_conductor/conductor.py,sha256=7elKINsgl4s1Tg5DwrU-K7xQ5vQvmDAIfAvUgfwpGN0,16784
15
- dbos/_conductor/protocol.py,sha256=xN7pmooyF1pqbH1b6WhllU5718P7zSb_b0KCwA6bzcs,6716
16
- dbos/_context.py,sha256=3He4w46OTFbR7h8U1MLcdaU10wNyIPBSRqzLkdggv7U,19368
17
- dbos/_core.py,sha256=kIj_4wlIff8ptlACJKXAPSNoyJIt2h44swjMKxfwv0k,45789
10
+ dbos/_client.py,sha256=f1n5bbtVO-Mf5dDvI3sNlozxHSUfstWtgPirSqv1kpE,12518
11
+ dbos/_conductor/conductor.py,sha256=HYzVL29IMMrs2Mnms_7cHJynCnmmEN5SDQOMjzn3UoU,16840
12
+ dbos/_conductor/protocol.py,sha256=zEKIuOQdIaSduNqfZKpo8PSD9_1oNpKIPnBNCu3RUyE,6681
13
+ dbos/_context.py,sha256=aHzJxO7LLAz9w3G2dkZnOcFW_GG-Yaxd02AaoLu4Et8,21861
14
+ dbos/_core.py,sha256=ylTVSv02h2M5SmDgYEJAZmNiKX35zPq0z-9WA-f4byY,47900
18
15
  dbos/_croniter.py,sha256=XHAyUyibs_59sJQfSNWkP7rqQY6_XrlfuuCxk4jYqek,47559
19
- dbos/_db_wizard.py,sha256=VnMa6OL87Lc-XPDD1RnXp8NjsJE8YgiQLj3wtWAXp-8,8252
20
- dbos/_dbos.py,sha256=AKfB61vvz5shPoEpKxqrqoiDemintKMqyBxhESRRJE8,45231
21
- dbos/_dbos_config.py,sha256=7Qm3FARP3lTKZS0gSxDHLbpaDCT30GzfyERxfCde4bc,21566
22
- dbos/_debug.py,sha256=mmgvLkqlrljMBBow9wk01PPur9kUf2rI_11dTJXY4gw,1822
23
- dbos/_error.py,sha256=B6Y9XLS1f6yrawxB2uAEYFMxFwk9BHhdxPNddKco-Fw,5399
16
+ dbos/_dbos.py,sha256=zE-4CcTQo8ICz-6Ng776IUKdur4qOt3bb810fUAhQc0,47343
17
+ dbos/_dbos_config.py,sha256=Q_YH1HSWzCzzCX7m6z3EjTp_YIjtFbohil6qmcmNARI,21311
18
+ dbos/_debug.py,sha256=MNlQVZ6TscGCRQeEEL0VE8Uignvr6dPeDDDefS3xgIE,1823
19
+ dbos/_docker_pg_helper.py,sha256=NmcgqmR5rQA_4igfeqh8ugNT2z3YmoOvuep_MEtxTiY,5854
20
+ dbos/_error.py,sha256=9ITvFsN_Udpx0xXtYQHXXXb6PjPr3TmMondGmprV-L0,7003
21
+ dbos/_event_loop.py,sha256=NmaLbEQFfEK36S_0KhVD39YdYrGce3qSKCTJ-5RqKQ0,2136
24
22
  dbos/_fastapi.py,sha256=PhaKftbApHnjtYEOw0EYna_3K0cmz__J9of7mRJWzu4,3704
25
23
  dbos/_flask.py,sha256=DZKUZR5-xOzPI7tYZ53r2PvvHVoAb8SYwLzMVFsVfjI,2608
26
- dbos/_kafka.py,sha256=o6DbwnsYRDtvVTZVsN7BAK8cdP79AfoWX3Q7CGY2Yuo,4199
24
+ dbos/_kafka.py,sha256=pz0xZ9F3X9Ky1k-VSbeF3tfPhP3UPr3lUUhUfE41__U,4198
27
25
  dbos/_kafka_message.py,sha256=NYvOXNG3Qn7bghn1pv3fg4Pbs86ILZGcK4IB-MLUNu0,409
28
26
  dbos/_logger.py,sha256=qv2srteCF2rSRjCK1VGOck3ieIkwUe9Lvbv60mJc16E,4069
29
27
  dbos/_migrations/env.py,sha256=38SIGVbmn_VV2x2u1aHLcPOoWgZ84eCymf3g_NljmbU,1626
@@ -31,23 +29,24 @@ dbos/_migrations/script.py.mako,sha256=MEqL-2qATlST9TAOeYgscMn1uy6HUS9NFvDgl93dM
31
29
  dbos/_migrations/versions/04ca4f231047_workflow_queues_executor_id.py,sha256=ICLPl8CN9tQXMsLDsAj8z1TsL831-Z3F8jSBvrR-wyw,736
32
30
  dbos/_migrations/versions/50f3227f0b4b_fix_job_queue.py,sha256=ZBYrtTdxy64HxIAlOes89fVIk2P1gNaJack7wuC_epg,873
33
31
  dbos/_migrations/versions/5c361fc04708_added_system_tables.py,sha256=Xr9hBDJjkAtymlauOmAy00yUHj0VVUaEz7kNwEM9IwE,6403
32
+ dbos/_migrations/versions/83f3732ae8e7_workflow_timeout.py,sha256=Q_R35pb8AfVI3sg5mzKwyoPfYB88Ychcc8gwxpM9R7A,1035
34
33
  dbos/_migrations/versions/a3b18ad34abe_added_triggers.py,sha256=Rv0ZsZYZ_WdgGEULYsPfnp4YzaO5L198gDTgYY39AVA,2022
35
34
  dbos/_migrations/versions/d76646551a6b_job_queue_limiter.py,sha256=8PyFi8rd6CN-mUro43wGhsg5wcQWKZPRHD6jw8R5pVc,986
36
35
  dbos/_migrations/versions/d76646551a6c_workflow_queue.py,sha256=G942nophZ2uC2vc4hGBC02Ptng1715roTjY3xiyzZU4,729
37
36
  dbos/_migrations/versions/eab0cc1d9a14_job_queue.py,sha256=uvhFOtqbBreCePhAxZfIT0qCAI7BiZTou9wt6QnbY7c,1412
38
37
  dbos/_migrations/versions/f4b9b32ba814_functionname_childid_op_outputs.py,sha256=m90Lc5YH0ZISSq1MyxND6oq3RZrZKrIqEsZtwJ1jWxA,1049
39
38
  dbos/_outcome.py,sha256=EXxBg4jXCVJsByDQ1VOCIedmbeq_03S6d-p1vqQrLFU,6810
40
- dbos/_queue.py,sha256=HqeybpS7kujs9laM-n0j7LWcJdwMl9okqm2hNSrUab8,3367
41
- dbos/_recovery.py,sha256=4KyZb0XJEUGH7ekYT1kpx38i6y5vygPeH75Ta7RZjYo,2596
42
- dbos/_registrations.py,sha256=_zy6k944Ll8QwqU12Kr3OP23ukVtm8axPNN1TS_kJRc,6717
39
+ dbos/_queue.py,sha256=l0g_CXJbxEmftCA9yhy-cyaR_sddfQSCfm-5XgIWzqU,3397
40
+ dbos/_recovery.py,sha256=98Py7icfytyIELJ54gIsdvmURBvTb0HmWaxEAuYL0dc,2546
41
+ dbos/_registrations.py,sha256=EZzG3ZfYmWA2bHX2hpnSIQ3PTi3-cXsvbcmXjyOusMk,7302
43
42
  dbos/_request.py,sha256=cX1B3Atlh160phgS35gF1VEEV4pD126c9F3BDgBmxZU,929
44
43
  dbos/_roles.py,sha256=iOsgmIAf1XVzxs3gYWdGRe1B880YfOw5fpU7Jwx8_A8,2271
45
- dbos/_scheduler.py,sha256=boG4BdcncFa3WxR97T5Oou4ppR0TgrEa2QQkjzpFEHU,2028
44
+ dbos/_scheduler.py,sha256=SR1oRZRcVzYsj-JauV2LA8JtwTkt8mru7qf6H1AzQ1U,2027
46
45
  dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
47
- dbos/_schemas/application_database.py,sha256=KeyoPrF7hy_ODXV7QNike_VFSD74QBRfQ76D7QyE9HI,966
48
- dbos/_schemas/system_database.py,sha256=W9eSpL7SZzQkxcEZ4W07BOcwkkDr35b9oCjUOgfHWek,5336
46
+ dbos/_schemas/application_database.py,sha256=SypAS9l9EsaBHFn9FR8jmnqt01M74d9AF1AMa4m2hhI,1040
47
+ dbos/_schemas/system_database.py,sha256=aChSK7uLECD-v-7BZeOfuZFbtWayllaS3PaowaKDHwY,5490
49
48
  dbos/_serialization.py,sha256=YCYv0qKAwAZ1djZisBC7khvKqG-5OcIv9t9EC5PFIog,1743
50
- dbos/_sys_db.py,sha256=fEkJbIzavLX-7K_TFnJe8ST58cpWShpBIarhEVXu6Po,62816
49
+ dbos/_sys_db.py,sha256=SjYTleSEPtZVrPRimgXKeIvTjY8VN9G9jlgbcPT8ghg,80631
51
50
  dbos/_templates/dbos-db-starter/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
52
51
  dbos/_templates/dbos-db-starter/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
53
52
  dbos/_templates/dbos-db-starter/__package/main.py,sha256=nJMN3ZD2lmwg4Dcgmiwqc-tQGuCJuJal2Xl85iA277U,2453
@@ -59,12 +58,12 @@ dbos/_templates/dbos-db-starter/migrations/script.py.mako,sha256=MEqL-2qATlST9TA
59
58
  dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py,sha256=MpS7LGaJS0CpvsjhfDkp9EJqvMvVCjRPfUp4c0aE2ys,941
60
59
  dbos/_templates/dbos-db-starter/start_postgres_docker.py,sha256=lQVLlYO5YkhGPEgPqwGc7Y8uDKse9HsWv5fynJEFJHM,1681
61
60
  dbos/_tracer.py,sha256=dFDSFlta-rfA3-ahIRLYwnnoAOmlavdxAGllqwFgnCA,2440
62
- dbos/_utils.py,sha256=wjOJzxN66IzL9p4dwcEmQACRQah_V09G6mJI2exQfOM,155
63
- dbos/_workflow_commands.py,sha256=SYp2khc9RSf6tjllG9CqT1zjBQnFTFq33ePXpvmRwME,5892
61
+ dbos/_utils.py,sha256=nFRUHzVjXG5AusF85AlYHikj63Tzi-kQm992ihsrAxA,201
62
+ dbos/_workflow_commands.py,sha256=7wyxTfIyh2IVIqlkaTr8CMBq8yxWP3Hhddyv1YJY8zE,3576
64
63
  dbos/cli/_github_init.py,sha256=Y_bDF9gfO2jB1id4FV5h1oIxEJRWyqVjhb7bNEa5nQ0,3224
65
64
  dbos/cli/_template_init.py,sha256=-WW3kbq0W_Tq4WbMqb1UGJG3xvJb3woEY5VspG95Srk,2857
66
- dbos/cli/cli.py,sha256=G55sZJxfmvUGvWr0hoIWwVZBy-fJdpCsTsZmuHT1CjA,16049
67
- dbos/dbos-config.schema.json,sha256=HtF_njVTGHLdzBGZ4OrGQz3qbPPT0Go-iwd1PgFVTNg,5847
65
+ dbos/cli/cli.py,sha256=1qCTs__A9LOEfU44XZ6TufwmRwe68ZEwbWEPli3vnVM,17873
66
+ dbos/dbos-config.schema.json,sha256=8KcwJb_sQc4-6tQG2TLmjE_nratfrQa0qVLl9XPsvWE,6367
68
67
  dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
69
68
  version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
70
- dbos-0.25.0a16.dist-info/RECORD,,
69
+ dbos-0.26.0.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: pdm-backend (2.4.3)
2
+ Generator: pdm-backend (2.4.4)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any