dbos 1.7.0a5__py3-none-any.whl → 1.8.0a3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbos might be problematic. Click here for more details.

dbos/_admin_server.py CHANGED
@@ -343,6 +343,8 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
343
343
  offset=filters.get("offset"),
344
344
  sort_desc=filters.get("sort_desc", False),
345
345
  workflow_id_prefix=filters.get("workflow_id_prefix"),
346
+ load_input=filters.get("load_input", False),
347
+ load_output=filters.get("load_output", False),
346
348
  )
347
349
  workflows_output = [
348
350
  conductor_protocol.WorkflowsOutput.from_workflow_information(i)
@@ -367,6 +369,7 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
367
369
  offset=filters.get("offset"),
368
370
  queue_name=filters.get("queue_name"),
369
371
  sort_desc=filters.get("sort_desc", False),
372
+ load_input=filters.get("load_input", False),
370
373
  )
371
374
  workflows_output = [
372
375
  conductor_protocol.WorkflowsOutput.from_workflow_information(i)
dbos/_client.py CHANGED
@@ -294,6 +294,8 @@ class DBOSClient:
294
294
  offset: Optional[int] = None,
295
295
  sort_desc: bool = False,
296
296
  workflow_id_prefix: Optional[str] = None,
297
+ load_input: bool = True,
298
+ load_output: bool = True,
297
299
  ) -> List[WorkflowStatus]:
298
300
  return list_workflows(
299
301
  self._sys_db,
@@ -308,6 +310,8 @@ class DBOSClient:
308
310
  offset=offset,
309
311
  sort_desc=sort_desc,
310
312
  workflow_id_prefix=workflow_id_prefix,
313
+ load_input=load_input,
314
+ load_output=load_output,
311
315
  )
312
316
 
313
317
  async def list_workflows_async(
@@ -324,6 +328,8 @@ class DBOSClient:
324
328
  offset: Optional[int] = None,
325
329
  sort_desc: bool = False,
326
330
  workflow_id_prefix: Optional[str] = None,
331
+ load_input: bool = True,
332
+ load_output: bool = True,
327
333
  ) -> List[WorkflowStatus]:
328
334
  return await asyncio.to_thread(
329
335
  self.list_workflows,
@@ -338,6 +344,8 @@ class DBOSClient:
338
344
  offset=offset,
339
345
  sort_desc=sort_desc,
340
346
  workflow_id_prefix=workflow_id_prefix,
347
+ load_input=load_input,
348
+ load_output=load_output,
341
349
  )
342
350
 
343
351
  def list_queued_workflows(
@@ -351,6 +359,7 @@ class DBOSClient:
351
359
  limit: Optional[int] = None,
352
360
  offset: Optional[int] = None,
353
361
  sort_desc: bool = False,
362
+ load_input: bool = True,
354
363
  ) -> List[WorkflowStatus]:
355
364
  return list_queued_workflows(
356
365
  self._sys_db,
@@ -362,6 +371,7 @@ class DBOSClient:
362
371
  limit=limit,
363
372
  offset=offset,
364
373
  sort_desc=sort_desc,
374
+ load_input=load_input,
365
375
  )
366
376
 
367
377
  async def list_queued_workflows_async(
@@ -375,6 +385,7 @@ class DBOSClient:
375
385
  limit: Optional[int] = None,
376
386
  offset: Optional[int] = None,
377
387
  sort_desc: bool = False,
388
+ load_input: bool = True,
378
389
  ) -> List[WorkflowStatus]:
379
390
  return await asyncio.to_thread(
380
391
  self.list_queued_workflows,
@@ -386,6 +397,7 @@ class DBOSClient:
386
397
  limit=limit,
387
398
  offset=offset,
388
399
  sort_desc=sort_desc,
400
+ load_input=load_input,
389
401
  )
390
402
 
391
403
  def list_workflow_steps(self, workflow_id: str) -> List[StepInfo]:
@@ -223,6 +223,8 @@ class ConductorWebsocket(threading.Thread):
223
223
  body = list_workflows_message.body
224
224
  infos = []
225
225
  try:
226
+ load_input = body.get("load_input", False)
227
+ load_output = body.get("load_output", False)
226
228
  infos = list_workflows(
227
229
  self.dbos._sys_db,
228
230
  workflow_ids=body["workflow_uuids"],
@@ -235,6 +237,8 @@ class ConductorWebsocket(threading.Thread):
235
237
  limit=body["limit"],
236
238
  offset=body["offset"],
237
239
  sort_desc=body["sort_desc"],
240
+ load_input=load_input,
241
+ load_output=load_output,
238
242
  )
239
243
  except Exception as e:
240
244
  error_message = f"Exception encountered when listing workflows: {traceback.format_exc()}"
@@ -257,6 +261,7 @@ class ConductorWebsocket(threading.Thread):
257
261
  q_body = list_queued_workflows_message.body
258
262
  infos = []
259
263
  try:
264
+ q_load_input = q_body.get("load_input", False)
260
265
  infos = list_queued_workflows(
261
266
  self.dbos._sys_db,
262
267
  start_time=q_body["start_time"],
@@ -267,6 +272,7 @@ class ConductorWebsocket(threading.Thread):
267
272
  offset=q_body["offset"],
268
273
  queue_name=q_body["queue_name"],
269
274
  sort_desc=q_body["sort_desc"],
275
+ load_input=q_load_input,
270
276
  )
271
277
  except Exception as e:
272
278
  error_message = f"Exception encountered when listing queued workflows: {traceback.format_exc()}"
@@ -110,7 +110,7 @@ class RestartResponse(BaseMessage):
110
110
  error_message: Optional[str] = None
111
111
 
112
112
 
113
- class ListWorkflowsBody(TypedDict):
113
+ class ListWorkflowsBody(TypedDict, total=False):
114
114
  workflow_uuids: List[str]
115
115
  workflow_name: Optional[str]
116
116
  authenticated_user: Optional[str]
@@ -121,6 +121,8 @@ class ListWorkflowsBody(TypedDict):
121
121
  limit: Optional[int]
122
122
  offset: Optional[int]
123
123
  sort_desc: bool
124
+ load_input: bool
125
+ load_output: bool
124
126
 
125
127
 
126
128
  @dataclass
@@ -209,7 +211,7 @@ class ListWorkflowsResponse(BaseMessage):
209
211
  error_message: Optional[str] = None
210
212
 
211
213
 
212
- class ListQueuedWorkflowsBody(TypedDict):
214
+ class ListQueuedWorkflowsBody(TypedDict, total=False):
213
215
  workflow_name: Optional[str]
214
216
  start_time: Optional[str]
215
217
  end_time: Optional[str]
@@ -218,6 +220,7 @@ class ListQueuedWorkflowsBody(TypedDict):
218
220
  limit: Optional[int]
219
221
  offset: Optional[int]
220
222
  sort_desc: bool
223
+ load_input: bool
221
224
 
222
225
 
223
226
  @dataclass
dbos/_context.py CHANGED
@@ -10,7 +10,7 @@ from enum import Enum
10
10
  from types import TracebackType
11
11
  from typing import List, Literal, Optional, Type, TypedDict
12
12
 
13
- from opentelemetry.trace import Span, Status, StatusCode
13
+ from opentelemetry.trace import Span, Status, StatusCode, use_span
14
14
  from sqlalchemy.orm import Session
15
15
 
16
16
  from dbos._utils import GlobalParams
@@ -68,6 +68,20 @@ class StepStatus:
68
68
  max_attempts: Optional[int]
69
69
 
70
70
 
71
+ @dataclass
72
+ class ContextSpan:
73
+ """
74
+ A span that is used to track the context of a workflow or step execution.
75
+
76
+ Attributes:
77
+ span: The OpenTelemetry span object.
78
+ context_manager: The context manager that is used to manage the span's lifecycle.
79
+ """
80
+
81
+ span: Span
82
+ context_manager: AbstractContextManager[Span]
83
+
84
+
71
85
  class DBOSContext:
72
86
  def __init__(self) -> None:
73
87
  self.executor_id = GlobalParams.executor_id
@@ -86,7 +100,7 @@ class DBOSContext:
86
100
  self.curr_step_function_id: int = -1
87
101
  self.curr_tx_function_id: int = -1
88
102
  self.sql_session: Optional[Session] = None
89
- self.spans: list[Span] = []
103
+ self.context_spans: list[ContextSpan] = []
90
104
 
91
105
  self.authenticated_user: Optional[str] = None
92
106
  self.authenticated_roles: Optional[List[str]] = None
@@ -202,8 +216,8 @@ class DBOSContext:
202
216
  self._end_span(exc_value)
203
217
 
204
218
  def get_current_span(self) -> Optional[Span]:
205
- if len(self.spans):
206
- return self.spans[-1]
219
+ if len(self.context_spans) > 0:
220
+ return self.context_spans[-1].span
207
221
  return None
208
222
 
209
223
  def _start_span(self, attributes: TracedAttributes) -> None:
@@ -218,27 +232,38 @@ class DBOSContext:
218
232
  )
219
233
  attributes["authenticatedUserAssumedRole"] = self.assumed_role
220
234
  span = dbos_tracer.start_span(
221
- attributes, parent=self.spans[-1] if len(self.spans) > 0 else None
235
+ attributes,
236
+ parent=self.context_spans[-1].span if len(self.context_spans) > 0 else None,
237
+ )
238
+ # Activate the current span
239
+ cm = use_span(
240
+ span,
241
+ end_on_exit=False,
242
+ record_exception=False,
243
+ set_status_on_exception=False,
222
244
  )
223
- self.spans.append(span)
245
+ self.context_spans.append(ContextSpan(span, cm))
246
+ cm.__enter__()
224
247
 
225
248
  def _end_span(self, exc_value: Optional[BaseException]) -> None:
249
+ context_span = self.context_spans.pop()
226
250
  if exc_value is None:
227
- self.spans[-1].set_status(Status(StatusCode.OK))
251
+ context_span.span.set_status(Status(StatusCode.OK))
228
252
  else:
229
- self.spans[-1].set_status(
253
+ context_span.span.set_status(
230
254
  Status(StatusCode.ERROR, description=str(exc_value))
231
255
  )
232
- dbos_tracer.end_span(self.spans.pop())
256
+ dbos_tracer.end_span(context_span.span)
257
+ context_span.context_manager.__exit__(None, None, None)
233
258
 
234
259
  def set_authentication(
235
260
  self, user: Optional[str], roles: Optional[List[str]]
236
261
  ) -> None:
237
262
  self.authenticated_user = user
238
263
  self.authenticated_roles = roles
239
- if user is not None and len(self.spans) > 0:
240
- self.spans[-1].set_attribute("authenticatedUser", user)
241
- self.spans[-1].set_attribute(
264
+ if user is not None and len(self.context_spans) > 0:
265
+ self.context_spans[-1].span.set_attribute("authenticatedUser", user)
266
+ self.context_spans[-1].span.set_attribute(
242
267
  "authenticatedUserRoles", json.dumps(roles) if roles is not None else ""
243
268
  )
244
269
 
dbos/_dbos.py CHANGED
@@ -1032,6 +1032,8 @@ class DBOS:
1032
1032
  offset: Optional[int] = None,
1033
1033
  sort_desc: bool = False,
1034
1034
  workflow_id_prefix: Optional[str] = None,
1035
+ load_input: bool = True,
1036
+ load_output: bool = True,
1035
1037
  ) -> List[WorkflowStatus]:
1036
1038
  def fn() -> List[WorkflowStatus]:
1037
1039
  return list_workflows(
@@ -1047,6 +1049,8 @@ class DBOS:
1047
1049
  offset=offset,
1048
1050
  sort_desc=sort_desc,
1049
1051
  workflow_id_prefix=workflow_id_prefix,
1052
+ load_input=load_input,
1053
+ load_output=load_output,
1050
1054
  )
1051
1055
 
1052
1056
  return _get_dbos_instance()._sys_db.call_function_as_step(
@@ -1065,6 +1069,7 @@ class DBOS:
1065
1069
  limit: Optional[int] = None,
1066
1070
  offset: Optional[int] = None,
1067
1071
  sort_desc: bool = False,
1072
+ load_input: bool = True,
1068
1073
  ) -> List[WorkflowStatus]:
1069
1074
  def fn() -> List[WorkflowStatus]:
1070
1075
  return list_queued_workflows(
@@ -1077,6 +1082,7 @@ class DBOS:
1077
1082
  limit=limit,
1078
1083
  offset=offset,
1079
1084
  sort_desc=sort_desc,
1085
+ load_input=load_input,
1080
1086
  )
1081
1087
 
1082
1088
  return _get_dbos_instance()._sys_db.call_function_as_step(
dbos/_error.py CHANGED
@@ -126,7 +126,7 @@ class DBOSDeadLetterQueueError(DBOSException):
126
126
 
127
127
  def __init__(self, wf_id: str, max_retries: int):
128
128
  super().__init__(
129
- f"Workflow {wf_id} has been moved to the dead-letter queue after exceeding the maximum of ${max_retries} retries",
129
+ f"Workflow {wf_id} has been moved to the dead-letter queue after exceeding the maximum of {max_retries} retries",
130
130
  dbos_error_code=DBOSErrorCode.DeadLetterQueueError.value,
131
131
  )
132
132
 
dbos/_sys_db.py CHANGED
@@ -437,7 +437,14 @@ class SystemDatabase:
437
437
 
438
438
  # Values to update when a row already exists for this workflow
439
439
  update_values: dict[str, Any] = {
440
- "recovery_attempts": SystemSchema.workflow_status.c.recovery_attempts + 1,
440
+ "recovery_attempts": sa.case(
441
+ (
442
+ SystemSchema.workflow_status.c.status
443
+ != WorkflowStatusString.ENQUEUED.value,
444
+ SystemSchema.workflow_status.c.recovery_attempts + 1,
445
+ ),
446
+ else_=SystemSchema.workflow_status.c.recovery_attempts,
447
+ ),
441
448
  "updated_at": func.extract("epoch", func.now()) * 1000,
442
449
  }
443
450
  # Don't update an existing executor ID when enqueueing a workflow.
@@ -788,11 +795,17 @@ class SystemDatabase:
788
795
  pass # CB: I guess we're assuming the WF will show up eventually.
789
796
  time.sleep(1)
790
797
 
791
- def get_workflows(self, input: GetWorkflowsInput) -> List[WorkflowStatus]:
798
+ def get_workflows(
799
+ self,
800
+ input: GetWorkflowsInput,
801
+ *,
802
+ load_input: bool = True,
803
+ load_output: bool = True,
804
+ ) -> List[WorkflowStatus]:
792
805
  """
793
806
  Retrieve a list of workflows result and inputs based on the input criteria. The result is a list of external-facing workflow status objects.
794
807
  """
795
- query = sa.select(
808
+ load_columns = [
796
809
  SystemSchema.workflow_status.c.workflow_uuid,
797
810
  SystemSchema.workflow_status.c.status,
798
811
  SystemSchema.workflow_status.c.name,
@@ -808,12 +821,16 @@ class SystemDatabase:
808
821
  SystemSchema.workflow_status.c.updated_at,
809
822
  SystemSchema.workflow_status.c.application_version,
810
823
  SystemSchema.workflow_status.c.application_id,
811
- SystemSchema.workflow_status.c.inputs,
812
- SystemSchema.workflow_status.c.output,
813
- SystemSchema.workflow_status.c.error,
814
824
  SystemSchema.workflow_status.c.workflow_deadline_epoch_ms,
815
825
  SystemSchema.workflow_status.c.workflow_timeout_ms,
816
- )
826
+ ]
827
+ if load_input:
828
+ load_columns.append(SystemSchema.workflow_status.c.inputs)
829
+ if load_output:
830
+ load_columns.append(SystemSchema.workflow_status.c.output)
831
+ load_columns.append(SystemSchema.workflow_status.c.error)
832
+
833
+ query = sa.select(*load_columns)
817
834
  if input.sort_desc:
818
835
  query = query.order_by(SystemSchema.workflow_status.c.created_at.desc())
819
836
  else:
@@ -880,29 +897,35 @@ class SystemDatabase:
880
897
  info.updated_at = row[12]
881
898
  info.app_version = row[13]
882
899
  info.app_id = row[14]
900
+ info.workflow_deadline_epoch_ms = row[15]
901
+ info.workflow_timeout_ms = row[16]
883
902
 
903
+ raw_input = row[17] if load_input else None
904
+ raw_output = row[18] if load_output else None
905
+ raw_error = row[19] if load_output else None
884
906
  inputs, output, exception = _serialization.safe_deserialize(
885
907
  info.workflow_id,
886
- serialized_input=row[15],
887
- serialized_output=row[16],
888
- serialized_exception=row[17],
908
+ serialized_input=raw_input,
909
+ serialized_output=raw_output,
910
+ serialized_exception=raw_error,
889
911
  )
890
912
  info.input = inputs
891
913
  info.output = output
892
914
  info.error = exception
893
- info.workflow_deadline_epoch_ms = row[18]
894
- info.workflow_timeout_ms = row[19]
895
915
 
896
916
  infos.append(info)
897
917
  return infos
898
918
 
899
919
  def get_queued_workflows(
900
- self, input: GetQueuedWorkflowsInput
920
+ self,
921
+ input: GetQueuedWorkflowsInput,
922
+ *,
923
+ load_input: bool = True,
901
924
  ) -> List[WorkflowStatus]:
902
925
  """
903
926
  Retrieve a list of queued workflows result and inputs based on the input criteria. The result is a list of external-facing workflow status objects.
904
927
  """
905
- query = sa.select(
928
+ load_columns = [
906
929
  SystemSchema.workflow_status.c.workflow_uuid,
907
930
  SystemSchema.workflow_status.c.status,
908
931
  SystemSchema.workflow_status.c.name,
@@ -918,12 +941,13 @@ class SystemDatabase:
918
941
  SystemSchema.workflow_status.c.updated_at,
919
942
  SystemSchema.workflow_status.c.application_version,
920
943
  SystemSchema.workflow_status.c.application_id,
921
- SystemSchema.workflow_status.c.inputs,
922
- SystemSchema.workflow_status.c.output,
923
- SystemSchema.workflow_status.c.error,
924
944
  SystemSchema.workflow_status.c.workflow_deadline_epoch_ms,
925
945
  SystemSchema.workflow_status.c.workflow_timeout_ms,
926
- ).where(
946
+ ]
947
+ if load_input:
948
+ load_columns.append(SystemSchema.workflow_status.c.inputs)
949
+
950
+ query = sa.select(*load_columns).where(
927
951
  sa.and_(
928
952
  SystemSchema.workflow_status.c.queue_name.isnot(None),
929
953
  SystemSchema.workflow_status.c.status.in_(["ENQUEUED", "PENDING"]),
@@ -984,18 +1008,21 @@ class SystemDatabase:
984
1008
  info.updated_at = row[12]
985
1009
  info.app_version = row[13]
986
1010
  info.app_id = row[14]
1011
+ info.workflow_deadline_epoch_ms = row[15]
1012
+ info.workflow_timeout_ms = row[16]
1013
+
1014
+ raw_input = row[17] if load_input else None
987
1015
 
1016
+ # Error and Output are not loaded because they should always be None for queued workflows.
988
1017
  inputs, output, exception = _serialization.safe_deserialize(
989
1018
  info.workflow_id,
990
- serialized_input=row[15],
991
- serialized_output=row[16],
992
- serialized_exception=row[17],
1019
+ serialized_input=raw_input,
1020
+ serialized_output=None,
1021
+ serialized_exception=None,
993
1022
  )
994
1023
  info.input = inputs
995
1024
  info.output = output
996
1025
  info.error = exception
997
- info.workflow_deadline_epoch_ms = row[18]
998
- info.workflow_timeout_ms = row[19]
999
1026
 
1000
1027
  infos.append(info)
1001
1028
 
@@ -33,6 +33,8 @@ def list_workflows(
33
33
  offset: Optional[int] = None,
34
34
  sort_desc: bool = False,
35
35
  workflow_id_prefix: Optional[str] = None,
36
+ load_input: bool = True,
37
+ load_output: bool = True,
36
38
  ) -> List[WorkflowStatus]:
37
39
  input = GetWorkflowsInput()
38
40
  input.workflow_ids = workflow_ids
@@ -47,7 +49,9 @@ def list_workflows(
47
49
  input.sort_desc = sort_desc
48
50
  input.workflow_id_prefix = workflow_id_prefix
49
51
 
50
- infos: List[WorkflowStatus] = sys_db.get_workflows(input)
52
+ infos: List[WorkflowStatus] = sys_db.get_workflows(
53
+ input, load_input=load_input, load_output=load_output
54
+ )
51
55
 
52
56
  return infos
53
57
 
@@ -63,6 +67,7 @@ def list_queued_workflows(
63
67
  limit: Optional[int] = None,
64
68
  offset: Optional[int] = None,
65
69
  sort_desc: bool = False,
70
+ load_input: bool = True,
66
71
  ) -> List[WorkflowStatus]:
67
72
  input: GetQueuedWorkflowsInput = {
68
73
  "queue_name": queue_name,
@@ -75,7 +80,9 @@ def list_queued_workflows(
75
80
  "sort_desc": sort_desc,
76
81
  }
77
82
 
78
- infos: List[WorkflowStatus] = sys_db.get_queued_workflows(input)
83
+ infos: List[WorkflowStatus] = sys_db.get_queued_workflows(
84
+ input, load_input=load_input
85
+ )
79
86
  return infos
80
87
 
81
88
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 1.7.0a5
3
+ Version: 1.8.0a3
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -1,23 +1,23 @@
1
- dbos-1.7.0a5.dist-info/METADATA,sha256=MpwGnaijkOfUai8Dqdrk2NS_wemMnKYf9N1YItrPMpo,13267
2
- dbos-1.7.0a5.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
3
- dbos-1.7.0a5.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
4
- dbos-1.7.0a5.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
1
+ dbos-1.8.0a3.dist-info/METADATA,sha256=Eo7YKx6NXFQxjbLzrfUNs7AiHsUQqogTMQZNi1SNSmg,13267
2
+ dbos-1.8.0a3.dist-info/WHEEL,sha256=9P2ygRxDrTJz3gsagc0Z96ukrxjr-LFBGOgv3AuKlCA,90
3
+ dbos-1.8.0a3.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
4
+ dbos-1.8.0a3.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
5
5
  dbos/__init__.py,sha256=NssPCubaBxdiKarOWa-wViz1hdJSkmBGcpLX_gQ4NeA,891
6
6
  dbos/__main__.py,sha256=G7Exn-MhGrVJVDbgNlpzhfh8WMX_72t3_oJaFT9Lmt8,653
7
- dbos/_admin_server.py,sha256=S2hFr3m5R3WkbOp3Yz9lWt5iLBfWPnvhYwYLokVax0A,16094
7
+ dbos/_admin_server.py,sha256=e8ELhcDWqR3_PNobnNgUvLGh5lzZq0yFSF6dvtzoQRI,16267
8
8
  dbos/_app_db.py,sha256=htblDPfqrpb_uZoFcvaud7cgQ-PDyn6Bn-cBidxdCTA,10603
9
9
  dbos/_classproperty.py,sha256=f0X-_BySzn3yFDRKB2JpCbLYQ9tLwt1XftfshvY7CBs,626
10
- dbos/_client.py,sha256=DeiJHo5fTedWsipr7qlQQIcDmVAPjzzX94X01121oQM,14780
11
- dbos/_conductor/conductor.py,sha256=y_T-8kEHwKWt6W8LtcFMctB_6EvYFWsuGLxiFuuKKBU,23702
12
- dbos/_conductor/protocol.py,sha256=DOTprPSd7oHDcvwWSyZpnlPds_JfILtcKzHZa-qBsF4,7330
13
- dbos/_context.py,sha256=zhje6jObpBcRALYfHyyIEumHtk_enl_PxLl01j4oDME,24897
10
+ dbos/_client.py,sha256=KD38fNX-u8hCyjcED3-Q7B6RrCNDeO9YZraAoi2nzrI,15194
11
+ dbos/_conductor/conductor.py,sha256=3E_hL3c9g9yWqKZkvI6KA0-ZzPMPRo06TOzT1esMiek,24114
12
+ dbos/_conductor/protocol.py,sha256=q3rgLxINFtWFigdOONc-4gX4vn66UmMlJQD6Kj8LnL4,7420
13
+ dbos/_context.py,sha256=0vFtLAk3WF5BQYIYNFImDRBppKO2CTKOSy51zQC-Cu8,25723
14
14
  dbos/_core.py,sha256=kRY2PXVryfpwjbOCmgzPA_-qNsFmRMLi-CxYCnyp1V8,49495
15
15
  dbos/_croniter.py,sha256=XHAyUyibs_59sJQfSNWkP7rqQY6_XrlfuuCxk4jYqek,47559
16
- dbos/_dbos.py,sha256=BprKIGPT-QDeoxtKM6kjRUK9dyF8sPCFfHIyIt0u7CE,48142
16
+ dbos/_dbos.py,sha256=2fvnTLnHAXcI8FTXBiMrsnjxqg8sbTSKxYBDKG7LZ1g,48361
17
17
  dbos/_dbos_config.py,sha256=JUG4V1rrP0p1AYESgih4ea80qOH_13UsgoIIm8X84pw,20562
18
18
  dbos/_debug.py,sha256=99j2SChWmCPAlZoDmjsJGe77tpU2LEa8E2TtLAnnh7o,1831
19
19
  dbos/_docker_pg_helper.py,sha256=tLJXWqZ4S-ExcaPnxg_i6cVxL6ZxrYlZjaGsklY-s2I,6115
20
- dbos/_error.py,sha256=nS7KuXJHhuNXZRErxdEUGT38Hb0VPyxNwSyADiVpHcE,8581
20
+ dbos/_error.py,sha256=MAHBjo2MLoaHBtRL1pOPNKqAM3IdNPPV11n7-meSF54,8580
21
21
  dbos/_event_loop.py,sha256=cvaFN9-II3MsHEOq8QoICc_8qSKrjikMlLfuhC3Y8Dk,2923
22
22
  dbos/_fastapi.py,sha256=T7YlVY77ASqyTqq0aAPclZ9YzlXdGTT0lEYSwSgt1EE,3151
23
23
  dbos/_flask.py,sha256=Npnakt-a3W5OykONFRkDRnumaDhTQmA0NPdUCGRYKXE,1652
@@ -49,7 +49,7 @@ dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
49
49
  dbos/_schemas/application_database.py,sha256=SypAS9l9EsaBHFn9FR8jmnqt01M74d9AF1AMa4m2hhI,1040
50
50
  dbos/_schemas/system_database.py,sha256=rbFKggONdvvbb45InvGz0TM6a7c-Ux9dcaL-h_7Z7pU,4438
51
51
  dbos/_serialization.py,sha256=bWuwhXSQcGmiazvhJHA5gwhrRWxtmFmcCFQSDJnqqkU,3666
52
- dbos/_sys_db.py,sha256=PaWa5Y8ublSMqPQXCHvYqln01cGf2LtPdXaLEHJq500,80653
52
+ dbos/_sys_db.py,sha256=862gEhgQ1l_yLDMoYrux4Ri8Hv6aQH-c_BHFAeDVTvE,81541
53
53
  dbos/_templates/dbos-db-starter/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
54
54
  dbos/_templates/dbos-db-starter/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
55
55
  dbos/_templates/dbos-db-starter/__package/main.py.dbos,sha256=aQnBPSSQpkB8ERfhf7gB7P9tsU6OPKhZscfeh0yiaD8,2702
@@ -62,11 +62,11 @@ dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py,sh
62
62
  dbos/_templates/dbos-db-starter/start_postgres_docker.py,sha256=lQVLlYO5YkhGPEgPqwGc7Y8uDKse9HsWv5fynJEFJHM,1681
63
63
  dbos/_tracer.py,sha256=RnlcaOJEx_58hr2J9L9g6E7gjAHAeEtEGugJZmCwNfQ,2963
64
64
  dbos/_utils.py,sha256=uywq1QrjMwy17btjxW4bES49povlQwYwYbvKwMT6C2U,1575
65
- dbos/_workflow_commands.py,sha256=4QCs7ziQ9T457tqfaNFwiXd6mDisr-ZK__skz1Uteyg,4648
65
+ dbos/_workflow_commands.py,sha256=EmmAaQfRWeOZm_WPTznuU-O3he3jiSzzT9VpYrhxugE,4835
66
66
  dbos/cli/_github_init.py,sha256=Y_bDF9gfO2jB1id4FV5h1oIxEJRWyqVjhb7bNEa5nQ0,3224
67
67
  dbos/cli/_template_init.py,sha256=7JBcpMqP1r2mfCnvWatu33z8ctEGHJarlZYKgB83cXE,2972
68
68
  dbos/cli/cli.py,sha256=IcfaX4rrSrk6f24S2jrlR33snYMyNyEIx_lNQtuVr2E,22081
69
69
  dbos/dbos-config.schema.json,sha256=CjaspeYmOkx6Ip_pcxtmfXJTn_YGdSx_0pcPBF7KZmo,6060
70
70
  dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
71
71
  version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
72
- dbos-1.7.0a5.dist-info/RECORD,,
72
+ dbos-1.8.0a3.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: pdm-backend (2.4.4)
2
+ Generator: pdm-backend (2.4.5)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any