dbos 1.7.0a3__py3-none-any.whl → 1.8.0a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbos might be problematic. Click here for more details.

dbos/_admin_server.py CHANGED
@@ -3,7 +3,6 @@ from __future__ import annotations
3
3
  import json
4
4
  import re
5
5
  import threading
6
- from dataclasses import asdict
7
6
  from functools import partial
8
7
  from http.server import BaseHTTPRequestHandler, ThreadingHTTPServer
9
8
  from typing import TYPE_CHECKING, Any, Dict, List, Optional, TypedDict
@@ -120,7 +119,12 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
120
119
  self.send_response(404)
121
120
  self._end_headers()
122
121
  return
123
- response_body = json.dumps(workflows[0].__dict__).encode("utf-8")
122
+ workflow_output = (
123
+ conductor_protocol.WorkflowsOutput.from_workflow_information(
124
+ workflows[0]
125
+ )
126
+ )
127
+ response_body = json.dumps(workflow_output.__dict__).encode("utf-8")
124
128
  self.send_response(200)
125
129
  self.send_header("Content-Type", "application/json")
126
130
  self.send_header("Content-Length", str(len(response_body)))
@@ -339,6 +343,8 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
339
343
  offset=filters.get("offset"),
340
344
  sort_desc=filters.get("sort_desc", False),
341
345
  workflow_id_prefix=filters.get("workflow_id_prefix"),
346
+ load_input=filters.get("load_input", False),
347
+ load_output=filters.get("load_output", False),
342
348
  )
343
349
  workflows_output = [
344
350
  conductor_protocol.WorkflowsOutput.from_workflow_information(i)
@@ -363,6 +369,7 @@ class AdminRequestHandler(BaseHTTPRequestHandler):
363
369
  offset=filters.get("offset"),
364
370
  queue_name=filters.get("queue_name"),
365
371
  sort_desc=filters.get("sort_desc", False),
372
+ load_input=filters.get("load_input", False),
366
373
  )
367
374
  workflows_output = [
368
375
  conductor_protocol.WorkflowsOutput.from_workflow_information(i)
dbos/_client.py CHANGED
@@ -294,6 +294,8 @@ class DBOSClient:
294
294
  offset: Optional[int] = None,
295
295
  sort_desc: bool = False,
296
296
  workflow_id_prefix: Optional[str] = None,
297
+ load_input: bool = True,
298
+ load_output: bool = True,
297
299
  ) -> List[WorkflowStatus]:
298
300
  return list_workflows(
299
301
  self._sys_db,
@@ -308,6 +310,8 @@ class DBOSClient:
308
310
  offset=offset,
309
311
  sort_desc=sort_desc,
310
312
  workflow_id_prefix=workflow_id_prefix,
313
+ load_input=load_input,
314
+ load_output=load_output,
311
315
  )
312
316
 
313
317
  async def list_workflows_async(
@@ -324,6 +328,8 @@ class DBOSClient:
324
328
  offset: Optional[int] = None,
325
329
  sort_desc: bool = False,
326
330
  workflow_id_prefix: Optional[str] = None,
331
+ load_input: bool = True,
332
+ load_output: bool = True,
327
333
  ) -> List[WorkflowStatus]:
328
334
  return await asyncio.to_thread(
329
335
  self.list_workflows,
@@ -338,6 +344,8 @@ class DBOSClient:
338
344
  offset=offset,
339
345
  sort_desc=sort_desc,
340
346
  workflow_id_prefix=workflow_id_prefix,
347
+ load_input=load_input,
348
+ load_output=load_output,
341
349
  )
342
350
 
343
351
  def list_queued_workflows(
@@ -351,6 +359,7 @@ class DBOSClient:
351
359
  limit: Optional[int] = None,
352
360
  offset: Optional[int] = None,
353
361
  sort_desc: bool = False,
362
+ load_input: bool = True,
354
363
  ) -> List[WorkflowStatus]:
355
364
  return list_queued_workflows(
356
365
  self._sys_db,
@@ -362,6 +371,7 @@ class DBOSClient:
362
371
  limit=limit,
363
372
  offset=offset,
364
373
  sort_desc=sort_desc,
374
+ load_input=load_input,
365
375
  )
366
376
 
367
377
  async def list_queued_workflows_async(
@@ -375,6 +385,7 @@ class DBOSClient:
375
385
  limit: Optional[int] = None,
376
386
  offset: Optional[int] = None,
377
387
  sort_desc: bool = False,
388
+ load_input: bool = True,
378
389
  ) -> List[WorkflowStatus]:
379
390
  return await asyncio.to_thread(
380
391
  self.list_queued_workflows,
@@ -386,6 +397,7 @@ class DBOSClient:
386
397
  limit=limit,
387
398
  offset=offset,
388
399
  sort_desc=sort_desc,
400
+ load_input=load_input,
389
401
  )
390
402
 
391
403
  def list_workflow_steps(self, workflow_id: str) -> List[StepInfo]:
@@ -223,6 +223,8 @@ class ConductorWebsocket(threading.Thread):
223
223
  body = list_workflows_message.body
224
224
  infos = []
225
225
  try:
226
+ load_input = body.get("load_input", False)
227
+ load_output = body.get("load_output", False)
226
228
  infos = list_workflows(
227
229
  self.dbos._sys_db,
228
230
  workflow_ids=body["workflow_uuids"],
@@ -235,6 +237,8 @@ class ConductorWebsocket(threading.Thread):
235
237
  limit=body["limit"],
236
238
  offset=body["offset"],
237
239
  sort_desc=body["sort_desc"],
240
+ load_input=load_input,
241
+ load_output=load_output,
238
242
  )
239
243
  except Exception as e:
240
244
  error_message = f"Exception encountered when listing workflows: {traceback.format_exc()}"
@@ -257,6 +261,7 @@ class ConductorWebsocket(threading.Thread):
257
261
  q_body = list_queued_workflows_message.body
258
262
  infos = []
259
263
  try:
264
+ q_load_input = q_body.get("load_input", False)
260
265
  infos = list_queued_workflows(
261
266
  self.dbos._sys_db,
262
267
  start_time=q_body["start_time"],
@@ -267,6 +272,7 @@ class ConductorWebsocket(threading.Thread):
267
272
  offset=q_body["offset"],
268
273
  queue_name=q_body["queue_name"],
269
274
  sort_desc=q_body["sort_desc"],
275
+ load_input=q_load_input,
270
276
  )
271
277
  except Exception as e:
272
278
  error_message = f"Exception encountered when listing queued workflows: {traceback.format_exc()}"
@@ -110,7 +110,7 @@ class RestartResponse(BaseMessage):
110
110
  error_message: Optional[str] = None
111
111
 
112
112
 
113
- class ListWorkflowsBody(TypedDict):
113
+ class ListWorkflowsBody(TypedDict, total=False):
114
114
  workflow_uuids: List[str]
115
115
  workflow_name: Optional[str]
116
116
  authenticated_user: Optional[str]
@@ -121,6 +121,8 @@ class ListWorkflowsBody(TypedDict):
121
121
  limit: Optional[int]
122
122
  offset: Optional[int]
123
123
  sort_desc: bool
124
+ load_input: bool
125
+ load_output: bool
124
126
 
125
127
 
126
128
  @dataclass
@@ -209,7 +211,7 @@ class ListWorkflowsResponse(BaseMessage):
209
211
  error_message: Optional[str] = None
210
212
 
211
213
 
212
- class ListQueuedWorkflowsBody(TypedDict):
214
+ class ListQueuedWorkflowsBody(TypedDict, total=False):
213
215
  workflow_name: Optional[str]
214
216
  start_time: Optional[str]
215
217
  end_time: Optional[str]
@@ -218,6 +220,7 @@ class ListQueuedWorkflowsBody(TypedDict):
218
220
  limit: Optional[int]
219
221
  offset: Optional[int]
220
222
  sort_desc: bool
223
+ load_input: bool
221
224
 
222
225
 
223
226
  @dataclass
dbos/_core.py CHANGED
@@ -1157,13 +1157,16 @@ def decorate_step(
1157
1157
  def wrapper(*args: Any, **kwargs: Any) -> Any:
1158
1158
  rr: Optional[str] = check_required_roles(func, fi)
1159
1159
  # Entering step is allowed:
1160
+ # No DBOS, just call the original function directly
1160
1161
  # In a step already, just call the original function directly.
1161
1162
  # In a workflow (that is not in a step already)
1162
1163
  # Not in a workflow (we will start the single op workflow)
1164
+ if not dbosreg.dbos or not dbosreg.dbos._launched:
1165
+ # Call the original function directly
1166
+ return func(*args, **kwargs)
1163
1167
  ctx = get_local_dbos_context()
1164
1168
  if ctx and ctx.is_step():
1165
1169
  # Call the original function directly
1166
-
1167
1170
  return func(*args, **kwargs)
1168
1171
  if ctx and ctx.is_within_workflow():
1169
1172
  assert ctx.is_workflow(), "Steps must be called from within workflows"
dbos/_dbos.py CHANGED
@@ -1032,6 +1032,8 @@ class DBOS:
1032
1032
  offset: Optional[int] = None,
1033
1033
  sort_desc: bool = False,
1034
1034
  workflow_id_prefix: Optional[str] = None,
1035
+ load_input: bool = True,
1036
+ load_output: bool = True,
1035
1037
  ) -> List[WorkflowStatus]:
1036
1038
  def fn() -> List[WorkflowStatus]:
1037
1039
  return list_workflows(
@@ -1047,6 +1049,8 @@ class DBOS:
1047
1049
  offset=offset,
1048
1050
  sort_desc=sort_desc,
1049
1051
  workflow_id_prefix=workflow_id_prefix,
1052
+ load_input=load_input,
1053
+ load_output=load_output,
1050
1054
  )
1051
1055
 
1052
1056
  return _get_dbos_instance()._sys_db.call_function_as_step(
@@ -1065,6 +1069,7 @@ class DBOS:
1065
1069
  limit: Optional[int] = None,
1066
1070
  offset: Optional[int] = None,
1067
1071
  sort_desc: bool = False,
1072
+ load_input: bool = True,
1068
1073
  ) -> List[WorkflowStatus]:
1069
1074
  def fn() -> List[WorkflowStatus]:
1070
1075
  return list_queued_workflows(
@@ -1077,6 +1082,7 @@ class DBOS:
1077
1082
  limit=limit,
1078
1083
  offset=offset,
1079
1084
  sort_desc=sort_desc,
1085
+ load_input=load_input,
1080
1086
  )
1081
1087
 
1082
1088
  return _get_dbos_instance()._sys_db.call_function_as_step(
dbos/_sys_db.py CHANGED
@@ -788,11 +788,17 @@ class SystemDatabase:
788
788
  pass # CB: I guess we're assuming the WF will show up eventually.
789
789
  time.sleep(1)
790
790
 
791
- def get_workflows(self, input: GetWorkflowsInput) -> List[WorkflowStatus]:
791
+ def get_workflows(
792
+ self,
793
+ input: GetWorkflowsInput,
794
+ *,
795
+ load_input: bool = True,
796
+ load_output: bool = True,
797
+ ) -> List[WorkflowStatus]:
792
798
  """
793
799
  Retrieve a list of workflows result and inputs based on the input criteria. The result is a list of external-facing workflow status objects.
794
800
  """
795
- query = sa.select(
801
+ load_columns = [
796
802
  SystemSchema.workflow_status.c.workflow_uuid,
797
803
  SystemSchema.workflow_status.c.status,
798
804
  SystemSchema.workflow_status.c.name,
@@ -808,12 +814,16 @@ class SystemDatabase:
808
814
  SystemSchema.workflow_status.c.updated_at,
809
815
  SystemSchema.workflow_status.c.application_version,
810
816
  SystemSchema.workflow_status.c.application_id,
811
- SystemSchema.workflow_status.c.inputs,
812
- SystemSchema.workflow_status.c.output,
813
- SystemSchema.workflow_status.c.error,
814
817
  SystemSchema.workflow_status.c.workflow_deadline_epoch_ms,
815
818
  SystemSchema.workflow_status.c.workflow_timeout_ms,
816
- )
819
+ ]
820
+ if load_input:
821
+ load_columns.append(SystemSchema.workflow_status.c.inputs)
822
+ if load_output:
823
+ load_columns.append(SystemSchema.workflow_status.c.output)
824
+ load_columns.append(SystemSchema.workflow_status.c.error)
825
+
826
+ query = sa.select(*load_columns)
817
827
  if input.sort_desc:
818
828
  query = query.order_by(SystemSchema.workflow_status.c.created_at.desc())
819
829
  else:
@@ -880,29 +890,35 @@ class SystemDatabase:
880
890
  info.updated_at = row[12]
881
891
  info.app_version = row[13]
882
892
  info.app_id = row[14]
893
+ info.workflow_deadline_epoch_ms = row[15]
894
+ info.workflow_timeout_ms = row[16]
883
895
 
896
+ raw_input = row[17] if load_input else None
897
+ raw_output = row[18] if load_output else None
898
+ raw_error = row[19] if load_output else None
884
899
  inputs, output, exception = _serialization.safe_deserialize(
885
900
  info.workflow_id,
886
- serialized_input=row[15],
887
- serialized_output=row[16],
888
- serialized_exception=row[17],
901
+ serialized_input=raw_input,
902
+ serialized_output=raw_output,
903
+ serialized_exception=raw_error,
889
904
  )
890
905
  info.input = inputs
891
906
  info.output = output
892
907
  info.error = exception
893
- info.workflow_deadline_epoch_ms = row[18]
894
- info.workflow_timeout_ms = row[19]
895
908
 
896
909
  infos.append(info)
897
910
  return infos
898
911
 
899
912
  def get_queued_workflows(
900
- self, input: GetQueuedWorkflowsInput
913
+ self,
914
+ input: GetQueuedWorkflowsInput,
915
+ *,
916
+ load_input: bool = True,
901
917
  ) -> List[WorkflowStatus]:
902
918
  """
903
919
  Retrieve a list of queued workflows result and inputs based on the input criteria. The result is a list of external-facing workflow status objects.
904
920
  """
905
- query = sa.select(
921
+ load_columns = [
906
922
  SystemSchema.workflow_status.c.workflow_uuid,
907
923
  SystemSchema.workflow_status.c.status,
908
924
  SystemSchema.workflow_status.c.name,
@@ -918,12 +934,13 @@ class SystemDatabase:
918
934
  SystemSchema.workflow_status.c.updated_at,
919
935
  SystemSchema.workflow_status.c.application_version,
920
936
  SystemSchema.workflow_status.c.application_id,
921
- SystemSchema.workflow_status.c.inputs,
922
- SystemSchema.workflow_status.c.output,
923
- SystemSchema.workflow_status.c.error,
924
937
  SystemSchema.workflow_status.c.workflow_deadline_epoch_ms,
925
938
  SystemSchema.workflow_status.c.workflow_timeout_ms,
926
- ).where(
939
+ ]
940
+ if load_input:
941
+ load_columns.append(SystemSchema.workflow_status.c.inputs)
942
+
943
+ query = sa.select(*load_columns).where(
927
944
  sa.and_(
928
945
  SystemSchema.workflow_status.c.queue_name.isnot(None),
929
946
  SystemSchema.workflow_status.c.status.in_(["ENQUEUED", "PENDING"]),
@@ -984,18 +1001,21 @@ class SystemDatabase:
984
1001
  info.updated_at = row[12]
985
1002
  info.app_version = row[13]
986
1003
  info.app_id = row[14]
1004
+ info.workflow_deadline_epoch_ms = row[15]
1005
+ info.workflow_timeout_ms = row[16]
1006
+
1007
+ raw_input = row[17] if load_input else None
987
1008
 
1009
+ # Error and Output are not loaded because they should always be None for queued workflows.
988
1010
  inputs, output, exception = _serialization.safe_deserialize(
989
1011
  info.workflow_id,
990
- serialized_input=row[15],
991
- serialized_output=row[16],
992
- serialized_exception=row[17],
1012
+ serialized_input=raw_input,
1013
+ serialized_output=None,
1014
+ serialized_exception=None,
993
1015
  )
994
1016
  info.input = inputs
995
1017
  info.output = output
996
1018
  info.error = exception
997
- info.workflow_deadline_epoch_ms = row[18]
998
- info.workflow_timeout_ms = row[19]
999
1019
 
1000
1020
  infos.append(info)
1001
1021
 
@@ -33,6 +33,8 @@ def list_workflows(
33
33
  offset: Optional[int] = None,
34
34
  sort_desc: bool = False,
35
35
  workflow_id_prefix: Optional[str] = None,
36
+ load_input: bool = True,
37
+ load_output: bool = True,
36
38
  ) -> List[WorkflowStatus]:
37
39
  input = GetWorkflowsInput()
38
40
  input.workflow_ids = workflow_ids
@@ -47,7 +49,9 @@ def list_workflows(
47
49
  input.sort_desc = sort_desc
48
50
  input.workflow_id_prefix = workflow_id_prefix
49
51
 
50
- infos: List[WorkflowStatus] = sys_db.get_workflows(input)
52
+ infos: List[WorkflowStatus] = sys_db.get_workflows(
53
+ input, load_input=load_input, load_output=load_output
54
+ )
51
55
 
52
56
  return infos
53
57
 
@@ -63,6 +67,7 @@ def list_queued_workflows(
63
67
  limit: Optional[int] = None,
64
68
  offset: Optional[int] = None,
65
69
  sort_desc: bool = False,
70
+ load_input: bool = True,
66
71
  ) -> List[WorkflowStatus]:
67
72
  input: GetQueuedWorkflowsInput = {
68
73
  "queue_name": queue_name,
@@ -75,7 +80,9 @@ def list_queued_workflows(
75
80
  "sort_desc": sort_desc,
76
81
  }
77
82
 
78
- infos: List[WorkflowStatus] = sys_db.get_queued_workflows(input)
83
+ infos: List[WorkflowStatus] = sys_db.get_queued_workflows(
84
+ input, load_input=load_input
85
+ )
79
86
  return infos
80
87
 
81
88
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 1.7.0a3
3
+ Version: 1.8.0a1
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -1,19 +1,19 @@
1
- dbos-1.7.0a3.dist-info/METADATA,sha256=9JR52XmoJqAM8nBVORsnsYk2VnKr0TRjZs6H4pdJHGc,13267
2
- dbos-1.7.0a3.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
3
- dbos-1.7.0a3.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
4
- dbos-1.7.0a3.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
1
+ dbos-1.8.0a1.dist-info/METADATA,sha256=YxpqtPtDVv28hgyqDg9sbsNWZ5pRr7mbOOCPVZ1IsRA,13267
2
+ dbos-1.8.0a1.dist-info/WHEEL,sha256=tSfRZzRHthuv7vxpI4aehrdN9scLjk-dCJkPLzkHxGg,90
3
+ dbos-1.8.0a1.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
4
+ dbos-1.8.0a1.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
5
5
  dbos/__init__.py,sha256=NssPCubaBxdiKarOWa-wViz1hdJSkmBGcpLX_gQ4NeA,891
6
6
  dbos/__main__.py,sha256=G7Exn-MhGrVJVDbgNlpzhfh8WMX_72t3_oJaFT9Lmt8,653
7
- dbos/_admin_server.py,sha256=86rL_aQmmi_zZ4a7PGVNbF6ey9tP27WC1wazQedFLWo,15927
7
+ dbos/_admin_server.py,sha256=e8ELhcDWqR3_PNobnNgUvLGh5lzZq0yFSF6dvtzoQRI,16267
8
8
  dbos/_app_db.py,sha256=htblDPfqrpb_uZoFcvaud7cgQ-PDyn6Bn-cBidxdCTA,10603
9
9
  dbos/_classproperty.py,sha256=f0X-_BySzn3yFDRKB2JpCbLYQ9tLwt1XftfshvY7CBs,626
10
- dbos/_client.py,sha256=DeiJHo5fTedWsipr7qlQQIcDmVAPjzzX94X01121oQM,14780
11
- dbos/_conductor/conductor.py,sha256=y_T-8kEHwKWt6W8LtcFMctB_6EvYFWsuGLxiFuuKKBU,23702
12
- dbos/_conductor/protocol.py,sha256=DOTprPSd7oHDcvwWSyZpnlPds_JfILtcKzHZa-qBsF4,7330
10
+ dbos/_client.py,sha256=KD38fNX-u8hCyjcED3-Q7B6RrCNDeO9YZraAoi2nzrI,15194
11
+ dbos/_conductor/conductor.py,sha256=3E_hL3c9g9yWqKZkvI6KA0-ZzPMPRo06TOzT1esMiek,24114
12
+ dbos/_conductor/protocol.py,sha256=q3rgLxINFtWFigdOONc-4gX4vn66UmMlJQD6Kj8LnL4,7420
13
13
  dbos/_context.py,sha256=zhje6jObpBcRALYfHyyIEumHtk_enl_PxLl01j4oDME,24897
14
- dbos/_core.py,sha256=m3e1WZ_210p2DT8c1sTh4S_CVM748UjkBdiGO846mVg,49269
14
+ dbos/_core.py,sha256=kRY2PXVryfpwjbOCmgzPA_-qNsFmRMLi-CxYCnyp1V8,49495
15
15
  dbos/_croniter.py,sha256=XHAyUyibs_59sJQfSNWkP7rqQY6_XrlfuuCxk4jYqek,47559
16
- dbos/_dbos.py,sha256=BprKIGPT-QDeoxtKM6kjRUK9dyF8sPCFfHIyIt0u7CE,48142
16
+ dbos/_dbos.py,sha256=2fvnTLnHAXcI8FTXBiMrsnjxqg8sbTSKxYBDKG7LZ1g,48361
17
17
  dbos/_dbos_config.py,sha256=JUG4V1rrP0p1AYESgih4ea80qOH_13UsgoIIm8X84pw,20562
18
18
  dbos/_debug.py,sha256=99j2SChWmCPAlZoDmjsJGe77tpU2LEa8E2TtLAnnh7o,1831
19
19
  dbos/_docker_pg_helper.py,sha256=tLJXWqZ4S-ExcaPnxg_i6cVxL6ZxrYlZjaGsklY-s2I,6115
@@ -49,7 +49,7 @@ dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
49
49
  dbos/_schemas/application_database.py,sha256=SypAS9l9EsaBHFn9FR8jmnqt01M74d9AF1AMa4m2hhI,1040
50
50
  dbos/_schemas/system_database.py,sha256=rbFKggONdvvbb45InvGz0TM6a7c-Ux9dcaL-h_7Z7pU,4438
51
51
  dbos/_serialization.py,sha256=bWuwhXSQcGmiazvhJHA5gwhrRWxtmFmcCFQSDJnqqkU,3666
52
- dbos/_sys_db.py,sha256=PaWa5Y8ublSMqPQXCHvYqln01cGf2LtPdXaLEHJq500,80653
52
+ dbos/_sys_db.py,sha256=Eh9ghQzwmcDsYDu9mFO8HHIbqOWGXy1IxaX8dIODWK0,81270
53
53
  dbos/_templates/dbos-db-starter/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
54
54
  dbos/_templates/dbos-db-starter/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
55
55
  dbos/_templates/dbos-db-starter/__package/main.py.dbos,sha256=aQnBPSSQpkB8ERfhf7gB7P9tsU6OPKhZscfeh0yiaD8,2702
@@ -62,11 +62,11 @@ dbos/_templates/dbos-db-starter/migrations/versions/2024_07_31_180642_init.py,sh
62
62
  dbos/_templates/dbos-db-starter/start_postgres_docker.py,sha256=lQVLlYO5YkhGPEgPqwGc7Y8uDKse9HsWv5fynJEFJHM,1681
63
63
  dbos/_tracer.py,sha256=RnlcaOJEx_58hr2J9L9g6E7gjAHAeEtEGugJZmCwNfQ,2963
64
64
  dbos/_utils.py,sha256=uywq1QrjMwy17btjxW4bES49povlQwYwYbvKwMT6C2U,1575
65
- dbos/_workflow_commands.py,sha256=4QCs7ziQ9T457tqfaNFwiXd6mDisr-ZK__skz1Uteyg,4648
65
+ dbos/_workflow_commands.py,sha256=EmmAaQfRWeOZm_WPTznuU-O3he3jiSzzT9VpYrhxugE,4835
66
66
  dbos/cli/_github_init.py,sha256=Y_bDF9gfO2jB1id4FV5h1oIxEJRWyqVjhb7bNEa5nQ0,3224
67
67
  dbos/cli/_template_init.py,sha256=7JBcpMqP1r2mfCnvWatu33z8ctEGHJarlZYKgB83cXE,2972
68
68
  dbos/cli/cli.py,sha256=IcfaX4rrSrk6f24S2jrlR33snYMyNyEIx_lNQtuVr2E,22081
69
69
  dbos/dbos-config.schema.json,sha256=CjaspeYmOkx6Ip_pcxtmfXJTn_YGdSx_0pcPBF7KZmo,6060
70
70
  dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
71
71
  version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
72
- dbos-1.7.0a3.dist-info/RECORD,,
72
+ dbos-1.8.0a1.dist-info/RECORD,,
File without changes