dbos 2.3.0a1__py3-none-any.whl → 2.3.0a3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbos might be problematic. Click here for more details.

dbos/_core.py CHANGED
@@ -93,14 +93,6 @@ TEMP_SEND_WF_NAME = "<temp>.temp_send_workflow"
93
93
  DEBOUNCER_WORKFLOW_NAME = "_dbos_debouncer_workflow"
94
94
 
95
95
 
96
- def check_is_in_coroutine() -> bool:
97
- try:
98
- asyncio.get_running_loop()
99
- return True
100
- except RuntimeError:
101
- return False
102
-
103
-
104
96
  class WorkflowHandleFuture(Generic[R]):
105
97
 
106
98
  def __init__(self, workflow_id: str, future: Future[R], dbos: "DBOS"):
@@ -856,11 +848,6 @@ def workflow_wrapper(
856
848
  dbos._sys_db.record_get_result(workflow_id, serialized_r, None)
857
849
  return r
858
850
 
859
- if check_is_in_coroutine() and not inspect.iscoroutinefunction(func):
860
- dbos_logger.warning(
861
- f"Sync workflow ({get_dbos_func_name(func)}) shouldn't be invoked from within another async function. Define it as async or use asyncio.to_thread instead."
862
- )
863
-
864
851
  outcome = (
865
852
  wfOutcome.wrap(init_wf, dbos=dbos)
866
853
  .also(DBOSAssumeRole(rr))
@@ -1046,10 +1033,6 @@ def decorate_transaction(
1046
1033
  assert (
1047
1034
  ctx.is_workflow()
1048
1035
  ), "Transactions must be called from within workflows"
1049
- if check_is_in_coroutine():
1050
- dbos_logger.warning(
1051
- f"Transaction function ({get_dbos_func_name(func)}) shouldn't be invoked from within another async function. Use asyncio.to_thread instead."
1052
- )
1053
1036
  with DBOSAssumeRole(rr):
1054
1037
  return invoke_tx(*args, **kwargs)
1055
1038
  else:
@@ -1194,10 +1177,6 @@ def decorate_step(
1194
1177
 
1195
1178
  @wraps(func)
1196
1179
  def wrapper(*args: Any, **kwargs: Any) -> Any:
1197
- if check_is_in_coroutine() and not inspect.iscoroutinefunction(func):
1198
- dbos_logger.warning(
1199
- f"Sync step ({get_dbos_func_name(func)}) shouldn't be invoked from within another async function. Define it as async or use asyncio.to_thread instead."
1200
- )
1201
1180
  # If the step is called from a workflow, run it as a step.
1202
1181
  # Otherwise, run it as a normal function.
1203
1182
  ctx = get_local_dbos_context()
dbos/_dbos_config.py CHANGED
@@ -444,6 +444,7 @@ def configure_db_engine_parameters(
444
444
 
445
445
  # Configure user database engine parameters
446
446
  app_engine_kwargs: dict[str, Any] = {
447
+ "connect_args": {"application_name": "dbos_transact"},
447
448
  "pool_timeout": 30,
448
449
  "max_overflow": 0,
449
450
  "pool_size": 20,
@@ -477,8 +478,6 @@ def is_valid_database_url(database_url: str) -> bool:
477
478
  return True
478
479
  url = make_url(database_url)
479
480
  required_fields = [
480
- ("username", "Username must be specified in the connection URL"),
481
- ("host", "Host must be specified in the connection URL"),
482
481
  ("database", "Database name must be specified in the connection URL"),
483
482
  ]
484
483
  for field_name, error_message in required_fields:
dbos/_kafka.py CHANGED
@@ -1,6 +1,6 @@
1
1
  import re
2
2
  import threading
3
- from typing import TYPE_CHECKING, Any, Callable, NoReturn
3
+ from typing import TYPE_CHECKING, Any, Callable, Coroutine, NoReturn
4
4
 
5
5
  from confluent_kafka import Consumer, KafkaError, KafkaException
6
6
 
@@ -15,7 +15,9 @@ from ._kafka_message import KafkaMessage
15
15
  from ._logger import dbos_logger
16
16
  from ._registrations import get_dbos_func_name
17
17
 
18
- _KafkaConsumerWorkflow = Callable[[KafkaMessage], None]
18
+ _KafkaConsumerWorkflow = (
19
+ Callable[[KafkaMessage], None] | Callable[[KafkaMessage], Coroutine[Any, Any, None]]
20
+ )
19
21
 
20
22
  _kafka_queue: Queue
21
23
  _in_order_kafka_queues: dict[str, Queue] = {}
@@ -37,8 +39,8 @@ def _kafka_consumer_loop(
37
39
  in_order: bool,
38
40
  ) -> None:
39
41
 
40
- def on_error(err: KafkaError) -> NoReturn:
41
- raise KafkaException(err)
42
+ def on_error(err: KafkaError) -> None:
43
+ dbos_logger.error(f"Exception in Kafka consumer: {err}")
42
44
 
43
45
  config["error_cb"] = on_error
44
46
  if "auto.offset.reset" not in config:
dbos/_scheduler.py CHANGED
@@ -2,7 +2,7 @@ import random
2
2
  import threading
3
3
  import traceback
4
4
  from datetime import datetime, timezone
5
- from typing import TYPE_CHECKING, Callable
5
+ from typing import TYPE_CHECKING, Any, Callable, Coroutine
6
6
 
7
7
  from ._logger import dbos_logger
8
8
  from ._queue import Queue
@@ -14,7 +14,10 @@ from ._context import SetWorkflowID
14
14
  from ._croniter import croniter # type: ignore
15
15
  from ._registrations import get_dbos_func_name
16
16
 
17
- ScheduledWorkflow = Callable[[datetime, datetime], None]
17
+ ScheduledWorkflow = (
18
+ Callable[[datetime, datetime], None]
19
+ | Callable[[datetime, datetime], Coroutine[Any, Any, None]]
20
+ )
18
21
 
19
22
 
20
23
  def scheduler_loop(
dbos/_serialization.py CHANGED
@@ -25,9 +25,13 @@ class Serializer(ABC):
25
25
  class DefaultSerializer(Serializer):
26
26
 
27
27
  def serialize(self, data: Any) -> str:
28
- pickled_data: bytes = pickle.dumps(data)
29
- encoded_data: str = base64.b64encode(pickled_data).decode("utf-8")
30
- return encoded_data
28
+ try:
29
+ pickled_data: bytes = pickle.dumps(data)
30
+ encoded_data: str = base64.b64encode(pickled_data).decode("utf-8")
31
+ return encoded_data
32
+ except Exception as e:
33
+ dbos_logger.error(f"Error serializing object: {data}", exc_info=e)
34
+ raise
31
35
 
32
36
  def deserialize(cls, serialized_data: str) -> Any:
33
37
  pickled_data: bytes = base64.b64decode(serialized_data)
dbos/_sys_db_postgres.py CHANGED
@@ -41,7 +41,7 @@ class PostgresSystemDatabase(SystemDatabase):
41
41
  parameters={"db_name": sysdb_name},
42
42
  ).scalar():
43
43
  dbos_logger.info(f"Creating system database {sysdb_name}")
44
- conn.execute(sa.text(f"CREATE DATABASE {sysdb_name}"))
44
+ conn.execute(sa.text(f'CREATE DATABASE "{sysdb_name}"'))
45
45
  engine.dispose()
46
46
  else:
47
47
  # If we were provided an engine, validate it can connect
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 2.3.0a1
3
+ Version: 2.3.0a3
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -1,7 +1,7 @@
1
- dbos-2.3.0a1.dist-info/METADATA,sha256=IwUb-JebRHeYxRbGuyr-IlVz_tkQsA7OI5M_j2UvcOE,14532
2
- dbos-2.3.0a1.dist-info/WHEEL,sha256=9P2ygRxDrTJz3gsagc0Z96ukrxjr-LFBGOgv3AuKlCA,90
3
- dbos-2.3.0a1.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
4
- dbos-2.3.0a1.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
1
+ dbos-2.3.0a3.dist-info/METADATA,sha256=EMs8I_ASfYtwj6H3WqRRTfGQ96q-bjdnJ0kQdZXatvs,14532
2
+ dbos-2.3.0a3.dist-info/WHEEL,sha256=9P2ygRxDrTJz3gsagc0Z96ukrxjr-LFBGOgv3AuKlCA,90
3
+ dbos-2.3.0a3.dist-info/entry_points.txt,sha256=_QOQ3tVfEjtjBlr1jS4sHqHya9lI2aIEIWkz8dqYp14,58
4
+ dbos-2.3.0a3.dist-info/licenses/LICENSE,sha256=VGZit_a5-kdw9WT6fY5jxAWVwGQzgLFyPWrcVVUhVNU,1067
5
5
  dbos/__init__.py,sha256=M7FdFSBGhcvaLIXrNw_0eR68ijwMWV7_UEyimHMP_F4,1039
6
6
  dbos/__main__.py,sha256=G7Exn-MhGrVJVDbgNlpzhfh8WMX_72t3_oJaFT9Lmt8,653
7
7
  dbos/_admin_server.py,sha256=hubQJw5T8zGKCPNS6FQTXy8jQ8GTJxoYQaDTMlICl9k,16267
@@ -11,10 +11,10 @@ dbos/_client.py,sha256=0VR9oWBn0i-34jNWHqkgeImKdg5aBefMWu2jaqRLH8Q,19658
11
11
  dbos/_conductor/conductor.py,sha256=3E_hL3c9g9yWqKZkvI6KA0-ZzPMPRo06TOzT1esMiek,24114
12
12
  dbos/_conductor/protocol.py,sha256=q3rgLxINFtWFigdOONc-4gX4vn66UmMlJQD6Kj8LnL4,7420
13
13
  dbos/_context.py,sha256=XKllmsDR_oMcWOuZnoe1X4yv2JeOi_vsAuyWC-mWs_o,28164
14
- dbos/_core.py,sha256=6OU3SMW5x8CvO7c0LBlHhF1eLiHPLs6nfkkasP73IEo,51124
14
+ dbos/_core.py,sha256=e-pKDbrvpN6BzcfyIZx4Nsb8wnMiGxLNzdpgtlRI-0I,50096
15
15
  dbos/_croniter.py,sha256=XHAyUyibs_59sJQfSNWkP7rqQY6_XrlfuuCxk4jYqek,47559
16
16
  dbos/_dbos.py,sha256=dr32Z_NT36JkUxWGyYVX7xkl3bYJmgsxVMOX8H9_mpM,59394
17
- dbos/_dbos_config.py,sha256=NIMQfxkznoyscyeMFLrfrPAS1W_PHXXWrxqpvvrbp3E,24923
17
+ dbos/_dbos_config.py,sha256=mfajyeyeV1ZHaAg2GU3dxwvp_19wZtY2prNdVrXgPb8,24846
18
18
  dbos/_debouncer.py,sha256=qNjIVmWqTPp64M2cEbLnpgGmlKVdCaAKysD1BPJgWh4,15297
19
19
  dbos/_debug.py,sha256=0MfgNqutCUhI4PEmmra9x7f3DiFE_0nscfUCHdLimEY,1415
20
20
  dbos/_docker_pg_helper.py,sha256=xySum4hTA8TVMBODoG19u4cXQAB1vCock-jwM2pnmSI,7791
@@ -22,7 +22,7 @@ dbos/_error.py,sha256=GwO0Ng4d4iB52brY09-Ss6Cz_V28Xc0D0cRCzZ6XmNM,8688
22
22
  dbos/_event_loop.py,sha256=cvaFN9-II3MsHEOq8QoICc_8qSKrjikMlLfuhC3Y8Dk,2923
23
23
  dbos/_fastapi.py,sha256=toYYfbe2aui2aHw0021PoXi2dKlI6NzO3M3pHB0dHOk,3421
24
24
  dbos/_flask.py,sha256=Npnakt-a3W5OykONFRkDRnumaDhTQmA0NPdUCGRYKXE,1652
25
- dbos/_kafka.py,sha256=Gm4fHWl7gYb-i5BMvwNwm5Km3z8zQpseqdMgqgFjlGI,4252
25
+ dbos/_kafka.py,sha256=cA3hXyT-FR4LQZnaBMVLTZn7oko76rcTUC_kOo6aSis,4352
26
26
  dbos/_kafka_message.py,sha256=NYvOXNG3Qn7bghn1pv3fg4Pbs86ILZGcK4IB-MLUNu0,409
27
27
  dbos/_logger.py,sha256=djnCp147QoQ1iG9Bt3Uz8RyGaXGmi6gebccXsrA6Cps,4660
28
28
  dbos/_migration.py,sha256=Fvc3m4dC4oDpjPMHX-tUZVnXklVB9OMMojSLuVyV9ak,10312
@@ -31,13 +31,13 @@ dbos/_queue.py,sha256=GmqZHl9smES1KSmpauhSdsnZFJHDyfvRArmC-jBibhw,6228
31
31
  dbos/_recovery.py,sha256=K-wlFhdf4yGRm6cUzyhcTjQUS0xp2T5rdNMLiiBErYg,2882
32
32
  dbos/_registrations.py,sha256=bEOntObnWaBylnebr5ZpcX2hk7OVLDd1z4BvW4_y3zA,7380
33
33
  dbos/_roles.py,sha256=kCuhhg8XLtrHCgKgm44I0abIRTGHltf88OwjEKAUggk,2317
34
- dbos/_scheduler.py,sha256=n96dNzKMr6-2RQvMxRI6BaoExHbLjw0Kr46j1P-DjP4,2620
34
+ dbos/_scheduler.py,sha256=PLiCSUujlfEfojTnHwzY-P_AEOVEx7bvWvU5BuMgLPY,2708
35
35
  dbos/_schemas/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
36
36
  dbos/_schemas/application_database.py,sha256=SypAS9l9EsaBHFn9FR8jmnqt01M74d9AF1AMa4m2hhI,1040
37
37
  dbos/_schemas/system_database.py,sha256=mNsBV0ttlqJArvOqGPY60WvtuiWrHCpYnVxtvMfe2LI,5544
38
- dbos/_serialization.py,sha256=8TVXB1c2k3keodNcXszqmcOGTQz2r5UBSYtxn2OrYjI,2804
38
+ dbos/_serialization.py,sha256=ZGrkN5UclSLOqMVZgYpT72pw1l888ZXRoYuu3pIg3PA,2957
39
39
  dbos/_sys_db.py,sha256=FDboSk58CyQCAFjOF_KMLnRtIw05OL3IpJHT1qwKEKo,87596
40
- dbos/_sys_db_postgres.py,sha256=GuyGVyZZD_Wl7LjRSkHnOuZ-hOROlO4Xs2UeDhKq10E,6963
40
+ dbos/_sys_db_postgres.py,sha256=_3m3hF6Pc23iZfUlIFYtDuC1Tw6KsjYqnDQE0HZpjt4,6965
41
41
  dbos/_sys_db_sqlite.py,sha256=ifjKdy-Z9vlVIBf5L6XnSaNjiBdvqPE73asVHim4A5Q,6998
42
42
  dbos/_templates/dbos-db-starter/README.md,sha256=GhxhBj42wjTt1fWEtwNriHbJuKb66Vzu89G4pxNHw2g,930
43
43
  dbos/_templates/dbos-db-starter/__package/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -56,4 +56,4 @@ dbos/cli/migration.py,sha256=I0_0ngWTuCPQf6Symbpd0lizaxWUKe3uTYEmuCmsrdU,3775
56
56
  dbos/dbos-config.schema.json,sha256=47wofTZ5jlFynec7bG0L369tAXbRQQ2euBxBXvg4m9c,1730
57
57
  dbos/py.typed,sha256=QfzXT1Ktfk3Rj84akygc7_42z0lRpCq0Ilh8OXI6Zas,44
58
58
  version/__init__.py,sha256=L4sNxecRuqdtSFdpUGX3TtBi9KL3k7YsZVIvv-fv9-A,1678
59
- dbos-2.3.0a1.dist-info/RECORD,,
59
+ dbos-2.3.0a3.dist-info/RECORD,,
File without changes