dbos 2.3.0a1__tar.gz → 2.3.0a2__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dbos might be problematic. Click here for more details.

Files changed (99) hide show
  1. {dbos-2.3.0a1 → dbos-2.3.0a2}/PKG-INFO +1 -1
  2. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_dbos_config.py +1 -0
  3. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_kafka.py +6 -4
  4. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_scheduler.py +5 -2
  5. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_serialization.py +7 -3
  6. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_sys_db_postgres.py +1 -1
  7. {dbos-2.3.0a1 → dbos-2.3.0a2}/pyproject.toml +1 -1
  8. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/test_config.py +29 -17
  9. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/test_dbos.py +60 -0
  10. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/test_failures.py +14 -1
  11. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/test_kafka.py +50 -17
  12. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/test_scheduler.py +13 -0
  13. {dbos-2.3.0a1 → dbos-2.3.0a2}/LICENSE +0 -0
  14. {dbos-2.3.0a1 → dbos-2.3.0a2}/README.md +0 -0
  15. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/__init__.py +0 -0
  16. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/__main__.py +0 -0
  17. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_admin_server.py +0 -0
  18. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_app_db.py +0 -0
  19. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_classproperty.py +0 -0
  20. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_client.py +0 -0
  21. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_conductor/conductor.py +0 -0
  22. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_conductor/protocol.py +0 -0
  23. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_context.py +0 -0
  24. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_core.py +0 -0
  25. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_croniter.py +0 -0
  26. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_dbos.py +0 -0
  27. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_debouncer.py +0 -0
  28. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_debug.py +0 -0
  29. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_docker_pg_helper.py +0 -0
  30. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_error.py +0 -0
  31. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_event_loop.py +0 -0
  32. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_fastapi.py +0 -0
  33. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_flask.py +0 -0
  34. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_kafka_message.py +0 -0
  35. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_logger.py +0 -0
  36. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_migration.py +0 -0
  37. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_outcome.py +0 -0
  38. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_queue.py +0 -0
  39. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_recovery.py +0 -0
  40. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_registrations.py +0 -0
  41. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_roles.py +0 -0
  42. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_schemas/__init__.py +0 -0
  43. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_schemas/application_database.py +0 -0
  44. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_schemas/system_database.py +0 -0
  45. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_sys_db.py +0 -0
  46. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_sys_db_sqlite.py +0 -0
  47. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_templates/dbos-db-starter/README.md +0 -0
  48. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_templates/dbos-db-starter/__package/__init__.py +0 -0
  49. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_templates/dbos-db-starter/__package/main.py.dbos +0 -0
  50. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_templates/dbos-db-starter/__package/schema.py +0 -0
  51. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_templates/dbos-db-starter/dbos-config.yaml.dbos +0 -0
  52. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_templates/dbos-db-starter/migrations/create_table.py.dbos +0 -0
  53. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_templates/dbos-db-starter/start_postgres_docker.py +0 -0
  54. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_tracer.py +0 -0
  55. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_utils.py +0 -0
  56. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/_workflow_commands.py +0 -0
  57. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/cli/_github_init.py +0 -0
  58. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/cli/_template_init.py +0 -0
  59. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/cli/cli.py +0 -0
  60. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/cli/migration.py +0 -0
  61. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/dbos-config.schema.json +0 -0
  62. {dbos-2.3.0a1 → dbos-2.3.0a2}/dbos/py.typed +0 -0
  63. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/__init__.py +0 -0
  64. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/atexit_no_ctor.py +0 -0
  65. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/atexit_no_launch.py +0 -0
  66. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/classdefs.py +0 -0
  67. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/client_collateral.py +0 -0
  68. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/client_worker.py +0 -0
  69. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/conftest.py +0 -0
  70. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/dupname_classdefs1.py +0 -0
  71. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/dupname_classdefsa.py +0 -0
  72. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/more_classdefs.py +0 -0
  73. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/queuedworkflow.py +0 -0
  74. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/script_without_fastapi.py +0 -0
  75. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/test_admin_server.py +0 -0
  76. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/test_async.py +0 -0
  77. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/test_async_workflow_management.py +0 -0
  78. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/test_classdecorators.py +0 -0
  79. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/test_cli.py +0 -0
  80. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/test_client.py +0 -0
  81. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/test_concurrency.py +0 -0
  82. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/test_croniter.py +0 -0
  83. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/test_debouncer.py +0 -0
  84. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/test_debug.py +0 -0
  85. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/test_docker_secrets.py +0 -0
  86. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/test_fastapi.py +0 -0
  87. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/test_fastapi_roles.py +0 -0
  88. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/test_flask.py +0 -0
  89. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/test_outcome.py +0 -0
  90. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/test_package.py +0 -0
  91. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/test_queue.py +0 -0
  92. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/test_schema_migration.py +0 -0
  93. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/test_singleton.py +0 -0
  94. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/test_spans.py +0 -0
  95. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/test_sqlalchemy.py +0 -0
  96. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/test_streaming.py +0 -0
  97. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/test_workflow_introspection.py +0 -0
  98. {dbos-2.3.0a1 → dbos-2.3.0a2}/tests/test_workflow_management.py +0 -0
  99. {dbos-2.3.0a1 → dbos-2.3.0a2}/version/__init__.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dbos
3
- Version: 2.3.0a1
3
+ Version: 2.3.0a2
4
4
  Summary: Ultra-lightweight durable execution in Python
5
5
  Author-Email: "DBOS, Inc." <contact@dbos.dev>
6
6
  License: MIT
@@ -444,6 +444,7 @@ def configure_db_engine_parameters(
444
444
 
445
445
  # Configure user database engine parameters
446
446
  app_engine_kwargs: dict[str, Any] = {
447
+ "connect_args": {"application_name": "dbos_transact"},
447
448
  "pool_timeout": 30,
448
449
  "max_overflow": 0,
449
450
  "pool_size": 20,
@@ -1,6 +1,6 @@
1
1
  import re
2
2
  import threading
3
- from typing import TYPE_CHECKING, Any, Callable, NoReturn
3
+ from typing import TYPE_CHECKING, Any, Callable, Coroutine, NoReturn
4
4
 
5
5
  from confluent_kafka import Consumer, KafkaError, KafkaException
6
6
 
@@ -15,7 +15,9 @@ from ._kafka_message import KafkaMessage
15
15
  from ._logger import dbos_logger
16
16
  from ._registrations import get_dbos_func_name
17
17
 
18
- _KafkaConsumerWorkflow = Callable[[KafkaMessage], None]
18
+ _KafkaConsumerWorkflow = (
19
+ Callable[[KafkaMessage], None] | Callable[[KafkaMessage], Coroutine[Any, Any, None]]
20
+ )
19
21
 
20
22
  _kafka_queue: Queue
21
23
  _in_order_kafka_queues: dict[str, Queue] = {}
@@ -37,8 +39,8 @@ def _kafka_consumer_loop(
37
39
  in_order: bool,
38
40
  ) -> None:
39
41
 
40
- def on_error(err: KafkaError) -> NoReturn:
41
- raise KafkaException(err)
42
+ def on_error(err: KafkaError) -> None:
43
+ dbos_logger.error(f"Exception in Kafka consumer: {err}")
42
44
 
43
45
  config["error_cb"] = on_error
44
46
  if "auto.offset.reset" not in config:
@@ -2,7 +2,7 @@ import random
2
2
  import threading
3
3
  import traceback
4
4
  from datetime import datetime, timezone
5
- from typing import TYPE_CHECKING, Callable
5
+ from typing import TYPE_CHECKING, Any, Callable, Coroutine
6
6
 
7
7
  from ._logger import dbos_logger
8
8
  from ._queue import Queue
@@ -14,7 +14,10 @@ from ._context import SetWorkflowID
14
14
  from ._croniter import croniter # type: ignore
15
15
  from ._registrations import get_dbos_func_name
16
16
 
17
- ScheduledWorkflow = Callable[[datetime, datetime], None]
17
+ ScheduledWorkflow = (
18
+ Callable[[datetime, datetime], None]
19
+ | Callable[[datetime, datetime], Coroutine[Any, Any, None]]
20
+ )
18
21
 
19
22
 
20
23
  def scheduler_loop(
@@ -25,9 +25,13 @@ class Serializer(ABC):
25
25
  class DefaultSerializer(Serializer):
26
26
 
27
27
  def serialize(self, data: Any) -> str:
28
- pickled_data: bytes = pickle.dumps(data)
29
- encoded_data: str = base64.b64encode(pickled_data).decode("utf-8")
30
- return encoded_data
28
+ try:
29
+ pickled_data: bytes = pickle.dumps(data)
30
+ encoded_data: str = base64.b64encode(pickled_data).decode("utf-8")
31
+ return encoded_data
32
+ except Exception as e:
33
+ dbos_logger.error(f"Error serializing object: {data}", exc_info=e)
34
+ raise
31
35
 
32
36
  def deserialize(cls, serialized_data: str) -> Any:
33
37
  pickled_data: bytes = base64.b64decode(serialized_data)
@@ -41,7 +41,7 @@ class PostgresSystemDatabase(SystemDatabase):
41
41
  parameters={"db_name": sysdb_name},
42
42
  ).scalar():
43
43
  dbos_logger.info(f"Creating system database {sysdb_name}")
44
- conn.execute(sa.text(f"CREATE DATABASE {sysdb_name}"))
44
+ conn.execute(sa.text(f'CREATE DATABASE "{sysdb_name}"'))
45
45
  engine.dispose()
46
46
  else:
47
47
  # If we were provided an engine, validate it can connect
@@ -34,7 +34,7 @@ classifiers = [
34
34
  "Topic :: Software Development :: Libraries :: Python Modules",
35
35
  "Framework :: AsyncIO",
36
36
  ]
37
- version = "2.3.0a1"
37
+ version = "2.3.0a2"
38
38
 
39
39
  [project.license]
40
40
  text = "MIT"
@@ -209,7 +209,7 @@ def test_process_config_full():
209
209
  "max_overflow": 0,
210
210
  "pool_size": 20,
211
211
  "pool_pre_ping": True,
212
- "connect_args": {"connect_timeout": 1},
212
+ "connect_args": {"connect_timeout": 1, "application_name": "dbos_transact"},
213
213
  }
214
214
  assert configFile["database"]["sys_db_engine_kwargs"] == {
215
215
  "key": "value",
@@ -217,7 +217,7 @@ def test_process_config_full():
217
217
  "max_overflow": 0,
218
218
  "pool_size": 27,
219
219
  "pool_pre_ping": True,
220
- "connect_args": {"connect_timeout": 1},
220
+ "connect_args": {"connect_timeout": 1, "application_name": "dbos_transact"},
221
221
  }
222
222
  assert configFile["runtimeConfig"]["start"] == ["python3 main.py"]
223
223
  assert configFile["runtimeConfig"]["admin_port"] == 8001
@@ -255,7 +255,7 @@ def test_process_config_system_database():
255
255
  "max_overflow": 0,
256
256
  "pool_size": 20,
257
257
  "pool_pre_ping": True,
258
- "connect_args": {"connect_timeout": 1},
258
+ "connect_args": {"connect_timeout": 1, "application_name": "dbos_transact"},
259
259
  }
260
260
  assert configFile["database"]["sys_db_engine_kwargs"] == {
261
261
  "key": "value",
@@ -263,7 +263,7 @@ def test_process_config_system_database():
263
263
  "max_overflow": 0,
264
264
  "pool_size": 27,
265
265
  "pool_pre_ping": True,
266
- "connect_args": {"connect_timeout": 1},
266
+ "connect_args": {"connect_timeout": 1, "application_name": "dbos_transact"},
267
267
  }
268
268
 
269
269
 
@@ -397,14 +397,14 @@ def test_configure_db_engine_parameters_defaults():
397
397
  "max_overflow": 0,
398
398
  "pool_size": 20,
399
399
  "pool_pre_ping": True,
400
- "connect_args": {"connect_timeout": 10},
400
+ "connect_args": {"connect_timeout": 10, "application_name": "dbos_transact"},
401
401
  }
402
402
  assert data["sys_db_engine_kwargs"] == {
403
403
  "pool_timeout": 30,
404
404
  "max_overflow": 0,
405
405
  "pool_size": 20,
406
406
  "pool_pre_ping": True,
407
- "connect_args": {"connect_timeout": 10},
407
+ "connect_args": {"connect_timeout": 10, "application_name": "dbos_transact"},
408
408
  }
409
409
 
410
410
 
@@ -419,14 +419,14 @@ def test_configure_db_engine_parameters_custom_sys_db_pool_sizes():
419
419
  "max_overflow": 0,
420
420
  "pool_size": 20,
421
421
  "pool_pre_ping": True,
422
- "connect_args": {"connect_timeout": 10},
422
+ "connect_args": {"connect_timeout": 10, "application_name": "dbos_transact"},
423
423
  }
424
424
  assert data["sys_db_engine_kwargs"] == {
425
425
  "pool_timeout": 30,
426
426
  "max_overflow": 0,
427
427
  "pool_size": 35,
428
428
  "pool_pre_ping": True,
429
- "connect_args": {"connect_timeout": 10},
429
+ "connect_args": {"connect_timeout": 10, "application_name": "dbos_transact"},
430
430
  }
431
431
 
432
432
 
@@ -440,7 +440,11 @@ def test_configure_db_engine_parameters_user_kwargs_override():
440
440
  "pool_pre_ping": True,
441
441
  "custom_param": "value",
442
442
  "pool_size": 50,
443
- "connect_args": {"connect_timeout": 30, "key": "value"},
443
+ "connect_args": {
444
+ "connect_timeout": 30,
445
+ "key": "value",
446
+ "application_name": "dbos_transact",
447
+ },
444
448
  },
445
449
  }
446
450
 
@@ -453,7 +457,11 @@ def test_configure_db_engine_parameters_user_kwargs_override():
453
457
  "pool_pre_ping": True,
454
458
  "custom_param": "value",
455
459
  "pool_size": 50,
456
- "connect_args": {"connect_timeout": 30, "key": "value"},
460
+ "connect_args": {
461
+ "connect_timeout": 30,
462
+ "key": "value",
463
+ "application_name": "dbos_transact",
464
+ },
457
465
  }
458
466
 
459
467
  # System engine kwargs should use system pool size but same user overrides
@@ -463,7 +471,11 @@ def test_configure_db_engine_parameters_user_kwargs_override():
463
471
  "pool_pre_ping": True,
464
472
  "custom_param": "value",
465
473
  "pool_size": 35,
466
- "connect_args": {"connect_timeout": 30, "key": "value"},
474
+ "connect_args": {
475
+ "connect_timeout": 30,
476
+ "key": "value",
477
+ "application_name": "dbos_transact",
478
+ },
467
479
  }
468
480
 
469
481
 
@@ -487,7 +499,7 @@ def test_configure_db_engine_parameters_user_kwargs_and_db_url_connect_timeout()
487
499
  "pool_pre_ping": True,
488
500
  "custom_param": "value",
489
501
  "pool_size": 50,
490
- "connect_args": {"connect_timeout": 22},
502
+ "connect_args": {"connect_timeout": 22, "application_name": "dbos_transact"},
491
503
  }
492
504
 
493
505
  # System engine kwargs should use system pool size but same user overrides
@@ -497,7 +509,7 @@ def test_configure_db_engine_parameters_user_kwargs_and_db_url_connect_timeout()
497
509
  "pool_pre_ping": True,
498
510
  "custom_param": "value",
499
511
  "pool_size": 50,
500
- "connect_args": {"connect_timeout": 22},
512
+ "connect_args": {"connect_timeout": 22, "application_name": "dbos_transact"},
501
513
  }
502
514
 
503
515
 
@@ -556,7 +568,7 @@ def test_configure_db_engine_parameters_user_kwargs_mixed_params():
556
568
  "pool_pre_ping": True,
557
569
  "custom_param": "value",
558
570
  "pool_size": 50,
559
- "connect_args": {"connect_timeout": 10},
571
+ "connect_args": {"connect_timeout": 10, "application_name": "dbos_transact"},
560
572
  }
561
573
 
562
574
  # System engine kwargs should use system pool size but same user overrides
@@ -566,7 +578,7 @@ def test_configure_db_engine_parameters_user_kwargs_mixed_params():
566
578
  "pool_pre_ping": True,
567
579
  "custom_param": "value",
568
580
  "pool_size": 50,
569
- "connect_args": {"connect_timeout": 10},
581
+ "connect_args": {"connect_timeout": 10, "application_name": "dbos_transact"},
570
582
  }
571
583
 
572
584
 
@@ -581,14 +593,14 @@ def test_configure_db_engine_parameters_empty_user_kwargs():
581
593
  "max_overflow": 0,
582
594
  "pool_size": 20,
583
595
  "pool_pre_ping": True,
584
- "connect_args": {"connect_timeout": 10},
596
+ "connect_args": {"connect_timeout": 10, "application_name": "dbos_transact"},
585
597
  }
586
598
  assert data["sys_db_engine_kwargs"] == {
587
599
  "pool_timeout": 30,
588
600
  "max_overflow": 0,
589
601
  "pool_size": 20,
590
602
  "pool_pre_ping": True,
591
- "connect_args": {"connect_timeout": 10},
603
+ "connect_args": {"connect_timeout": 10, "application_name": "dbos_transact"},
592
604
  }
593
605
 
594
606
 
@@ -37,6 +37,7 @@ from dbos._error import (
37
37
  from dbos._schemas.system_database import SystemSchema
38
38
  from dbos._sys_db import GetWorkflowsInput
39
39
  from dbos._utils import GlobalParams
40
+ from tests.conftest import using_sqlite
40
41
 
41
42
 
42
43
  def test_simple_workflow(dbos: DBOS) -> None:
@@ -1796,6 +1797,65 @@ def test_without_appdb(config: DBOSConfig, cleanup_test_databases: None) -> None
1796
1797
  assert s["function_name"] == step.__qualname__
1797
1798
 
1798
1799
 
1800
+ def test_custom_database(
1801
+ config: DBOSConfig, db_engine: sa.Engine, cleanup_test_databases: None
1802
+ ) -> None:
1803
+ DBOS.destroy(destroy_registry=True)
1804
+ assert config["system_database_url"]
1805
+ custom_database = "F8nny_dAtaB@s3@-n@m3.sqlite"
1806
+ url = sa.make_url(config["system_database_url"])
1807
+ url = url.set(database=custom_database)
1808
+ config["system_database_url"] = url.render_as_string(hide_password=False)
1809
+ # Destroy the database if it exists
1810
+ if using_sqlite():
1811
+ parsed_url = sa.make_url(config["system_database_url"])
1812
+ db_path = parsed_url.database
1813
+ assert db_path is not None
1814
+ if os.path.exists(db_path):
1815
+ os.remove(db_path)
1816
+ else:
1817
+ with db_engine.connect() as connection:
1818
+ connection.execution_options(isolation_level="AUTOCOMMIT")
1819
+ connection.execute(
1820
+ sa.text(f'DROP DATABASE IF EXISTS "{custom_database}" WITH (FORCE)')
1821
+ )
1822
+ DBOS(config=config)
1823
+ DBOS.launch()
1824
+
1825
+ key = "key"
1826
+ val = "val"
1827
+
1828
+ @DBOS.transaction()
1829
+ def transaction() -> None:
1830
+ return
1831
+
1832
+ @DBOS.workflow()
1833
+ def recv_workflow() -> Any:
1834
+ transaction()
1835
+ DBOS.set_event(key, val)
1836
+ return DBOS.recv()
1837
+
1838
+ handle = DBOS.start_workflow(recv_workflow)
1839
+ assert DBOS.get_event(handle.workflow_id, key) == val
1840
+ DBOS.send(handle.workflow_id, val)
1841
+ assert handle.get_result() == val
1842
+ assert len(DBOS.list_workflows()) == 2
1843
+ steps = DBOS.list_workflow_steps(handle.workflow_id)
1844
+ assert len(steps) == 4
1845
+ assert "transaction" in steps[0]["function_name"]
1846
+ DBOS.destroy(destroy_registry=True)
1847
+
1848
+ # Test custom database with client
1849
+ client = DBOSClient(
1850
+ system_database_url=config["system_database_url"],
1851
+ application_database_url=config["application_database_url"],
1852
+ )
1853
+ assert len(client.list_workflows()) == 2
1854
+ steps = client.list_workflow_steps(handle.workflow_id)
1855
+ assert len(steps) == 4
1856
+ assert "transaction" in steps[0]["function_name"]
1857
+
1858
+
1799
1859
  def test_custom_schema(
1800
1860
  config: DBOSConfig, cleanup_test_databases: None, skip_with_sqlite: None
1801
1861
  ) -> None:
@@ -1,7 +1,7 @@
1
1
  import threading
2
2
  import time
3
3
  import uuid
4
- from typing import cast
4
+ from typing import Any, Generator, cast
5
5
 
6
6
  import pytest
7
7
  import sqlalchemy as sa
@@ -553,3 +553,16 @@ def test_unregistered_workflow(dbos: DBOS, config: DBOSConfig) -> None:
553
553
 
554
554
  with pytest.raises(DBOSWorkflowFunctionNotFoundError):
555
555
  DBOS._recover_pending_workflows()
556
+
557
+
558
+ def test_nonserializable_return(dbos: DBOS) -> None:
559
+ @DBOS.step()
560
+ def step() -> Generator[str, Any, None]:
561
+ yield "val"
562
+
563
+ @DBOS.workflow()
564
+ def workflow() -> None:
565
+ step()
566
+
567
+ with pytest.raises(TypeError):
568
+ workflow()
@@ -12,24 +12,25 @@ from dbos import DBOS, KafkaMessage
12
12
  # Without it, they're automatically skipped.
13
13
  # Here's a docker-compose script you can use to set up local Kafka:
14
14
 
15
- # version: "3.7"
16
15
  # services:
17
16
  # broker:
18
- # image: bitnami/kafka:latest
17
+ # image: apache/kafka:latest
19
18
  # hostname: broker
20
19
  # container_name: broker
21
20
  # ports:
22
21
  # - '9092:9092'
23
22
  # environment:
24
- # KAFKA_CFG_NODE_ID: 1
25
- # KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT'
26
- # KAFKA_CFG_ADVERTISED_LISTENERS: 'PLAINTEXT_HOST://localhost:9092,PLAINTEXT://broker:19092'
27
- # KAFKA_CFG_PROCESS_ROLES: 'broker,controller'
28
- # KAFKA_CFG_CONTROLLER_QUORUM_VOTERS: '1@broker:29093'
29
- # KAFKA_CFG_LISTENERS: 'CONTROLLER://:29093,PLAINTEXT_HOST://:9092,PLAINTEXT://:19092'
30
- # KAFKA_CFG_INTER_BROKER_LISTENER_NAME: 'PLAINTEXT'
31
- # KAFKA_CFG_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
32
-
23
+ # KAFKA_NODE_ID: 1
24
+ # KAFKA_LISTENERS: PLAINTEXT://0.0.0.0:9092,CONTROLLER://0.0.0.0:9093
25
+ # KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://127.0.0.1:9092
26
+ # KAFKA_PROCESS_ROLES: broker,controller
27
+ # KAFKA_CONTROLLER_QUORUM_VOTERS: 1@localhost:9093
28
+ # KAFKA_CONTROLLER_LISTENER_NAMES: CONTROLLER
29
+ # KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,CONTROLLER:PLAINTEXT
30
+ # KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
31
+ # KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
32
+ # KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
33
+ # CLUSTER_ID: MkU3OEVBNTcwNTJENDM2Qk
33
34
 
34
35
  NUM_EVENTS = 3
35
36
 
@@ -81,12 +82,44 @@ def test_kafka(dbos: DBOS) -> None:
81
82
  assert b"test message key" in msg.key # type: ignore
82
83
  assert b"test message value" in msg.value # type: ignore
83
84
  print(msg)
84
- if kafka_count == 3:
85
+ if kafka_count == NUM_EVENTS:
86
+ event.set()
87
+
88
+ wait = event.wait(timeout=10)
89
+ assert wait
90
+ assert kafka_count == NUM_EVENTS
91
+
92
+
93
+ def test_kafka_async(dbos: DBOS) -> None:
94
+ event = threading.Event()
95
+ kafka_count = 0
96
+ server = "localhost:9092"
97
+ topic = f"dbos-kafka-{random.randrange(1_000_000_000)}"
98
+
99
+ if not send_test_messages(server, topic):
100
+ pytest.skip("Kafka not available")
101
+
102
+ @DBOS.kafka_consumer(
103
+ {
104
+ "bootstrap.servers": server,
105
+ "group.id": "dbos-test",
106
+ "auto.offset.reset": "earliest",
107
+ },
108
+ [topic],
109
+ )
110
+ @DBOS.workflow()
111
+ async def test_kafka_workflow(msg: KafkaMessage) -> None:
112
+ nonlocal kafka_count
113
+ kafka_count += 1
114
+ assert b"test message key" in msg.key # type: ignore
115
+ assert b"test message value" in msg.value # type: ignore
116
+ print(msg)
117
+ if kafka_count == NUM_EVENTS:
85
118
  event.set()
86
119
 
87
120
  wait = event.wait(timeout=10)
88
121
  assert wait
89
- assert kafka_count == 3
122
+ assert kafka_count == NUM_EVENTS
90
123
 
91
124
 
92
125
  def test_kafka_in_order(dbos: DBOS) -> None:
@@ -114,12 +147,12 @@ def test_kafka_in_order(dbos: DBOS) -> None:
114
147
  kafka_count += 1
115
148
  assert f"test message key {kafka_count - 1}".encode() == msg.key
116
149
  print(msg)
117
- if kafka_count == 3:
150
+ if kafka_count == NUM_EVENTS:
118
151
  event.set()
119
152
 
120
153
  wait = event.wait(timeout=15)
121
154
  assert wait
122
- assert kafka_count == 3
155
+ assert kafka_count == NUM_EVENTS
123
156
  time.sleep(2) # Wait for things to clean up
124
157
 
125
158
 
@@ -150,9 +183,9 @@ def test_kafka_no_groupid(dbos: DBOS) -> None:
150
183
  assert b"test message key" in msg.key # type: ignore
151
184
  assert b"test message value" in msg.value # type: ignore
152
185
  print(msg)
153
- if kafka_count == 6:
186
+ if kafka_count == NUM_EVENTS * 2:
154
187
  event.set()
155
188
 
156
189
  wait = event.wait(timeout=10)
157
190
  assert wait
158
- assert kafka_count == 6
191
+ assert kafka_count == NUM_EVENTS * 2
@@ -105,6 +105,19 @@ def test_scheduled_workflow(dbos: DBOS) -> None:
105
105
  assert wf_counter > 1 and wf_counter <= 5
106
106
 
107
107
 
108
+ def test_async_scheduled_workflow(dbos: DBOS) -> None:
109
+ wf_counter: int = 0
110
+
111
+ @DBOS.scheduled("* * * * * *")
112
+ @DBOS.workflow()
113
+ async def test_workflow(scheduled: datetime, actual: datetime) -> None:
114
+ nonlocal wf_counter
115
+ wf_counter += 1
116
+
117
+ time.sleep(5)
118
+ assert wf_counter > 1 and wf_counter <= 5
119
+
120
+
108
121
  def test_appdb_downtime(dbos: DBOS, skip_with_sqlite: None) -> None:
109
122
  wf_counter: int = 0
110
123
 
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes