dbos 0.6.0a3__tar.gz → 0.7.0a0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {dbos-0.6.0a3 → dbos-0.7.0a0}/PKG-INFO +1 -1
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/__init__.py +2 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/dbos.py +25 -6
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/dbos_config.py +2 -1
- dbos-0.7.0a0/dbos/kafka.py +94 -0
- dbos-0.7.0a0/dbos/kafka_message.py +15 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/pyproject.toml +4 -3
- {dbos-0.6.0a3 → dbos-0.7.0a0}/tests/classdefs.py +1 -1
- {dbos-0.6.0a3 → dbos-0.7.0a0}/tests/conftest.py +1 -1
- {dbos-0.6.0a3 → dbos-0.7.0a0}/tests/test_classdecorators.py +6 -6
- dbos-0.7.0a0/tests/test_kafka.py +80 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/tests/test_singleton.py +12 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/LICENSE +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/README.md +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/admin_sever.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/application_database.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/cli.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/context.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/core.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/dbos-config.schema.json +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/decorators.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/error.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/fastapi.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/flask.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/logger.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/migrations/env.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/migrations/script.py.mako +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/migrations/versions/5c361fc04708_added_system_tables.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/migrations/versions/a3b18ad34abe_added_triggers.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/py.typed +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/recovery.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/registrations.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/request.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/roles.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/scheduler/croniter.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/scheduler/scheduler.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/schemas/__init__.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/schemas/application_database.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/schemas/system_database.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/system_database.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/templates/hello/README.md +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/templates/hello/__package/__init__.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/templates/hello/__package/main.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/templates/hello/__package/schema.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/templates/hello/alembic.ini +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/templates/hello/dbos-config.yaml.dbos +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/templates/hello/migrations/env.py.dbos +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/templates/hello/migrations/script.py.mako +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/templates/hello/migrations/versions/2024_07_31_180642_init.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/templates/hello/start_postgres_docker.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/tracer.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/utils.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/tests/__init__.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/tests/atexit_no_ctor.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/tests/atexit_no_launch.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/tests/more_classdefs.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/tests/scheduler/test_croniter.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/tests/scheduler/test_scheduler.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/tests/test_admin_server.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/tests/test_concurrency.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/tests/test_config.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/tests/test_dbos.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/tests/test_failures.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/tests/test_fastapi.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/tests/test_fastapi_roles.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/tests/test_flask.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/tests/test_package.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/tests/test_schema_migration.py +0 -0
- {dbos-0.6.0a3 → dbos-0.7.0a0}/version/__init__.py +0 -0
|
@@ -2,6 +2,7 @@ from . import error as error
|
|
|
2
2
|
from .context import DBOSContextEnsure, SetWorkflowID
|
|
3
3
|
from .dbos import DBOS, DBOSConfiguredInstance, WorkflowHandle, WorkflowStatus
|
|
4
4
|
from .dbos_config import ConfigFile, get_dbos_database_url, load_config
|
|
5
|
+
from .kafka_message import KafkaMessage
|
|
5
6
|
from .system_database import GetWorkflowsInput, WorkflowStatusString
|
|
6
7
|
|
|
7
8
|
__all__ = [
|
|
@@ -10,6 +11,7 @@ __all__ = [
|
|
|
10
11
|
"DBOSConfiguredInstance",
|
|
11
12
|
"DBOSContextEnsure",
|
|
12
13
|
"GetWorkflowsInput",
|
|
14
|
+
"KafkaMessage",
|
|
13
15
|
"SetWorkflowID",
|
|
14
16
|
"WorkflowHandle",
|
|
15
17
|
"WorkflowStatus",
|
|
@@ -54,11 +54,14 @@ from .tracer import dbos_tracer
|
|
|
54
54
|
|
|
55
55
|
if TYPE_CHECKING:
|
|
56
56
|
from fastapi import FastAPI
|
|
57
|
+
from dbos.kafka import KafkaConsumerWorkflow
|
|
57
58
|
from .request import Request
|
|
58
59
|
from flask import Flask
|
|
59
60
|
|
|
60
61
|
from sqlalchemy.orm import Session
|
|
61
62
|
|
|
63
|
+
from dbos.request import Request
|
|
64
|
+
|
|
62
65
|
if sys.version_info < (3, 10):
|
|
63
66
|
from typing_extensions import ParamSpec, TypeAlias
|
|
64
67
|
else:
|
|
@@ -506,6 +509,20 @@ class DBOS:
|
|
|
506
509
|
|
|
507
510
|
return scheduled(_get_or_create_dbos_registry(), cron)
|
|
508
511
|
|
|
512
|
+
@classmethod
|
|
513
|
+
def kafka_consumer(
|
|
514
|
+
cls, config: dict[str, Any], topics: list[str]
|
|
515
|
+
) -> Callable[[KafkaConsumerWorkflow], KafkaConsumerWorkflow]:
|
|
516
|
+
"""Decorate a function to be used as a Kafka consumer."""
|
|
517
|
+
try:
|
|
518
|
+
from dbos.kafka import kafka_consumer
|
|
519
|
+
|
|
520
|
+
return kafka_consumer(_get_or_create_dbos_registry(), config, topics)
|
|
521
|
+
except ModuleNotFoundError as e:
|
|
522
|
+
raise DBOSException(
|
|
523
|
+
f"{e.name} dependency not found. Please install {e.name} via your package manager."
|
|
524
|
+
) from e
|
|
525
|
+
|
|
509
526
|
@classmethod
|
|
510
527
|
def start_workflow(
|
|
511
528
|
cls,
|
|
@@ -714,6 +731,12 @@ class DBOS:
|
|
|
714
731
|
ctx = assert_current_dbos_context()
|
|
715
732
|
return ctx.authenticated_roles
|
|
716
733
|
|
|
734
|
+
@classproperty
|
|
735
|
+
def assumed_role(cls) -> Optional[str]:
|
|
736
|
+
"""Return the role currently assumed by the authenticated user, if any, associated with the current context."""
|
|
737
|
+
ctx = assert_current_dbos_context()
|
|
738
|
+
return ctx.assumed_role
|
|
739
|
+
|
|
717
740
|
@classmethod
|
|
718
741
|
def set_authentication(
|
|
719
742
|
cls, authenticated_user: Optional[str], authenticated_roles: Optional[List[str]]
|
|
@@ -800,13 +823,9 @@ class DBOSConfiguredInstance:
|
|
|
800
823
|
|
|
801
824
|
"""
|
|
802
825
|
|
|
803
|
-
def __init__(self, config_name: str
|
|
826
|
+
def __init__(self, config_name: str) -> None:
|
|
804
827
|
self.config_name = config_name
|
|
805
|
-
|
|
806
|
-
assert isinstance(dbos, DBOS)
|
|
807
|
-
dbos._registry.register_instance(self)
|
|
808
|
-
else:
|
|
809
|
-
DBOS.register_instance(self)
|
|
828
|
+
DBOS.register_instance(self)
|
|
810
829
|
|
|
811
830
|
|
|
812
831
|
# Apps that import DBOS probably don't exit. If they do, let's see if
|
|
@@ -2,7 +2,7 @@ import json
|
|
|
2
2
|
import os
|
|
3
3
|
import re
|
|
4
4
|
from importlib import resources
|
|
5
|
-
from typing import Dict, List, Optional, TypedDict
|
|
5
|
+
from typing import Any, Dict, List, Optional, TypedDict
|
|
6
6
|
|
|
7
7
|
import yaml
|
|
8
8
|
from jsonschema import ValidationError, validate
|
|
@@ -69,6 +69,7 @@ class ConfigFile(TypedDict, total=False):
|
|
|
69
69
|
database: DatabaseConfig
|
|
70
70
|
telemetry: Optional[TelemetryConfig]
|
|
71
71
|
env: Dict[str, str]
|
|
72
|
+
application: Dict[str, Any]
|
|
72
73
|
|
|
73
74
|
|
|
74
75
|
def substitute_env_vars(content: str) -> str:
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
import threading
|
|
2
|
+
import traceback
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from typing import TYPE_CHECKING, Any, Callable, Generator, NoReturn, Optional, Union
|
|
5
|
+
|
|
6
|
+
from confluent_kafka import Consumer, KafkaError, KafkaException
|
|
7
|
+
from confluent_kafka import Message as CTypeMessage
|
|
8
|
+
|
|
9
|
+
if TYPE_CHECKING:
|
|
10
|
+
from dbos.dbos import _DBOSRegistry
|
|
11
|
+
|
|
12
|
+
from .context import SetWorkflowID
|
|
13
|
+
from .kafka_message import KafkaMessage
|
|
14
|
+
from .logger import dbos_logger
|
|
15
|
+
|
|
16
|
+
KafkaConsumerWorkflow = Callable[[KafkaMessage], None]
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def _kafka_consumer_loop(
|
|
20
|
+
func: KafkaConsumerWorkflow,
|
|
21
|
+
config: dict[str, Any],
|
|
22
|
+
topics: list[str],
|
|
23
|
+
stop_event: threading.Event,
|
|
24
|
+
) -> None:
|
|
25
|
+
|
|
26
|
+
def on_error(err: KafkaError) -> NoReturn:
|
|
27
|
+
raise KafkaException(err)
|
|
28
|
+
|
|
29
|
+
config["error_cb"] = on_error
|
|
30
|
+
if "auto.offset.reset" not in config:
|
|
31
|
+
config["auto.offset.reset"] = "earliest"
|
|
32
|
+
|
|
33
|
+
consumer = Consumer(config)
|
|
34
|
+
try:
|
|
35
|
+
consumer.subscribe(topics)
|
|
36
|
+
while not stop_event.is_set():
|
|
37
|
+
cmsg = consumer.poll(1.0)
|
|
38
|
+
|
|
39
|
+
if stop_event.is_set():
|
|
40
|
+
return
|
|
41
|
+
|
|
42
|
+
if cmsg is None:
|
|
43
|
+
continue
|
|
44
|
+
|
|
45
|
+
err = cmsg.error()
|
|
46
|
+
if err is not None:
|
|
47
|
+
dbos_logger.error(
|
|
48
|
+
f"Kafka error {err.code()} ({err.name()}): {err.str()}"
|
|
49
|
+
)
|
|
50
|
+
# fatal errors require an updated consumer instance
|
|
51
|
+
if err.code() == KafkaError._FATAL or err.fatal():
|
|
52
|
+
original_consumer = consumer
|
|
53
|
+
try:
|
|
54
|
+
consumer = Consumer(config)
|
|
55
|
+
consumer.subscribe(topics)
|
|
56
|
+
finally:
|
|
57
|
+
original_consumer.close()
|
|
58
|
+
else:
|
|
59
|
+
msg = KafkaMessage(
|
|
60
|
+
headers=cmsg.headers(),
|
|
61
|
+
key=cmsg.key(),
|
|
62
|
+
latency=cmsg.latency(),
|
|
63
|
+
leader_epoch=cmsg.leader_epoch(),
|
|
64
|
+
offset=cmsg.offset(),
|
|
65
|
+
partition=cmsg.partition(),
|
|
66
|
+
timestamp=cmsg.timestamp(),
|
|
67
|
+
topic=cmsg.topic(),
|
|
68
|
+
value=cmsg.value(),
|
|
69
|
+
)
|
|
70
|
+
with SetWorkflowID(
|
|
71
|
+
f"kafka-unique-id-{msg.topic}-{msg.partition}-{msg.offset}"
|
|
72
|
+
):
|
|
73
|
+
try:
|
|
74
|
+
func(msg)
|
|
75
|
+
except Exception as e:
|
|
76
|
+
dbos_logger.error(
|
|
77
|
+
f"Exception encountered in Kafka consumer: {traceback.format_exc()}"
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
finally:
|
|
81
|
+
consumer.close()
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def kafka_consumer(
|
|
85
|
+
dbosreg: "_DBOSRegistry", config: dict[str, Any], topics: list[str]
|
|
86
|
+
) -> Callable[[KafkaConsumerWorkflow], KafkaConsumerWorkflow]:
|
|
87
|
+
def decorator(func: KafkaConsumerWorkflow) -> KafkaConsumerWorkflow:
|
|
88
|
+
stop_event = threading.Event()
|
|
89
|
+
dbosreg.register_poller(
|
|
90
|
+
stop_event, _kafka_consumer_loop, func, config, topics, stop_event
|
|
91
|
+
)
|
|
92
|
+
return func
|
|
93
|
+
|
|
94
|
+
return decorator
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from typing import Optional, Union
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
@dataclass
|
|
6
|
+
class KafkaMessage:
|
|
7
|
+
headers: Optional[list[tuple[str, Union[str, bytes]]]]
|
|
8
|
+
key: Optional[Union[str, bytes]]
|
|
9
|
+
latency: Optional[float]
|
|
10
|
+
leader_epoch: Optional[int]
|
|
11
|
+
offset: Optional[int]
|
|
12
|
+
partition: Optional[int]
|
|
13
|
+
timestamp: tuple[int, int]
|
|
14
|
+
topic: Optional[str]
|
|
15
|
+
value: Optional[Union[str, bytes]]
|
|
@@ -23,7 +23,7 @@ dependencies = [
|
|
|
23
23
|
]
|
|
24
24
|
requires-python = ">=3.9"
|
|
25
25
|
readme = "README.md"
|
|
26
|
-
version = "0.
|
|
26
|
+
version = "0.7.0a0"
|
|
27
27
|
|
|
28
28
|
[project.license]
|
|
29
29
|
text = "MIT"
|
|
@@ -60,12 +60,13 @@ dev = [
|
|
|
60
60
|
"httpx>=0.27.0",
|
|
61
61
|
"types-psycopg2>=2.9.21.20240417",
|
|
62
62
|
"pytz>=2024.1",
|
|
63
|
-
"pdm>=2.18.0",
|
|
64
|
-
"pdm-backend>=2.3.3",
|
|
65
63
|
"GitPython>=3.1.43",
|
|
64
|
+
"confluent-kafka>=2.5.3",
|
|
65
|
+
"types-confluent-kafka>=1.2.2",
|
|
66
66
|
"flask>=3.0.3",
|
|
67
67
|
"pytest-order>=1.3.0",
|
|
68
68
|
"pyjwt>=2.9.0",
|
|
69
|
+
"pdm-backend>=2.3.3",
|
|
69
70
|
]
|
|
70
71
|
|
|
71
72
|
[tool.black]
|
|
@@ -61,7 +61,7 @@ class DBOSTestClass(DBOSConfiguredInstance):
|
|
|
61
61
|
@DBOS.workflow()
|
|
62
62
|
@DBOS.required_roles(["admin"])
|
|
63
63
|
def test_func_admin(self, var: str) -> str:
|
|
64
|
-
assert
|
|
64
|
+
assert DBOS.assumed_role == "admin"
|
|
65
65
|
return self.config_name + ":" + var
|
|
66
66
|
|
|
67
67
|
|
|
@@ -124,7 +124,7 @@ def dbos_fastapi(
|
|
|
124
124
|
warnings.filterwarnings(
|
|
125
125
|
"ignore",
|
|
126
126
|
category=DeprecationWarning,
|
|
127
|
-
message="\s*on_event is deprecated, use lifespan event handlers instead\.",
|
|
127
|
+
message=r"\s*on_event is deprecated, use lifespan event handlers instead\.",
|
|
128
128
|
)
|
|
129
129
|
dbos = DBOS(fastapi=app, config=config)
|
|
130
130
|
|
|
@@ -49,7 +49,7 @@ def test_required_roles_class(dbos: DBOS) -> None:
|
|
|
49
49
|
@DBOS.default_required_roles(["user"])
|
|
50
50
|
class DBOSTestClassRR(DBOSConfiguredInstance):
|
|
51
51
|
def __init__(self) -> None:
|
|
52
|
-
super().__init__("myconfig"
|
|
52
|
+
super().__init__("myconfig")
|
|
53
53
|
|
|
54
54
|
@DBOS.workflow()
|
|
55
55
|
def test_func_user(self, var: str) -> str:
|
|
@@ -279,7 +279,7 @@ def test_simple_workflow_inst(dbos: DBOS) -> None:
|
|
|
279
279
|
@DBOS.dbos_class()
|
|
280
280
|
class DBOSTestClassInst(DBOSConfiguredInstance):
|
|
281
281
|
def __init__(self) -> None:
|
|
282
|
-
super().__init__("bob"
|
|
282
|
+
super().__init__("bob")
|
|
283
283
|
self.txn_counter: int = 0
|
|
284
284
|
self.wf_counter: int = 0
|
|
285
285
|
self.step_counter: int = 0
|
|
@@ -327,7 +327,7 @@ def test_simple_workflow_inst(dbos: DBOS) -> None:
|
|
|
327
327
|
def test_forgotten_decorator(dbos: DBOS) -> None:
|
|
328
328
|
class DBOSTestRegErr(DBOSConfiguredInstance):
|
|
329
329
|
def __init__(self) -> None:
|
|
330
|
-
super().__init__("bob"
|
|
330
|
+
super().__init__("bob")
|
|
331
331
|
self.txn_counter: int = 0
|
|
332
332
|
self.wf_counter: int = 0
|
|
333
333
|
self.step_counter: int = 0
|
|
@@ -362,7 +362,7 @@ def test_duplicate_reg(dbos: DBOS) -> None:
|
|
|
362
362
|
@DBOS.dbos_class()
|
|
363
363
|
class DBOSTestRegDup(DBOSConfiguredInstance):
|
|
364
364
|
def __init__(self) -> None:
|
|
365
|
-
super().__init__("bob"
|
|
365
|
+
super().__init__("bob")
|
|
366
366
|
|
|
367
367
|
# Duplicate class registration
|
|
368
368
|
with pytest.raises(Exception) as exc_info:
|
|
@@ -370,7 +370,7 @@ def test_duplicate_reg(dbos: DBOS) -> None:
|
|
|
370
370
|
@DBOS.dbos_class()
|
|
371
371
|
class DBOSTestRegDup(DBOSConfiguredInstance): # type: ignore
|
|
372
372
|
def __init__(self) -> None:
|
|
373
|
-
super().__init__("bob"
|
|
373
|
+
super().__init__("bob")
|
|
374
374
|
|
|
375
375
|
assert "Duplicate type registration for class 'DBOSTestRegDup'" == str(
|
|
376
376
|
exc_info.value
|
|
@@ -419,7 +419,7 @@ def test_inst_recovery(dbos: DBOS) -> None:
|
|
|
419
419
|
@DBOS.dbos_class()
|
|
420
420
|
class DBOSTestInstRec(DBOSConfiguredInstance):
|
|
421
421
|
def __init__(self) -> None:
|
|
422
|
-
super().__init__("bob"
|
|
422
|
+
super().__init__("bob")
|
|
423
423
|
|
|
424
424
|
@DBOS.workflow()
|
|
425
425
|
def check_inst(self, arg1: str) -> str:
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
import random
|
|
2
|
+
import threading
|
|
3
|
+
import uuid
|
|
4
|
+
from typing import Any, List, NoReturn
|
|
5
|
+
|
|
6
|
+
import pytest
|
|
7
|
+
from confluent_kafka import KafkaError, Producer
|
|
8
|
+
|
|
9
|
+
from dbos import DBOS, KafkaMessage
|
|
10
|
+
|
|
11
|
+
# These tests require local Kafka to run.
|
|
12
|
+
# Without it, they're automatically skipped.
|
|
13
|
+
# Here's a docker-compose script you can use to set up local Kafka:
|
|
14
|
+
|
|
15
|
+
# version: "3.7"
|
|
16
|
+
# services:
|
|
17
|
+
# broker:
|
|
18
|
+
# image: bitnami/kafka:latest
|
|
19
|
+
# hostname: broker
|
|
20
|
+
# container_name: broker
|
|
21
|
+
# ports:
|
|
22
|
+
# - '9092:9092'
|
|
23
|
+
# environment:
|
|
24
|
+
# KAFKA_CFG_NODE_ID: 1
|
|
25
|
+
# KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT'
|
|
26
|
+
# KAFKA_CFG_ADVERTISED_LISTENERS: 'PLAINTEXT_HOST://localhost:9092,PLAINTEXT://broker:19092'
|
|
27
|
+
# KAFKA_CFG_PROCESS_ROLES: 'broker,controller'
|
|
28
|
+
# KAFKA_CFG_CONTROLLER_QUORUM_VOTERS: '1@broker:29093'
|
|
29
|
+
# KAFKA_CFG_LISTENERS: 'CONTROLLER://:29093,PLAINTEXT_HOST://:9092,PLAINTEXT://:19092'
|
|
30
|
+
# KAFKA_CFG_INTER_BROKER_LISTENER_NAME: 'PLAINTEXT'
|
|
31
|
+
# KAFKA_CFG_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def send_test_messages(server: str, topic: str) -> bool:
|
|
35
|
+
|
|
36
|
+
try:
|
|
37
|
+
|
|
38
|
+
def on_error(err: KafkaError) -> NoReturn:
|
|
39
|
+
raise Exception(err)
|
|
40
|
+
|
|
41
|
+
producer = Producer({"bootstrap.servers": server, "error_cb": on_error})
|
|
42
|
+
|
|
43
|
+
producer.produce(topic, key=f"test message key", value=f"test message value")
|
|
44
|
+
|
|
45
|
+
producer.poll(10)
|
|
46
|
+
producer.flush(10)
|
|
47
|
+
return True
|
|
48
|
+
except Exception as e:
|
|
49
|
+
return False
|
|
50
|
+
finally:
|
|
51
|
+
pass
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def test_kafka(dbos: DBOS) -> None:
|
|
55
|
+
event = threading.Event()
|
|
56
|
+
server = "localhost:9092"
|
|
57
|
+
topic = f"dbos-kafka-{random.randrange(1_000_000_000)}"
|
|
58
|
+
|
|
59
|
+
if not send_test_messages(server, topic):
|
|
60
|
+
pytest.skip("Kafka not available")
|
|
61
|
+
|
|
62
|
+
messages: List[KafkaMessage] = []
|
|
63
|
+
|
|
64
|
+
@DBOS.kafka_consumer(
|
|
65
|
+
{
|
|
66
|
+
"bootstrap.servers": server,
|
|
67
|
+
"group.id": "dbos-test",
|
|
68
|
+
"auto.offset.reset": "earliest",
|
|
69
|
+
},
|
|
70
|
+
[topic],
|
|
71
|
+
)
|
|
72
|
+
@DBOS.workflow()
|
|
73
|
+
def test_kafka_workflow(msg: KafkaMessage) -> None:
|
|
74
|
+
print(msg)
|
|
75
|
+
messages.append(msg)
|
|
76
|
+
event.set()
|
|
77
|
+
|
|
78
|
+
wait = event.wait(timeout=10)
|
|
79
|
+
assert wait
|
|
80
|
+
assert len(messages) > 0
|
|
@@ -146,6 +146,12 @@ database:
|
|
|
146
146
|
runtimeConfig:
|
|
147
147
|
start:
|
|
148
148
|
- python3 main.py
|
|
149
|
+
application:
|
|
150
|
+
service_url: 'https://service.org'
|
|
151
|
+
service_config:
|
|
152
|
+
port: 80
|
|
153
|
+
user: "user"
|
|
154
|
+
password: "password"
|
|
149
155
|
"""
|
|
150
156
|
|
|
151
157
|
|
|
@@ -164,6 +170,12 @@ def test_config_before_singleton(cleanup_test_databases: None) -> None:
|
|
|
164
170
|
|
|
165
171
|
x = DBOS.config.get("language")
|
|
166
172
|
assert x == "python"
|
|
173
|
+
y = DBOS.config["language"]
|
|
174
|
+
assert y == "python"
|
|
175
|
+
url = DBOS.config["application"]["service_url"]
|
|
176
|
+
assert url == "https://service.org"
|
|
177
|
+
port = DBOS.config["application"]["service_config"]["port"]
|
|
178
|
+
assert port == 80
|
|
167
179
|
|
|
168
180
|
# This is OK, it meant load_config anyway
|
|
169
181
|
dbos: DBOS = DBOS()
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{dbos-0.6.0a3 → dbos-0.7.0a0}/dbos/templates/hello/migrations/versions/2024_07_31_180642_init.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|