dbos 0.6.0a4__tar.gz → 0.7.0a0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {dbos-0.6.0a4 → dbos-0.7.0a0}/PKG-INFO +1 -1
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/__init__.py +2 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/dbos.py +17 -0
- dbos-0.7.0a0/dbos/kafka.py +94 -0
- dbos-0.7.0a0/dbos/kafka_message.py +15 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/pyproject.toml +4 -3
- {dbos-0.6.0a4 → dbos-0.7.0a0}/tests/conftest.py +1 -1
- dbos-0.7.0a0/tests/test_kafka.py +80 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/LICENSE +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/README.md +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/admin_sever.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/application_database.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/cli.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/context.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/core.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/dbos-config.schema.json +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/dbos_config.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/decorators.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/error.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/fastapi.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/flask.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/logger.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/migrations/env.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/migrations/script.py.mako +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/migrations/versions/5c361fc04708_added_system_tables.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/migrations/versions/a3b18ad34abe_added_triggers.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/py.typed +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/recovery.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/registrations.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/request.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/roles.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/scheduler/croniter.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/scheduler/scheduler.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/schemas/__init__.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/schemas/application_database.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/schemas/system_database.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/system_database.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/templates/hello/README.md +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/templates/hello/__package/__init__.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/templates/hello/__package/main.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/templates/hello/__package/schema.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/templates/hello/alembic.ini +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/templates/hello/dbos-config.yaml.dbos +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/templates/hello/migrations/env.py.dbos +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/templates/hello/migrations/script.py.mako +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/templates/hello/migrations/versions/2024_07_31_180642_init.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/templates/hello/start_postgres_docker.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/tracer.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/utils.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/tests/__init__.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/tests/atexit_no_ctor.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/tests/atexit_no_launch.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/tests/classdefs.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/tests/more_classdefs.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/tests/scheduler/test_croniter.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/tests/scheduler/test_scheduler.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/tests/test_admin_server.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/tests/test_classdecorators.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/tests/test_concurrency.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/tests/test_config.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/tests/test_dbos.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/tests/test_failures.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/tests/test_fastapi.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/tests/test_fastapi_roles.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/tests/test_flask.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/tests/test_package.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/tests/test_schema_migration.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/tests/test_singleton.py +0 -0
- {dbos-0.6.0a4 → dbos-0.7.0a0}/version/__init__.py +0 -0
|
@@ -2,6 +2,7 @@ from . import error as error
|
|
|
2
2
|
from .context import DBOSContextEnsure, SetWorkflowID
|
|
3
3
|
from .dbos import DBOS, DBOSConfiguredInstance, WorkflowHandle, WorkflowStatus
|
|
4
4
|
from .dbos_config import ConfigFile, get_dbos_database_url, load_config
|
|
5
|
+
from .kafka_message import KafkaMessage
|
|
5
6
|
from .system_database import GetWorkflowsInput, WorkflowStatusString
|
|
6
7
|
|
|
7
8
|
__all__ = [
|
|
@@ -10,6 +11,7 @@ __all__ = [
|
|
|
10
11
|
"DBOSConfiguredInstance",
|
|
11
12
|
"DBOSContextEnsure",
|
|
12
13
|
"GetWorkflowsInput",
|
|
14
|
+
"KafkaMessage",
|
|
13
15
|
"SetWorkflowID",
|
|
14
16
|
"WorkflowHandle",
|
|
15
17
|
"WorkflowStatus",
|
|
@@ -54,11 +54,14 @@ from .tracer import dbos_tracer
|
|
|
54
54
|
|
|
55
55
|
if TYPE_CHECKING:
|
|
56
56
|
from fastapi import FastAPI
|
|
57
|
+
from dbos.kafka import KafkaConsumerWorkflow
|
|
57
58
|
from .request import Request
|
|
58
59
|
from flask import Flask
|
|
59
60
|
|
|
60
61
|
from sqlalchemy.orm import Session
|
|
61
62
|
|
|
63
|
+
from dbos.request import Request
|
|
64
|
+
|
|
62
65
|
if sys.version_info < (3, 10):
|
|
63
66
|
from typing_extensions import ParamSpec, TypeAlias
|
|
64
67
|
else:
|
|
@@ -506,6 +509,20 @@ class DBOS:
|
|
|
506
509
|
|
|
507
510
|
return scheduled(_get_or_create_dbos_registry(), cron)
|
|
508
511
|
|
|
512
|
+
@classmethod
|
|
513
|
+
def kafka_consumer(
|
|
514
|
+
cls, config: dict[str, Any], topics: list[str]
|
|
515
|
+
) -> Callable[[KafkaConsumerWorkflow], KafkaConsumerWorkflow]:
|
|
516
|
+
"""Decorate a function to be used as a Kafka consumer."""
|
|
517
|
+
try:
|
|
518
|
+
from dbos.kafka import kafka_consumer
|
|
519
|
+
|
|
520
|
+
return kafka_consumer(_get_or_create_dbos_registry(), config, topics)
|
|
521
|
+
except ModuleNotFoundError as e:
|
|
522
|
+
raise DBOSException(
|
|
523
|
+
f"{e.name} dependency not found. Please install {e.name} via your package manager."
|
|
524
|
+
) from e
|
|
525
|
+
|
|
509
526
|
@classmethod
|
|
510
527
|
def start_workflow(
|
|
511
528
|
cls,
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
import threading
|
|
2
|
+
import traceback
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from typing import TYPE_CHECKING, Any, Callable, Generator, NoReturn, Optional, Union
|
|
5
|
+
|
|
6
|
+
from confluent_kafka import Consumer, KafkaError, KafkaException
|
|
7
|
+
from confluent_kafka import Message as CTypeMessage
|
|
8
|
+
|
|
9
|
+
if TYPE_CHECKING:
|
|
10
|
+
from dbos.dbos import _DBOSRegistry
|
|
11
|
+
|
|
12
|
+
from .context import SetWorkflowID
|
|
13
|
+
from .kafka_message import KafkaMessage
|
|
14
|
+
from .logger import dbos_logger
|
|
15
|
+
|
|
16
|
+
KafkaConsumerWorkflow = Callable[[KafkaMessage], None]
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def _kafka_consumer_loop(
|
|
20
|
+
func: KafkaConsumerWorkflow,
|
|
21
|
+
config: dict[str, Any],
|
|
22
|
+
topics: list[str],
|
|
23
|
+
stop_event: threading.Event,
|
|
24
|
+
) -> None:
|
|
25
|
+
|
|
26
|
+
def on_error(err: KafkaError) -> NoReturn:
|
|
27
|
+
raise KafkaException(err)
|
|
28
|
+
|
|
29
|
+
config["error_cb"] = on_error
|
|
30
|
+
if "auto.offset.reset" not in config:
|
|
31
|
+
config["auto.offset.reset"] = "earliest"
|
|
32
|
+
|
|
33
|
+
consumer = Consumer(config)
|
|
34
|
+
try:
|
|
35
|
+
consumer.subscribe(topics)
|
|
36
|
+
while not stop_event.is_set():
|
|
37
|
+
cmsg = consumer.poll(1.0)
|
|
38
|
+
|
|
39
|
+
if stop_event.is_set():
|
|
40
|
+
return
|
|
41
|
+
|
|
42
|
+
if cmsg is None:
|
|
43
|
+
continue
|
|
44
|
+
|
|
45
|
+
err = cmsg.error()
|
|
46
|
+
if err is not None:
|
|
47
|
+
dbos_logger.error(
|
|
48
|
+
f"Kafka error {err.code()} ({err.name()}): {err.str()}"
|
|
49
|
+
)
|
|
50
|
+
# fatal errors require an updated consumer instance
|
|
51
|
+
if err.code() == KafkaError._FATAL or err.fatal():
|
|
52
|
+
original_consumer = consumer
|
|
53
|
+
try:
|
|
54
|
+
consumer = Consumer(config)
|
|
55
|
+
consumer.subscribe(topics)
|
|
56
|
+
finally:
|
|
57
|
+
original_consumer.close()
|
|
58
|
+
else:
|
|
59
|
+
msg = KafkaMessage(
|
|
60
|
+
headers=cmsg.headers(),
|
|
61
|
+
key=cmsg.key(),
|
|
62
|
+
latency=cmsg.latency(),
|
|
63
|
+
leader_epoch=cmsg.leader_epoch(),
|
|
64
|
+
offset=cmsg.offset(),
|
|
65
|
+
partition=cmsg.partition(),
|
|
66
|
+
timestamp=cmsg.timestamp(),
|
|
67
|
+
topic=cmsg.topic(),
|
|
68
|
+
value=cmsg.value(),
|
|
69
|
+
)
|
|
70
|
+
with SetWorkflowID(
|
|
71
|
+
f"kafka-unique-id-{msg.topic}-{msg.partition}-{msg.offset}"
|
|
72
|
+
):
|
|
73
|
+
try:
|
|
74
|
+
func(msg)
|
|
75
|
+
except Exception as e:
|
|
76
|
+
dbos_logger.error(
|
|
77
|
+
f"Exception encountered in Kafka consumer: {traceback.format_exc()}"
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
finally:
|
|
81
|
+
consumer.close()
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def kafka_consumer(
|
|
85
|
+
dbosreg: "_DBOSRegistry", config: dict[str, Any], topics: list[str]
|
|
86
|
+
) -> Callable[[KafkaConsumerWorkflow], KafkaConsumerWorkflow]:
|
|
87
|
+
def decorator(func: KafkaConsumerWorkflow) -> KafkaConsumerWorkflow:
|
|
88
|
+
stop_event = threading.Event()
|
|
89
|
+
dbosreg.register_poller(
|
|
90
|
+
stop_event, _kafka_consumer_loop, func, config, topics, stop_event
|
|
91
|
+
)
|
|
92
|
+
return func
|
|
93
|
+
|
|
94
|
+
return decorator
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from typing import Optional, Union
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
@dataclass
|
|
6
|
+
class KafkaMessage:
|
|
7
|
+
headers: Optional[list[tuple[str, Union[str, bytes]]]]
|
|
8
|
+
key: Optional[Union[str, bytes]]
|
|
9
|
+
latency: Optional[float]
|
|
10
|
+
leader_epoch: Optional[int]
|
|
11
|
+
offset: Optional[int]
|
|
12
|
+
partition: Optional[int]
|
|
13
|
+
timestamp: tuple[int, int]
|
|
14
|
+
topic: Optional[str]
|
|
15
|
+
value: Optional[Union[str, bytes]]
|
|
@@ -23,7 +23,7 @@ dependencies = [
|
|
|
23
23
|
]
|
|
24
24
|
requires-python = ">=3.9"
|
|
25
25
|
readme = "README.md"
|
|
26
|
-
version = "0.
|
|
26
|
+
version = "0.7.0a0"
|
|
27
27
|
|
|
28
28
|
[project.license]
|
|
29
29
|
text = "MIT"
|
|
@@ -60,12 +60,13 @@ dev = [
|
|
|
60
60
|
"httpx>=0.27.0",
|
|
61
61
|
"types-psycopg2>=2.9.21.20240417",
|
|
62
62
|
"pytz>=2024.1",
|
|
63
|
-
"pdm>=2.18.0",
|
|
64
|
-
"pdm-backend>=2.3.3",
|
|
65
63
|
"GitPython>=3.1.43",
|
|
64
|
+
"confluent-kafka>=2.5.3",
|
|
65
|
+
"types-confluent-kafka>=1.2.2",
|
|
66
66
|
"flask>=3.0.3",
|
|
67
67
|
"pytest-order>=1.3.0",
|
|
68
68
|
"pyjwt>=2.9.0",
|
|
69
|
+
"pdm-backend>=2.3.3",
|
|
69
70
|
]
|
|
70
71
|
|
|
71
72
|
[tool.black]
|
|
@@ -124,7 +124,7 @@ def dbos_fastapi(
|
|
|
124
124
|
warnings.filterwarnings(
|
|
125
125
|
"ignore",
|
|
126
126
|
category=DeprecationWarning,
|
|
127
|
-
message="\s*on_event is deprecated, use lifespan event handlers instead\.",
|
|
127
|
+
message=r"\s*on_event is deprecated, use lifespan event handlers instead\.",
|
|
128
128
|
)
|
|
129
129
|
dbos = DBOS(fastapi=app, config=config)
|
|
130
130
|
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
import random
|
|
2
|
+
import threading
|
|
3
|
+
import uuid
|
|
4
|
+
from typing import Any, List, NoReturn
|
|
5
|
+
|
|
6
|
+
import pytest
|
|
7
|
+
from confluent_kafka import KafkaError, Producer
|
|
8
|
+
|
|
9
|
+
from dbos import DBOS, KafkaMessage
|
|
10
|
+
|
|
11
|
+
# These tests require local Kafka to run.
|
|
12
|
+
# Without it, they're automatically skipped.
|
|
13
|
+
# Here's a docker-compose script you can use to set up local Kafka:
|
|
14
|
+
|
|
15
|
+
# version: "3.7"
|
|
16
|
+
# services:
|
|
17
|
+
# broker:
|
|
18
|
+
# image: bitnami/kafka:latest
|
|
19
|
+
# hostname: broker
|
|
20
|
+
# container_name: broker
|
|
21
|
+
# ports:
|
|
22
|
+
# - '9092:9092'
|
|
23
|
+
# environment:
|
|
24
|
+
# KAFKA_CFG_NODE_ID: 1
|
|
25
|
+
# KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT'
|
|
26
|
+
# KAFKA_CFG_ADVERTISED_LISTENERS: 'PLAINTEXT_HOST://localhost:9092,PLAINTEXT://broker:19092'
|
|
27
|
+
# KAFKA_CFG_PROCESS_ROLES: 'broker,controller'
|
|
28
|
+
# KAFKA_CFG_CONTROLLER_QUORUM_VOTERS: '1@broker:29093'
|
|
29
|
+
# KAFKA_CFG_LISTENERS: 'CONTROLLER://:29093,PLAINTEXT_HOST://:9092,PLAINTEXT://:19092'
|
|
30
|
+
# KAFKA_CFG_INTER_BROKER_LISTENER_NAME: 'PLAINTEXT'
|
|
31
|
+
# KAFKA_CFG_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def send_test_messages(server: str, topic: str) -> bool:
|
|
35
|
+
|
|
36
|
+
try:
|
|
37
|
+
|
|
38
|
+
def on_error(err: KafkaError) -> NoReturn:
|
|
39
|
+
raise Exception(err)
|
|
40
|
+
|
|
41
|
+
producer = Producer({"bootstrap.servers": server, "error_cb": on_error})
|
|
42
|
+
|
|
43
|
+
producer.produce(topic, key=f"test message key", value=f"test message value")
|
|
44
|
+
|
|
45
|
+
producer.poll(10)
|
|
46
|
+
producer.flush(10)
|
|
47
|
+
return True
|
|
48
|
+
except Exception as e:
|
|
49
|
+
return False
|
|
50
|
+
finally:
|
|
51
|
+
pass
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def test_kafka(dbos: DBOS) -> None:
|
|
55
|
+
event = threading.Event()
|
|
56
|
+
server = "localhost:9092"
|
|
57
|
+
topic = f"dbos-kafka-{random.randrange(1_000_000_000)}"
|
|
58
|
+
|
|
59
|
+
if not send_test_messages(server, topic):
|
|
60
|
+
pytest.skip("Kafka not available")
|
|
61
|
+
|
|
62
|
+
messages: List[KafkaMessage] = []
|
|
63
|
+
|
|
64
|
+
@DBOS.kafka_consumer(
|
|
65
|
+
{
|
|
66
|
+
"bootstrap.servers": server,
|
|
67
|
+
"group.id": "dbos-test",
|
|
68
|
+
"auto.offset.reset": "earliest",
|
|
69
|
+
},
|
|
70
|
+
[topic],
|
|
71
|
+
)
|
|
72
|
+
@DBOS.workflow()
|
|
73
|
+
def test_kafka_workflow(msg: KafkaMessage) -> None:
|
|
74
|
+
print(msg)
|
|
75
|
+
messages.append(msg)
|
|
76
|
+
event.set()
|
|
77
|
+
|
|
78
|
+
wait = event.wait(timeout=10)
|
|
79
|
+
assert wait
|
|
80
|
+
assert len(messages) > 0
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{dbos-0.6.0a4 → dbos-0.7.0a0}/dbos/templates/hello/migrations/versions/2024_07_31_180642_init.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|