edda-framework 0.6.0__py3-none-any.whl → 0.8.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- edda/__init__.py +39 -5
- edda/app.py +383 -223
- edda/channels.py +992 -0
- edda/compensation.py +22 -22
- edda/context.py +77 -51
- edda/integrations/opentelemetry/hooks.py +7 -2
- edda/locking.py +130 -67
- edda/replay.py +312 -82
- edda/storage/models.py +165 -24
- edda/storage/protocol.py +575 -122
- edda/storage/sqlalchemy_storage.py +2073 -319
- edda/viewer_ui/app.py +558 -127
- edda/viewer_ui/components.py +81 -68
- edda/viewer_ui/data_service.py +61 -25
- edda/viewer_ui/theme.py +200 -0
- edda/workflow.py +43 -0
- {edda_framework-0.6.0.dist-info → edda_framework-0.8.0.dist-info}/METADATA +167 -9
- {edda_framework-0.6.0.dist-info → edda_framework-0.8.0.dist-info}/RECORD +21 -20
- {edda_framework-0.6.0.dist-info → edda_framework-0.8.0.dist-info}/WHEEL +1 -1
- edda/events.py +0 -505
- {edda_framework-0.6.0.dist-info → edda_framework-0.8.0.dist-info}/entry_points.txt +0 -0
- {edda_framework-0.6.0.dist-info → edda_framework-0.8.0.dist-info}/licenses/LICENSE +0 -0
edda/events.py
DELETED
|
@@ -1,505 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Event handling module for Edda framework.
|
|
3
|
-
|
|
4
|
-
This module provides CloudEvents integration including wait_event and send_event.
|
|
5
|
-
"""
|
|
6
|
-
|
|
7
|
-
import uuid
|
|
8
|
-
from dataclasses import dataclass, field, replace
|
|
9
|
-
from datetime import UTC, datetime, timedelta
|
|
10
|
-
from typing import TYPE_CHECKING, Any, TypeVar, cast
|
|
11
|
-
|
|
12
|
-
import httpx
|
|
13
|
-
from cloudevents.conversion import to_structured
|
|
14
|
-
from cloudevents.http import CloudEvent
|
|
15
|
-
from pydantic import BaseModel
|
|
16
|
-
|
|
17
|
-
from edda.pydantic_utils import from_json_dict, is_pydantic_instance, to_json_dict
|
|
18
|
-
|
|
19
|
-
if TYPE_CHECKING:
|
|
20
|
-
from edda.context import WorkflowContext
|
|
21
|
-
|
|
22
|
-
T = TypeVar("T", bound=BaseModel)
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
@dataclass(frozen=True)
|
|
26
|
-
class ReceivedEvent:
|
|
27
|
-
"""
|
|
28
|
-
Represents a CloudEvent received by a workflow.
|
|
29
|
-
|
|
30
|
-
This class provides structured access to both the event payload (data)
|
|
31
|
-
and CloudEvents metadata (type, source, id, time, etc.).
|
|
32
|
-
|
|
33
|
-
Attributes:
|
|
34
|
-
data: The event payload (JSON dict or Pydantic model)
|
|
35
|
-
type: CloudEvent type (e.g., "payment.completed")
|
|
36
|
-
source: CloudEvent source (e.g., "payment-service")
|
|
37
|
-
id: Unique event identifier
|
|
38
|
-
time: Event timestamp (ISO 8601 format)
|
|
39
|
-
datacontenttype: Content type of the data (typically "application/json")
|
|
40
|
-
subject: Subject of the event (optional CloudEvents extension)
|
|
41
|
-
extensions: Additional CloudEvents extension attributes
|
|
42
|
-
|
|
43
|
-
Example:
|
|
44
|
-
>>> # Without Pydantic model
|
|
45
|
-
>>> event = await wait_event(ctx, "payment.completed")
|
|
46
|
-
>>> amount = event.data["amount"]
|
|
47
|
-
>>> order_id = event.data["order_id"]
|
|
48
|
-
>>>
|
|
49
|
-
>>> # With Pydantic model (type-safe)
|
|
50
|
-
>>> event = await wait_event(ctx, "payment.completed", model=PaymentCompleted)
|
|
51
|
-
>>> amount = event.data.amount # Type-safe access
|
|
52
|
-
>>> order_id = event.data.order_id # IDE completion
|
|
53
|
-
>>>
|
|
54
|
-
>>> # Access CloudEvents metadata
|
|
55
|
-
>>> event_source = event.source
|
|
56
|
-
>>> event_time = event.time
|
|
57
|
-
>>> event_id = event.id
|
|
58
|
-
"""
|
|
59
|
-
|
|
60
|
-
# Event payload (JSON dict or Pydantic model)
|
|
61
|
-
data: dict[str, Any] | BaseModel
|
|
62
|
-
|
|
63
|
-
# CloudEvents standard attributes
|
|
64
|
-
type: str
|
|
65
|
-
source: str
|
|
66
|
-
id: str
|
|
67
|
-
time: str | None = None
|
|
68
|
-
datacontenttype: str | None = None
|
|
69
|
-
subject: str | None = None
|
|
70
|
-
|
|
71
|
-
# CloudEvents extension attributes
|
|
72
|
-
extensions: dict[str, Any] = field(default_factory=dict)
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
class WaitForEventException(Exception):
|
|
76
|
-
"""
|
|
77
|
-
Exception raised to pause workflow execution while waiting for an event.
|
|
78
|
-
|
|
79
|
-
This is an internal exception used to signal that the workflow should be
|
|
80
|
-
paused and resumed when the specified event arrives.
|
|
81
|
-
"""
|
|
82
|
-
|
|
83
|
-
def __init__(
|
|
84
|
-
self,
|
|
85
|
-
event_type: str,
|
|
86
|
-
timeout_seconds: int | None = None,
|
|
87
|
-
activity_id: str | None = None,
|
|
88
|
-
):
|
|
89
|
-
self.event_type = event_type
|
|
90
|
-
self.timeout_seconds = timeout_seconds
|
|
91
|
-
self.activity_id = activity_id
|
|
92
|
-
super().__init__(f"Waiting for event: {event_type}")
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
class WaitForTimerException(Exception):
|
|
96
|
-
"""
|
|
97
|
-
Exception raised to pause workflow execution while waiting for a timer.
|
|
98
|
-
|
|
99
|
-
This is an internal exception used to signal that the workflow should be
|
|
100
|
-
paused and resumed when the timer expires.
|
|
101
|
-
"""
|
|
102
|
-
|
|
103
|
-
def __init__(
|
|
104
|
-
self,
|
|
105
|
-
duration_seconds: int,
|
|
106
|
-
expires_at: "datetime",
|
|
107
|
-
timer_id: str,
|
|
108
|
-
activity_id: str | None = None,
|
|
109
|
-
):
|
|
110
|
-
self.duration_seconds = duration_seconds
|
|
111
|
-
self.expires_at = expires_at
|
|
112
|
-
self.timer_id = timer_id
|
|
113
|
-
self.activity_id = activity_id
|
|
114
|
-
super().__init__(f"Waiting for timer: {timer_id} ({duration_seconds}s)")
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
class EventTimeoutError(Exception):
|
|
118
|
-
"""
|
|
119
|
-
Exception raised when wait_event() times out.
|
|
120
|
-
|
|
121
|
-
This exception is raised when an event does not arrive within the
|
|
122
|
-
specified timeout period. The workflow can catch this exception to
|
|
123
|
-
handle timeout scenarios gracefully.
|
|
124
|
-
|
|
125
|
-
Example:
|
|
126
|
-
try:
|
|
127
|
-
event = await wait_event(ctx, "payment.completed", timeout_seconds=60)
|
|
128
|
-
except EventTimeoutError:
|
|
129
|
-
# Handle timeout - maybe send reminder or cancel order
|
|
130
|
-
await send_notification("Payment timeout")
|
|
131
|
-
"""
|
|
132
|
-
|
|
133
|
-
def __init__(self, event_type: str, timeout_seconds: int):
|
|
134
|
-
self.event_type = event_type
|
|
135
|
-
self.timeout_seconds = timeout_seconds
|
|
136
|
-
super().__init__(f"Event '{event_type}' did not arrive within {timeout_seconds} seconds")
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
async def wait_event(
|
|
140
|
-
ctx: "WorkflowContext",
|
|
141
|
-
event_type: str,
|
|
142
|
-
timeout_seconds: int | None = None,
|
|
143
|
-
model: type[T] | None = None,
|
|
144
|
-
event_id: str | None = None,
|
|
145
|
-
) -> ReceivedEvent:
|
|
146
|
-
"""
|
|
147
|
-
Wait for a CloudEvent to arrive.
|
|
148
|
-
|
|
149
|
-
This function pauses the workflow execution until a matching CloudEvent is received.
|
|
150
|
-
During replay, it returns the cached event data and metadata.
|
|
151
|
-
|
|
152
|
-
Args:
|
|
153
|
-
ctx: Workflow context
|
|
154
|
-
event_type: CloudEvent type to wait for (e.g., "payment.completed")
|
|
155
|
-
timeout_seconds: Optional timeout in seconds
|
|
156
|
-
model: Optional Pydantic model class to convert event data to
|
|
157
|
-
event_id: Optional event identifier (auto-generated if not provided)
|
|
158
|
-
|
|
159
|
-
Returns:
|
|
160
|
-
ReceivedEvent object containing event data and CloudEvents metadata.
|
|
161
|
-
If model is provided, ReceivedEvent.data will be a Pydantic model instance.
|
|
162
|
-
|
|
163
|
-
Note:
|
|
164
|
-
Events are delivered to specific workflow instances based on instance_id.
|
|
165
|
-
No filter function is needed since subscriptions are already instance-specific.
|
|
166
|
-
|
|
167
|
-
Raises:
|
|
168
|
-
WaitForEventException: During normal execution to pause the workflow
|
|
169
|
-
TimeoutError: If timeout is reached
|
|
170
|
-
|
|
171
|
-
Example:
|
|
172
|
-
>>> # Without Pydantic (dict access)
|
|
173
|
-
>>> @saga
|
|
174
|
-
... async def order_workflow(ctx: WorkflowContext, order_id: str):
|
|
175
|
-
... payment_event = await wait_event(ctx, "payment.completed")
|
|
176
|
-
... amount = payment_event.data["amount"]
|
|
177
|
-
... order_id = payment_event.data["order_id"]
|
|
178
|
-
...
|
|
179
|
-
>>> # With Pydantic (type-safe access)
|
|
180
|
-
>>> @saga
|
|
181
|
-
... async def order_workflow_typed(ctx: WorkflowContext, order_id: str):
|
|
182
|
-
... payment_event = await wait_event(
|
|
183
|
-
... ctx,
|
|
184
|
-
... event_type="payment.completed",
|
|
185
|
-
... model=PaymentCompleted
|
|
186
|
-
... )
|
|
187
|
-
... # Type-safe access with IDE completion
|
|
188
|
-
... amount = payment_event.data.amount
|
|
189
|
-
... order_id = payment_event.data.order_id
|
|
190
|
-
... transaction_id = payment_event.data.transaction_id
|
|
191
|
-
...
|
|
192
|
-
... # Access CloudEvents metadata
|
|
193
|
-
... event_source = payment_event.source
|
|
194
|
-
... event_time = payment_event.time
|
|
195
|
-
...
|
|
196
|
-
... # Continue with order fulfillment
|
|
197
|
-
... await ship_order(ctx, order_id, amount)
|
|
198
|
-
...
|
|
199
|
-
>>> # Concurrent event waiting (with explicit IDs)
|
|
200
|
-
>>> @saga
|
|
201
|
-
... async def multi_event_workflow(ctx: WorkflowContext, order_id: str):
|
|
202
|
-
... payment, inventory = await asyncio.gather(
|
|
203
|
-
... wait_event(ctx, "payment.completed", event_id="payment"),
|
|
204
|
-
... wait_event(ctx, "inventory.reserved", event_id="inventory"),
|
|
205
|
-
... )
|
|
206
|
-
"""
|
|
207
|
-
# Resolve activity ID (explicit or auto-generated)
|
|
208
|
-
if event_id is None:
|
|
209
|
-
# Auto-generate event_id using context's generator
|
|
210
|
-
activity_id = ctx._generate_activity_id(f"wait_event_{event_type}")
|
|
211
|
-
else:
|
|
212
|
-
activity_id = event_id
|
|
213
|
-
|
|
214
|
-
# Record activity ID execution
|
|
215
|
-
ctx._record_activity_id(activity_id)
|
|
216
|
-
|
|
217
|
-
# During replay, return cached event data
|
|
218
|
-
if ctx.is_replaying:
|
|
219
|
-
found, cached_result = ctx._get_cached_result(activity_id)
|
|
220
|
-
if found:
|
|
221
|
-
# Check if this was an error (timeout)
|
|
222
|
-
if isinstance(cached_result, dict) and cached_result.get("_error"):
|
|
223
|
-
error_type = cached_result.get("error_type", "Exception")
|
|
224
|
-
error_message = cached_result.get("error_message", "Unknown error")
|
|
225
|
-
raise Exception(f"{error_type}: {error_message}")
|
|
226
|
-
|
|
227
|
-
# Convert data to Pydantic model if requested
|
|
228
|
-
if model is not None and isinstance(cached_result.data, dict):
|
|
229
|
-
converted_data = from_json_dict(cached_result.data, model)
|
|
230
|
-
cached_result = replace(cached_result, data=converted_data)
|
|
231
|
-
|
|
232
|
-
# Return cached event data
|
|
233
|
-
return cached_result # type: ignore[no-any-return]
|
|
234
|
-
|
|
235
|
-
# During normal execution, raise exception to pause workflow execution
|
|
236
|
-
# The ReplayEngine will catch this and atomically:
|
|
237
|
-
# 1. Register event subscription
|
|
238
|
-
# 2. Update activity ID
|
|
239
|
-
# 3. Release lock
|
|
240
|
-
# This ensures distributed coroutines work correctly in multi-Pod environments
|
|
241
|
-
raise WaitForEventException(
|
|
242
|
-
event_type=event_type,
|
|
243
|
-
timeout_seconds=timeout_seconds,
|
|
244
|
-
activity_id=activity_id,
|
|
245
|
-
)
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
async def wait_timer(
|
|
249
|
-
ctx: "WorkflowContext",
|
|
250
|
-
duration_seconds: int,
|
|
251
|
-
timer_id: str | None = None,
|
|
252
|
-
) -> None:
|
|
253
|
-
"""
|
|
254
|
-
Wait for a specific duration (timer).
|
|
255
|
-
|
|
256
|
-
This function pauses the workflow execution until the specified duration has elapsed.
|
|
257
|
-
During replay, it returns immediately without waiting.
|
|
258
|
-
|
|
259
|
-
Args:
|
|
260
|
-
ctx: Workflow context
|
|
261
|
-
duration_seconds: Duration to wait in seconds
|
|
262
|
-
timer_id: Optional timer identifier (auto-generated if not provided)
|
|
263
|
-
|
|
264
|
-
Note:
|
|
265
|
-
Timers are checked periodically by a background task. The actual resume time
|
|
266
|
-
may be slightly later than the specified duration depending on the check interval.
|
|
267
|
-
|
|
268
|
-
For concurrent timer waiting, provide explicit timer_id:
|
|
269
|
-
>>> results = await asyncio.gather(
|
|
270
|
-
... wait_timer(ctx, 60, timer_id="short"),
|
|
271
|
-
... wait_timer(ctx, 120, timer_id="long"),
|
|
272
|
-
... )
|
|
273
|
-
|
|
274
|
-
Raises:
|
|
275
|
-
WaitForTimerException: During normal execution to pause the workflow
|
|
276
|
-
|
|
277
|
-
Example:
|
|
278
|
-
>>> @saga
|
|
279
|
-
... async def order_workflow(ctx: WorkflowContext, order_id: str):
|
|
280
|
-
... # Create order
|
|
281
|
-
... await create_order(ctx, order_id)
|
|
282
|
-
...
|
|
283
|
-
... # Wait 60 seconds for payment
|
|
284
|
-
... await wait_timer(ctx, duration_seconds=60)
|
|
285
|
-
...
|
|
286
|
-
... # Check payment status
|
|
287
|
-
... await check_payment_status(ctx, order_id)
|
|
288
|
-
"""
|
|
289
|
-
# Resolve activity ID (timer_id or auto-generated)
|
|
290
|
-
if timer_id is None:
|
|
291
|
-
# Auto-generate using context's generator
|
|
292
|
-
activity_id = ctx._generate_activity_id("wait_timer")
|
|
293
|
-
timer_id = activity_id # Use same ID for timer subscription
|
|
294
|
-
else:
|
|
295
|
-
activity_id = timer_id
|
|
296
|
-
|
|
297
|
-
# Record activity ID execution
|
|
298
|
-
ctx._record_activity_id(activity_id)
|
|
299
|
-
|
|
300
|
-
# During replay, return immediately (timer has already expired)
|
|
301
|
-
if ctx.is_replaying:
|
|
302
|
-
found, cached_result = ctx._get_cached_result(activity_id)
|
|
303
|
-
if found:
|
|
304
|
-
# Check if this was an error
|
|
305
|
-
if isinstance(cached_result, dict) and cached_result.get("_error"):
|
|
306
|
-
error_type = cached_result.get("error_type", "Exception")
|
|
307
|
-
error_message = cached_result.get("error_message", "Unknown error")
|
|
308
|
-
raise Exception(f"{error_type}: {error_message}")
|
|
309
|
-
|
|
310
|
-
# Timer has expired, continue execution
|
|
311
|
-
return
|
|
312
|
-
|
|
313
|
-
# During normal execution, raise exception to pause workflow execution
|
|
314
|
-
# Calculate absolute expiration time (deterministic for replay)
|
|
315
|
-
# This calculation happens only once (during initial execution)
|
|
316
|
-
# and the absolute time is stored in the exception.
|
|
317
|
-
expires_at = datetime.now(UTC) + timedelta(seconds=duration_seconds)
|
|
318
|
-
|
|
319
|
-
# The ReplayEngine will catch this and atomically:
|
|
320
|
-
# 1. Register timer subscription (with expires_at)
|
|
321
|
-
# 2. Update activity ID
|
|
322
|
-
# 3. Release lock
|
|
323
|
-
# This ensures distributed coroutines work correctly in multi-Pod environments
|
|
324
|
-
raise WaitForTimerException(
|
|
325
|
-
duration_seconds=duration_seconds,
|
|
326
|
-
expires_at=expires_at,
|
|
327
|
-
timer_id=timer_id,
|
|
328
|
-
activity_id=activity_id,
|
|
329
|
-
)
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
async def wait_until(
|
|
333
|
-
ctx: "WorkflowContext",
|
|
334
|
-
until_time: "datetime",
|
|
335
|
-
timer_id: str | None = None,
|
|
336
|
-
) -> None:
|
|
337
|
-
"""
|
|
338
|
-
Wait until a specific absolute time.
|
|
339
|
-
|
|
340
|
-
This function pauses the workflow execution until the specified absolute time is reached.
|
|
341
|
-
During replay, it returns immediately without waiting.
|
|
342
|
-
|
|
343
|
-
Args:
|
|
344
|
-
ctx: Workflow context
|
|
345
|
-
until_time: Absolute datetime to wait until (any timezone, will be converted to UTC)
|
|
346
|
-
timer_id: Optional timer identifier (auto-generated if not provided)
|
|
347
|
-
|
|
348
|
-
Note:
|
|
349
|
-
Timers are checked periodically by a background task. The actual resume time
|
|
350
|
-
may be slightly later than the specified time depending on the check interval.
|
|
351
|
-
|
|
352
|
-
Raises:
|
|
353
|
-
WaitForTimerException: During normal execution to pause the workflow
|
|
354
|
-
|
|
355
|
-
Example:
|
|
356
|
-
>>> from datetime import datetime, timedelta, UTC
|
|
357
|
-
>>>
|
|
358
|
-
>>> @saga
|
|
359
|
-
... async def order_workflow(ctx: WorkflowContext, order_id: str):
|
|
360
|
-
... # Create order
|
|
361
|
-
... await create_order(ctx, order_id)
|
|
362
|
-
...
|
|
363
|
-
... # Wait until 3 PM today
|
|
364
|
-
... deadline = datetime.now(UTC).replace(hour=15, minute=0, second=0)
|
|
365
|
-
... await wait_until(ctx, deadline)
|
|
366
|
-
...
|
|
367
|
-
... # Check payment status
|
|
368
|
-
... await check_payment_status(ctx, order_id)
|
|
369
|
-
"""
|
|
370
|
-
# Resolve activity ID (timer_id or auto-generated)
|
|
371
|
-
if timer_id is None:
|
|
372
|
-
# Auto-generate using context's generator
|
|
373
|
-
activity_id = ctx._generate_activity_id("wait_until")
|
|
374
|
-
timer_id = activity_id # Use same ID for timer subscription
|
|
375
|
-
else:
|
|
376
|
-
activity_id = timer_id
|
|
377
|
-
|
|
378
|
-
# Record activity ID execution
|
|
379
|
-
ctx._record_activity_id(activity_id)
|
|
380
|
-
|
|
381
|
-
# During replay, return immediately (timer has already expired)
|
|
382
|
-
if ctx.is_replaying:
|
|
383
|
-
found, cached_result = ctx._get_cached_result(activity_id)
|
|
384
|
-
if found:
|
|
385
|
-
# Check if this was an error
|
|
386
|
-
if isinstance(cached_result, dict) and cached_result.get("_error"):
|
|
387
|
-
error_type = cached_result.get("error_type", "Exception")
|
|
388
|
-
error_message = cached_result.get("error_message", "Unknown error")
|
|
389
|
-
raise Exception(f"{error_type}: {error_message}")
|
|
390
|
-
|
|
391
|
-
# Timer has expired, continue execution
|
|
392
|
-
return
|
|
393
|
-
|
|
394
|
-
# During normal execution, raise exception to pause workflow execution
|
|
395
|
-
# Convert to UTC if needed
|
|
396
|
-
if until_time.tzinfo is None:
|
|
397
|
-
# Naive datetime - assume UTC
|
|
398
|
-
expires_at_utc = until_time.replace(tzinfo=UTC)
|
|
399
|
-
else:
|
|
400
|
-
# Aware datetime - convert to UTC
|
|
401
|
-
expires_at_utc = until_time.astimezone(UTC)
|
|
402
|
-
|
|
403
|
-
# The ReplayEngine will catch this and atomically:
|
|
404
|
-
# 1. Register timer subscription (with expires_at)
|
|
405
|
-
# 2. Update activity ID
|
|
406
|
-
# 3. Release lock
|
|
407
|
-
# This ensures distributed coroutines work correctly in multi-Pod environments
|
|
408
|
-
raise WaitForTimerException(
|
|
409
|
-
duration_seconds=0, # Not applicable for wait_until
|
|
410
|
-
expires_at=expires_at_utc,
|
|
411
|
-
timer_id=timer_id,
|
|
412
|
-
activity_id=activity_id,
|
|
413
|
-
)
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
async def send_event(
|
|
417
|
-
event_type: str,
|
|
418
|
-
source: str,
|
|
419
|
-
data: dict[str, Any] | BaseModel,
|
|
420
|
-
broker_url: str = "http://broker-ingress.knative-eventing.svc.cluster.local",
|
|
421
|
-
datacontenttype: str | None = None,
|
|
422
|
-
) -> None:
|
|
423
|
-
"""
|
|
424
|
-
Send a CloudEvent to Knative Broker.
|
|
425
|
-
|
|
426
|
-
Args:
|
|
427
|
-
event_type: CloudEvent type (e.g., "order.created")
|
|
428
|
-
source: CloudEvent source (e.g., "order-service")
|
|
429
|
-
data: Event payload (JSON dict or Pydantic model)
|
|
430
|
-
broker_url: Knative Broker URL
|
|
431
|
-
datacontenttype: Content type (defaults to "application/json")
|
|
432
|
-
|
|
433
|
-
Raises:
|
|
434
|
-
httpx.HTTPError: If the HTTP request fails
|
|
435
|
-
|
|
436
|
-
Example:
|
|
437
|
-
>>> # With dict
|
|
438
|
-
>>> await send_event("order.created", "order-service", {"order_id": "123"})
|
|
439
|
-
>>>
|
|
440
|
-
>>> # With Pydantic model (automatically converted to JSON)
|
|
441
|
-
>>> order = OrderCreated(order_id="123", amount=99.99)
|
|
442
|
-
>>> await send_event("order.created", "order-service", order)
|
|
443
|
-
"""
|
|
444
|
-
# Convert Pydantic model to JSON dict
|
|
445
|
-
data_dict: dict[str, Any]
|
|
446
|
-
data_dict = to_json_dict(data) if is_pydantic_instance(data) else cast(dict[str, Any], data)
|
|
447
|
-
|
|
448
|
-
# Create CloudEvent attributes
|
|
449
|
-
attributes = {
|
|
450
|
-
"type": event_type,
|
|
451
|
-
"source": source,
|
|
452
|
-
"id": str(uuid.uuid4()),
|
|
453
|
-
}
|
|
454
|
-
|
|
455
|
-
# Set datacontenttype if specified
|
|
456
|
-
if datacontenttype:
|
|
457
|
-
attributes["datacontenttype"] = datacontenttype
|
|
458
|
-
|
|
459
|
-
event = CloudEvent(attributes, data_dict)
|
|
460
|
-
|
|
461
|
-
# Convert to structured format (HTTP)
|
|
462
|
-
headers, body = to_structured(event)
|
|
463
|
-
|
|
464
|
-
# Send to Knative Broker via HTTP POST
|
|
465
|
-
async with httpx.AsyncClient() as client:
|
|
466
|
-
response = await client.post(
|
|
467
|
-
broker_url,
|
|
468
|
-
headers=headers,
|
|
469
|
-
content=body,
|
|
470
|
-
timeout=10.0,
|
|
471
|
-
)
|
|
472
|
-
response.raise_for_status()
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
async def send_event_from_dict(event_dict: dict[str, Any], broker_url: str | None = None) -> None:
|
|
476
|
-
"""
|
|
477
|
-
Send a CloudEvent from a dictionary.
|
|
478
|
-
|
|
479
|
-
Convenience function that extracts type, source, and data from a dict.
|
|
480
|
-
|
|
481
|
-
Args:
|
|
482
|
-
event_dict: Dictionary with 'type', 'source', and 'data' keys
|
|
483
|
-
broker_url: Optional Knative Broker URL
|
|
484
|
-
|
|
485
|
-
Raises:
|
|
486
|
-
ValueError: If required keys are missing
|
|
487
|
-
httpx.HTTPError: If the HTTP request fails
|
|
488
|
-
"""
|
|
489
|
-
if "type" not in event_dict:
|
|
490
|
-
raise ValueError("event_dict must contain 'type' key")
|
|
491
|
-
if "source" not in event_dict:
|
|
492
|
-
raise ValueError("event_dict must contain 'source' key")
|
|
493
|
-
if "data" not in event_dict:
|
|
494
|
-
raise ValueError("event_dict must contain 'data' key")
|
|
495
|
-
|
|
496
|
-
kwargs = {
|
|
497
|
-
"event_type": event_dict["type"],
|
|
498
|
-
"source": event_dict["source"],
|
|
499
|
-
"data": event_dict["data"],
|
|
500
|
-
}
|
|
501
|
-
|
|
502
|
-
if broker_url is not None:
|
|
503
|
-
kwargs["broker_url"] = broker_url
|
|
504
|
-
|
|
505
|
-
await send_event(**kwargs)
|
|
File without changes
|
|
File without changes
|