omnibase_infra 0.2.7__py3-none-any.whl → 0.2.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. omnibase_infra/__init__.py +1 -1
  2. omnibase_infra/enums/__init__.py +4 -0
  3. omnibase_infra/enums/enum_declarative_node_violation.py +102 -0
  4. omnibase_infra/event_bus/adapters/__init__.py +31 -0
  5. omnibase_infra/event_bus/adapters/adapter_protocol_event_publisher_kafka.py +517 -0
  6. omnibase_infra/mixins/mixin_async_circuit_breaker.py +113 -1
  7. omnibase_infra/models/__init__.py +9 -0
  8. omnibase_infra/models/event_bus/__init__.py +22 -0
  9. omnibase_infra/models/event_bus/model_consumer_retry_config.py +367 -0
  10. omnibase_infra/models/event_bus/model_dlq_config.py +177 -0
  11. omnibase_infra/models/event_bus/model_idempotency_config.py +131 -0
  12. omnibase_infra/models/event_bus/model_offset_policy_config.py +107 -0
  13. omnibase_infra/models/resilience/model_circuit_breaker_config.py +15 -0
  14. omnibase_infra/models/validation/__init__.py +8 -0
  15. omnibase_infra/models/validation/model_declarative_node_validation_result.py +139 -0
  16. omnibase_infra/models/validation/model_declarative_node_violation.py +169 -0
  17. omnibase_infra/nodes/architecture_validator/__init__.py +28 -7
  18. omnibase_infra/nodes/architecture_validator/constants.py +36 -0
  19. omnibase_infra/nodes/architecture_validator/handlers/__init__.py +28 -0
  20. omnibase_infra/nodes/architecture_validator/handlers/contract.yaml +120 -0
  21. omnibase_infra/nodes/architecture_validator/handlers/handler_architecture_validation.py +359 -0
  22. omnibase_infra/nodes/architecture_validator/node.py +1 -0
  23. omnibase_infra/nodes/architecture_validator/node_architecture_validator.py +48 -336
  24. omnibase_infra/nodes/node_ledger_projection_compute/__init__.py +16 -2
  25. omnibase_infra/nodes/node_ledger_projection_compute/contract.yaml +14 -4
  26. omnibase_infra/nodes/node_ledger_projection_compute/handlers/__init__.py +18 -0
  27. omnibase_infra/nodes/node_ledger_projection_compute/handlers/contract.yaml +53 -0
  28. omnibase_infra/nodes/node_ledger_projection_compute/handlers/handler_ledger_projection.py +354 -0
  29. omnibase_infra/nodes/node_ledger_projection_compute/node.py +20 -256
  30. omnibase_infra/nodes/node_registry_effect/node.py +20 -73
  31. omnibase_infra/protocols/protocol_dispatch_engine.py +90 -0
  32. omnibase_infra/runtime/__init__.py +11 -0
  33. omnibase_infra/runtime/baseline_subscriptions.py +150 -0
  34. omnibase_infra/runtime/event_bus_subcontract_wiring.py +455 -24
  35. omnibase_infra/runtime/kafka_contract_source.py +13 -5
  36. omnibase_infra/runtime/service_message_dispatch_engine.py +112 -0
  37. omnibase_infra/runtime/service_runtime_host_process.py +6 -11
  38. omnibase_infra/services/__init__.py +36 -0
  39. omnibase_infra/services/contract_publisher/__init__.py +95 -0
  40. omnibase_infra/services/contract_publisher/config.py +199 -0
  41. omnibase_infra/services/contract_publisher/errors.py +243 -0
  42. omnibase_infra/services/contract_publisher/models/__init__.py +28 -0
  43. omnibase_infra/services/contract_publisher/models/model_contract_error.py +67 -0
  44. omnibase_infra/services/contract_publisher/models/model_infra_error.py +62 -0
  45. omnibase_infra/services/contract_publisher/models/model_publish_result.py +112 -0
  46. omnibase_infra/services/contract_publisher/models/model_publish_stats.py +79 -0
  47. omnibase_infra/services/contract_publisher/service.py +617 -0
  48. omnibase_infra/services/contract_publisher/sources/__init__.py +52 -0
  49. omnibase_infra/services/contract_publisher/sources/model_discovered.py +155 -0
  50. omnibase_infra/services/contract_publisher/sources/protocol.py +101 -0
  51. omnibase_infra/services/contract_publisher/sources/source_composite.py +309 -0
  52. omnibase_infra/services/contract_publisher/sources/source_filesystem.py +174 -0
  53. omnibase_infra/services/contract_publisher/sources/source_package.py +221 -0
  54. omnibase_infra/services/observability/__init__.py +40 -0
  55. omnibase_infra/services/observability/agent_actions/__init__.py +64 -0
  56. omnibase_infra/services/observability/agent_actions/config.py +209 -0
  57. omnibase_infra/services/observability/agent_actions/consumer.py +1320 -0
  58. omnibase_infra/services/observability/agent_actions/models/__init__.py +87 -0
  59. omnibase_infra/services/observability/agent_actions/models/model_agent_action.py +142 -0
  60. omnibase_infra/services/observability/agent_actions/models/model_detection_failure.py +125 -0
  61. omnibase_infra/services/observability/agent_actions/models/model_envelope.py +85 -0
  62. omnibase_infra/services/observability/agent_actions/models/model_execution_log.py +159 -0
  63. omnibase_infra/services/observability/agent_actions/models/model_performance_metric.py +130 -0
  64. omnibase_infra/services/observability/agent_actions/models/model_routing_decision.py +138 -0
  65. omnibase_infra/services/observability/agent_actions/models/model_transformation_event.py +124 -0
  66. omnibase_infra/services/observability/agent_actions/tests/__init__.py +20 -0
  67. omnibase_infra/services/observability/agent_actions/tests/test_consumer.py +1154 -0
  68. omnibase_infra/services/observability/agent_actions/tests/test_models.py +645 -0
  69. omnibase_infra/services/observability/agent_actions/tests/test_writer.py +709 -0
  70. omnibase_infra/services/observability/agent_actions/writer_postgres.py +926 -0
  71. omnibase_infra/validation/__init__.py +12 -0
  72. omnibase_infra/validation/contracts/declarative_node.validation.yaml +143 -0
  73. omnibase_infra/validation/validation_exemptions.yaml +93 -0
  74. omnibase_infra/validation/validator_declarative_node.py +850 -0
  75. {omnibase_infra-0.2.7.dist-info → omnibase_infra-0.2.9.dist-info}/METADATA +3 -3
  76. {omnibase_infra-0.2.7.dist-info → omnibase_infra-0.2.9.dist-info}/RECORD +79 -27
  77. {omnibase_infra-0.2.7.dist-info → omnibase_infra-0.2.9.dist-info}/WHEEL +0 -0
  78. {omnibase_infra-0.2.7.dist-info → omnibase_infra-0.2.9.dist-info}/entry_points.txt +0 -0
  79. {omnibase_infra-0.2.7.dist-info → omnibase_infra-0.2.9.dist-info}/licenses/LICENSE +0 -0
@@ -76,7 +76,7 @@ See Also
76
76
  - Runtime kernel: omnibase_infra.runtime.service_kernel
77
77
  """
78
78
 
79
- __version__ = "0.2.3"
79
+ __version__ = "0.2.9"
80
80
 
81
81
  from . import (
82
82
  enums,
@@ -57,6 +57,9 @@ from omnibase_infra.enums.enum_circuit_state import EnumCircuitState
57
57
  from omnibase_infra.enums.enum_confirmation_event_type import EnumConfirmationEventType
58
58
  from omnibase_infra.enums.enum_consumer_group_purpose import EnumConsumerGroupPurpose
59
59
  from omnibase_infra.enums.enum_contract_type import EnumContractType
60
+ from omnibase_infra.enums.enum_declarative_node_violation import (
61
+ EnumDeclarativeNodeViolation,
62
+ )
60
63
  from omnibase_infra.enums.enum_dedupe_strategy import EnumDedupeStrategy
61
64
  from omnibase_infra.enums.enum_dispatch_status import EnumDispatchStatus
62
65
  from omnibase_infra.enums.enum_environment import EnumEnvironment
@@ -101,6 +104,7 @@ __all__: list[str] = [
101
104
  "EnumConfirmationEventType",
102
105
  "EnumConsumerGroupPurpose",
103
106
  "EnumContractType",
107
+ "EnumDeclarativeNodeViolation",
104
108
  "EnumDedupeStrategy",
105
109
  "EnumDispatchStatus",
106
110
  "EnumEnvironment",
@@ -0,0 +1,102 @@
1
+ # SPDX-License-Identifier: MIT
2
+ # Copyright (c) 2025 OmniNode Team
3
+ """Enum for declarative node violation categories.
4
+
5
+ Defines the specific violation types detected by the declarative node validator.
6
+ Following the ONEX declarative pattern policy:
7
+ - Node classes MUST only extend base classes without custom logic
8
+ - Only __init__ with super().__init__(container) is allowed
9
+ - No custom methods, properties, or instance variables
10
+ """
11
+
12
+ from enum import Enum
13
+
14
+
15
+ class EnumDeclarativeNodeViolation(str, Enum):
16
+ """Violation types for declarative node pattern enforcement.
17
+
18
+ These violation types correspond to the ONEX declarative node policy:
19
+ Nodes must be contract-driven with no custom Python logic.
20
+
21
+ Attributes:
22
+ CUSTOM_METHOD: Node class contains custom method (not __init__).
23
+ CUSTOM_PROPERTY: Node class contains property definition.
24
+ INIT_CUSTOM_LOGIC: __init__ contains logic beyond super().__init__().
25
+ INSTANCE_VARIABLE: __init__ creates custom instance variables.
26
+ CLASS_VARIABLE: Node class defines class-level variables.
27
+ SYNTAX_ERROR: File has Python syntax error, cannot validate.
28
+ NO_NODE_CLASS: File named node.py but contains no node class.
29
+
30
+ Policy Summary:
31
+ - ALLOWED: ``class MyNode(NodeEffect): pass``
32
+ - ALLOWED: ``def __init__(self, container): super().__init__(container)``
33
+ - BLOCKED: ``def compute(self, data): ...``
34
+ - BLOCKED: ``@property def my_prop(self): ...``
35
+ - BLOCKED: ``self._custom_var = value`` in __init__
36
+ """
37
+
38
+ CUSTOM_METHOD = "custom_method"
39
+ CUSTOM_PROPERTY = "custom_property"
40
+ INIT_CUSTOM_LOGIC = "init_custom_logic"
41
+ INSTANCE_VARIABLE = "instance_variable"
42
+ CLASS_VARIABLE = "class_variable"
43
+ SYNTAX_ERROR = "syntax_error"
44
+ NO_NODE_CLASS = "no_node_class"
45
+
46
+ @property
47
+ def is_exemptable(self) -> bool:
48
+ """Check if this violation type can be exempted.
49
+
50
+ Some violations like SYNTAX_ERROR cannot be exempted because
51
+ they indicate fundamental issues with the source file.
52
+
53
+ Returns:
54
+ True if the violation type can be exempted via decorator or comment.
55
+ """
56
+ return self not in {
57
+ EnumDeclarativeNodeViolation.SYNTAX_ERROR,
58
+ EnumDeclarativeNodeViolation.NO_NODE_CLASS,
59
+ }
60
+
61
+ @property
62
+ def suggestion(self) -> str:
63
+ """Get the suggested fix for this violation type.
64
+
65
+ Returns:
66
+ Human-readable suggestion for fixing the violation.
67
+ """
68
+ suggestions = {
69
+ EnumDeclarativeNodeViolation.CUSTOM_METHOD: (
70
+ "Move business logic to a Handler class. Node classes must be "
71
+ "declarative - all behavior should be defined in contract.yaml "
72
+ "and implemented by handlers."
73
+ ),
74
+ EnumDeclarativeNodeViolation.CUSTOM_PROPERTY: (
75
+ "Remove property from node class. Node classes should not have "
76
+ "custom properties - use container dependency injection instead."
77
+ ),
78
+ EnumDeclarativeNodeViolation.INIT_CUSTOM_LOGIC: (
79
+ "Remove custom logic from __init__. The __init__ method should "
80
+ "only call super().__init__(container). All initialization should "
81
+ "be handled by the base class and contract.yaml."
82
+ ),
83
+ EnumDeclarativeNodeViolation.INSTANCE_VARIABLE: (
84
+ "Remove instance variable assignment from __init__. Node classes "
85
+ "should not store state - use container injection and handlers."
86
+ ),
87
+ EnumDeclarativeNodeViolation.CLASS_VARIABLE: (
88
+ "Remove class variable from node class. Configuration should be "
89
+ "in contract.yaml, not Python code."
90
+ ),
91
+ EnumDeclarativeNodeViolation.SYNTAX_ERROR: (
92
+ "Fix the Python syntax error before validation can proceed."
93
+ ),
94
+ EnumDeclarativeNodeViolation.NO_NODE_CLASS: (
95
+ "File is named node.py but does not contain a Node class. "
96
+ "Either add a Node class or rename the file."
97
+ ),
98
+ }
99
+ return suggestions.get(self, "Make the node class declarative.")
100
+
101
+
102
+ __all__ = ["EnumDeclarativeNodeViolation"]
@@ -0,0 +1,31 @@
1
+ # SPDX-License-Identifier: MIT
2
+ # Copyright (c) 2025 OmniNode Team
3
+ """Event bus adapters implementing protocol interfaces.
4
+
5
+ This module provides adapters that bridge event bus implementations to protocol
6
+ interfaces defined in omnibase_spi.
7
+
8
+ Available Adapters:
9
+ - AdapterProtocolEventPublisherKafka: Kafka implementation of ProtocolEventPublisher
10
+
11
+ Usage:
12
+ ```python
13
+ from omnibase_infra.event_bus.adapters import AdapterProtocolEventPublisherKafka
14
+ from omnibase_infra.event_bus import EventBusKafka
15
+
16
+ bus = EventBusKafka.default()
17
+ await bus.start()
18
+
19
+ adapter = AdapterProtocolEventPublisherKafka(bus=bus, service_name="my-service")
20
+ success = await adapter.publish(
21
+ event_type="user.created.v1",
22
+ payload={"user_id": "123"},
23
+ )
24
+ ```
25
+ """
26
+
27
+ from omnibase_infra.event_bus.adapters.adapter_protocol_event_publisher_kafka import (
28
+ AdapterProtocolEventPublisherKafka,
29
+ )
30
+
31
+ __all__: list[str] = ["AdapterProtocolEventPublisherKafka"]
@@ -0,0 +1,517 @@
1
+ # SPDX-License-Identifier: MIT
2
+ # Copyright (c) 2025 OmniNode Team
3
+ """Kafka adapter implementing ProtocolEventPublisher for production event publishing.
4
+
5
+ This adapter wraps EventBusKafka to implement the ProtocolEventPublisher protocol
6
+ from omnibase_spi. It provides a standard interface for event publishing while
7
+ delegating resilience (circuit breaker, retry) to the underlying EventBusKafka.
8
+
9
+ Key Design Decisions:
10
+ - NO double circuit breaker: Resilience is delegated to EventBusKafka
11
+ - Publish semantics: All Infra* exceptions are caught and return False
12
+ - DLQ metric: Always 0 (publish path doesn't use DLQ)
13
+ - Topic routing: explicit topic parameter takes precedence over event_type
14
+ - Partition key: UTF-8 encoded to bytes per SPI specification
15
+
16
+ Usage:
17
+ ```python
18
+ from omnibase_core.container import ModelONEXContainer
19
+ from omnibase_infra.event_bus import EventBusKafka
20
+ from omnibase_infra.event_bus.adapters import AdapterProtocolEventPublisherKafka
21
+
22
+ container = ModelONEXContainer()
23
+ bus = EventBusKafka.default()
24
+ await bus.start()
25
+
26
+ adapter = AdapterProtocolEventPublisherKafka(
27
+ container=container,
28
+ bus=bus,
29
+ service_name="my-service",
30
+ )
31
+
32
+ success = await adapter.publish(
33
+ event_type="omninode.user.event.created.v1",
34
+ payload={"user_id": "usr-123"},
35
+ correlation_id="corr-456",
36
+ )
37
+
38
+ metrics = await adapter.get_metrics()
39
+ await adapter.close()
40
+ ```
41
+
42
+ References:
43
+ - ProtocolEventPublisher: omnibase_spi.protocols.event_bus.protocol_event_publisher
44
+ - EventBusKafka: omnibase_infra.event_bus.event_bus_kafka
45
+ - Parent ticket: OMN-1764
46
+ """
47
+
48
+ from __future__ import annotations
49
+
50
+ import asyncio
51
+ import json
52
+ import logging
53
+ from datetime import UTC, datetime
54
+ from typing import TYPE_CHECKING, cast
55
+ from uuid import UUID, uuid4
56
+
57
+ from omnibase_core.container import ModelONEXContainer
58
+ from omnibase_core.models.core.model_envelope_metadata import ModelEnvelopeMetadata
59
+ from omnibase_core.models.events.model_event_envelope import ModelEventEnvelope
60
+ from omnibase_core.types import JsonType
61
+ from omnibase_infra.enums import EnumInfraTransportType
62
+ from omnibase_infra.errors import InfraUnavailableError
63
+
64
+ # TODO: OMN-1767 - Move ModelPublisherMetrics out of testing/ directory
65
+ from omnibase_infra.event_bus.testing.model_publisher_metrics import (
66
+ ModelPublisherMetrics,
67
+ )
68
+ from omnibase_infra.models.errors import ModelInfraErrorContext
69
+
70
+ if TYPE_CHECKING:
71
+ from omnibase_infra.event_bus.event_bus_kafka import EventBusKafka
72
+ from omnibase_infra.types.typed_dict import TypedDictEnvelopeBuildParams
73
+ from omnibase_spi.protocols.types.protocol_core_types import ContextValue
74
+
75
+ logger = logging.getLogger(__name__)
76
+
77
+ DEFAULT_CLOSE_TIMEOUT_SECONDS: float = 30.0
78
+
79
+
80
+ class AdapterProtocolEventPublisherKafka:
81
+ """Kafka adapter implementing ProtocolEventPublisher bridged to EventBusKafka.
82
+
83
+ This adapter provides production-grade event publishing by wrapping EventBusKafka
84
+ and implementing the ProtocolEventPublisher interface from omnibase_spi.
85
+
86
+ Key Design Decisions:
87
+ - Delegates resilience to EventBusKafka (no additional circuit breaker)
88
+ - Uses ModelEventEnvelope for canonical envelope format
89
+ - Preserves all correlation tracking (correlation_id, causation_id)
90
+ - Stores causation_id in metadata.tags since ModelEventEnvelope doesn't have
91
+ a dedicated field for it
92
+ - Topic routing: explicit topic parameter takes precedence over event_type
93
+ - partition_key is encoded to UTF-8 bytes as per SPI specification
94
+ - All exceptions during publish are caught and return False
95
+
96
+ Circuit Breaker Exemption:
97
+ This adapter intentionally does NOT inherit MixinAsyncCircuitBreaker.
98
+ Resilience (circuit breaker, retry with exponential backoff) is delegated
99
+ to the underlying EventBusKafka instance to avoid the "double circuit breaker"
100
+ anti-pattern. See docs/patterns/dispatcher_resilience.md for details on
101
+ dispatcher-owned resilience patterns.
102
+
103
+ Attributes:
104
+ container: The ONEX container for dependency injection.
105
+ service_name: Service identifier included in envelope metadata.
106
+ instance_id: Instance identifier for envelope source tracking.
107
+
108
+ Example:
109
+ ```python
110
+ from omnibase_core.container import ModelONEXContainer
111
+
112
+ container = ModelONEXContainer()
113
+ bus = EventBusKafka.default()
114
+ await bus.start()
115
+
116
+ adapter = AdapterProtocolEventPublisherKafka(
117
+ container=container,
118
+ bus=bus,
119
+ service_name="my-service",
120
+ instance_id="instance-001",
121
+ )
122
+
123
+ success = await adapter.publish(
124
+ event_type="user.created",
125
+ payload={"id": "123"},
126
+ correlation_id="corr-abc",
127
+ )
128
+ assert success is True
129
+ ```
130
+ """
131
+
132
+ def __init__(
133
+ self,
134
+ container: ModelONEXContainer,
135
+ bus: EventBusKafka | None = None,
136
+ service_name: str = "kafka-publisher",
137
+ instance_id: str | None = None,
138
+ ) -> None:
139
+ """Initialize the adapter with a container for dependency injection.
140
+
141
+ Args:
142
+ container: The ONEX container for dependency injection.
143
+ bus: Optional EventBusKafka instance. If not provided, must be
144
+ resolved from container or set via set_bus() before publishing.
145
+ service_name: Service name for envelope metadata. Defaults to
146
+ "kafka-publisher".
147
+ instance_id: Optional instance identifier. Defaults to a generated UUID.
148
+
149
+ Note:
150
+ Either provide `bus` directly or ensure EventBusKafka is registered
151
+ in the container's service registry. If neither is available at
152
+ publish time, InfraUnavailableError will be raised.
153
+ """
154
+ self._container = container
155
+ self._bus: EventBusKafka | None = bus
156
+ self._service_name = service_name
157
+ self._instance_id = instance_id or str(uuid4())
158
+ self._metrics = ModelPublisherMetrics()
159
+ self._metrics_lock = asyncio.Lock()
160
+ self._closed = False
161
+
162
+ @property
163
+ def is_closed(self) -> bool:
164
+ """Return whether the adapter has been closed.
165
+
166
+ Allows callers to check adapter state without attempting a publish.
167
+ """
168
+ return self._closed
169
+
170
+ def _get_bus(self) -> EventBusKafka:
171
+ """Get the underlying EventBusKafka instance.
172
+
173
+ Returns the bus if it was provided at construction time.
174
+
175
+ Returns:
176
+ The EventBusKafka instance.
177
+
178
+ Raises:
179
+ InfraUnavailableError: If no bus is available.
180
+ """
181
+ if self._bus is None:
182
+ context = ModelInfraErrorContext.with_correlation(
183
+ transport_type=EnumInfraTransportType.KAFKA,
184
+ operation="get_bus",
185
+ )
186
+ raise InfraUnavailableError(
187
+ "No EventBusKafka available. Provide bus at construction time.",
188
+ context=context,
189
+ )
190
+ return self._bus
191
+
192
+ async def publish(
193
+ self,
194
+ event_type: str,
195
+ payload: JsonType,
196
+ correlation_id: str | None = None,
197
+ causation_id: str | None = None,
198
+ metadata: dict[str, ContextValue] | None = None,
199
+ topic: str | None = None,
200
+ partition_key: str | None = None,
201
+ ) -> bool:
202
+ """Publish event with canonical ModelEventEnvelope serialization.
203
+
204
+ Builds a ModelEventEnvelope from the provided parameters, serializes to JSON,
205
+ and publishes to the underlying EventBusKafka.
206
+
207
+ Topic Routing:
208
+ 1. If `topic` is provided, use it directly (explicit override).
209
+ 2. Otherwise, derive topic from `event_type` (default routing).
210
+
211
+ Correlation Tracking:
212
+ - correlation_id: Stored in envelope.correlation_id
213
+ - causation_id: Stored in envelope.metadata.tags["causation_id"]
214
+ - Both IDs are preserved through serialization for full traceability
215
+
216
+ Error Handling:
217
+ All exceptions are caught and logged. On any failure, returns False
218
+ without propagating the exception. This design allows callers to
219
+ implement their own retry/fallback logic.
220
+
221
+ Args:
222
+ event_type: Fully-qualified event type (e.g., "omninode.user.event.created.v1").
223
+ payload: Event payload data (dict, list, or primitive JSON types).
224
+ correlation_id: Optional correlation ID for request tracing.
225
+ causation_id: Optional causation ID for event sourcing chains.
226
+ metadata: Optional additional metadata as context values.
227
+ topic: Optional explicit topic override. When None, uses event_type as topic.
228
+ partition_key: Optional partition key for message ordering.
229
+
230
+ Returns:
231
+ True if published successfully, False otherwise.
232
+
233
+ Raises:
234
+ InfraUnavailableError: If adapter has been closed.
235
+ """
236
+ if self._closed:
237
+ context = ModelInfraErrorContext.with_correlation(
238
+ transport_type=EnumInfraTransportType.KAFKA,
239
+ operation="publish",
240
+ )
241
+ raise InfraUnavailableError("Publisher has been closed", context=context)
242
+
243
+ start_time = datetime.now(UTC)
244
+
245
+ try:
246
+ # Build envelope - parameters passed as dict to comply with ONEX parameter limit
247
+ envelope = self._build_envelope(
248
+ {
249
+ "event_type": event_type,
250
+ "payload": payload,
251
+ "correlation_id": correlation_id,
252
+ "causation_id": causation_id,
253
+ "metadata": metadata,
254
+ }
255
+ )
256
+
257
+ # Determine target topic: explicit topic > event_type
258
+ target_topic = topic if topic is not None else event_type
259
+
260
+ # Encode partition key to bytes (UTF-8 canonical encoding)
261
+ key_bytes: bytes | None = None
262
+ if partition_key is not None:
263
+ key_bytes = partition_key.encode("utf-8")
264
+
265
+ # Serialize envelope to JSON bytes
266
+ envelope_dict = envelope.model_dump(mode="json")
267
+ value_bytes = json.dumps(envelope_dict).encode("utf-8")
268
+
269
+ # Publish to underlying bus
270
+ bus = self._get_bus()
271
+ await bus.publish(
272
+ topic=target_topic,
273
+ key=key_bytes,
274
+ value=value_bytes,
275
+ )
276
+
277
+ # Update success metrics (coroutine-safe)
278
+ elapsed_ms = (datetime.now(UTC) - start_time).total_seconds() * 1000
279
+ async with self._metrics_lock:
280
+ self._metrics.events_published += 1
281
+ self._metrics.total_publish_time_ms += elapsed_ms
282
+ self._metrics.avg_publish_time_ms = (
283
+ self._metrics.total_publish_time_ms / self._metrics.events_published
284
+ )
285
+ self._metrics.current_failures = 0
286
+
287
+ logger.debug(
288
+ "Event published successfully",
289
+ extra={
290
+ "event_type": event_type,
291
+ "topic": target_topic,
292
+ "correlation_id": correlation_id,
293
+ "elapsed_ms": elapsed_ms,
294
+ },
295
+ )
296
+
297
+ return True
298
+
299
+ except Exception as e:
300
+ # NOTE: Intentionally broad exception catch.
301
+ # Kafka adapter catches ALL exceptions (including Infra* errors from
302
+ # EventBusKafka) and returns False rather than propagating. This design
303
+ # allows callers to implement their own retry/fallback logic without
304
+ # needing to handle infrastructure-specific exception types.
305
+ # Update failure metrics (coroutine-safe)
306
+ async with self._metrics_lock:
307
+ self._metrics.events_failed += 1
308
+ self._metrics.current_failures += 1
309
+
310
+ logger.exception(
311
+ "Failed to publish event",
312
+ extra={
313
+ "event_type": event_type,
314
+ "topic": topic or event_type,
315
+ "correlation_id": correlation_id,
316
+ "error": str(e),
317
+ "error_type": type(e).__name__,
318
+ },
319
+ )
320
+
321
+ return False
322
+
323
+ def _build_envelope(
324
+ self,
325
+ params: TypedDictEnvelopeBuildParams,
326
+ ) -> ModelEventEnvelope[JsonType]:
327
+ """Build a ModelEventEnvelope from publish parameters.
328
+
329
+ Args:
330
+ params: Dictionary containing:
331
+ - event_type: The event type identifier
332
+ - payload: The event payload
333
+ - correlation_id: Optional correlation ID
334
+ - causation_id: Optional causation ID
335
+ - metadata: Optional additional metadata
336
+
337
+ Returns:
338
+ Configured ModelEventEnvelope ready for serialization.
339
+ """
340
+ event_type = str(params["event_type"])
341
+ payload = cast("JsonType", params["payload"])
342
+ correlation_id = params.get("correlation_id")
343
+ causation_id = params.get("causation_id")
344
+ metadata = params.get("metadata")
345
+
346
+ # Convert correlation_id string to UUID if provided
347
+ corr_uuid: UUID | None = None
348
+ if correlation_id is not None:
349
+ try:
350
+ corr_uuid = UUID(str(correlation_id))
351
+ except ValueError:
352
+ # If not a valid UUID, generate one and log the original for debugging
353
+ corr_uuid = uuid4()
354
+ logger.warning(
355
+ "correlation_id is not a valid UUID, generating new UUID (original logged)",
356
+ extra={
357
+ "original_correlation_id": correlation_id,
358
+ "generated_uuid": str(corr_uuid),
359
+ },
360
+ )
361
+
362
+ # Build metadata tags
363
+ tags: dict[str, str] = {
364
+ "event_type": event_type,
365
+ "service_name": self._service_name,
366
+ "instance_id": self._instance_id,
367
+ }
368
+
369
+ # Store causation_id in tags (ModelEventEnvelope doesn't have dedicated field)
370
+ if causation_id is not None:
371
+ tags["causation_id"] = str(causation_id)
372
+
373
+ # Merge additional metadata context values into tags
374
+ if metadata is not None:
375
+ for key, value in metadata.items():
376
+ # Context values may have a serialize_for_context method or be simple types
377
+ if hasattr(value, "serialize_for_context"):
378
+ serialized = value.serialize_for_context()
379
+ tags[key] = json.dumps(serialized)
380
+ elif hasattr(value, "value"):
381
+ # ProtocolContext*Value types have a value attribute
382
+ tags[key] = str(value.value)
383
+ else:
384
+ tags[key] = str(value)
385
+
386
+ envelope_metadata = ModelEnvelopeMetadata(tags=tags)
387
+
388
+ # Build the envelope
389
+ envelope: ModelEventEnvelope[JsonType] = ModelEventEnvelope(
390
+ payload=payload,
391
+ correlation_id=corr_uuid,
392
+ source_tool=f"{self._service_name}.{self._instance_id}",
393
+ metadata=envelope_metadata,
394
+ )
395
+
396
+ return envelope
397
+
398
+ async def get_metrics(self) -> JsonType:
399
+ """Get publisher metrics including circuit breaker status from underlying bus.
400
+
401
+ Reads the circuit breaker state from the underlying EventBusKafka to
402
+ provide accurate resilience metrics.
403
+
404
+ Returns:
405
+ Dictionary with metrics including:
406
+ - events_published: Total successful publishes
407
+ - events_failed: Total failed publishes
408
+ - events_sent_to_dlq: Always 0 (publish path doesn't use DLQ)
409
+ - total_publish_time_ms: Cumulative publish time
410
+ - avg_publish_time_ms: Average publish latency
411
+ - circuit_breaker_opens: Current failure count from underlying bus circuit breaker
412
+ (Note: reflects current failures, not cumulative open events)
413
+ - retries_attempted: Count from underlying bus (if available)
414
+ - circuit_breaker_status: Current state from underlying bus
415
+ - current_failures: Current consecutive failure count
416
+ """
417
+ # Read circuit breaker state from underlying bus with defensive error handling
418
+ # EventBusKafka inherits MixinAsyncCircuitBreaker which provides get_circuit_breaker_state()
419
+ try:
420
+ if self._bus is not None:
421
+ cb_state = self._bus.get_circuit_breaker_state()
422
+ else:
423
+ cb_state = {"state": "unknown", "failures": 0}
424
+ except Exception as e:
425
+ # If bus is closed or unavailable, return safe defaults
426
+ # Log at debug level for observability without flooding logs
427
+ logger.debug(
428
+ "Unable to read circuit breaker state from bus, using defaults",
429
+ extra={
430
+ "error": str(e),
431
+ "error_type": type(e).__name__,
432
+ "service_name": self._service_name,
433
+ },
434
+ )
435
+ cb_state = {"state": "unknown", "failures": 0}
436
+
437
+ # Extract values with safe type handling for JsonType
438
+ state_value = cb_state.get("state", "unknown")
439
+ failures_value = cb_state.get("failures", 0)
440
+
441
+ # Update and return metrics (coroutine-safe)
442
+ # Note: failures represents current consecutive failures, not cumulative opens
443
+ async with self._metrics_lock:
444
+ self._metrics.circuit_breaker_status = str(state_value)
445
+ self._metrics.circuit_breaker_opens = (
446
+ int(failures_value) if isinstance(failures_value, (int, float)) else 0
447
+ )
448
+ return self._metrics.to_dict()
449
+
450
+ async def reset_metrics(self) -> None:
451
+ """Reset all publisher metrics to initial values.
452
+
453
+ Useful for test isolation when reusing an adapter across multiple
454
+ test cases without recreating the adapter instance.
455
+
456
+ Note:
457
+ This method does NOT affect the closed state of the adapter.
458
+ If the adapter has been closed, it remains closed after reset.
459
+
460
+ Example:
461
+ ```python
462
+ adapter = AdapterProtocolEventPublisherKafka(container=container, bus=bus)
463
+ await adapter.publish(...) # metrics.events_published = 1
464
+
465
+ await adapter.reset_metrics() # metrics.events_published = 0
466
+ await adapter.publish(...) # metrics.events_published = 1
467
+ ```
468
+ """
469
+ async with self._metrics_lock:
470
+ self._metrics = ModelPublisherMetrics()
471
+ logger.debug(
472
+ "Publisher metrics reset",
473
+ extra={
474
+ "service_name": self._service_name,
475
+ "instance_id": self._instance_id,
476
+ },
477
+ )
478
+
479
+ async def close(
480
+ self, timeout_seconds: float = DEFAULT_CLOSE_TIMEOUT_SECONDS
481
+ ) -> None:
482
+ """Close the publisher and release resources.
483
+
484
+ Marks the adapter as closed and stops the underlying EventBusKafka.
485
+ After closing, any calls to publish() will raise InfraUnavailableError.
486
+
487
+ Args:
488
+ timeout_seconds: Timeout for cleanup operations. Currently unused
489
+ (EventBusKafka.close() manages its own timeout). Included for
490
+ ProtocolEventPublisher interface compliance.
491
+ """
492
+ self._closed = True
493
+
494
+ # Close the underlying bus if available
495
+ if self._bus is not None:
496
+ try:
497
+ await self._bus.close()
498
+ except Exception as e:
499
+ logger.warning(
500
+ "Error closing underlying EventBusKafka",
501
+ extra={
502
+ "service_name": self._service_name,
503
+ "instance_id": self._instance_id,
504
+ "error": str(e),
505
+ },
506
+ )
507
+
508
+ logger.info(
509
+ "AdapterProtocolEventPublisherKafka closed",
510
+ extra={
511
+ "service_name": self._service_name,
512
+ "instance_id": self._instance_id,
513
+ },
514
+ )
515
+
516
+
517
+ __all__: list[str] = ["AdapterProtocolEventPublisherKafka"]