omnibase_infra 0.2.8__py3-none-any.whl → 0.2.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. omnibase_infra/__init__.py +1 -1
  2. omnibase_infra/enums/__init__.py +4 -0
  3. omnibase_infra/enums/enum_declarative_node_violation.py +102 -0
  4. omnibase_infra/event_bus/adapters/__init__.py +31 -0
  5. omnibase_infra/event_bus/adapters/adapter_protocol_event_publisher_kafka.py +517 -0
  6. omnibase_infra/mixins/mixin_async_circuit_breaker.py +113 -1
  7. omnibase_infra/models/__init__.py +9 -0
  8. omnibase_infra/models/event_bus/__init__.py +22 -0
  9. omnibase_infra/models/event_bus/model_consumer_retry_config.py +367 -0
  10. omnibase_infra/models/event_bus/model_dlq_config.py +177 -0
  11. omnibase_infra/models/event_bus/model_idempotency_config.py +131 -0
  12. omnibase_infra/models/event_bus/model_offset_policy_config.py +107 -0
  13. omnibase_infra/models/resilience/model_circuit_breaker_config.py +15 -0
  14. omnibase_infra/models/validation/__init__.py +8 -0
  15. omnibase_infra/models/validation/model_declarative_node_validation_result.py +139 -0
  16. omnibase_infra/models/validation/model_declarative_node_violation.py +169 -0
  17. omnibase_infra/nodes/architecture_validator/__init__.py +28 -7
  18. omnibase_infra/nodes/architecture_validator/constants.py +36 -0
  19. omnibase_infra/nodes/architecture_validator/handlers/__init__.py +28 -0
  20. omnibase_infra/nodes/architecture_validator/handlers/contract.yaml +120 -0
  21. omnibase_infra/nodes/architecture_validator/handlers/handler_architecture_validation.py +359 -0
  22. omnibase_infra/nodes/architecture_validator/node.py +1 -0
  23. omnibase_infra/nodes/architecture_validator/node_architecture_validator.py +48 -336
  24. omnibase_infra/nodes/node_ledger_projection_compute/__init__.py +16 -2
  25. omnibase_infra/nodes/node_ledger_projection_compute/contract.yaml +14 -4
  26. omnibase_infra/nodes/node_ledger_projection_compute/handlers/__init__.py +18 -0
  27. omnibase_infra/nodes/node_ledger_projection_compute/handlers/contract.yaml +53 -0
  28. omnibase_infra/nodes/node_ledger_projection_compute/handlers/handler_ledger_projection.py +354 -0
  29. omnibase_infra/nodes/node_ledger_projection_compute/node.py +20 -256
  30. omnibase_infra/nodes/node_registry_effect/node.py +20 -73
  31. omnibase_infra/protocols/protocol_dispatch_engine.py +90 -0
  32. omnibase_infra/runtime/__init__.py +11 -0
  33. omnibase_infra/runtime/baseline_subscriptions.py +150 -0
  34. omnibase_infra/runtime/event_bus_subcontract_wiring.py +455 -24
  35. omnibase_infra/runtime/kafka_contract_source.py +13 -5
  36. omnibase_infra/runtime/service_message_dispatch_engine.py +112 -0
  37. omnibase_infra/runtime/service_runtime_host_process.py +6 -11
  38. omnibase_infra/services/__init__.py +36 -0
  39. omnibase_infra/services/contract_publisher/__init__.py +95 -0
  40. omnibase_infra/services/contract_publisher/config.py +199 -0
  41. omnibase_infra/services/contract_publisher/errors.py +243 -0
  42. omnibase_infra/services/contract_publisher/models/__init__.py +28 -0
  43. omnibase_infra/services/contract_publisher/models/model_contract_error.py +67 -0
  44. omnibase_infra/services/contract_publisher/models/model_infra_error.py +62 -0
  45. omnibase_infra/services/contract_publisher/models/model_publish_result.py +112 -0
  46. omnibase_infra/services/contract_publisher/models/model_publish_stats.py +79 -0
  47. omnibase_infra/services/contract_publisher/service.py +617 -0
  48. omnibase_infra/services/contract_publisher/sources/__init__.py +52 -0
  49. omnibase_infra/services/contract_publisher/sources/model_discovered.py +155 -0
  50. omnibase_infra/services/contract_publisher/sources/protocol.py +101 -0
  51. omnibase_infra/services/contract_publisher/sources/source_composite.py +309 -0
  52. omnibase_infra/services/contract_publisher/sources/source_filesystem.py +174 -0
  53. omnibase_infra/services/contract_publisher/sources/source_package.py +221 -0
  54. omnibase_infra/services/observability/__init__.py +40 -0
  55. omnibase_infra/services/observability/agent_actions/__init__.py +64 -0
  56. omnibase_infra/services/observability/agent_actions/config.py +209 -0
  57. omnibase_infra/services/observability/agent_actions/consumer.py +1320 -0
  58. omnibase_infra/services/observability/agent_actions/models/__init__.py +87 -0
  59. omnibase_infra/services/observability/agent_actions/models/model_agent_action.py +142 -0
  60. omnibase_infra/services/observability/agent_actions/models/model_detection_failure.py +125 -0
  61. omnibase_infra/services/observability/agent_actions/models/model_envelope.py +85 -0
  62. omnibase_infra/services/observability/agent_actions/models/model_execution_log.py +159 -0
  63. omnibase_infra/services/observability/agent_actions/models/model_performance_metric.py +130 -0
  64. omnibase_infra/services/observability/agent_actions/models/model_routing_decision.py +138 -0
  65. omnibase_infra/services/observability/agent_actions/models/model_transformation_event.py +124 -0
  66. omnibase_infra/services/observability/agent_actions/tests/__init__.py +20 -0
  67. omnibase_infra/services/observability/agent_actions/tests/test_consumer.py +1154 -0
  68. omnibase_infra/services/observability/agent_actions/tests/test_models.py +645 -0
  69. omnibase_infra/services/observability/agent_actions/tests/test_writer.py +709 -0
  70. omnibase_infra/services/observability/agent_actions/writer_postgres.py +926 -0
  71. omnibase_infra/validation/__init__.py +12 -0
  72. omnibase_infra/validation/contracts/declarative_node.validation.yaml +143 -0
  73. omnibase_infra/validation/validation_exemptions.yaml +93 -0
  74. omnibase_infra/validation/validator_declarative_node.py +850 -0
  75. {omnibase_infra-0.2.8.dist-info → omnibase_infra-0.2.9.dist-info}/METADATA +2 -2
  76. {omnibase_infra-0.2.8.dist-info → omnibase_infra-0.2.9.dist-info}/RECORD +79 -27
  77. {omnibase_infra-0.2.8.dist-info → omnibase_infra-0.2.9.dist-info}/WHEEL +0 -0
  78. {omnibase_infra-0.2.8.dist-info → omnibase_infra-0.2.9.dist-info}/entry_points.txt +0 -0
  79. {omnibase_infra-0.2.8.dist-info → omnibase_infra-0.2.9.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,354 @@
1
+ # SPDX-License-Identifier: MIT
2
+ # Copyright (c) 2026 OmniNode Team
3
+ """Handler for ledger projection - transforms ModelEventMessage to ModelIntent.
4
+
5
+ This handler encapsulates the compute logic for projecting platform events
6
+ to ledger append intents. It follows the ONEX declarative pattern where
7
+ all business logic resides in handlers, not nodes.
8
+
9
+ Design Rationale - Best-Effort Metadata Extraction:
10
+ The audit ledger serves as the system's source of truth. Events must NEVER
11
+ be dropped due to metadata extraction failures. All metadata fields are
12
+ extracted best-effort - parsing errors result in None/empty values, not
13
+ exceptions. Only a missing event_value (the raw bytes) causes an error.
14
+
15
+ Bytes Encoding:
16
+ Kafka event keys and values are bytes. Since bytes cannot safely cross
17
+ intent boundaries (serialization issues), they are base64-encoded at this
18
+ transform layer. The Effect layer decodes before storage.
19
+
20
+ Ticket: OMN-1648, OMN-1726
21
+ """
22
+
23
+ from __future__ import annotations
24
+
25
+ import base64
26
+ import logging
27
+ from typing import TYPE_CHECKING
28
+ from uuid import UUID, uuid4
29
+
30
+ from omnibase_core.errors import OnexError
31
+ from omnibase_core.models.dispatch import ModelHandlerOutput
32
+ from omnibase_core.models.reducer.model_intent import ModelIntent
33
+ from omnibase_core.types import JsonType
34
+ from omnibase_infra.enums import (
35
+ EnumHandlerType,
36
+ EnumHandlerTypeCategory,
37
+ EnumResponseStatus,
38
+ )
39
+ from omnibase_infra.event_bus.models.model_event_headers import ModelEventHeaders
40
+ from omnibase_infra.event_bus.models.model_event_message import ModelEventMessage
41
+ from omnibase_infra.nodes.reducers.models.model_payload_ledger_append import (
42
+ ModelPayloadLedgerAppend,
43
+ )
44
+
45
+ if TYPE_CHECKING:
46
+ from omnibase_core.container import ModelONEXContainer
47
+
48
+ logger = logging.getLogger(__name__)
49
+
50
+ # Handler ID for ModelHandlerOutput
51
+ HANDLER_ID_LEDGER_PROJECTION: str = "ledger-projection-handler"
52
+
53
+
54
+ class HandlerLedgerProjection:
55
+ """Handler that transforms platform events to ledger append intents.
56
+
57
+ This handler implements the compute logic for the ledger projection node,
58
+ extracting metadata from ModelEventMessage and producing ModelIntent with
59
+ ModelPayloadLedgerAppend payloads.
60
+
61
+ CRITICAL INVARIANTS:
62
+ - NEVER drop events due to metadata extraction failure
63
+ - event_value is REQUIRED (raises OnexError if None)
64
+ - correlation_id and other metadata are OPTIONAL
65
+ - Best-effort extraction - parsing errors yield None, not exceptions
66
+
67
+ Attributes:
68
+ handler_type: EnumHandlerType.COMPUTE_HANDLER
69
+ handler_category: EnumHandlerTypeCategory.COMPUTE
70
+
71
+ Example:
72
+ >>> handler = HandlerLedgerProjection(container)
73
+ >>> message = ModelEventMessage(
74
+ ... topic="agent.routing.completed.v1",
75
+ ... value=b'{"agent": "code-quality"}',
76
+ ... headers=headers,
77
+ ... partition=0,
78
+ ... offset="42",
79
+ ... )
80
+ >>> result = await handler.execute({"payload": message.model_dump()})
81
+ >>> # result.result contains the ModelIntent with ledger.append payload
82
+ """
83
+
84
+ def __init__(self, container: ModelONEXContainer) -> None:
85
+ """Initialize the ledger projection handler.
86
+
87
+ Args:
88
+ container: ONEX dependency injection container.
89
+ """
90
+ self._container = container
91
+ self._initialized: bool = False
92
+
93
+ @property
94
+ def handler_type(self) -> EnumHandlerType:
95
+ """Return the architectural role of this handler.
96
+
97
+ Returns:
98
+ EnumHandlerType.COMPUTE_HANDLER - This handler performs pure
99
+ computation (event transformation) without side effects.
100
+ """
101
+ return EnumHandlerType.COMPUTE_HANDLER
102
+
103
+ @property
104
+ def handler_category(self) -> EnumHandlerTypeCategory:
105
+ """Return the behavioral classification of this handler.
106
+
107
+ Returns:
108
+ EnumHandlerTypeCategory.COMPUTE - This handler performs pure,
109
+ deterministic transformations without side effects.
110
+ """
111
+ return EnumHandlerTypeCategory.COMPUTE
112
+
113
+ async def initialize(self, config: dict[str, object]) -> None:
114
+ """Initialize the handler.
115
+
116
+ Args:
117
+ config: Configuration dict (currently unused).
118
+ """
119
+ self._initialized = True
120
+ logger.info(
121
+ "%s initialized successfully",
122
+ self.__class__.__name__,
123
+ extra={"handler": self.__class__.__name__},
124
+ )
125
+
126
+ async def shutdown(self) -> None:
127
+ """Shutdown the handler."""
128
+ self._initialized = False
129
+ logger.info("HandlerLedgerProjection shutdown complete")
130
+
131
+ def project(self, message: ModelEventMessage) -> ModelIntent:
132
+ """Transform event message to ledger append intent.
133
+
134
+ Extracts metadata from the incoming Kafka event and produces a
135
+ ModelIntent with a ModelPayloadLedgerAppend payload for the Effect
136
+ layer to persist to PostgreSQL.
137
+
138
+ Args:
139
+ message: The incoming Kafka event message to transform.
140
+
141
+ Returns:
142
+ ModelIntent with intent_type="extension" containing the ledger
143
+ append payload for the Effect layer.
144
+
145
+ Raises:
146
+ OnexError: If message.value is None (event body is required).
147
+
148
+ INVARIANTS:
149
+ - Never drop events due to metadata extraction failure
150
+ - event_value is REQUIRED (raises OnexError if None)
151
+ - correlation_id is optional
152
+ """
153
+ payload = self._extract_ledger_metadata(message)
154
+ return ModelIntent(
155
+ intent_type="extension",
156
+ target=f"postgres://event_ledger/{payload.topic}/{payload.partition}/{payload.kafka_offset}",
157
+ payload=payload,
158
+ )
159
+
160
+ async def execute(
161
+ self,
162
+ envelope: dict[str, object],
163
+ ) -> ModelHandlerOutput[ModelIntent]:
164
+ """Execute ledger projection from envelope (ProtocolHandler interface).
165
+
166
+ This method provides the standard handler interface for contract-driven
167
+ invocation. It extracts the ModelEventMessage from the envelope and
168
+ delegates to the project() method.
169
+
170
+ Args:
171
+ envelope: Request envelope containing:
172
+ - operation: "ledger.project"
173
+ - payload: ModelEventMessage as dict
174
+ - correlation_id: Optional correlation ID
175
+
176
+ Returns:
177
+ ModelHandlerOutput wrapping ModelIntent.
178
+
179
+ Raises:
180
+ OnexError: If message.value is None.
181
+ RuntimeError: If payload is missing or invalid.
182
+ """
183
+ correlation_id_raw = envelope.get("correlation_id")
184
+ correlation_id = (
185
+ UUID(str(correlation_id_raw)) if correlation_id_raw else uuid4()
186
+ )
187
+ input_envelope_id = uuid4()
188
+
189
+ payload_raw = envelope.get("payload")
190
+ if not isinstance(payload_raw, dict):
191
+ raise RuntimeError("Missing or invalid 'payload' in envelope")
192
+
193
+ # Parse payload into typed model
194
+ message = ModelEventMessage.model_validate(payload_raw)
195
+
196
+ # Execute projection
197
+ intent = self.project(message)
198
+
199
+ return ModelHandlerOutput.for_compute(
200
+ input_envelope_id=input_envelope_id,
201
+ correlation_id=correlation_id,
202
+ handler_id=HANDLER_ID_LEDGER_PROJECTION,
203
+ result=intent,
204
+ )
205
+
206
+ def _b64(self, b: bytes | None) -> str | None:
207
+ """Base64 encode bytes, returning None for None input.
208
+
209
+ Args:
210
+ b: Bytes to encode, or None.
211
+
212
+ Returns:
213
+ Base64-encoded string, or None if input was None.
214
+ """
215
+ if b is None:
216
+ return None
217
+ return base64.b64encode(b).decode("ascii")
218
+
219
+ def _normalize_headers(
220
+ self, headers: ModelEventHeaders | None
221
+ ) -> dict[str, JsonType]:
222
+ """Convert event headers to JSON-safe dictionary.
223
+
224
+ Uses Pydantic's model_dump with mode="json" to ensure all values
225
+ are JSON-serializable. Returns empty dict for None input.
226
+
227
+ Args:
228
+ headers: Event headers model, or None.
229
+
230
+ Returns:
231
+ JSON-safe dictionary of header values, or empty dict.
232
+ """
233
+ if headers is None:
234
+ return {}
235
+ try:
236
+ return headers.model_dump(mode="json")
237
+ except Exception:
238
+ # Best-effort: try to get correlation_id for logging context
239
+ correlation_id = getattr(headers, "correlation_id", None)
240
+ logger.warning(
241
+ "Failed to serialize event headers, returning empty dict. "
242
+ "correlation_id=%s",
243
+ correlation_id,
244
+ exc_info=True,
245
+ )
246
+ return {}
247
+
248
+ def _parse_offset(
249
+ self, offset: str | None, correlation_id: UUID | None = None
250
+ ) -> int:
251
+ """Parse Kafka offset string to integer.
252
+
253
+ Args:
254
+ offset: Offset string from Kafka, or None.
255
+ correlation_id: Optional correlation ID for logging context.
256
+
257
+ Returns:
258
+ Parsed offset as integer, or 0 if None or unparseable.
259
+ """
260
+ if offset is None:
261
+ return 0
262
+ try:
263
+ return int(offset)
264
+ except (ValueError, TypeError):
265
+ logger.warning(
266
+ "Failed to parse offset '%s' as integer, defaulting to 0. "
267
+ "correlation_id=%s",
268
+ offset,
269
+ correlation_id,
270
+ )
271
+ return 0
272
+
273
+ def _extract_ledger_metadata(
274
+ self, message: ModelEventMessage
275
+ ) -> ModelPayloadLedgerAppend:
276
+ """Extract ledger metadata from event message.
277
+
278
+ Main extraction logic that transforms a ModelEventMessage into a
279
+ ModelPayloadLedgerAppend. Uses best-effort extraction for all
280
+ metadata fields - only event_value being None causes an error.
281
+
282
+ Args:
283
+ message: The event message to extract metadata from.
284
+
285
+ Returns:
286
+ Populated ledger append payload ready for the Effect layer.
287
+
288
+ Raises:
289
+ OnexError: If message.value is None.
290
+
291
+ Field Mapping:
292
+ | Payload Field | Source | Required |
293
+ |------------------|---------------------------------|----------|
294
+ | topic | message.topic | YES |
295
+ | partition | message.partition | YES* |
296
+ | kafka_offset | message.offset | YES* |
297
+ | event_key | base64(message.key) | NO |
298
+ | event_value | base64(message.value) | YES |
299
+ | correlation_id | message.headers.correlation_id | NO |
300
+ | event_type | message.headers.event_type | NO |
301
+ | source | message.headers.source | NO |
302
+ | envelope_id | message.headers.message_id | NO |
303
+ | event_timestamp | message.headers.timestamp | NO |
304
+ | onex_headers | headers.model_dump(mode="json") | NO |
305
+
306
+ * Defaults to 0 if not available (for consumed messages, these
307
+ should always be present, but we handle None defensively).
308
+ """
309
+ # CRITICAL: event_value is required - this is the only case where we raise
310
+ if message.value is None:
311
+ raise OnexError(
312
+ "Cannot create ledger entry: message.value is None. "
313
+ "Event body is required for audit ledger persistence."
314
+ )
315
+
316
+ # Base64 encode the raw bytes
317
+ event_value_b64 = self._b64(message.value)
318
+ # Defensive check - _b64 should never return None for non-None input
319
+ if event_value_b64 is None:
320
+ raise OnexError(
321
+ "Unexpected: base64 encoding of message.value returned None. "
322
+ "This should never happen for non-None bytes input."
323
+ )
324
+
325
+ event_key_b64 = self._b64(message.key)
326
+
327
+ # Extract headers best-effort
328
+ headers = message.headers
329
+ # Extract correlation_id early for logging context in helper methods
330
+ correlation_id = headers.correlation_id if headers else None
331
+ onex_headers = self._normalize_headers(headers)
332
+
333
+ # Build payload with best-effort metadata extraction
334
+ return ModelPayloadLedgerAppend(
335
+ # Required Kafka position fields (defensive defaults for None)
336
+ topic=message.topic,
337
+ partition=message.partition if message.partition is not None else 0,
338
+ kafka_offset=self._parse_offset(
339
+ message.offset, correlation_id=correlation_id
340
+ ),
341
+ # Raw event data as base64
342
+ event_key=event_key_b64,
343
+ event_value=event_value_b64,
344
+ # Extracted metadata (all optional, best-effort)
345
+ onex_headers=onex_headers,
346
+ correlation_id=correlation_id,
347
+ envelope_id=headers.message_id if headers else None,
348
+ event_type=headers.event_type if headers else None,
349
+ source=headers.source if headers else None,
350
+ event_timestamp=headers.timestamp if headers else None,
351
+ )
352
+
353
+
354
+ __all__ = ["HandlerLedgerProjection"]
@@ -1,25 +1,18 @@
1
1
  # SPDX-License-Identifier: MIT
2
2
  # Copyright (c) 2026 OmniNode Team
3
- """NodeLedgerProjectionCompute - Extracts metadata from platform events for ledger persistence.
3
+ """NodeLedgerProjectionCompute - Declarative COMPUTE node for ledger projection.
4
4
 
5
- This COMPUTE node transforms ModelEventMessage into ModelPayloadLedgerAppend wrapped
6
- in a ModelIntent for the Effect layer. It follows the ONEX declarative pattern:
7
- - DECLARATIVE node driven by contract.yaml
8
- - Subscribes to 7 platform topics via contract configuration
9
- - Transforms events to ledger append intents
5
+ This node extracts metadata from platform events for ledger persistence.
6
+ All business logic is delegated to HandlerLedgerProjection per ONEX
7
+ declarative node pattern.
10
8
 
11
- Design Rationale - Best-Effort Metadata Extraction:
12
- The audit ledger serves as the system's source of truth. Events must NEVER
13
- be dropped due to metadata extraction failures. All metadata fields are
14
- extracted best-effort - parsing errors result in None/empty values, not
15
- exceptions. Only a missing event_value (the raw bytes) causes an error.
9
+ Design Rationale:
10
+ ONEX nodes are declarative shells driven by contract.yaml. The node class
11
+ extends the appropriate archetype base class and contains no custom logic.
12
+ All compute behavior is defined in handlers configured via handler_routing
13
+ in the contract.
16
14
 
17
- Bytes Encoding:
18
- Kafka event keys and values are bytes. Since bytes cannot safely cross
19
- intent boundaries (serialization issues), they are base64-encoded at this
20
- transform layer. The Effect layer decodes before storage.
21
-
22
- Subscribed Topics:
15
+ Subscribed Topics (via contract.yaml):
23
16
  - onex.evt.platform.node-registration.v1
24
17
  - onex.evt.platform.node-introspection.v1
25
18
  - onex.evt.platform.node-heartbeat.v1
@@ -28,257 +21,28 @@ Subscribed Topics:
28
21
  - onex.intent.platform.runtime-tick.v1
29
22
  - onex.snapshot.platform.registration-snapshots.v1
30
23
 
31
- Ticket: OMN-1648
24
+ Ticket: OMN-1648, OMN-1726
32
25
  """
33
26
 
34
27
  from __future__ import annotations
35
28
 
36
- import base64
37
- import logging
38
- from typing import TYPE_CHECKING
39
-
40
- from omnibase_core.errors import OnexError
41
- from omnibase_core.models.reducer.model_intent import ModelIntent
42
29
  from omnibase_core.nodes.node_compute import NodeCompute
43
- from omnibase_core.types import JsonType
44
- from omnibase_infra.event_bus.models.model_event_headers import ModelEventHeaders
45
- from omnibase_infra.event_bus.models.model_event_message import ModelEventMessage
46
- from omnibase_infra.nodes.reducers.models.model_payload_ledger_append import (
47
- ModelPayloadLedgerAppend,
48
- )
49
-
50
- if TYPE_CHECKING:
51
- from uuid import UUID
52
-
53
- from omnibase_core.container import ModelONEXContainer
54
-
55
- logger = logging.getLogger(__name__)
56
30
 
57
31
 
32
+ # ONEX_EXCLUDE: declarative_node - legacy compute node with projection logic (OMN-1725)
58
33
  class NodeLedgerProjectionCompute(NodeCompute):
59
- """COMPUTE node that extracts metadata from platform events for ledger persistence.
60
-
61
- Declarative node - subscribes to 7 platform topics via contract.yaml.
62
- Transforms ModelEventMessage -> ModelPayloadLedgerAppend -> ModelIntent.
63
-
64
- This node implements the ONEX ledger projection pattern:
65
- 1. Receives raw Kafka events as ModelEventMessage
66
- 2. Extracts metadata best-effort (never fails on parse errors)
67
- 3. Base64-encodes bytes for safe intent serialization
68
- 4. Emits ModelIntent with "ledger.append" payload for Effect layer
69
-
70
- CRITICAL INVARIANTS:
71
- - NEVER drop events due to metadata extraction failure
72
- - event_value is REQUIRED (raises OnexError if None)
73
- - correlation_id and other metadata are OPTIONAL
74
- - Best-effort extraction - parsing errors yield None, not exceptions
75
-
76
- Attributes:
77
- container: ONEX dependency injection container.
78
-
79
- Example:
80
- ```python
81
- from omnibase_core.container import ModelONEXContainer
34
+ """Declarative COMPUTE node for ledger projection.
82
35
 
83
- container = ModelONEXContainer()
84
- node = NodeLedgerProjectionCompute(container)
36
+ All behavior is defined in contract.yaml and delegated to
37
+ HandlerLedgerProjection. This node contains no custom logic.
85
38
 
86
- # Transform event to ledger intent
87
- message = ModelEventMessage(
88
- topic="agent.routing.completed.v1",
89
- value=b'{"agent": "code-quality"}',
90
- headers=headers,
91
- partition=0,
92
- offset="42",
93
- )
94
- intent = node.compute(message)
95
- # intent.payload.intent_type == "ledger.append"
96
- ```
39
+ See Also:
40
+ - handlers/handler_ledger_projection.py: Contains all compute logic
41
+ - handlers/contract.yaml: Handler routing configuration
42
+ - contract.yaml: Node subscription and I/O configuration
97
43
  """
98
44
 
99
- def __init__(self, container: ModelONEXContainer) -> None:
100
- """Initialize the ledger projection compute node.
101
-
102
- Args:
103
- container: ONEX dependency injection container.
104
- """
105
- super().__init__(container)
106
-
107
- def compute(self, message: ModelEventMessage) -> ModelIntent:
108
- """Transform event message to ledger append intent.
109
-
110
- Extracts metadata from the incoming Kafka event and produces a
111
- ModelIntent with a ModelPayloadLedgerAppend payload for the Effect
112
- layer to persist to PostgreSQL.
113
-
114
- Args:
115
- message: The incoming Kafka event message to transform.
116
-
117
- Returns:
118
- ModelIntent with intent_type="extension" containing the ledger
119
- append payload for the Effect layer.
120
-
121
- Raises:
122
- OnexError: If message.value is None (event body is required).
123
-
124
- INVARIANTS:
125
- - Never drop events due to metadata extraction failure
126
- - event_value is REQUIRED (raises OnexError if None)
127
- - correlation_id is optional
128
- """
129
- payload = self._extract_ledger_metadata(message)
130
- return ModelIntent(
131
- intent_type="extension",
132
- target=f"postgres://event_ledger/{payload.topic}/{payload.partition}/{payload.kafka_offset}",
133
- payload=payload,
134
- )
135
-
136
- def _b64(self, b: bytes | None) -> str | None:
137
- """Base64 encode bytes, returning None for None input.
138
-
139
- Args:
140
- b: Bytes to encode, or None.
141
-
142
- Returns:
143
- Base64-encoded string, or None if input was None.
144
- """
145
- if b is None:
146
- return None
147
- return base64.b64encode(b).decode("ascii")
148
-
149
- def _normalize_headers(
150
- self, headers: ModelEventHeaders | None
151
- ) -> dict[str, JsonType]:
152
- """Convert event headers to JSON-safe dictionary.
153
-
154
- Uses Pydantic's model_dump with mode="json" to ensure all values
155
- are JSON-serializable. Returns empty dict for None input.
156
-
157
- Args:
158
- headers: Event headers model, or None.
159
-
160
- Returns:
161
- JSON-safe dictionary of header values, or empty dict.
162
- """
163
- if headers is None:
164
- return {}
165
- try:
166
- return headers.model_dump(mode="json")
167
- except Exception:
168
- # Best-effort: try to get correlation_id for logging context
169
- correlation_id = getattr(headers, "correlation_id", None)
170
- logger.warning(
171
- "Failed to serialize event headers, returning empty dict. "
172
- "correlation_id=%s",
173
- correlation_id,
174
- exc_info=True,
175
- )
176
- return {}
177
-
178
- def _parse_offset(
179
- self, offset: str | None, correlation_id: UUID | None = None
180
- ) -> int:
181
- """Parse Kafka offset string to integer.
182
-
183
- Args:
184
- offset: Offset string from Kafka, or None.
185
- correlation_id: Optional correlation ID for logging context.
186
-
187
- Returns:
188
- Parsed offset as integer, or 0 if None or unparseable.
189
- """
190
- if offset is None:
191
- return 0
192
- try:
193
- return int(offset)
194
- except (ValueError, TypeError):
195
- logger.warning(
196
- "Failed to parse offset '%s' as integer, defaulting to 0. "
197
- "correlation_id=%s",
198
- offset,
199
- correlation_id,
200
- )
201
- return 0
202
-
203
- def _extract_ledger_metadata(
204
- self, message: ModelEventMessage
205
- ) -> ModelPayloadLedgerAppend:
206
- """Extract ledger metadata from event message.
207
-
208
- Main extraction logic that transforms a ModelEventMessage into a
209
- ModelPayloadLedgerAppend. Uses best-effort extraction for all
210
- metadata fields - only event_value being None causes an error.
211
-
212
- Args:
213
- message: The event message to extract metadata from.
214
-
215
- Returns:
216
- Populated ledger append payload ready for the Effect layer.
217
-
218
- Raises:
219
- OnexError: If message.value is None.
220
-
221
- Field Mapping:
222
- | Payload Field | Source | Required |
223
- |------------------|---------------------------------|----------|
224
- | topic | message.topic | YES |
225
- | partition | message.partition | YES* |
226
- | kafka_offset | message.offset | YES* |
227
- | event_key | base64(message.key) | NO |
228
- | event_value | base64(message.value) | YES |
229
- | correlation_id | message.headers.correlation_id | NO |
230
- | event_type | message.headers.event_type | NO |
231
- | source | message.headers.source | NO |
232
- | envelope_id | message.headers.message_id | NO |
233
- | event_timestamp | message.headers.timestamp | NO |
234
- | onex_headers | headers.model_dump(mode="json") | NO |
235
-
236
- * Defaults to 0 if not available (for consumed messages, these
237
- should always be present, but we handle None defensively).
238
- """
239
- # CRITICAL: event_value is required - this is the only case where we raise
240
- if message.value is None:
241
- raise OnexError(
242
- "Cannot create ledger entry: message.value is None. "
243
- "Event body is required for audit ledger persistence."
244
- )
245
-
246
- # Base64 encode the raw bytes
247
- event_value_b64 = self._b64(message.value)
248
- # Defensive check - _b64 should never return None for non-None input
249
- if event_value_b64 is None:
250
- raise OnexError(
251
- "Unexpected: base64 encoding of message.value returned None. "
252
- "This should never happen for non-None bytes input."
253
- )
254
-
255
- event_key_b64 = self._b64(message.key)
256
-
257
- # Extract headers best-effort
258
- headers = message.headers
259
- # Extract correlation_id early for logging context in helper methods
260
- correlation_id = headers.correlation_id if headers else None
261
- onex_headers = self._normalize_headers(headers)
262
-
263
- # Build payload with best-effort metadata extraction
264
- return ModelPayloadLedgerAppend(
265
- # Required Kafka position fields (defensive defaults for None)
266
- topic=message.topic,
267
- partition=message.partition if message.partition is not None else 0,
268
- kafka_offset=self._parse_offset(
269
- message.offset, correlation_id=correlation_id
270
- ),
271
- # Raw event data as base64
272
- event_key=event_key_b64,
273
- event_value=event_value_b64,
274
- # Extracted metadata (all optional, best-effort)
275
- onex_headers=onex_headers,
276
- correlation_id=correlation_id,
277
- envelope_id=headers.message_id if headers else None,
278
- event_type=headers.event_type if headers else None,
279
- source=headers.source if headers else None,
280
- event_timestamp=headers.timestamp if headers else None,
281
- )
45
+ # Declarative node - all behavior defined in contract.yaml
282
46
 
283
47
 
284
48
  __all__ = ["NodeLedgerProjectionCompute"]