kailash 0.6.5__py3-none-any.whl → 0.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. kailash/__init__.py +35 -4
  2. kailash/adapters/__init__.py +5 -0
  3. kailash/adapters/mcp_platform_adapter.py +273 -0
  4. kailash/channels/__init__.py +21 -0
  5. kailash/channels/api_channel.py +409 -0
  6. kailash/channels/base.py +271 -0
  7. kailash/channels/cli_channel.py +661 -0
  8. kailash/channels/event_router.py +496 -0
  9. kailash/channels/mcp_channel.py +648 -0
  10. kailash/channels/session.py +423 -0
  11. kailash/mcp_server/discovery.py +1 -1
  12. kailash/middleware/core/agent_ui.py +5 -0
  13. kailash/middleware/mcp/enhanced_server.py +22 -16
  14. kailash/nexus/__init__.py +21 -0
  15. kailash/nexus/factory.py +413 -0
  16. kailash/nexus/gateway.py +545 -0
  17. kailash/nodes/__init__.py +2 -0
  18. kailash/nodes/ai/iterative_llm_agent.py +988 -17
  19. kailash/nodes/ai/llm_agent.py +29 -9
  20. kailash/nodes/api/__init__.py +2 -2
  21. kailash/nodes/api/monitoring.py +1 -1
  22. kailash/nodes/base_async.py +54 -14
  23. kailash/nodes/code/async_python.py +1 -1
  24. kailash/nodes/data/bulk_operations.py +939 -0
  25. kailash/nodes/data/query_builder.py +373 -0
  26. kailash/nodes/data/query_cache.py +512 -0
  27. kailash/nodes/monitoring/__init__.py +10 -0
  28. kailash/nodes/monitoring/deadlock_detector.py +964 -0
  29. kailash/nodes/monitoring/performance_anomaly.py +1078 -0
  30. kailash/nodes/monitoring/race_condition_detector.py +1151 -0
  31. kailash/nodes/monitoring/transaction_metrics.py +790 -0
  32. kailash/nodes/monitoring/transaction_monitor.py +931 -0
  33. kailash/nodes/system/__init__.py +17 -0
  34. kailash/nodes/system/command_parser.py +820 -0
  35. kailash/nodes/transaction/__init__.py +48 -0
  36. kailash/nodes/transaction/distributed_transaction_manager.py +983 -0
  37. kailash/nodes/transaction/saga_coordinator.py +652 -0
  38. kailash/nodes/transaction/saga_state_storage.py +411 -0
  39. kailash/nodes/transaction/saga_step.py +467 -0
  40. kailash/nodes/transaction/transaction_context.py +756 -0
  41. kailash/nodes/transaction/two_phase_commit.py +978 -0
  42. kailash/nodes/transform/processors.py +17 -1
  43. kailash/nodes/validation/__init__.py +21 -0
  44. kailash/nodes/validation/test_executor.py +532 -0
  45. kailash/nodes/validation/validation_nodes.py +447 -0
  46. kailash/resources/factory.py +1 -1
  47. kailash/runtime/async_local.py +84 -21
  48. kailash/runtime/local.py +21 -2
  49. kailash/runtime/parameter_injector.py +187 -31
  50. kailash/security.py +16 -1
  51. kailash/servers/__init__.py +32 -0
  52. kailash/servers/durable_workflow_server.py +430 -0
  53. kailash/servers/enterprise_workflow_server.py +466 -0
  54. kailash/servers/gateway.py +183 -0
  55. kailash/servers/workflow_server.py +290 -0
  56. kailash/utils/data_validation.py +192 -0
  57. kailash/workflow/builder.py +291 -12
  58. kailash/workflow/validation.py +144 -8
  59. {kailash-0.6.5.dist-info → kailash-0.7.0.dist-info}/METADATA +1 -1
  60. {kailash-0.6.5.dist-info → kailash-0.7.0.dist-info}/RECORD +64 -26
  61. {kailash-0.6.5.dist-info → kailash-0.7.0.dist-info}/WHEEL +0 -0
  62. {kailash-0.6.5.dist-info → kailash-0.7.0.dist-info}/entry_points.txt +0 -0
  63. {kailash-0.6.5.dist-info → kailash-0.7.0.dist-info}/licenses/LICENSE +0 -0
  64. {kailash-0.6.5.dist-info → kailash-0.7.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,756 @@
1
+ """Transaction Context Node for workflow-level transaction coordination.
2
+
3
+ This module provides a context manager node that enables transparent transaction
4
+ management across workflow executions. It automatically manages transaction
5
+ lifecycles, participant registration, and pattern selection for entire workflows.
6
+
7
+ The TransactionContextNode:
8
+ 1. Provides transparent transaction boundaries for workflows
9
+ 2. Automatically registers workflow nodes as transaction participants
10
+ 3. Manages distributed transaction coordination across workflow steps
11
+ 4. Handles automatic compensation and recovery for failed workflows
12
+ 5. Integrates with monitoring and metrics collection
13
+
14
+ Examples:
15
+ Basic workflow transaction:
16
+
17
+ >>> context = TransactionContextNode(
18
+ ... transaction_name="user_onboarding",
19
+ ... consistency="eventual",
20
+ ... availability="high"
21
+ ... )
22
+ >>> result = await context.execute(
23
+ ... operation="begin_workflow_transaction",
24
+ ... workflow_nodes=[
25
+ ... {"id": "create_user", "type": "UserCreateNode"},
26
+ ... {"id": "send_welcome", "type": "EmailNode"},
27
+ ... {"id": "setup_profile", "type": "ProfileSetupNode"}
28
+ ... ]
29
+ ... )
30
+
31
+ DataFlow integration:
32
+
33
+ >>> # Automatically wrap DataFlow operations in transactions
34
+ >>> context = TransactionContextNode(
35
+ ... auto_wrap_bulk_operations=True,
36
+ ... default_pattern="saga",
37
+ ... monitoring_enabled=True
38
+ ... )
39
+ >>> result = await context.execute(
40
+ ... operation="wrap_bulk_operation",
41
+ ... bulk_node="ProductBulkCreateNode",
42
+ ... compensation_node="ProductBulkDeleteNode"
43
+ ... )
44
+
45
+ Enterprise configuration:
46
+
47
+ >>> context = TransactionContextNode(
48
+ ... transaction_name="order_processing",
49
+ ... pattern="auto",
50
+ ... requirements={
51
+ ... "consistency": "strong",
52
+ ... "availability": "medium",
53
+ ... "timeout": 600
54
+ ... },
55
+ ... monitoring_enabled=True,
56
+ ... audit_logging=True
57
+ ... )
58
+ """
59
+
60
+ import asyncio
61
+ import logging
62
+ import time
63
+ import uuid
64
+ from datetime import UTC, datetime
65
+ from enum import Enum
66
+ from typing import Any, Dict, List, Optional, Union
67
+
68
+ from kailash.nodes.base import NodeMetadata, NodeParameter, register_node
69
+ from kailash.nodes.base_async import AsyncNode
70
+ from kailash.sdk_exceptions import NodeConfigurationError, NodeExecutionError
71
+
72
+ from .distributed_transaction_manager import (
73
+ AvailabilityLevel,
74
+ ConsistencyLevel,
75
+ DistributedTransactionManagerNode,
76
+ ParticipantCapability,
77
+ TransactionPattern,
78
+ TransactionRequirements,
79
+ )
80
+
81
+ logger = logging.getLogger(__name__)
82
+
83
+
84
+ class WorkflowTransactionStatus(Enum):
85
+ """Workflow transaction status."""
86
+
87
+ PENDING = "pending"
88
+ ACTIVE = "active"
89
+ COMMITTED = "committed"
90
+ ROLLED_BACK = "rolled_back"
91
+ COMPENSATED = "compensated"
92
+ FAILED = "failed"
93
+
94
+
95
+ class ParticipantType(Enum):
96
+ """Types of transaction participants."""
97
+
98
+ WORKFLOW_NODE = "workflow_node"
99
+ BULK_OPERATION = "bulk_operation"
100
+ EXTERNAL_SERVICE = "external_service"
101
+ DATABASE_OPERATION = "database_operation"
102
+
103
+
104
+ class WorkflowParticipant:
105
+ """Represents a workflow participant in a transaction."""
106
+
107
+ def __init__(
108
+ self,
109
+ participant_id: str,
110
+ node_type: str,
111
+ participant_type: ParticipantType = ParticipantType.WORKFLOW_NODE,
112
+ supports_2pc: bool = False,
113
+ supports_saga: bool = True,
114
+ compensation_node: Optional[str] = None,
115
+ compensation_parameters: Optional[Dict[str, Any]] = None,
116
+ timeout: int = 30,
117
+ retry_count: int = 3,
118
+ priority: int = 1,
119
+ metadata: Optional[Dict[str, Any]] = None,
120
+ ):
121
+ self.participant_id = participant_id
122
+ self.node_type = node_type
123
+ self.participant_type = participant_type
124
+ self.supports_2pc = supports_2pc
125
+ self.supports_saga = supports_saga
126
+ self.compensation_node = compensation_node
127
+ self.compensation_parameters = compensation_parameters or {}
128
+ self.timeout = timeout
129
+ self.retry_count = retry_count
130
+ self.priority = priority
131
+ self.metadata = metadata or {}
132
+
133
+ # Auto-detect capabilities based on node type
134
+ self._detect_capabilities()
135
+
136
+ def _detect_capabilities(self):
137
+ """Auto-detect transaction capabilities based on node type."""
138
+ # Database operations typically support 2PC
139
+ if any(
140
+ db_keyword in self.node_type.lower()
141
+ for db_keyword in ["sql", "database", "bulk", "create", "update", "delete"]
142
+ ):
143
+ self.supports_2pc = True
144
+
145
+ # External services typically only support saga
146
+ if any(
147
+ ext_keyword in self.node_type.lower()
148
+ for ext_keyword in ["http", "rest", "api", "email", "notification"]
149
+ ):
150
+ self.supports_2pc = False
151
+
152
+ # Bulk operations need special handling
153
+ if "bulk" in self.node_type.lower():
154
+ self.participant_type = ParticipantType.BULK_OPERATION
155
+
156
+ def to_participant_capability(self) -> ParticipantCapability:
157
+ """Convert to ParticipantCapability for DTM."""
158
+ return ParticipantCapability(
159
+ participant_id=self.participant_id,
160
+ endpoint=f"workflow://{self.participant_id}",
161
+ supports_2pc=self.supports_2pc,
162
+ supports_saga=self.supports_saga,
163
+ compensation_action=self.compensation_node,
164
+ timeout=self.timeout,
165
+ retry_count=self.retry_count,
166
+ priority=self.priority,
167
+ )
168
+
169
+
170
+ @register_node("TransactionContextNode")
171
+ class TransactionContextNode(AsyncNode):
172
+ """Workflow-level transaction coordination node.
173
+
174
+ This node provides transparent transaction management for entire workflows,
175
+ automatically managing transaction lifecycles, participant registration,
176
+ and distributed coordination across workflow steps.
177
+
178
+ Key Features:
179
+ - Automatic workflow transaction boundaries
180
+ - Transparent participant registration
181
+ - Intelligent compensation logic
182
+ - DataFlow integration support
183
+ - Monitoring and metrics integration
184
+ - Enterprise-grade configuration
185
+
186
+ Operations:
187
+ - begin_workflow_transaction: Start transaction for workflow
188
+ - register_participant: Register workflow node as participant
189
+ - wrap_bulk_operation: Wrap bulk operations in transactions
190
+ - execute_workflow_step: Execute single workflow step with transaction
191
+ - commit_workflow: Commit workflow transaction
192
+ - rollback_workflow: Rollback workflow transaction
193
+ - get_workflow_status: Get workflow transaction status
194
+ """
195
+
196
+ def __init__(
197
+ self,
198
+ transaction_name: str = None,
199
+ context_id: str = None,
200
+ pattern: Union[TransactionPattern, str] = TransactionPattern.AUTO,
201
+ requirements: Optional[Dict[str, Any]] = None,
202
+ auto_wrap_bulk_operations: bool = True,
203
+ monitoring_enabled: bool = True,
204
+ audit_logging: bool = False,
205
+ state_storage: str = "memory",
206
+ storage_config: Optional[Dict[str, Any]] = None,
207
+ **kwargs,
208
+ ):
209
+ """Initialize Transaction Context Node.
210
+
211
+ Args:
212
+ transaction_name: Human-readable transaction name
213
+ context_id: Unique context identifier
214
+ pattern: Transaction pattern (saga, two_phase_commit, auto)
215
+ requirements: Transaction requirements for pattern selection
216
+ auto_wrap_bulk_operations: Automatically wrap bulk operations
217
+ monitoring_enabled: Enable transaction monitoring
218
+ audit_logging: Enable audit logging
219
+ state_storage: Storage backend for transaction state
220
+ storage_config: Configuration for state storage
221
+ **kwargs: Additional node configuration
222
+ """
223
+ # Set node metadata
224
+ metadata = NodeMetadata(
225
+ name=kwargs.get("name", "transaction_context"),
226
+ description="Workflow-level transaction coordination with automatic participant management",
227
+ version="1.0.0",
228
+ tags={"transaction", "workflow", "context", "coordination"},
229
+ )
230
+
231
+ # Initialize AsyncNode
232
+ super().__init__(metadata=metadata, **kwargs)
233
+
234
+ # Context configuration
235
+ self.transaction_name = transaction_name or f"workflow_tx_{int(time.time())}"
236
+ self.context_id = context_id or str(uuid.uuid4())
237
+ self.pattern = (
238
+ TransactionPattern(pattern) if isinstance(pattern, str) else pattern
239
+ )
240
+ self.auto_wrap_bulk_operations = auto_wrap_bulk_operations
241
+ self.monitoring_enabled = monitoring_enabled
242
+ self.audit_logging = audit_logging
243
+
244
+ # Transaction requirements
245
+ if requirements:
246
+ self.requirements = TransactionRequirements(**requirements)
247
+ else:
248
+ self.requirements = TransactionRequirements()
249
+
250
+ # State
251
+ self.status = WorkflowTransactionStatus.PENDING
252
+ self.participants: List[WorkflowParticipant] = []
253
+ self.workflow_context: Dict[str, Any] = {}
254
+ self.execution_order: List[str] = []
255
+ self.created_at: Optional[datetime] = None
256
+ self.started_at: Optional[datetime] = None
257
+ self.completed_at: Optional[datetime] = None
258
+ self.error_message: Optional[str] = None
259
+
260
+ # Distributed transaction manager
261
+ self.dtm = DistributedTransactionManagerNode(
262
+ transaction_name=self.transaction_name,
263
+ transaction_id=self.context_id,
264
+ default_pattern=self.pattern,
265
+ state_storage=state_storage,
266
+ storage_config=storage_config or {},
267
+ monitoring_enabled=monitoring_enabled,
268
+ audit_logging=audit_logging,
269
+ )
270
+
271
+ # Monitoring
272
+ self._metrics_node = None
273
+ if monitoring_enabled:
274
+ self._init_monitoring()
275
+
276
+ logger.info(f"Initialized TransactionContextNode: {self.context_id}")
277
+
278
+ def _init_monitoring(self):
279
+ """Initialize transaction monitoring."""
280
+ try:
281
+ from kailash.nodes.monitoring.transaction_metrics import (
282
+ TransactionMetricsNode,
283
+ )
284
+
285
+ self._metrics_node = TransactionMetricsNode()
286
+ except ImportError:
287
+ logger.warning("TransactionMetricsNode not available, monitoring disabled")
288
+
289
+ def get_parameters(self) -> Dict[str, NodeParameter]:
290
+ """Get node parameters for validation."""
291
+ return {
292
+ "operation": NodeParameter(
293
+ name="operation",
294
+ type=str,
295
+ required=True,
296
+ description="Transaction context operation to execute",
297
+ ),
298
+ "workflow_nodes": NodeParameter(
299
+ name="workflow_nodes",
300
+ type=list,
301
+ required=False,
302
+ description="List of workflow nodes to register as participants",
303
+ ),
304
+ "participant": NodeParameter(
305
+ name="participant",
306
+ type=dict,
307
+ required=False,
308
+ description="Single participant to register",
309
+ ),
310
+ "bulk_node": NodeParameter(
311
+ name="bulk_node",
312
+ type=str,
313
+ required=False,
314
+ description="Bulk operation node to wrap in transaction",
315
+ ),
316
+ "compensation_node": NodeParameter(
317
+ name="compensation_node",
318
+ type=str,
319
+ required=False,
320
+ description="Compensation node for bulk operation",
321
+ ),
322
+ "step_id": NodeParameter(
323
+ name="step_id",
324
+ type=str,
325
+ required=False,
326
+ description="Workflow step identifier",
327
+ ),
328
+ "step_parameters": NodeParameter(
329
+ name="step_parameters",
330
+ type=dict,
331
+ required=False,
332
+ description="Parameters for workflow step execution",
333
+ ),
334
+ "context": NodeParameter(
335
+ name="context",
336
+ type=dict,
337
+ required=False,
338
+ description="Workflow context data",
339
+ ),
340
+ "force_pattern": NodeParameter(
341
+ name="force_pattern",
342
+ type=str,
343
+ required=False,
344
+ description="Force specific transaction pattern",
345
+ ),
346
+ }
347
+
348
+ def get_outputs(self) -> Dict[str, NodeParameter]:
349
+ """Get node outputs."""
350
+ return {
351
+ "status": NodeParameter(
352
+ name="status",
353
+ type=str,
354
+ required=True,
355
+ description="Operation status",
356
+ ),
357
+ "context_id": NodeParameter(
358
+ name="context_id",
359
+ type=str,
360
+ required=True,
361
+ description="Transaction context identifier",
362
+ ),
363
+ "workflow_status": NodeParameter(
364
+ name="workflow_status",
365
+ type=str,
366
+ required=True,
367
+ description="Workflow transaction status",
368
+ ),
369
+ "participants": NodeParameter(
370
+ name="participants",
371
+ type=list,
372
+ required=False,
373
+ description="List of registered participants",
374
+ ),
375
+ "selected_pattern": NodeParameter(
376
+ name="selected_pattern",
377
+ type=str,
378
+ required=False,
379
+ description="Selected transaction pattern",
380
+ ),
381
+ "result": NodeParameter(
382
+ name="result",
383
+ type=dict,
384
+ required=False,
385
+ description="Operation result data",
386
+ ),
387
+ "error": NodeParameter(
388
+ name="error",
389
+ type=str,
390
+ required=False,
391
+ description="Error message if operation failed",
392
+ ),
393
+ }
394
+
395
+ async def async_run(self, **kwargs) -> Dict[str, Any]:
396
+ """Execute transaction context operation."""
397
+ operation = kwargs.get("operation")
398
+
399
+ try:
400
+ if operation == "begin_workflow_transaction":
401
+ return await self._begin_workflow_transaction(**kwargs)
402
+ elif operation == "register_participant":
403
+ return await self._register_participant(**kwargs)
404
+ elif operation == "wrap_bulk_operation":
405
+ return await self._wrap_bulk_operation(**kwargs)
406
+ elif operation == "execute_workflow_step":
407
+ return await self._execute_workflow_step(**kwargs)
408
+ elif operation == "commit_workflow":
409
+ return await self._commit_workflow(**kwargs)
410
+ elif operation == "rollback_workflow":
411
+ return await self._rollback_workflow(**kwargs)
412
+ elif operation == "get_workflow_status":
413
+ return await self._get_workflow_status(**kwargs)
414
+ else:
415
+ raise NodeExecutionError(f"Unknown operation: {operation}")
416
+
417
+ except Exception as e:
418
+ logger.error(f"Transaction context operation failed: {e}")
419
+ self.error_message = str(e)
420
+ await self._record_error(str(e))
421
+ return {
422
+ "status": "error",
423
+ "context_id": self.context_id,
424
+ "workflow_status": self.status.value,
425
+ "error": str(e),
426
+ }
427
+
428
+ async def _begin_workflow_transaction(self, **kwargs) -> Dict[str, Any]:
429
+ """Begin a workflow transaction."""
430
+ if self.status != WorkflowTransactionStatus.PENDING:
431
+ raise NodeExecutionError(f"Transaction already {self.status.value}")
432
+
433
+ # Update context
434
+ context = kwargs.get("context", {})
435
+ self.workflow_context.update(context)
436
+
437
+ # Register workflow nodes as participants
438
+ workflow_nodes = kwargs.get("workflow_nodes", [])
439
+ for node_info in workflow_nodes:
440
+ participant = WorkflowParticipant(
441
+ participant_id=node_info["id"],
442
+ node_type=node_info["type"],
443
+ compensation_node=node_info.get("compensation_node"),
444
+ compensation_parameters=node_info.get("compensation_parameters"),
445
+ priority=node_info.get("priority", 1),
446
+ metadata=node_info.get("metadata", {}),
447
+ )
448
+ self.participants.append(participant)
449
+
450
+ # Create transaction with DTM
451
+ dtm_result = await self.dtm.async_run(
452
+ operation="create_transaction",
453
+ transaction_name=self.transaction_name,
454
+ requirements=self.requirements.__dict__,
455
+ context=self.workflow_context,
456
+ )
457
+
458
+ if dtm_result.get("status") != "success":
459
+ raise NodeExecutionError(
460
+ f"Failed to create transaction: {dtm_result.get('error')}"
461
+ )
462
+
463
+ # Register participants with DTM
464
+ for participant in self.participants:
465
+ capability = participant.to_participant_capability()
466
+ await self.dtm.async_run(
467
+ operation="add_participant", participant=capability.to_dict()
468
+ )
469
+
470
+ # Update status
471
+ self.status = WorkflowTransactionStatus.ACTIVE
472
+ self.created_at = datetime.now(UTC)
473
+
474
+ # Start monitoring
475
+ if self._metrics_node:
476
+ await self._metrics_node.async_run(
477
+ operation="start_transaction",
478
+ transaction_id=self.context_id,
479
+ name=self.transaction_name,
480
+ tags={"type": "workflow_transaction"},
481
+ )
482
+
483
+ logger.info(f"Started workflow transaction: {self.context_id}")
484
+
485
+ return {
486
+ "status": "success",
487
+ "context_id": self.context_id,
488
+ "workflow_status": self.status.value,
489
+ "participants": len(self.participants),
490
+ "created_at": self.created_at.isoformat(),
491
+ }
492
+
493
+ async def _register_participant(self, **kwargs) -> Dict[str, Any]:
494
+ """Register a single participant."""
495
+ participant_info = kwargs.get("participant")
496
+ if not participant_info:
497
+ raise NodeExecutionError("participant information required")
498
+
499
+ participant = WorkflowParticipant(
500
+ participant_id=participant_info["id"],
501
+ node_type=participant_info["type"],
502
+ compensation_node=participant_info.get("compensation_node"),
503
+ compensation_parameters=participant_info.get("compensation_parameters"),
504
+ priority=participant_info.get("priority", 1),
505
+ metadata=participant_info.get("metadata", {}),
506
+ )
507
+
508
+ # Add to participants list
509
+ self.participants.append(participant)
510
+
511
+ # Register with DTM
512
+ capability = participant.to_participant_capability()
513
+ dtm_result = await self.dtm.async_run(
514
+ operation="add_participant", participant=capability.to_dict()
515
+ )
516
+
517
+ if dtm_result.get("status") != "success":
518
+ raise NodeExecutionError(
519
+ f"Failed to register participant: {dtm_result.get('error')}"
520
+ )
521
+
522
+ return {
523
+ "status": "success",
524
+ "context_id": self.context_id,
525
+ "workflow_status": self.status.value,
526
+ "participant_id": participant.participant_id,
527
+ "total_participants": len(self.participants),
528
+ }
529
+
530
+ async def _wrap_bulk_operation(self, **kwargs) -> Dict[str, Any]:
531
+ """Wrap a bulk operation in a transaction."""
532
+ bulk_node = kwargs.get("bulk_node")
533
+ if not bulk_node:
534
+ raise NodeExecutionError("bulk_node required for wrap_bulk_operation")
535
+
536
+ compensation_node = kwargs.get("compensation_node")
537
+
538
+ # Create participant for bulk operation
539
+ participant = WorkflowParticipant(
540
+ participant_id=f"bulk_{bulk_node}_{int(time.time())}",
541
+ node_type=bulk_node,
542
+ participant_type=ParticipantType.BULK_OPERATION,
543
+ compensation_node=compensation_node,
544
+ supports_2pc=True, # Bulk operations support 2PC
545
+ supports_saga=True,
546
+ priority=1,
547
+ )
548
+
549
+ # Register participant
550
+ self.participants.append(participant)
551
+
552
+ # Register with DTM if transaction is active
553
+ if self.status == WorkflowTransactionStatus.ACTIVE:
554
+ capability = participant.to_participant_capability()
555
+ await self.dtm.async_run(
556
+ operation="add_participant", participant=capability.to_dict()
557
+ )
558
+
559
+ return {
560
+ "status": "success",
561
+ "context_id": self.context_id,
562
+ "workflow_status": self.status.value,
563
+ "participant_id": participant.participant_id,
564
+ "wrapped_operation": bulk_node,
565
+ "compensation_node": compensation_node,
566
+ }
567
+
568
+ async def _execute_workflow_step(self, **kwargs) -> Dict[str, Any]:
569
+ """Execute a single workflow step within transaction."""
570
+ step_id = kwargs.get("step_id")
571
+ if not step_id:
572
+ raise NodeExecutionError("step_id required for execute_workflow_step")
573
+
574
+ step_parameters = kwargs.get("step_parameters", {})
575
+
576
+ # Find participant
577
+ participant = None
578
+ for p in self.participants:
579
+ if p.participant_id == step_id:
580
+ participant = p
581
+ break
582
+
583
+ if not participant:
584
+ raise NodeExecutionError(f"Participant {step_id} not found")
585
+
586
+ # Record step execution order
587
+ self.execution_order.append(step_id)
588
+
589
+ # For now, this is a placeholder - in full implementation,
590
+ # this would integrate with the workflow execution engine
591
+ # to actually execute the step
592
+
593
+ return {
594
+ "status": "success",
595
+ "context_id": self.context_id,
596
+ "workflow_status": self.status.value,
597
+ "step_id": step_id,
598
+ "execution_order": self.execution_order,
599
+ }
600
+
601
+ async def _commit_workflow(self, **kwargs) -> Dict[str, Any]:
602
+ """Commit the workflow transaction."""
603
+ if self.status != WorkflowTransactionStatus.ACTIVE:
604
+ raise NodeExecutionError(
605
+ f"Cannot commit transaction in status: {self.status.value}"
606
+ )
607
+
608
+ # Force pattern if specified
609
+ force_pattern = kwargs.get("force_pattern")
610
+ if force_pattern:
611
+ self.pattern = TransactionPattern(force_pattern)
612
+
613
+ # Execute transaction with DTM
614
+ dtm_result = await self.dtm.async_run(
615
+ operation="execute_transaction",
616
+ pattern=(
617
+ self.pattern.value
618
+ if self.pattern != TransactionPattern.AUTO
619
+ else "auto"
620
+ ),
621
+ )
622
+
623
+ # Update status based on result
624
+ if dtm_result.get("status") == "success":
625
+ transaction_status = dtm_result.get("transaction_status", "committed")
626
+ if transaction_status == "committed":
627
+ self.status = WorkflowTransactionStatus.COMMITTED
628
+ elif transaction_status == "compensated":
629
+ self.status = WorkflowTransactionStatus.COMPENSATED
630
+ else:
631
+ self.status = WorkflowTransactionStatus.COMMITTED
632
+ else:
633
+ self.status = WorkflowTransactionStatus.FAILED
634
+ self.error_message = dtm_result.get("error", "Transaction execution failed")
635
+
636
+ self.completed_at = datetime.now(UTC)
637
+
638
+ # End monitoring
639
+ if self._metrics_node:
640
+ await self._metrics_node.async_run(
641
+ operation="end_transaction",
642
+ transaction_id=self.context_id,
643
+ status=(
644
+ "success"
645
+ if self.status == WorkflowTransactionStatus.COMMITTED
646
+ else "error"
647
+ ),
648
+ error=self.error_message,
649
+ )
650
+
651
+ logger.info(
652
+ f"Workflow transaction {self.context_id} completed with status: {self.status.value}"
653
+ )
654
+
655
+ return {
656
+ "status": (
657
+ "success"
658
+ if self.status == WorkflowTransactionStatus.COMMITTED
659
+ else "failed"
660
+ ),
661
+ "context_id": self.context_id,
662
+ "workflow_status": self.status.value,
663
+ "selected_pattern": dtm_result.get("selected_pattern"),
664
+ "participants": len(self.participants),
665
+ "execution_time": (
666
+ (self.completed_at - self.created_at).total_seconds()
667
+ if self.created_at
668
+ else 0
669
+ ),
670
+ "result": dtm_result.get("result"),
671
+ "error": self.error_message,
672
+ }
673
+
674
+ async def _rollback_workflow(self, **kwargs) -> Dict[str, Any]:
675
+ """Rollback the workflow transaction."""
676
+ if self.status not in [
677
+ WorkflowTransactionStatus.ACTIVE,
678
+ WorkflowTransactionStatus.FAILED,
679
+ ]:
680
+ return {
681
+ "status": "already_finished",
682
+ "context_id": self.context_id,
683
+ "workflow_status": self.status.value,
684
+ }
685
+
686
+ # Abort transaction with DTM
687
+ dtm_result = await self.dtm.async_run(operation="abort_transaction")
688
+
689
+ self.status = WorkflowTransactionStatus.ROLLED_BACK
690
+ self.completed_at = datetime.now(UTC)
691
+
692
+ # End monitoring
693
+ if self._metrics_node:
694
+ await self._metrics_node.async_run(
695
+ operation="end_transaction",
696
+ transaction_id=self.context_id,
697
+ status="aborted",
698
+ )
699
+
700
+ logger.info(f"Workflow transaction {self.context_id} rolled back")
701
+
702
+ return {
703
+ "status": "success",
704
+ "context_id": self.context_id,
705
+ "workflow_status": self.status.value,
706
+ "rolled_back_at": self.completed_at.isoformat(),
707
+ }
708
+
709
+ async def _get_workflow_status(self, **kwargs) -> Dict[str, Any]:
710
+ """Get current workflow transaction status."""
711
+ # Get DTM status
712
+ dtm_status = await self.dtm.async_run(operation="get_status")
713
+
714
+ participant_info = [
715
+ {
716
+ "id": p.participant_id,
717
+ "type": p.node_type,
718
+ "participant_type": p.participant_type.value,
719
+ "supports_2pc": p.supports_2pc,
720
+ "supports_saga": p.supports_saga,
721
+ "compensation_node": p.compensation_node,
722
+ "priority": p.priority,
723
+ "metadata": p.metadata,
724
+ }
725
+ for p in self.participants
726
+ ]
727
+
728
+ return {
729
+ "status": "success",
730
+ "context_id": self.context_id,
731
+ "transaction_name": self.transaction_name,
732
+ "workflow_status": self.status.value,
733
+ "participants": participant_info,
734
+ "execution_order": self.execution_order,
735
+ "workflow_context": self.workflow_context,
736
+ "created_at": self.created_at.isoformat() if self.created_at else None,
737
+ "started_at": self.started_at.isoformat() if self.started_at else None,
738
+ "completed_at": (
739
+ self.completed_at.isoformat() if self.completed_at else None
740
+ ),
741
+ "dtm_status": dtm_status,
742
+ "error": self.error_message,
743
+ }
744
+
745
+ async def _record_error(self, error: str):
746
+ """Record error for monitoring."""
747
+ if self._metrics_node:
748
+ try:
749
+ await self._metrics_node.async_run(
750
+ operation="end_transaction",
751
+ transaction_id=self.context_id,
752
+ status="error",
753
+ error=error,
754
+ )
755
+ except Exception as e:
756
+ logger.warning(f"Failed to record error in metrics: {e}")