jarviscore-framework 0.1.1__py3-none-any.whl → 0.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (86) hide show
  1. examples/autoagent_distributed_example.py +211 -0
  2. examples/custom_profile_decorator.py +134 -0
  3. examples/custom_profile_wrap.py +168 -0
  4. examples/customagent_distributed_example.py +362 -0
  5. examples/customagent_p2p_example.py +730 -0
  6. jarviscore/__init__.py +49 -36
  7. jarviscore/adapter/__init__.py +15 -9
  8. jarviscore/adapter/decorator.py +23 -19
  9. jarviscore/adapter/wrapper.py +303 -0
  10. jarviscore/cli/scaffold.py +1 -1
  11. jarviscore/cli/smoketest.py +3 -2
  12. jarviscore/core/agent.py +44 -1
  13. jarviscore/core/mesh.py +196 -35
  14. jarviscore/data/examples/autoagent_distributed_example.py +211 -0
  15. jarviscore/data/examples/customagent_distributed_example.py +362 -0
  16. jarviscore/data/examples/customagent_p2p_example.py +730 -0
  17. jarviscore/docs/API_REFERENCE.md +264 -51
  18. jarviscore/docs/AUTOAGENT_GUIDE.md +198 -0
  19. jarviscore/docs/CONFIGURATION.md +35 -21
  20. jarviscore/docs/CUSTOMAGENT_GUIDE.md +1362 -0
  21. jarviscore/docs/GETTING_STARTED.md +107 -14
  22. jarviscore/docs/TROUBLESHOOTING.md +145 -7
  23. jarviscore/docs/USER_GUIDE.md +138 -361
  24. jarviscore/orchestration/engine.py +20 -8
  25. jarviscore/p2p/__init__.py +10 -0
  26. jarviscore/p2p/coordinator.py +129 -0
  27. jarviscore/p2p/messages.py +87 -0
  28. jarviscore/p2p/peer_client.py +576 -0
  29. jarviscore/p2p/peer_tool.py +268 -0
  30. jarviscore_framework-0.2.1.dist-info/METADATA +144 -0
  31. jarviscore_framework-0.2.1.dist-info/RECORD +132 -0
  32. {jarviscore_framework-0.1.1.dist-info → jarviscore_framework-0.2.1.dist-info}/WHEEL +1 -1
  33. {jarviscore_framework-0.1.1.dist-info → jarviscore_framework-0.2.1.dist-info}/top_level.txt +1 -0
  34. test_logs/code_registry/functions/data_generator-558779ed_560ebc37.py +7 -0
  35. test_logs/code_registry/functions/data_generator-5ed3609e_560ebc37.py +7 -0
  36. test_logs/code_registry/functions/data_generator-66da0356_43970bb9.py +25 -0
  37. test_logs/code_registry/functions/data_generator-7a2fac83_583709d9.py +36 -0
  38. test_logs/code_registry/functions/data_generator-888b670f_aa235863.py +9 -0
  39. test_logs/code_registry/functions/data_generator-9ca5f642_aa235863.py +9 -0
  40. test_logs/code_registry/functions/data_generator-bfd90775_560ebc37.py +7 -0
  41. test_logs/code_registry/functions/data_generator-e95d2f7d_aa235863.py +9 -0
  42. test_logs/code_registry/functions/data_generator-f60ca8a2_327eb8c2.py +29 -0
  43. test_logs/code_registry/functions/mathematician-02adf9ee_958658d9.py +19 -0
  44. test_logs/code_registry/functions/mathematician-0706fb57_5df13441.py +23 -0
  45. test_logs/code_registry/functions/mathematician-153c9c4a_ba59c918.py +83 -0
  46. test_logs/code_registry/functions/mathematician-287e61c0_41daa793.py +18 -0
  47. test_logs/code_registry/functions/mathematician-2967af5a_863c2cc6.py +17 -0
  48. test_logs/code_registry/functions/mathematician-303ca6d6_5df13441.py +23 -0
  49. test_logs/code_registry/functions/mathematician-308a4afd_cbf5064d.py +73 -0
  50. test_logs/code_registry/functions/mathematician-353f16e2_0968bcf5.py +18 -0
  51. test_logs/code_registry/functions/mathematician-3c22475a_41daa793.py +17 -0
  52. test_logs/code_registry/functions/mathematician-5bac1029_0968bcf5.py +18 -0
  53. test_logs/code_registry/functions/mathematician-640f76b2_9198780b.py +19 -0
  54. test_logs/code_registry/functions/mathematician-752fa7ea_863c2cc6.py +17 -0
  55. test_logs/code_registry/functions/mathematician-baf9ef39_0968bcf5.py +18 -0
  56. test_logs/code_registry/functions/mathematician-bc8b2a2f_5df13441.py +23 -0
  57. test_logs/code_registry/functions/mathematician-c31e4686_41daa793.py +18 -0
  58. test_logs/code_registry/functions/mathematician-cc84c84c_863c2cc6.py +17 -0
  59. test_logs/code_registry/functions/mathematician-dd7c7144_9198780b.py +19 -0
  60. test_logs/code_registry/functions/mathematician-e671c256_41ea4487.py +74 -0
  61. test_logs/code_registry/functions/report_generator-1a878fcc_18d44bdc.py +47 -0
  62. test_logs/code_registry/functions/report_generator-25c1c331_cea57d0d.py +35 -0
  63. test_logs/code_registry/functions/report_generator-37552117_e711c2b9.py +35 -0
  64. test_logs/code_registry/functions/report_generator-bc662768_e711c2b9.py +35 -0
  65. test_logs/code_registry/functions/report_generator-d6c0e76b_5e7722ec.py +44 -0
  66. test_logs/code_registry/functions/report_generator-f270fb02_680529c3.py +44 -0
  67. test_logs/code_registry/functions/text_processor-11393b14_4370d3ed.py +40 -0
  68. test_logs/code_registry/functions/text_processor-7d02dfc3_d3b569be.py +37 -0
  69. test_logs/code_registry/functions/text_processor-8adb5e32_9168c5fe.py +13 -0
  70. test_logs/code_registry/functions/text_processor-c58ffc19_78b4ceac.py +42 -0
  71. test_logs/code_registry/functions/text_processor-cd5977b1_9168c5fe.py +13 -0
  72. test_logs/code_registry/functions/text_processor-ec1c8773_9168c5fe.py +13 -0
  73. tests/test_01_analyst_standalone.py +124 -0
  74. tests/test_02_assistant_standalone.py +164 -0
  75. tests/test_03_analyst_with_framework.py +945 -0
  76. tests/test_04_assistant_with_framework.py +1002 -0
  77. tests/test_05_integration.py +1301 -0
  78. tests/test_06_real_llm_integration.py +760 -0
  79. tests/test_07_distributed_single_node.py +578 -0
  80. tests/test_08_distributed_multi_node.py +454 -0
  81. tests/test_09_distributed_autoagent.py +509 -0
  82. tests/test_10_distributed_customagent.py +787 -0
  83. tests/test_mesh.py +35 -4
  84. jarviscore_framework-0.1.1.dist-info/METADATA +0 -137
  85. jarviscore_framework-0.1.1.dist-info/RECORD +0 -69
  86. {jarviscore_framework-0.1.1.dist-info → jarviscore_framework-0.2.1.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,578 @@
1
+ """
2
+ Test 7: Distributed Mode - Single Node Tests
3
+
4
+ Tests the core distributed mode functionality on a single node:
5
+ - P2P coordinator + Workflow engine working together
6
+ - Workflow execution with dependency resolution
7
+ - Step result broadcasting
8
+ - Status tracking and memory management
9
+
10
+ This file uses MOCKED agents (no real LLM) to test the infrastructure.
11
+ Real LLM tests are in test_09 and test_10.
12
+
13
+ Run with: pytest tests/test_07_distributed_single_node.py -v
14
+ """
15
+ import asyncio
16
+ import sys
17
+ import pytest
18
+
19
+ sys.path.insert(0, '.')
20
+
21
+ from jarviscore import Mesh, MeshMode, Agent
22
+
23
+
24
+ # ═══════════════════════════════════════════════════════════════════════════════
25
+ # TEST AGENTS (Mocked - No LLM)
26
+ # ═══════════════════════════════════════════════════════════════════════════════
27
+
28
+ class DataGeneratorAgent(Agent):
29
+ """Generates sample data."""
30
+ role = "generator"
31
+ capabilities = ["data_generation", "sampling"]
32
+
33
+ def __init__(self, agent_id=None):
34
+ super().__init__(agent_id)
35
+ self.tasks_executed = []
36
+
37
+ async def execute_task(self, task):
38
+ self.tasks_executed.append(task)
39
+ return {
40
+ "status": "success",
41
+ "output": {
42
+ "data": [
43
+ {"id": 1, "name": "Alice", "score": 85},
44
+ {"id": 2, "name": "Bob", "score": 92},
45
+ {"id": 3, "name": "Charlie", "score": 78}
46
+ ],
47
+ "count": 3
48
+ },
49
+ "agent": self.agent_id
50
+ }
51
+
52
+
53
+ class DataAnalyzerAgent(Agent):
54
+ """Analyzes data and produces insights."""
55
+ role = "analyzer"
56
+ capabilities = ["analysis", "statistics"]
57
+
58
+ def __init__(self, agent_id=None):
59
+ super().__init__(agent_id)
60
+ self.tasks_executed = []
61
+ self.context_received = []
62
+
63
+ async def execute_task(self, task):
64
+ self.tasks_executed.append(task)
65
+
66
+ # Capture context from dependencies
67
+ context = task.get("context", {})
68
+ self.context_received.append(context)
69
+
70
+ # Use previous step results if available
71
+ previous_results = context.get("previous_step_results", {})
72
+
73
+ return {
74
+ "status": "success",
75
+ "output": {
76
+ "analysis": "Positive trend detected",
77
+ "mean_score": 85.0,
78
+ "received_context": bool(previous_results)
79
+ },
80
+ "agent": self.agent_id
81
+ }
82
+
83
+
84
+ class DataStorageAgent(Agent):
85
+ """Stores data to a destination."""
86
+ role = "storage"
87
+ capabilities = ["storage", "persistence"]
88
+
89
+ def __init__(self, agent_id=None):
90
+ super().__init__(agent_id)
91
+ self.tasks_executed = []
92
+
93
+ async def execute_task(self, task):
94
+ self.tasks_executed.append(task)
95
+ return {
96
+ "status": "success",
97
+ "output": {"saved": True, "records": 3},
98
+ "agent": self.agent_id
99
+ }
100
+
101
+
102
+ class FailingAgent(Agent):
103
+ """Agent that always fails (for error testing)."""
104
+ role = "failing"
105
+ capabilities = ["failure"]
106
+
107
+ async def execute_task(self, task):
108
+ raise RuntimeError("Intentional failure for testing")
109
+
110
+
111
+ # ═══════════════════════════════════════════════════════════════════════════════
112
+ # TEST CLASS: Distributed Mode Initialization
113
+ # ═══════════════════════════════════════════════════════════════════════════════
114
+
115
+ class TestDistributedModeInitialization:
116
+ """Tests for distributed mode setup and initialization."""
117
+
118
+ def test_mesh_creates_in_distributed_mode(self):
119
+ """Mesh should initialize with distributed mode."""
120
+ mesh = Mesh(mode="distributed")
121
+
122
+ assert mesh.mode == MeshMode.DISTRIBUTED
123
+ assert mesh._started is False
124
+ assert mesh._p2p_coordinator is None # Not started yet
125
+ assert mesh._workflow_engine is None # Not started yet
126
+
127
+ @pytest.mark.asyncio
128
+ async def test_p2p_coordinator_initialized(self):
129
+ """P2P coordinator should be initialized on start."""
130
+ mesh = Mesh(mode="distributed", config={'bind_port': 7950})
131
+ mesh.add(DataGeneratorAgent)
132
+
133
+ await mesh.start()
134
+
135
+ try:
136
+ assert mesh._p2p_coordinator is not None
137
+ assert mesh._p2p_coordinator._started is True
138
+ finally:
139
+ await mesh.stop()
140
+
141
+ @pytest.mark.asyncio
142
+ async def test_workflow_engine_initialized(self):
143
+ """Workflow engine should be initialized on start."""
144
+ mesh = Mesh(mode="distributed", config={'bind_port': 7951})
145
+ mesh.add(DataGeneratorAgent)
146
+
147
+ await mesh.start()
148
+
149
+ try:
150
+ assert mesh._workflow_engine is not None
151
+ assert mesh._workflow_engine._started is True
152
+ finally:
153
+ await mesh.stop()
154
+
155
+ @pytest.mark.asyncio
156
+ async def test_both_components_initialized(self):
157
+ """Both P2P coordinator AND workflow engine should be present."""
158
+ mesh = Mesh(mode="distributed", config={'bind_port': 7952})
159
+ mesh.add(DataGeneratorAgent)
160
+
161
+ await mesh.start()
162
+
163
+ try:
164
+ # This is the KEY difference from other modes
165
+ assert mesh._p2p_coordinator is not None, "Distributed needs P2P"
166
+ assert mesh._workflow_engine is not None, "Distributed needs Workflow"
167
+ finally:
168
+ await mesh.stop()
169
+
170
+ @pytest.mark.asyncio
171
+ async def test_capabilities_announced(self):
172
+ """Agent capabilities should be announced to P2P mesh."""
173
+ mesh = Mesh(mode="distributed", config={'bind_port': 7953})
174
+ mesh.add(DataGeneratorAgent)
175
+ mesh.add(DataAnalyzerAgent)
176
+
177
+ await mesh.start()
178
+
179
+ try:
180
+ # Check capability map was populated
181
+ cap_map = mesh._p2p_coordinator._capability_map
182
+ assert "data_generation" in cap_map or len(cap_map) > 0
183
+ finally:
184
+ await mesh.stop()
185
+
186
+
187
+ # ═══════════════════════════════════════════════════════════════════════════════
188
+ # TEST CLASS: Workflow Execution in Distributed Mode
189
+ # ═══════════════════════════════════════════════════════════════════════════════
190
+
191
+ class TestDistributedWorkflowExecution:
192
+ """Tests for workflow execution in distributed mode."""
193
+
194
+ @pytest.fixture
195
+ async def distributed_mesh(self):
196
+ """Create a distributed mesh with multiple agents."""
197
+ mesh = Mesh(mode="distributed", config={'bind_port': 7954})
198
+ generator = mesh.add(DataGeneratorAgent)
199
+ analyzer = mesh.add(DataAnalyzerAgent)
200
+ storage = mesh.add(DataStorageAgent)
201
+
202
+ await mesh.start()
203
+
204
+ yield mesh, generator, analyzer, storage
205
+
206
+ await mesh.stop()
207
+
208
+ @pytest.mark.asyncio
209
+ async def test_single_step_workflow(self, distributed_mesh):
210
+ """Execute a single-step workflow."""
211
+ mesh, generator, analyzer, storage = distributed_mesh
212
+
213
+ results = await mesh.workflow("test-single", [
214
+ {"agent": "generator", "task": "Generate sample data"}
215
+ ])
216
+
217
+ assert len(results) == 1
218
+ assert results[0]["status"] == "success"
219
+ assert "data" in results[0]["output"]
220
+ assert len(generator.tasks_executed) == 1
221
+
222
+ @pytest.mark.asyncio
223
+ async def test_multi_step_workflow(self, distributed_mesh):
224
+ """Execute a multi-step workflow without dependencies."""
225
+ mesh, generator, analyzer, storage = distributed_mesh
226
+
227
+ results = await mesh.workflow("test-multi", [
228
+ {"agent": "generator", "task": "Generate data"},
229
+ {"agent": "analyzer", "task": "Analyze data"},
230
+ {"agent": "storage", "task": "Store results"}
231
+ ])
232
+
233
+ assert len(results) == 3
234
+ assert all(r["status"] == "success" for r in results)
235
+ assert len(generator.tasks_executed) == 1
236
+ assert len(analyzer.tasks_executed) == 1
237
+ assert len(storage.tasks_executed) == 1
238
+
239
+ @pytest.mark.asyncio
240
+ async def test_workflow_with_dependencies(self, distributed_mesh):
241
+ """Execute workflow where steps depend on previous steps."""
242
+ mesh, generator, analyzer, storage = distributed_mesh
243
+
244
+ results = await mesh.workflow("test-deps", [
245
+ {"agent": "generator", "task": "Generate user data"},
246
+ {"agent": "analyzer", "task": "Analyze the data", "depends_on": [0]},
247
+ {"agent": "storage", "task": "Store analysis", "depends_on": [1]}
248
+ ])
249
+
250
+ assert len(results) == 3
251
+ assert all(r["status"] == "success" for r in results)
252
+
253
+ # Verify analyzer received context from generator
254
+ assert len(analyzer.context_received) == 1
255
+ context = analyzer.context_received[0]
256
+ assert "previous_step_results" in context
257
+ assert "step0" in context["previous_step_results"]
258
+
259
+ @pytest.mark.asyncio
260
+ async def test_workflow_with_missing_agent(self, distributed_mesh):
261
+ """Workflow should fail gracefully when agent not found."""
262
+ mesh, generator, analyzer, storage = distributed_mesh
263
+
264
+ results = await mesh.workflow("test-missing", [
265
+ {"agent": "nonexistent_agent", "task": "Should fail"}
266
+ ])
267
+
268
+ assert len(results) == 1
269
+ assert results[0]["status"] == "failure"
270
+ assert "No agent found" in results[0]["error"]
271
+
272
+ @pytest.mark.asyncio
273
+ async def test_workflow_routes_by_capability(self, distributed_mesh):
274
+ """Workflow should route by capability when role not found."""
275
+ mesh, generator, analyzer, storage = distributed_mesh
276
+
277
+ results = await mesh.workflow("test-capability", [
278
+ {"agent": "data_generation", "task": "Use capability routing"}
279
+ ])
280
+
281
+ assert len(results) == 1
282
+ assert results[0]["status"] == "success"
283
+ # Should have been routed to generator agent
284
+ assert len(generator.tasks_executed) == 1
285
+
286
+ @pytest.mark.asyncio
287
+ async def test_step_results_stored_in_memory(self, distributed_mesh):
288
+ """Step results should be stored in workflow memory."""
289
+ mesh, generator, analyzer, storage = distributed_mesh
290
+
291
+ await mesh.workflow("test-memory", [
292
+ {"id": "gen-step", "agent": "generator", "task": "Generate"},
293
+ {"id": "analyze-step", "agent": "analyzer", "task": "Analyze", "depends_on": ["gen-step"]}
294
+ ])
295
+
296
+ # Check workflow engine memory
297
+ memory = mesh._workflow_engine.get_memory()
298
+ assert "gen-step" in memory
299
+ assert "analyze-step" in memory
300
+
301
+
302
+ # ═══════════════════════════════════════════════════════════════════════════════
303
+ # TEST CLASS: Step Broadcasting
304
+ # ═══════════════════════════════════════════════════════════════════════════════
305
+
306
+ class TestDistributedStepBroadcasting:
307
+ """Tests for P2P step result broadcasting."""
308
+
309
+ @pytest.mark.asyncio
310
+ async def test_broadcaster_initialized(self):
311
+ """Broadcaster should be initialized with P2P coordinator."""
312
+ mesh = Mesh(mode="distributed", config={'bind_port': 7955})
313
+ mesh.add(DataGeneratorAgent)
314
+
315
+ await mesh.start()
316
+
317
+ try:
318
+ assert mesh._p2p_coordinator.broadcaster is not None
319
+ finally:
320
+ await mesh.stop()
321
+
322
+ @pytest.mark.asyncio
323
+ async def test_status_manager_tracks_steps(self):
324
+ """Status manager should track step states."""
325
+ mesh = Mesh(mode="distributed", config={'bind_port': 7956})
326
+ mesh.add(DataGeneratorAgent)
327
+
328
+ await mesh.start()
329
+
330
+ try:
331
+ await mesh.workflow("test-status", [
332
+ {"id": "tracked-step", "agent": "generator", "task": "Track me"}
333
+ ])
334
+
335
+ # Check status was tracked
336
+ status = mesh._workflow_engine.get_status("tracked-step")
337
+ assert status is not None
338
+ assert status["status"] == "completed"
339
+ finally:
340
+ await mesh.stop()
341
+
342
+ @pytest.mark.asyncio
343
+ async def test_failed_step_status_tracked(self):
344
+ """Failed steps should have failure status."""
345
+ mesh = Mesh(mode="distributed", config={'bind_port': 7957})
346
+ mesh.add(FailingAgent)
347
+
348
+ await mesh.start()
349
+
350
+ try:
351
+ await mesh.workflow("test-fail-status", [
352
+ {"id": "failing-step", "agent": "failing", "task": "Will fail"}
353
+ ])
354
+
355
+ status = mesh._workflow_engine.get_status("failing-step")
356
+ assert status is not None
357
+ assert status["status"] == "failed"
358
+ finally:
359
+ await mesh.stop()
360
+
361
+
362
+ # ═══════════════════════════════════════════════════════════════════════════════
363
+ # TEST CLASS: Distributed Mode Lifecycle
364
+ # ═══════════════════════════════════════════════════════════════════════════════
365
+
366
+ class TestDistributedModeLifecycle:
367
+ """Tests for distributed mode start/stop lifecycle."""
368
+
369
+ @pytest.mark.asyncio
370
+ async def test_graceful_shutdown(self):
371
+ """Mesh should shutdown gracefully."""
372
+ mesh = Mesh(mode="distributed", config={'bind_port': 7958})
373
+ mesh.add(DataGeneratorAgent)
374
+
375
+ await mesh.start()
376
+ assert mesh._started is True
377
+
378
+ await mesh.stop()
379
+ assert mesh._started is False
380
+
381
+ @pytest.mark.asyncio
382
+ async def test_workflow_fails_before_start(self):
383
+ """Workflow should fail if mesh not started."""
384
+ mesh = Mesh(mode="distributed", config={'bind_port': 7959})
385
+ mesh.add(DataGeneratorAgent)
386
+
387
+ # Don't start the mesh
388
+ with pytest.raises(RuntimeError) as exc_info:
389
+ await mesh.workflow("test", [{"agent": "generator", "task": "fail"}])
390
+
391
+ assert "not started" in str(exc_info.value)
392
+
393
+ @pytest.mark.asyncio
394
+ async def test_agent_setup_called(self):
395
+ """Agent setup should be called on mesh start."""
396
+ setup_called = []
397
+
398
+ class TrackingAgent(Agent):
399
+ role = "tracker"
400
+ capabilities = ["tracking"]
401
+
402
+ async def setup(self):
403
+ await super().setup()
404
+ setup_called.append(self.agent_id)
405
+
406
+ async def execute_task(self, task):
407
+ return {"status": "success"}
408
+
409
+ mesh = Mesh(mode="distributed", config={'bind_port': 7960})
410
+ agent = mesh.add(TrackingAgent)
411
+
412
+ await mesh.start()
413
+
414
+ try:
415
+ assert agent.agent_id in setup_called
416
+ finally:
417
+ await mesh.stop()
418
+
419
+ @pytest.mark.asyncio
420
+ async def test_agent_teardown_called(self):
421
+ """Agent teardown should be called on mesh stop."""
422
+ teardown_called = []
423
+
424
+ class TrackingAgent(Agent):
425
+ role = "tracker"
426
+ capabilities = ["tracking"]
427
+
428
+ async def teardown(self):
429
+ await super().teardown()
430
+ teardown_called.append(self.agent_id)
431
+
432
+ async def execute_task(self, task):
433
+ return {"status": "success"}
434
+
435
+ mesh = Mesh(mode="distributed", config={'bind_port': 7961})
436
+ agent = mesh.add(TrackingAgent)
437
+
438
+ await mesh.start()
439
+ await mesh.stop()
440
+
441
+ assert agent.agent_id in teardown_called
442
+
443
+
444
+ # ═══════════════════════════════════════════════════════════════════════════════
445
+ # TEST CLASS: Comparison with Other Modes
446
+ # ═══════════════════════════════════════════════════════════════════════════════
447
+
448
+ class TestDistributedVsOtherModes:
449
+ """Tests that verify distributed mode differs from other modes."""
450
+
451
+ def test_autonomous_mode_has_no_p2p(self):
452
+ """Autonomous mode should NOT have P2P coordinator."""
453
+ mesh = Mesh(mode="autonomous")
454
+ mesh.add(DataGeneratorAgent)
455
+
456
+ # Before start - no P2P
457
+ assert mesh._p2p_coordinator is None
458
+
459
+ @pytest.mark.asyncio
460
+ async def test_autonomous_starts_without_p2p(self):
461
+ """Autonomous mode should start without P2P."""
462
+ mesh = Mesh(mode="autonomous")
463
+ mesh.add(DataGeneratorAgent)
464
+
465
+ await mesh.start()
466
+
467
+ try:
468
+ # Autonomous has workflow engine but NOT P2P
469
+ assert mesh._workflow_engine is not None
470
+ assert mesh._p2p_coordinator is None
471
+ finally:
472
+ await mesh.stop()
473
+
474
+ @pytest.mark.asyncio
475
+ async def test_p2p_mode_has_no_workflow_engine(self):
476
+ """P2P mode should NOT have workflow engine."""
477
+ mesh = Mesh(mode="p2p", config={'bind_port': 7962})
478
+
479
+ class P2PAgent(Agent):
480
+ role = "p2p_agent"
481
+ capabilities = ["p2p"]
482
+
483
+ async def execute_task(self, task):
484
+ return {"status": "success"}
485
+
486
+ async def run(self):
487
+ while not self.shutdown_requested:
488
+ await asyncio.sleep(0.1)
489
+
490
+ mesh.add(P2PAgent)
491
+
492
+ await mesh.start()
493
+
494
+ try:
495
+ # P2P has P2P coordinator but NOT workflow engine
496
+ assert mesh._p2p_coordinator is not None
497
+ assert mesh._workflow_engine is None
498
+ finally:
499
+ await mesh.stop()
500
+
501
+ @pytest.mark.asyncio
502
+ async def test_distributed_has_both(self):
503
+ """Distributed mode should have BOTH P2P and workflow engine."""
504
+ mesh = Mesh(mode="distributed", config={'bind_port': 7963})
505
+ mesh.add(DataGeneratorAgent)
506
+
507
+ await mesh.start()
508
+
509
+ try:
510
+ # Distributed has BOTH
511
+ assert mesh._p2p_coordinator is not None, "Missing P2P coordinator"
512
+ assert mesh._workflow_engine is not None, "Missing workflow engine"
513
+ finally:
514
+ await mesh.stop()
515
+
516
+
517
+ # ═══════════════════════════════════════════════════════════════════════════════
518
+ # MANUAL RUN
519
+ # ═══════════════════════════════════════════════════════════════════════════════
520
+
521
+ async def run_manual_test():
522
+ """Run a manual demonstration of distributed mode."""
523
+ print("\n" + "="*70)
524
+ print("DISTRIBUTED MODE - SINGLE NODE DEMONSTRATION")
525
+ print("="*70)
526
+
527
+ # Create mesh
528
+ mesh = Mesh(mode="distributed", config={'bind_port': 7964})
529
+
530
+ generator = mesh.add(DataGeneratorAgent)
531
+ analyzer = mesh.add(DataAnalyzerAgent)
532
+ storage = mesh.add(DataStorageAgent)
533
+
534
+ print(f"\n[SETUP] Created mesh with {len(mesh.agents)} agents")
535
+ print(f" - Generator: {generator.role} ({generator.capabilities})")
536
+ print(f" - Analyzer: {analyzer.role} ({analyzer.capabilities})")
537
+ print(f" - Storage: {storage.role} ({storage.capabilities})")
538
+
539
+ # Start mesh
540
+ await mesh.start()
541
+ print(f"\n[START] Mesh started")
542
+ print(f" - P2P Coordinator: {mesh._p2p_coordinator is not None}")
543
+ print(f" - Workflow Engine: {mesh._workflow_engine is not None}")
544
+
545
+ # Execute workflow
546
+ print(f"\n[WORKFLOW] Executing pipeline with dependencies...")
547
+
548
+ results = await mesh.workflow("demo-pipeline", [
549
+ {"id": "generate", "agent": "generator", "task": "Generate user data"},
550
+ {"id": "analyze", "agent": "analyzer", "task": "Analyze patterns", "depends_on": ["generate"]},
551
+ {"id": "store", "agent": "storage", "task": "Save results", "depends_on": ["analyze"]}
552
+ ])
553
+
554
+ print(f"\n[RESULTS]")
555
+ for i, result in enumerate(results):
556
+ status = result.get("status", "unknown")
557
+ agent = result.get("agent", "unknown")
558
+ print(f" Step {i+1}: {status} (executed by {agent})")
559
+
560
+ # Show memory
561
+ print(f"\n[MEMORY] Workflow memory contains:")
562
+ for step_id, data in mesh._workflow_engine.get_memory().items():
563
+ print(f" - {step_id}: {type(data).__name__}")
564
+
565
+ # Show context injection
566
+ print(f"\n[CONTEXT] Analyzer received context: {analyzer.context_received[0].get('previous_step_results', {}).keys()}")
567
+
568
+ # Stop mesh
569
+ await mesh.stop()
570
+ print(f"\n[STOP] Mesh stopped gracefully")
571
+
572
+ print("\n" + "="*70)
573
+ print("DEMONSTRATION COMPLETE")
574
+ print("="*70)
575
+
576
+
577
+ if __name__ == "__main__":
578
+ asyncio.run(run_manual_test())