jarviscore-framework 0.2.1__py3-none-any.whl → 0.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- examples/cloud_deployment_example.py +162 -0
- examples/customagent_cognitive_discovery_example.py +343 -0
- examples/fastapi_integration_example.py +570 -0
- jarviscore/__init__.py +19 -5
- jarviscore/cli/smoketest.py +8 -4
- jarviscore/core/agent.py +227 -0
- jarviscore/core/mesh.py +9 -0
- jarviscore/data/examples/cloud_deployment_example.py +162 -0
- jarviscore/data/examples/custom_profile_decorator.py +134 -0
- jarviscore/data/examples/custom_profile_wrap.py +168 -0
- jarviscore/data/examples/customagent_cognitive_discovery_example.py +343 -0
- jarviscore/data/examples/fastapi_integration_example.py +570 -0
- jarviscore/docs/API_REFERENCE.md +283 -3
- jarviscore/docs/CHANGELOG.md +139 -0
- jarviscore/docs/CONFIGURATION.md +1 -1
- jarviscore/docs/CUSTOMAGENT_GUIDE.md +997 -85
- jarviscore/docs/GETTING_STARTED.md +228 -267
- jarviscore/docs/TROUBLESHOOTING.md +1 -1
- jarviscore/docs/USER_GUIDE.md +153 -8
- jarviscore/integrations/__init__.py +16 -0
- jarviscore/integrations/fastapi.py +247 -0
- jarviscore/p2p/broadcaster.py +10 -3
- jarviscore/p2p/coordinator.py +310 -14
- jarviscore/p2p/keepalive.py +45 -23
- jarviscore/p2p/peer_client.py +311 -12
- jarviscore/p2p/swim_manager.py +9 -4
- jarviscore/profiles/__init__.py +7 -1
- jarviscore/profiles/customagent.py +295 -74
- {jarviscore_framework-0.2.1.dist-info → jarviscore_framework-0.3.1.dist-info}/METADATA +66 -18
- {jarviscore_framework-0.2.1.dist-info → jarviscore_framework-0.3.1.dist-info}/RECORD +37 -22
- {jarviscore_framework-0.2.1.dist-info → jarviscore_framework-0.3.1.dist-info}/WHEEL +1 -1
- tests/test_13_dx_improvements.py +554 -0
- tests/test_14_cloud_deployment.py +403 -0
- tests/test_15_llm_cognitive_discovery.py +684 -0
- tests/test_16_unified_dx_flow.py +947 -0
- {jarviscore_framework-0.2.1.dist-info → jarviscore_framework-0.3.1.dist-info}/licenses/LICENSE +0 -0
- {jarviscore_framework-0.2.1.dist-info → jarviscore_framework-0.3.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,554 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Test 13: DX Improvements - FastAPI Integration, CustomAgent Handlers, Cognitive Context
|
|
3
|
+
|
|
4
|
+
Tests the Developer Experience improvements:
|
|
5
|
+
- JarvisLifespan for FastAPI integration
|
|
6
|
+
- CustomAgent profile with P2P message handlers
|
|
7
|
+
- Cognitive context generation for LLM prompts
|
|
8
|
+
|
|
9
|
+
Run with: pytest tests/test_13_dx_improvements.py -v -s
|
|
10
|
+
"""
|
|
11
|
+
import asyncio
|
|
12
|
+
import sys
|
|
13
|
+
import pytest
|
|
14
|
+
import logging
|
|
15
|
+
from unittest.mock import AsyncMock, MagicMock, patch
|
|
16
|
+
|
|
17
|
+
sys.path.insert(0, '.')
|
|
18
|
+
|
|
19
|
+
# Setup logging
|
|
20
|
+
logging.basicConfig(level=logging.INFO)
|
|
21
|
+
logger = logging.getLogger(__name__)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
25
|
+
# TEST: FASTAPI INTEGRATION (JarvisLifespan)
|
|
26
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
27
|
+
|
|
28
|
+
class TestJarvisLifespan:
|
|
29
|
+
"""Test FastAPI JarvisLifespan integration."""
|
|
30
|
+
|
|
31
|
+
@pytest.mark.asyncio
|
|
32
|
+
async def test_lifespan_creates_and_starts_mesh(self):
|
|
33
|
+
"""Test lifespan creates mesh and starts it during startup."""
|
|
34
|
+
from jarviscore.profiles import CustomAgent
|
|
35
|
+
from jarviscore.integrations.fastapi import JarvisLifespan
|
|
36
|
+
|
|
37
|
+
class TestAgent(CustomAgent):
|
|
38
|
+
role = "test_agent"
|
|
39
|
+
capabilities = ["testing"]
|
|
40
|
+
|
|
41
|
+
async def execute_task(self, task):
|
|
42
|
+
return {"status": "success", "output": "test"}
|
|
43
|
+
|
|
44
|
+
# Mock FastAPI app
|
|
45
|
+
mock_app = MagicMock()
|
|
46
|
+
mock_app.state = MagicMock()
|
|
47
|
+
|
|
48
|
+
agent = TestAgent()
|
|
49
|
+
lifespan = JarvisLifespan(agent, mode="p2p", bind_port=7890)
|
|
50
|
+
|
|
51
|
+
# Enter lifespan context
|
|
52
|
+
async with lifespan(mock_app):
|
|
53
|
+
# Verify mesh was created and started
|
|
54
|
+
assert lifespan.mesh is not None
|
|
55
|
+
assert lifespan.mesh._started is True
|
|
56
|
+
|
|
57
|
+
# Verify state was injected into app
|
|
58
|
+
assert hasattr(mock_app.state, 'jarvis_mesh')
|
|
59
|
+
assert hasattr(mock_app.state, 'jarvis_agents')
|
|
60
|
+
assert 'test_agent' in mock_app.state.jarvis_agents
|
|
61
|
+
|
|
62
|
+
# After context exit, mesh should be stopped
|
|
63
|
+
assert lifespan.mesh._started is False
|
|
64
|
+
|
|
65
|
+
@pytest.mark.asyncio
|
|
66
|
+
async def test_lifespan_with_multiple_agents(self):
|
|
67
|
+
"""Test lifespan handles multiple agents correctly."""
|
|
68
|
+
from jarviscore.profiles import CustomAgent
|
|
69
|
+
from jarviscore.integrations.fastapi import JarvisLifespan
|
|
70
|
+
|
|
71
|
+
class AgentA(CustomAgent):
|
|
72
|
+
role = "agent_a"
|
|
73
|
+
capabilities = ["capability_a"]
|
|
74
|
+
async def execute_task(self, task):
|
|
75
|
+
return {"status": "success"}
|
|
76
|
+
|
|
77
|
+
class AgentB(CustomAgent):
|
|
78
|
+
role = "agent_b"
|
|
79
|
+
capabilities = ["capability_b"]
|
|
80
|
+
async def execute_task(self, task):
|
|
81
|
+
return {"status": "success"}
|
|
82
|
+
|
|
83
|
+
mock_app = MagicMock()
|
|
84
|
+
mock_app.state = MagicMock()
|
|
85
|
+
|
|
86
|
+
agents = [AgentA(), AgentB()]
|
|
87
|
+
lifespan = JarvisLifespan(agents, mode="p2p", bind_port=7891)
|
|
88
|
+
|
|
89
|
+
async with lifespan(mock_app):
|
|
90
|
+
assert len(lifespan.mesh.agents) == 2
|
|
91
|
+
assert 'agent_a' in mock_app.state.jarvis_agents
|
|
92
|
+
assert 'agent_b' in mock_app.state.jarvis_agents
|
|
93
|
+
|
|
94
|
+
@pytest.mark.asyncio
|
|
95
|
+
async def test_lifespan_launches_background_tasks_for_run_loops(self):
|
|
96
|
+
"""Test lifespan launches background tasks for agents with run() methods."""
|
|
97
|
+
from jarviscore.profiles import CustomAgent
|
|
98
|
+
from jarviscore.integrations.fastapi import JarvisLifespan
|
|
99
|
+
|
|
100
|
+
run_called = False
|
|
101
|
+
|
|
102
|
+
class AgentWithRun(CustomAgent):
|
|
103
|
+
role = "runner"
|
|
104
|
+
capabilities = ["running"]
|
|
105
|
+
|
|
106
|
+
async def run(self):
|
|
107
|
+
nonlocal run_called
|
|
108
|
+
run_called = True
|
|
109
|
+
# Short loop that exits
|
|
110
|
+
while not self.shutdown_requested:
|
|
111
|
+
await asyncio.sleep(0.1)
|
|
112
|
+
|
|
113
|
+
async def execute_task(self, task):
|
|
114
|
+
return {"status": "success"}
|
|
115
|
+
|
|
116
|
+
mock_app = MagicMock()
|
|
117
|
+
mock_app.state = MagicMock()
|
|
118
|
+
|
|
119
|
+
agent = AgentWithRun()
|
|
120
|
+
lifespan = JarvisLifespan(agent, mode="p2p", bind_port=7892)
|
|
121
|
+
|
|
122
|
+
async with lifespan(mock_app):
|
|
123
|
+
# Wait a bit for background task to start
|
|
124
|
+
await asyncio.sleep(0.3)
|
|
125
|
+
assert run_called is True
|
|
126
|
+
assert len(lifespan._background_tasks) == 1
|
|
127
|
+
|
|
128
|
+
|
|
129
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
130
|
+
# TEST: CUSTOMAGENT P2P HANDLERS
|
|
131
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
132
|
+
|
|
133
|
+
class TestCustomAgentHandlers:
|
|
134
|
+
"""Test CustomAgent P2P message handlers."""
|
|
135
|
+
|
|
136
|
+
@pytest.mark.asyncio
|
|
137
|
+
async def test_customagent_dispatches_request_to_handler(self):
|
|
138
|
+
"""Test CustomAgent dispatches REQUEST messages to on_peer_request."""
|
|
139
|
+
from jarviscore.profiles import CustomAgent
|
|
140
|
+
from jarviscore.p2p.messages import IncomingMessage, MessageType
|
|
141
|
+
|
|
142
|
+
request_received = False
|
|
143
|
+
request_data = None
|
|
144
|
+
|
|
145
|
+
class TestAgent(CustomAgent):
|
|
146
|
+
role = "listener"
|
|
147
|
+
capabilities = ["listening"]
|
|
148
|
+
|
|
149
|
+
async def on_peer_request(self, msg):
|
|
150
|
+
nonlocal request_received, request_data
|
|
151
|
+
request_received = True
|
|
152
|
+
request_data = msg.data
|
|
153
|
+
return {"handled": True, "echo": msg.data.get("value")}
|
|
154
|
+
|
|
155
|
+
agent = TestAgent()
|
|
156
|
+
agent._logger = MagicMock()
|
|
157
|
+
|
|
158
|
+
# Mock peers
|
|
159
|
+
agent.peers = MagicMock()
|
|
160
|
+
agent.peers.respond = AsyncMock()
|
|
161
|
+
|
|
162
|
+
# Create test request message
|
|
163
|
+
msg = IncomingMessage(
|
|
164
|
+
sender="test_sender",
|
|
165
|
+
sender_node="localhost:7946",
|
|
166
|
+
type=MessageType.REQUEST,
|
|
167
|
+
data={"action": "test", "value": 42},
|
|
168
|
+
correlation_id="corr-123",
|
|
169
|
+
timestamp=0
|
|
170
|
+
)
|
|
171
|
+
|
|
172
|
+
# Dispatch the message
|
|
173
|
+
await agent._dispatch_message(msg)
|
|
174
|
+
|
|
175
|
+
# Verify handler was called
|
|
176
|
+
assert request_received is True
|
|
177
|
+
assert request_data == {"action": "test", "value": 42}
|
|
178
|
+
|
|
179
|
+
# Verify response was sent (auto_respond=True by default)
|
|
180
|
+
agent.peers.respond.assert_called_once()
|
|
181
|
+
call_args = agent.peers.respond.call_args
|
|
182
|
+
assert call_args[0][1] == {"handled": True, "echo": 42}
|
|
183
|
+
|
|
184
|
+
@pytest.mark.asyncio
|
|
185
|
+
async def test_customagent_dispatches_notify_to_handler(self):
|
|
186
|
+
"""Test CustomAgent dispatches NOTIFY messages to on_peer_notify."""
|
|
187
|
+
from jarviscore.profiles import CustomAgent
|
|
188
|
+
from jarviscore.p2p.messages import IncomingMessage, MessageType
|
|
189
|
+
|
|
190
|
+
notify_received = False
|
|
191
|
+
notify_data = None
|
|
192
|
+
|
|
193
|
+
class TestAgent(CustomAgent):
|
|
194
|
+
role = "listener"
|
|
195
|
+
capabilities = ["listening"]
|
|
196
|
+
|
|
197
|
+
async def on_peer_request(self, msg):
|
|
198
|
+
return {}
|
|
199
|
+
|
|
200
|
+
async def on_peer_notify(self, msg):
|
|
201
|
+
nonlocal notify_received, notify_data
|
|
202
|
+
notify_received = True
|
|
203
|
+
notify_data = msg.data
|
|
204
|
+
|
|
205
|
+
agent = TestAgent()
|
|
206
|
+
agent._logger = MagicMock()
|
|
207
|
+
|
|
208
|
+
# Create test notify message
|
|
209
|
+
msg = IncomingMessage(
|
|
210
|
+
sender="test_sender",
|
|
211
|
+
sender_node="localhost:7946",
|
|
212
|
+
type=MessageType.NOTIFY,
|
|
213
|
+
data={"event": "task_complete", "result": "success"},
|
|
214
|
+
correlation_id=None,
|
|
215
|
+
timestamp=0
|
|
216
|
+
)
|
|
217
|
+
|
|
218
|
+
await agent._dispatch_message(msg)
|
|
219
|
+
|
|
220
|
+
assert notify_received is True
|
|
221
|
+
assert notify_data == {"event": "task_complete", "result": "success"}
|
|
222
|
+
|
|
223
|
+
@pytest.mark.asyncio
|
|
224
|
+
async def test_customagent_auto_respond_disabled(self):
|
|
225
|
+
"""Test CustomAgent respects auto_respond=False setting."""
|
|
226
|
+
from jarviscore.profiles import CustomAgent
|
|
227
|
+
from jarviscore.p2p.messages import IncomingMessage, MessageType
|
|
228
|
+
|
|
229
|
+
class TestAgent(CustomAgent):
|
|
230
|
+
role = "listener"
|
|
231
|
+
capabilities = ["listening"]
|
|
232
|
+
auto_respond = False # Disable auto response
|
|
233
|
+
|
|
234
|
+
async def on_peer_request(self, msg):
|
|
235
|
+
return {"result": "this should not be sent automatically"}
|
|
236
|
+
|
|
237
|
+
agent = TestAgent()
|
|
238
|
+
agent._logger = MagicMock()
|
|
239
|
+
agent.peers = MagicMock()
|
|
240
|
+
agent.peers.respond = AsyncMock()
|
|
241
|
+
|
|
242
|
+
msg = IncomingMessage(
|
|
243
|
+
sender="test",
|
|
244
|
+
sender_node="local",
|
|
245
|
+
type=MessageType.REQUEST,
|
|
246
|
+
data={},
|
|
247
|
+
correlation_id="123",
|
|
248
|
+
timestamp=0
|
|
249
|
+
)
|
|
250
|
+
|
|
251
|
+
await agent._dispatch_message(msg)
|
|
252
|
+
|
|
253
|
+
# Response should NOT be sent
|
|
254
|
+
agent.peers.respond.assert_not_called()
|
|
255
|
+
|
|
256
|
+
@pytest.mark.asyncio
|
|
257
|
+
async def test_customagent_error_handling(self):
|
|
258
|
+
"""Test CustomAgent calls on_error when handler raises exception."""
|
|
259
|
+
from jarviscore.profiles import CustomAgent
|
|
260
|
+
from jarviscore.p2p.messages import IncomingMessage, MessageType
|
|
261
|
+
|
|
262
|
+
error_received = None
|
|
263
|
+
error_msg = None
|
|
264
|
+
|
|
265
|
+
class TestAgent(CustomAgent):
|
|
266
|
+
role = "listener"
|
|
267
|
+
capabilities = ["listening"]
|
|
268
|
+
|
|
269
|
+
async def on_peer_request(self, msg):
|
|
270
|
+
raise ValueError("Test error")
|
|
271
|
+
|
|
272
|
+
async def on_error(self, error, msg):
|
|
273
|
+
nonlocal error_received, error_msg
|
|
274
|
+
error_received = error
|
|
275
|
+
error_msg = msg
|
|
276
|
+
|
|
277
|
+
agent = TestAgent()
|
|
278
|
+
agent._logger = MagicMock()
|
|
279
|
+
agent.peers = MagicMock()
|
|
280
|
+
|
|
281
|
+
msg = IncomingMessage(
|
|
282
|
+
sender="test",
|
|
283
|
+
sender_node="local",
|
|
284
|
+
type=MessageType.REQUEST,
|
|
285
|
+
data={},
|
|
286
|
+
correlation_id="123",
|
|
287
|
+
timestamp=0
|
|
288
|
+
)
|
|
289
|
+
|
|
290
|
+
await agent._dispatch_message(msg)
|
|
291
|
+
|
|
292
|
+
assert error_received is not None
|
|
293
|
+
assert isinstance(error_received, ValueError)
|
|
294
|
+
assert str(error_received) == "Test error"
|
|
295
|
+
assert error_msg is not None
|
|
296
|
+
|
|
297
|
+
@pytest.mark.asyncio
|
|
298
|
+
async def test_customagent_workflow_compatibility(self):
|
|
299
|
+
"""Test CustomAgent.execute_task() delegates to on_peer_request."""
|
|
300
|
+
from jarviscore.profiles import CustomAgent
|
|
301
|
+
|
|
302
|
+
class TestAgent(CustomAgent):
|
|
303
|
+
role = "processor"
|
|
304
|
+
capabilities = ["processing"]
|
|
305
|
+
|
|
306
|
+
async def on_peer_request(self, msg):
|
|
307
|
+
task = msg.data.get("task", "")
|
|
308
|
+
return {"processed": task.upper()}
|
|
309
|
+
|
|
310
|
+
agent = TestAgent()
|
|
311
|
+
agent._logger = MagicMock()
|
|
312
|
+
|
|
313
|
+
result = await agent.execute_task({"task": "hello world"})
|
|
314
|
+
|
|
315
|
+
assert result["status"] == "success"
|
|
316
|
+
assert result["output"] == {"processed": "HELLO WORLD"}
|
|
317
|
+
|
|
318
|
+
|
|
319
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
320
|
+
# TEST: COGNITIVE CONTEXT
|
|
321
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
322
|
+
|
|
323
|
+
class TestCognitiveContext:
|
|
324
|
+
"""Test PeerClient cognitive context generation."""
|
|
325
|
+
|
|
326
|
+
def test_get_cognitive_context_markdown_format(self):
|
|
327
|
+
"""Test markdown format output for LLM prompts."""
|
|
328
|
+
from jarviscore.p2p.peer_client import PeerClient
|
|
329
|
+
|
|
330
|
+
# Create mock peer client
|
|
331
|
+
client = PeerClient(
|
|
332
|
+
coordinator=MagicMock(),
|
|
333
|
+
agent_id="test-agent",
|
|
334
|
+
agent_role="tester",
|
|
335
|
+
agent_registry={},
|
|
336
|
+
node_id="localhost:7946"
|
|
337
|
+
)
|
|
338
|
+
|
|
339
|
+
# Mock list_peers to return test data
|
|
340
|
+
client.list_peers = MagicMock(return_value=[
|
|
341
|
+
{
|
|
342
|
+
"role": "analyst",
|
|
343
|
+
"agent_id": "analyst-abc123",
|
|
344
|
+
"capabilities": ["analysis", "charting", "reporting"],
|
|
345
|
+
"description": "Analyzes data and generates insights"
|
|
346
|
+
},
|
|
347
|
+
{
|
|
348
|
+
"role": "scout",
|
|
349
|
+
"agent_id": "scout-def456",
|
|
350
|
+
"capabilities": ["research", "reconnaissance"],
|
|
351
|
+
"description": ""
|
|
352
|
+
},
|
|
353
|
+
])
|
|
354
|
+
|
|
355
|
+
context = client.get_cognitive_context(format="markdown")
|
|
356
|
+
|
|
357
|
+
# Verify structure
|
|
358
|
+
assert "## AVAILABLE MESH PEERS" in context
|
|
359
|
+
assert "**analyst**" in context
|
|
360
|
+
assert "**scout**" in context
|
|
361
|
+
assert "analyst-abc123" in context
|
|
362
|
+
assert "analysis, charting, reporting" in context
|
|
363
|
+
assert "Analyzes data and generates insights" in context
|
|
364
|
+
assert "ask_peer" in context
|
|
365
|
+
|
|
366
|
+
def test_get_cognitive_context_json_format(self):
|
|
367
|
+
"""Test JSON format output."""
|
|
368
|
+
import json
|
|
369
|
+
from jarviscore.p2p.peer_client import PeerClient
|
|
370
|
+
|
|
371
|
+
client = PeerClient(
|
|
372
|
+
coordinator=MagicMock(),
|
|
373
|
+
agent_id="test-agent",
|
|
374
|
+
agent_role="tester",
|
|
375
|
+
agent_registry={},
|
|
376
|
+
node_id="localhost:7946"
|
|
377
|
+
)
|
|
378
|
+
|
|
379
|
+
client.list_peers = MagicMock(return_value=[
|
|
380
|
+
{"role": "analyst", "agent_id": "analyst-1", "capabilities": ["analysis"]},
|
|
381
|
+
])
|
|
382
|
+
|
|
383
|
+
context = client.get_cognitive_context(format="json")
|
|
384
|
+
|
|
385
|
+
# Should be valid JSON
|
|
386
|
+
data = json.loads(context)
|
|
387
|
+
assert "available_peers" in data
|
|
388
|
+
assert len(data["available_peers"]) == 1
|
|
389
|
+
assert data["available_peers"][0]["role"] == "analyst"
|
|
390
|
+
|
|
391
|
+
def test_get_cognitive_context_text_format(self):
|
|
392
|
+
"""Test plain text format output."""
|
|
393
|
+
from jarviscore.p2p.peer_client import PeerClient
|
|
394
|
+
|
|
395
|
+
client = PeerClient(
|
|
396
|
+
coordinator=MagicMock(),
|
|
397
|
+
agent_id="test-agent",
|
|
398
|
+
agent_role="tester",
|
|
399
|
+
agent_registry={},
|
|
400
|
+
node_id="localhost:7946"
|
|
401
|
+
)
|
|
402
|
+
|
|
403
|
+
client.list_peers = MagicMock(return_value=[
|
|
404
|
+
{"role": "analyst", "agent_id": "analyst-1", "capabilities": ["analysis"]},
|
|
405
|
+
{"role": "scout", "agent_id": "scout-1", "capabilities": ["research"]},
|
|
406
|
+
])
|
|
407
|
+
|
|
408
|
+
context = client.get_cognitive_context(format="text")
|
|
409
|
+
|
|
410
|
+
assert "Available Peers:" in context
|
|
411
|
+
assert "- analyst: analysis" in context
|
|
412
|
+
assert "- scout: research" in context
|
|
413
|
+
|
|
414
|
+
def test_get_cognitive_context_empty_mesh(self):
|
|
415
|
+
"""Test output when no peers are available."""
|
|
416
|
+
from jarviscore.p2p.peer_client import PeerClient
|
|
417
|
+
|
|
418
|
+
client = PeerClient(
|
|
419
|
+
coordinator=MagicMock(),
|
|
420
|
+
agent_id="test-agent",
|
|
421
|
+
agent_role="tester",
|
|
422
|
+
agent_registry={},
|
|
423
|
+
node_id="localhost:7946"
|
|
424
|
+
)
|
|
425
|
+
|
|
426
|
+
client.list_peers = MagicMock(return_value=[])
|
|
427
|
+
|
|
428
|
+
context = client.get_cognitive_context()
|
|
429
|
+
|
|
430
|
+
assert "No other agents" in context
|
|
431
|
+
|
|
432
|
+
def test_get_cognitive_context_custom_tool_name(self):
|
|
433
|
+
"""Test custom tool name in output."""
|
|
434
|
+
from jarviscore.p2p.peer_client import PeerClient
|
|
435
|
+
|
|
436
|
+
client = PeerClient(
|
|
437
|
+
coordinator=MagicMock(),
|
|
438
|
+
agent_id="test-agent",
|
|
439
|
+
agent_role="tester",
|
|
440
|
+
agent_registry={},
|
|
441
|
+
node_id="localhost:7946"
|
|
442
|
+
)
|
|
443
|
+
|
|
444
|
+
client.list_peers = MagicMock(return_value=[
|
|
445
|
+
{"role": "analyst", "agent_id": "a-1", "capabilities": ["analysis"]},
|
|
446
|
+
])
|
|
447
|
+
|
|
448
|
+
context = client.get_cognitive_context(tool_name="delegate_to_peer")
|
|
449
|
+
|
|
450
|
+
assert "delegate_to_peer" in context
|
|
451
|
+
assert "ask_peer" not in context
|
|
452
|
+
|
|
453
|
+
def test_build_system_prompt(self):
|
|
454
|
+
"""Test build_system_prompt combines base prompt with context."""
|
|
455
|
+
from jarviscore.p2p.peer_client import PeerClient
|
|
456
|
+
|
|
457
|
+
client = PeerClient(
|
|
458
|
+
coordinator=MagicMock(),
|
|
459
|
+
agent_id="test-agent",
|
|
460
|
+
agent_role="tester",
|
|
461
|
+
agent_registry={},
|
|
462
|
+
node_id="localhost:7946"
|
|
463
|
+
)
|
|
464
|
+
|
|
465
|
+
client.list_peers = MagicMock(return_value=[
|
|
466
|
+
{"role": "analyst", "agent_id": "a-1", "capabilities": ["analysis"]},
|
|
467
|
+
])
|
|
468
|
+
|
|
469
|
+
base_prompt = "You are a helpful assistant that processes data."
|
|
470
|
+
prompt = client.build_system_prompt(base_prompt)
|
|
471
|
+
|
|
472
|
+
# Should contain both base prompt and context
|
|
473
|
+
assert "You are a helpful assistant" in prompt
|
|
474
|
+
assert "AVAILABLE MESH PEERS" in prompt
|
|
475
|
+
assert "analyst" in prompt
|
|
476
|
+
|
|
477
|
+
def test_build_system_prompt_with_options(self):
|
|
478
|
+
"""Test build_system_prompt passes options to get_cognitive_context."""
|
|
479
|
+
from jarviscore.p2p.peer_client import PeerClient
|
|
480
|
+
|
|
481
|
+
client = PeerClient(
|
|
482
|
+
coordinator=MagicMock(),
|
|
483
|
+
agent_id="test-agent",
|
|
484
|
+
agent_role="tester",
|
|
485
|
+
agent_registry={},
|
|
486
|
+
node_id="localhost:7946"
|
|
487
|
+
)
|
|
488
|
+
|
|
489
|
+
client.list_peers = MagicMock(return_value=[
|
|
490
|
+
{"role": "analyst", "agent_id": "a-1", "capabilities": ["analysis"]},
|
|
491
|
+
])
|
|
492
|
+
|
|
493
|
+
prompt = client.build_system_prompt(
|
|
494
|
+
"Base prompt.",
|
|
495
|
+
include_capabilities=False,
|
|
496
|
+
tool_name="custom_tool"
|
|
497
|
+
)
|
|
498
|
+
|
|
499
|
+
assert "custom_tool" in prompt
|
|
500
|
+
|
|
501
|
+
|
|
502
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
503
|
+
# TEST: INTEGRATION - CustomAgent + JarvisLifespan
|
|
504
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
505
|
+
|
|
506
|
+
class TestCustomAgentWithFastAPI:
|
|
507
|
+
"""Integration test for CustomAgent with FastAPI lifespan."""
|
|
508
|
+
|
|
509
|
+
@pytest.mark.asyncio
|
|
510
|
+
async def test_customagent_in_fastapi_lifespan(self):
|
|
511
|
+
"""Test CustomAgent works correctly with JarvisLifespan."""
|
|
512
|
+
from jarviscore.profiles import CustomAgent
|
|
513
|
+
from jarviscore.integrations.fastapi import JarvisLifespan
|
|
514
|
+
|
|
515
|
+
messages_received = []
|
|
516
|
+
|
|
517
|
+
class APIAgent(CustomAgent):
|
|
518
|
+
role = "api_processor"
|
|
519
|
+
capabilities = ["api_processing"]
|
|
520
|
+
listen_timeout = 0.1 # Fast timeout for test
|
|
521
|
+
|
|
522
|
+
async def on_peer_request(self, msg):
|
|
523
|
+
messages_received.append(msg.data)
|
|
524
|
+
return {"processed": True}
|
|
525
|
+
|
|
526
|
+
mock_app = MagicMock()
|
|
527
|
+
mock_app.state = MagicMock()
|
|
528
|
+
|
|
529
|
+
agent = APIAgent()
|
|
530
|
+
lifespan = JarvisLifespan(agent, mode="p2p", bind_port=7893)
|
|
531
|
+
|
|
532
|
+
async with lifespan(mock_app):
|
|
533
|
+
# Verify agent is accessible from app state
|
|
534
|
+
assert 'api_processor' in mock_app.state.jarvis_agents
|
|
535
|
+
registered_agent = mock_app.state.jarvis_agents['api_processor']
|
|
536
|
+
|
|
537
|
+
# Verify agent has peers injected
|
|
538
|
+
assert hasattr(registered_agent, 'peers')
|
|
539
|
+
assert registered_agent.peers is not None
|
|
540
|
+
|
|
541
|
+
# Verify cognitive context is available
|
|
542
|
+
context = registered_agent.peers.get_cognitive_context()
|
|
543
|
+
assert isinstance(context, str)
|
|
544
|
+
|
|
545
|
+
# Give background task time to start
|
|
546
|
+
await asyncio.sleep(0.2)
|
|
547
|
+
|
|
548
|
+
|
|
549
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
550
|
+
# RUN TESTS
|
|
551
|
+
# ═══════════════════════════════════════════════════════════════════════════════
|
|
552
|
+
|
|
553
|
+
if __name__ == "__main__":
|
|
554
|
+
pytest.main([__file__, "-v", "-s"])
|