jarviscore-framework 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- examples/calculator_agent_example.py +77 -0
- examples/multi_agent_workflow.py +132 -0
- examples/research_agent_example.py +76 -0
- jarviscore/__init__.py +54 -0
- jarviscore/cli/__init__.py +7 -0
- jarviscore/cli/__main__.py +33 -0
- jarviscore/cli/check.py +404 -0
- jarviscore/cli/smoketest.py +371 -0
- jarviscore/config/__init__.py +7 -0
- jarviscore/config/settings.py +128 -0
- jarviscore/core/__init__.py +7 -0
- jarviscore/core/agent.py +163 -0
- jarviscore/core/mesh.py +463 -0
- jarviscore/core/profile.py +64 -0
- jarviscore/docs/API_REFERENCE.md +932 -0
- jarviscore/docs/CONFIGURATION.md +753 -0
- jarviscore/docs/GETTING_STARTED.md +600 -0
- jarviscore/docs/TROUBLESHOOTING.md +424 -0
- jarviscore/docs/USER_GUIDE.md +983 -0
- jarviscore/execution/__init__.py +94 -0
- jarviscore/execution/code_registry.py +298 -0
- jarviscore/execution/generator.py +268 -0
- jarviscore/execution/llm.py +430 -0
- jarviscore/execution/repair.py +283 -0
- jarviscore/execution/result_handler.py +332 -0
- jarviscore/execution/sandbox.py +555 -0
- jarviscore/execution/search.py +281 -0
- jarviscore/orchestration/__init__.py +18 -0
- jarviscore/orchestration/claimer.py +101 -0
- jarviscore/orchestration/dependency.py +143 -0
- jarviscore/orchestration/engine.py +292 -0
- jarviscore/orchestration/status.py +96 -0
- jarviscore/p2p/__init__.py +23 -0
- jarviscore/p2p/broadcaster.py +353 -0
- jarviscore/p2p/coordinator.py +364 -0
- jarviscore/p2p/keepalive.py +361 -0
- jarviscore/p2p/swim_manager.py +290 -0
- jarviscore/profiles/__init__.py +6 -0
- jarviscore/profiles/autoagent.py +264 -0
- jarviscore/profiles/customagent.py +137 -0
- jarviscore_framework-0.1.0.dist-info/METADATA +136 -0
- jarviscore_framework-0.1.0.dist-info/RECORD +55 -0
- jarviscore_framework-0.1.0.dist-info/WHEEL +5 -0
- jarviscore_framework-0.1.0.dist-info/licenses/LICENSE +21 -0
- jarviscore_framework-0.1.0.dist-info/top_level.txt +3 -0
- tests/conftest.py +44 -0
- tests/test_agent.py +165 -0
- tests/test_autoagent.py +140 -0
- tests/test_autoagent_day4.py +186 -0
- tests/test_customagent.py +248 -0
- tests/test_integration.py +293 -0
- tests/test_llm_fallback.py +185 -0
- tests/test_mesh.py +356 -0
- tests/test_p2p_integration.py +375 -0
- tests/test_remote_sandbox.py +116 -0
|
@@ -0,0 +1,375 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Tests for P2P Integration (Day 2)
|
|
3
|
+
|
|
4
|
+
Tests SWIM protocol, keepalive, broadcaster, and P2P coordinator.
|
|
5
|
+
"""
|
|
6
|
+
import pytest
|
|
7
|
+
import asyncio
|
|
8
|
+
from jarviscore import Mesh
|
|
9
|
+
from jarviscore.profiles import AutoAgent, CustomAgent
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class TestP2PAgent(AutoAgent):
|
|
13
|
+
"""Test agent for P2P tests"""
|
|
14
|
+
role = "p2p_test"
|
|
15
|
+
capabilities = ["testing", "p2p"]
|
|
16
|
+
system_prompt = "Test agent for P2P integration"
|
|
17
|
+
|
|
18
|
+
async def execute_task(self, task):
|
|
19
|
+
return {"status": "success", "output": "test"}
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class TestP2PStartup:
|
|
23
|
+
"""Test P2P initialization and startup"""
|
|
24
|
+
|
|
25
|
+
@pytest.mark.asyncio
|
|
26
|
+
async def test_p2p_disabled_by_default_in_autonomous_mode(self):
|
|
27
|
+
"""Test that P2P is disabled by default in autonomous mode"""
|
|
28
|
+
mesh = Mesh(mode="autonomous")
|
|
29
|
+
mesh.add(TestP2PAgent)
|
|
30
|
+
await mesh.start()
|
|
31
|
+
|
|
32
|
+
# P2P should not be initialized in autonomous mode by default
|
|
33
|
+
assert mesh._p2p_coordinator is None
|
|
34
|
+
|
|
35
|
+
await mesh.stop()
|
|
36
|
+
|
|
37
|
+
@pytest.mark.asyncio
|
|
38
|
+
async def test_p2p_enabled_in_distributed_mode(self):
|
|
39
|
+
"""Test that P2P is enabled in distributed mode"""
|
|
40
|
+
mesh = Mesh(mode="distributed", config={'bind_port': 7950})
|
|
41
|
+
mesh.add(TestP2PAgent)
|
|
42
|
+
|
|
43
|
+
try:
|
|
44
|
+
await mesh.start()
|
|
45
|
+
|
|
46
|
+
# P2P should be initialized in distributed mode
|
|
47
|
+
assert mesh._p2p_coordinator is not None
|
|
48
|
+
assert mesh._p2p_coordinator._started is True
|
|
49
|
+
|
|
50
|
+
await mesh.stop()
|
|
51
|
+
except Exception as e:
|
|
52
|
+
# If SWIM library is not installed, skip test
|
|
53
|
+
if "swim" in str(e).lower():
|
|
54
|
+
pytest.skip("SWIM library not available")
|
|
55
|
+
raise
|
|
56
|
+
|
|
57
|
+
@pytest.mark.asyncio
|
|
58
|
+
async def test_p2p_can_be_explicitly_enabled(self):
|
|
59
|
+
"""Test that P2P can be explicitly enabled via config"""
|
|
60
|
+
config = {
|
|
61
|
+
'p2p_enabled': True,
|
|
62
|
+
'bind_port': 7951
|
|
63
|
+
}
|
|
64
|
+
mesh = Mesh(mode="autonomous", config=config)
|
|
65
|
+
mesh.add(TestP2PAgent)
|
|
66
|
+
|
|
67
|
+
try:
|
|
68
|
+
await mesh.start()
|
|
69
|
+
|
|
70
|
+
# P2P should be initialized when explicitly enabled
|
|
71
|
+
assert mesh._p2p_coordinator is not None
|
|
72
|
+
assert mesh._p2p_coordinator._started is True
|
|
73
|
+
|
|
74
|
+
await mesh.stop()
|
|
75
|
+
except Exception as e:
|
|
76
|
+
if "swim" in str(e).lower():
|
|
77
|
+
pytest.skip("SWIM library not available")
|
|
78
|
+
raise
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
class TestP2PConfiguration:
|
|
82
|
+
"""Test P2P configuration"""
|
|
83
|
+
|
|
84
|
+
@pytest.mark.asyncio
|
|
85
|
+
async def test_custom_bind_port(self):
|
|
86
|
+
"""Test custom bind port configuration"""
|
|
87
|
+
config = {
|
|
88
|
+
'p2p_enabled': True,
|
|
89
|
+
'bind_host': '127.0.0.1',
|
|
90
|
+
'bind_port': 7952,
|
|
91
|
+
'node_name': 'test-node-1'
|
|
92
|
+
}
|
|
93
|
+
mesh = Mesh(mode="autonomous", config=config)
|
|
94
|
+
mesh.add(TestP2PAgent)
|
|
95
|
+
|
|
96
|
+
try:
|
|
97
|
+
await mesh.start()
|
|
98
|
+
|
|
99
|
+
# Verify configuration was applied
|
|
100
|
+
assert mesh._p2p_coordinator is not None
|
|
101
|
+
|
|
102
|
+
await mesh.stop()
|
|
103
|
+
except Exception as e:
|
|
104
|
+
if "swim" in str(e).lower():
|
|
105
|
+
pytest.skip("SWIM library not available")
|
|
106
|
+
raise
|
|
107
|
+
|
|
108
|
+
@pytest.mark.asyncio
|
|
109
|
+
async def test_keepalive_configuration(self):
|
|
110
|
+
"""Test keepalive configuration"""
|
|
111
|
+
config = {
|
|
112
|
+
'p2p_enabled': True,
|
|
113
|
+
'bind_port': 7953,
|
|
114
|
+
'keepalive_enabled': True,
|
|
115
|
+
'keepalive_interval': 60,
|
|
116
|
+
'keepalive_timeout': 10
|
|
117
|
+
}
|
|
118
|
+
mesh = Mesh(mode="autonomous", config=config)
|
|
119
|
+
mesh.add(TestP2PAgent)
|
|
120
|
+
|
|
121
|
+
try:
|
|
122
|
+
await mesh.start()
|
|
123
|
+
|
|
124
|
+
# Verify keepalive was configured
|
|
125
|
+
assert mesh._p2p_coordinator is not None
|
|
126
|
+
assert mesh._p2p_coordinator.keepalive_manager is not None
|
|
127
|
+
assert mesh._p2p_coordinator.keepalive_manager.interval == 60
|
|
128
|
+
|
|
129
|
+
await mesh.stop()
|
|
130
|
+
except Exception as e:
|
|
131
|
+
if "swim" in str(e).lower():
|
|
132
|
+
pytest.skip("SWIM library not available")
|
|
133
|
+
raise
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
class TestP2PCapabilities:
|
|
137
|
+
"""Test P2P capability announcement"""
|
|
138
|
+
|
|
139
|
+
@pytest.mark.asyncio
|
|
140
|
+
async def test_capabilities_announced(self):
|
|
141
|
+
"""Test that agent capabilities are announced to mesh"""
|
|
142
|
+
config = {
|
|
143
|
+
'p2p_enabled': True,
|
|
144
|
+
'bind_port': 7954
|
|
145
|
+
}
|
|
146
|
+
mesh = Mesh(mode="autonomous", config=config)
|
|
147
|
+
|
|
148
|
+
# Add agents with different capabilities
|
|
149
|
+
class Agent1(AutoAgent):
|
|
150
|
+
role = "agent1"
|
|
151
|
+
capabilities = ["cap1", "cap2"]
|
|
152
|
+
system_prompt = "Agent 1"
|
|
153
|
+
|
|
154
|
+
async def execute_task(self, task):
|
|
155
|
+
return {"status": "success"}
|
|
156
|
+
|
|
157
|
+
class Agent2(AutoAgent):
|
|
158
|
+
role = "agent2"
|
|
159
|
+
capabilities = ["cap2", "cap3"]
|
|
160
|
+
system_prompt = "Agent 2"
|
|
161
|
+
|
|
162
|
+
async def execute_task(self, task):
|
|
163
|
+
return {"status": "success"}
|
|
164
|
+
|
|
165
|
+
mesh.add(Agent1)
|
|
166
|
+
mesh.add(Agent2)
|
|
167
|
+
|
|
168
|
+
try:
|
|
169
|
+
await mesh.start()
|
|
170
|
+
|
|
171
|
+
# Verify capabilities were announced
|
|
172
|
+
assert mesh._p2p_coordinator is not None
|
|
173
|
+
cap_map = mesh._p2p_coordinator._capability_map
|
|
174
|
+
|
|
175
|
+
assert "cap1" in cap_map
|
|
176
|
+
assert "cap2" in cap_map
|
|
177
|
+
assert "cap3" in cap_map
|
|
178
|
+
|
|
179
|
+
# cap1 should only have agent1
|
|
180
|
+
assert len(cap_map["cap1"]) >= 1
|
|
181
|
+
|
|
182
|
+
# cap2 should have both agents
|
|
183
|
+
assert len(cap_map["cap2"]) >= 2
|
|
184
|
+
|
|
185
|
+
await mesh.stop()
|
|
186
|
+
except Exception as e:
|
|
187
|
+
if "swim" in str(e).lower():
|
|
188
|
+
pytest.skip("SWIM library not available")
|
|
189
|
+
raise
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
class TestP2PLifecycle:
|
|
193
|
+
"""Test P2P lifecycle management"""
|
|
194
|
+
|
|
195
|
+
@pytest.mark.asyncio
|
|
196
|
+
async def test_clean_startup_and_shutdown(self):
|
|
197
|
+
"""Test clean P2P startup and shutdown"""
|
|
198
|
+
config = {
|
|
199
|
+
'p2p_enabled': True,
|
|
200
|
+
'bind_port': 7955
|
|
201
|
+
}
|
|
202
|
+
mesh = Mesh(mode="autonomous", config=config)
|
|
203
|
+
mesh.add(TestP2PAgent)
|
|
204
|
+
|
|
205
|
+
try:
|
|
206
|
+
# Start mesh
|
|
207
|
+
await mesh.start()
|
|
208
|
+
assert mesh._started is True
|
|
209
|
+
assert mesh._p2p_coordinator is not None
|
|
210
|
+
assert mesh._p2p_coordinator._started is True
|
|
211
|
+
|
|
212
|
+
# Stop mesh
|
|
213
|
+
await mesh.stop()
|
|
214
|
+
assert mesh._started is False
|
|
215
|
+
|
|
216
|
+
except Exception as e:
|
|
217
|
+
if "swim" in str(e).lower():
|
|
218
|
+
pytest.skip("SWIM library not available")
|
|
219
|
+
raise
|
|
220
|
+
|
|
221
|
+
@pytest.mark.asyncio
|
|
222
|
+
async def test_multiple_start_calls_fail(self):
|
|
223
|
+
"""Test that starting mesh twice raises error"""
|
|
224
|
+
config = {
|
|
225
|
+
'p2p_enabled': True,
|
|
226
|
+
'bind_port': 7956
|
|
227
|
+
}
|
|
228
|
+
mesh = Mesh(mode="autonomous", config=config)
|
|
229
|
+
mesh.add(TestP2PAgent)
|
|
230
|
+
|
|
231
|
+
try:
|
|
232
|
+
await mesh.start()
|
|
233
|
+
|
|
234
|
+
# Second start should fail
|
|
235
|
+
with pytest.raises(RuntimeError, match="already started"):
|
|
236
|
+
await mesh.start()
|
|
237
|
+
|
|
238
|
+
await mesh.stop()
|
|
239
|
+
except Exception as e:
|
|
240
|
+
if "swim" in str(e).lower():
|
|
241
|
+
pytest.skip("SWIM library not available")
|
|
242
|
+
# Clean up even if test fails
|
|
243
|
+
try:
|
|
244
|
+
await mesh.stop()
|
|
245
|
+
except:
|
|
246
|
+
pass
|
|
247
|
+
raise
|
|
248
|
+
|
|
249
|
+
|
|
250
|
+
class TestP2PIntegrationWithAgents:
|
|
251
|
+
"""Test P2P integration with different agent types"""
|
|
252
|
+
|
|
253
|
+
@pytest.mark.asyncio
|
|
254
|
+
async def test_autoagent_with_p2p(self):
|
|
255
|
+
"""Test AutoAgent with P2P enabled"""
|
|
256
|
+
config = {
|
|
257
|
+
'p2p_enabled': True,
|
|
258
|
+
'bind_port': 7957
|
|
259
|
+
}
|
|
260
|
+
mesh = Mesh(mode="autonomous", config=config)
|
|
261
|
+
|
|
262
|
+
class TestAutoAgent(AutoAgent):
|
|
263
|
+
role = "auto"
|
|
264
|
+
capabilities = ["testing"]
|
|
265
|
+
system_prompt = "Test auto agent"
|
|
266
|
+
|
|
267
|
+
async def execute_task(self, task):
|
|
268
|
+
return {"status": "success", "output": "auto"}
|
|
269
|
+
|
|
270
|
+
mesh.add(TestAutoAgent)
|
|
271
|
+
|
|
272
|
+
try:
|
|
273
|
+
await mesh.start()
|
|
274
|
+
|
|
275
|
+
# Agent should work with P2P
|
|
276
|
+
assert len(mesh.agents) == 1
|
|
277
|
+
assert mesh._p2p_coordinator is not None
|
|
278
|
+
|
|
279
|
+
await mesh.stop()
|
|
280
|
+
except Exception as e:
|
|
281
|
+
if "swim" in str(e).lower():
|
|
282
|
+
pytest.skip("SWIM library not available")
|
|
283
|
+
raise
|
|
284
|
+
|
|
285
|
+
@pytest.mark.asyncio
|
|
286
|
+
async def test_customagent_with_p2p(self):
|
|
287
|
+
"""Test CustomAgent with P2P enabled"""
|
|
288
|
+
config = {
|
|
289
|
+
'p2p_enabled': True,
|
|
290
|
+
'bind_port': 7958
|
|
291
|
+
}
|
|
292
|
+
mesh = Mesh(mode="autonomous", config=config)
|
|
293
|
+
|
|
294
|
+
class TestCustomAgent(CustomAgent):
|
|
295
|
+
role = "custom"
|
|
296
|
+
capabilities = ["testing"]
|
|
297
|
+
|
|
298
|
+
async def execute_task(self, task):
|
|
299
|
+
return {"status": "success", "output": "custom"}
|
|
300
|
+
|
|
301
|
+
mesh.add(TestCustomAgent)
|
|
302
|
+
|
|
303
|
+
try:
|
|
304
|
+
await mesh.start()
|
|
305
|
+
|
|
306
|
+
# Agent should work with P2P
|
|
307
|
+
assert len(mesh.agents) == 1
|
|
308
|
+
assert mesh._p2p_coordinator is not None
|
|
309
|
+
|
|
310
|
+
await mesh.stop()
|
|
311
|
+
except Exception as e:
|
|
312
|
+
if "swim" in str(e).lower():
|
|
313
|
+
pytest.skip("SWIM library not available")
|
|
314
|
+
raise
|
|
315
|
+
|
|
316
|
+
|
|
317
|
+
class TestP2PHealthChecks:
|
|
318
|
+
"""Test P2P health monitoring"""
|
|
319
|
+
|
|
320
|
+
@pytest.mark.asyncio
|
|
321
|
+
async def test_swim_manager_health(self):
|
|
322
|
+
"""Test SWIM manager health check"""
|
|
323
|
+
config = {
|
|
324
|
+
'p2p_enabled': True,
|
|
325
|
+
'bind_port': 7959
|
|
326
|
+
}
|
|
327
|
+
mesh = Mesh(mode="autonomous", config=config)
|
|
328
|
+
mesh.add(TestP2PAgent)
|
|
329
|
+
|
|
330
|
+
try:
|
|
331
|
+
await mesh.start()
|
|
332
|
+
|
|
333
|
+
# Check SWIM manager health
|
|
334
|
+
swim_mgr = mesh._p2p_coordinator.swim_manager
|
|
335
|
+
assert swim_mgr is not None
|
|
336
|
+
assert swim_mgr.is_healthy() is True
|
|
337
|
+
|
|
338
|
+
status = swim_mgr.get_status()
|
|
339
|
+
assert status['healthy'] is True
|
|
340
|
+
assert status['started'] is True
|
|
341
|
+
|
|
342
|
+
await mesh.stop()
|
|
343
|
+
except Exception as e:
|
|
344
|
+
if "swim" in str(e).lower():
|
|
345
|
+
pytest.skip("SWIM library not available")
|
|
346
|
+
raise
|
|
347
|
+
|
|
348
|
+
@pytest.mark.asyncio
|
|
349
|
+
async def test_keepalive_health(self):
|
|
350
|
+
"""Test keepalive manager health"""
|
|
351
|
+
config = {
|
|
352
|
+
'p2p_enabled': True,
|
|
353
|
+
'bind_port': 7960,
|
|
354
|
+
'keepalive_enabled': True
|
|
355
|
+
}
|
|
356
|
+
mesh = Mesh(mode="autonomous", config=config)
|
|
357
|
+
mesh.add(TestP2PAgent)
|
|
358
|
+
|
|
359
|
+
try:
|
|
360
|
+
await mesh.start()
|
|
361
|
+
|
|
362
|
+
# Check keepalive health
|
|
363
|
+
keepalive = mesh._p2p_coordinator.keepalive_manager
|
|
364
|
+
assert keepalive is not None
|
|
365
|
+
assert keepalive._running is True
|
|
366
|
+
|
|
367
|
+
health = keepalive.get_health_status()
|
|
368
|
+
assert health['enabled'] is True
|
|
369
|
+
assert health['running'] is True
|
|
370
|
+
|
|
371
|
+
await mesh.stop()
|
|
372
|
+
except Exception as e:
|
|
373
|
+
if "swim" in str(e).lower():
|
|
374
|
+
pytest.skip("SWIM library not available")
|
|
375
|
+
raise
|
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Quick test for remote sandbox execution
|
|
3
|
+
Tests connection to Azure Container Apps sandbox service
|
|
4
|
+
"""
|
|
5
|
+
import asyncio
|
|
6
|
+
import sys
|
|
7
|
+
from jarviscore.execution.sandbox import create_sandbox_executor
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
async def test_remote_sandbox():
|
|
11
|
+
"""Test remote sandbox with simple calculation."""
|
|
12
|
+
print("=" * 60)
|
|
13
|
+
print("Testing Remote Sandbox (Azure Container Apps)")
|
|
14
|
+
print("=" * 60)
|
|
15
|
+
|
|
16
|
+
# Create sandbox executor with remote mode
|
|
17
|
+
config = {
|
|
18
|
+
'sandbox_mode': 'remote',
|
|
19
|
+
'sandbox_service_url': 'https://browser-task-executor.bravesea-3f5f7e75.eastus.azurecontainerapps.io'
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
executor = create_sandbox_executor(timeout=30, config=config)
|
|
23
|
+
|
|
24
|
+
print(f"\nSandbox Mode: {executor.mode}")
|
|
25
|
+
print(f"Sandbox URL: {executor.sandbox_url}")
|
|
26
|
+
print("\n" + "-" * 60)
|
|
27
|
+
|
|
28
|
+
# Test 1: Simple calculation
|
|
29
|
+
print("\nTest 1: Simple Calculation (2 + 2)")
|
|
30
|
+
print("-" * 60)
|
|
31
|
+
|
|
32
|
+
code1 = "result = 2 + 2"
|
|
33
|
+
result1 = await executor.execute(code1, timeout=10)
|
|
34
|
+
|
|
35
|
+
print(f"Status: {result1['status']}")
|
|
36
|
+
print(f"Output: {result1.get('output')}")
|
|
37
|
+
print(f"Mode: {result1.get('mode')}")
|
|
38
|
+
print(f"Execution Time: {result1.get('execution_time', 0):.3f}s")
|
|
39
|
+
|
|
40
|
+
if result1.get('error'):
|
|
41
|
+
print(f"Error: {result1['error']}")
|
|
42
|
+
print(f"Error Type: {result1.get('error_type')}")
|
|
43
|
+
|
|
44
|
+
# Test 2: Math calculation
|
|
45
|
+
print("\n" + "-" * 60)
|
|
46
|
+
print("\nTest 2: Math Calculation (factorial of 10)")
|
|
47
|
+
print("-" * 60)
|
|
48
|
+
|
|
49
|
+
code2 = """
|
|
50
|
+
import math
|
|
51
|
+
result = math.factorial(10)
|
|
52
|
+
"""
|
|
53
|
+
|
|
54
|
+
result2 = await executor.execute(code2, timeout=10)
|
|
55
|
+
|
|
56
|
+
print(f"Status: {result2['status']}")
|
|
57
|
+
print(f"Output: {result2.get('output')}")
|
|
58
|
+
print(f"Mode: {result2.get('mode')}")
|
|
59
|
+
print(f"Execution Time: {result2.get('execution_time', 0):.3f}s")
|
|
60
|
+
|
|
61
|
+
if result2.get('error'):
|
|
62
|
+
print(f"Error: {result2['error']}")
|
|
63
|
+
print(f"Error Type: {result2.get('error_type')}")
|
|
64
|
+
|
|
65
|
+
# Test 3: Data processing
|
|
66
|
+
print("\n" + "-" * 60)
|
|
67
|
+
print("\nTest 3: Data Processing (statistics)")
|
|
68
|
+
print("-" * 60)
|
|
69
|
+
|
|
70
|
+
code3 = """
|
|
71
|
+
data = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
|
|
72
|
+
result = {
|
|
73
|
+
'mean': sum(data) / len(data),
|
|
74
|
+
'min': min(data),
|
|
75
|
+
'max': max(data),
|
|
76
|
+
'count': len(data)
|
|
77
|
+
}
|
|
78
|
+
"""
|
|
79
|
+
|
|
80
|
+
result3 = await executor.execute(code3, timeout=10)
|
|
81
|
+
|
|
82
|
+
print(f"Status: {result3['status']}")
|
|
83
|
+
print(f"Output: {result3.get('output')}")
|
|
84
|
+
print(f"Mode: {result3.get('mode')}")
|
|
85
|
+
print(f"Execution Time: {result3.get('execution_time', 0):.3f}s")
|
|
86
|
+
|
|
87
|
+
if result3.get('error'):
|
|
88
|
+
print(f"Error: {result3['error']}")
|
|
89
|
+
print(f"Error Type: {result3.get('error_type')}")
|
|
90
|
+
|
|
91
|
+
# Summary
|
|
92
|
+
print("\n" + "=" * 60)
|
|
93
|
+
print("Test Summary")
|
|
94
|
+
print("=" * 60)
|
|
95
|
+
|
|
96
|
+
tests_passed = sum([
|
|
97
|
+
result1['status'] == 'success',
|
|
98
|
+
result2['status'] == 'success',
|
|
99
|
+
result3['status'] == 'success'
|
|
100
|
+
])
|
|
101
|
+
|
|
102
|
+
print(f"Tests Passed: {tests_passed}/3")
|
|
103
|
+
|
|
104
|
+
if tests_passed == 3:
|
|
105
|
+
print("\n✅ All remote sandbox tests passed!")
|
|
106
|
+
print("Remote execution working correctly.")
|
|
107
|
+
return 0
|
|
108
|
+
else:
|
|
109
|
+
print(f"\n❌ {3 - tests_passed} test(s) failed")
|
|
110
|
+
print("Check error messages above.")
|
|
111
|
+
return 1
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
if __name__ == '__main__':
|
|
115
|
+
exit_code = asyncio.run(test_remote_sandbox())
|
|
116
|
+
sys.exit(exit_code)
|