devduck 0.1.0__py3-none-any.whl → 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of devduck might be problematic. Click here for more details.
- devduck/__init__.py +546 -91
- devduck/__main__.py +7 -0
- devduck/_version.py +34 -0
- devduck/tools/__init__.py +7 -0
- devduck/tools/install_tools.py +308 -0
- devduck/tools/mcp_server.py +572 -0
- devduck/tools/tcp.py +263 -93
- devduck/tools/websocket.py +492 -0
- {devduck-0.1.0.dist-info → devduck-0.2.0.dist-info}/METADATA +48 -11
- devduck-0.2.0.dist-info/RECORD +16 -0
- devduck-0.1.0.dist-info/RECORD +0 -11
- {devduck-0.1.0.dist-info → devduck-0.2.0.dist-info}/WHEEL +0 -0
- {devduck-0.1.0.dist-info → devduck-0.2.0.dist-info}/entry_points.txt +0 -0
- {devduck-0.1.0.dist-info → devduck-0.2.0.dist-info}/licenses/LICENSE +0 -0
- {devduck-0.1.0.dist-info → devduck-0.2.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,492 @@
|
|
|
1
|
+
"""WebSocket tool for DevDuck agents with real-time streaming support.
|
|
2
|
+
|
|
3
|
+
This module provides WebSocket server functionality for DevDuck agents,
|
|
4
|
+
allowing them to communicate over WebSocket protocol with real-time response streaming.
|
|
5
|
+
The tool runs server operations in background threads, enabling concurrent
|
|
6
|
+
communication without blocking the main agent.
|
|
7
|
+
|
|
8
|
+
Key Features:
|
|
9
|
+
1. WebSocket Server: Listen for incoming connections and process them with a DevDuck agent
|
|
10
|
+
2. Real-time Streaming: Responses stream to clients as they're generated (non-blocking)
|
|
11
|
+
3. Concurrent Processing: Handle multiple messages simultaneously
|
|
12
|
+
4. Background Processing: Server runs in a background thread
|
|
13
|
+
5. Per-Connection DevDuck: Creates a fresh DevDuck instance for each client connection
|
|
14
|
+
6. Callback Handler: Uses Strands callback system for efficient streaming
|
|
15
|
+
7. Browser Compatible: Works with browser WebSocket clients
|
|
16
|
+
|
|
17
|
+
Message Format:
|
|
18
|
+
```json
|
|
19
|
+
{
|
|
20
|
+
"type": "turn_start" | "chunk" | "tool_start" | "tool_end" | "turn_end",
|
|
21
|
+
"turn_id": "uuid",
|
|
22
|
+
"data": "text content",
|
|
23
|
+
"timestamp": 1234567890.123
|
|
24
|
+
}
|
|
25
|
+
```
|
|
26
|
+
|
|
27
|
+
Usage with DevDuck Agent:
|
|
28
|
+
|
|
29
|
+
```python
|
|
30
|
+
from devduck import devduck
|
|
31
|
+
|
|
32
|
+
# Start a streaming WebSocket server
|
|
33
|
+
result = devduck.agent.tool.websocket(
|
|
34
|
+
action="start_server",
|
|
35
|
+
host="127.0.0.1",
|
|
36
|
+
port=8080,
|
|
37
|
+
system_prompt="You are a helpful WebSocket server assistant.",
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
# Stop the WebSocket server
|
|
41
|
+
result = devduck.agent.tool.websocket(action="stop_server", port=8080)
|
|
42
|
+
```
|
|
43
|
+
|
|
44
|
+
For testing with browser:
|
|
45
|
+
```javascript
|
|
46
|
+
const ws = new WebSocket('ws://localhost:8080');
|
|
47
|
+
ws.onmessage = (event) => {
|
|
48
|
+
const msg = JSON.parse(event.data);
|
|
49
|
+
console.log(`[${msg.turn_id}] ${msg.type}: ${msg.data}`);
|
|
50
|
+
};
|
|
51
|
+
ws.send('Hello DevDuck!');
|
|
52
|
+
```
|
|
53
|
+
"""
|
|
54
|
+
|
|
55
|
+
import logging
|
|
56
|
+
import threading
|
|
57
|
+
import time
|
|
58
|
+
import os
|
|
59
|
+
import asyncio
|
|
60
|
+
import json
|
|
61
|
+
import uuid
|
|
62
|
+
from typing import Any
|
|
63
|
+
from concurrent.futures import ThreadPoolExecutor
|
|
64
|
+
|
|
65
|
+
from strands import Agent, tool
|
|
66
|
+
|
|
67
|
+
logger = logging.getLogger(__name__)
|
|
68
|
+
|
|
69
|
+
# Global registry to store server threads
|
|
70
|
+
WS_SERVER_THREADS: dict[int, dict[str, Any]] = {}
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
class WebSocketStreamingCallbackHandler:
|
|
74
|
+
"""Callback handler that streams agent responses directly over WebSocket with turn tracking."""
|
|
75
|
+
|
|
76
|
+
def __init__(self, websocket, loop, turn_id: str):
|
|
77
|
+
"""Initialize the streaming handler.
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
websocket: The WebSocket connection to stream data to
|
|
81
|
+
loop: The event loop to use for async operations
|
|
82
|
+
turn_id: Unique identifier for this conversation turn
|
|
83
|
+
"""
|
|
84
|
+
self.websocket = websocket
|
|
85
|
+
self.loop = loop
|
|
86
|
+
self.turn_id = turn_id
|
|
87
|
+
self.tool_count = 0
|
|
88
|
+
self.previous_tool_use = None
|
|
89
|
+
|
|
90
|
+
async def _send_message(
|
|
91
|
+
self, msg_type: str, data: str = "", metadata: dict = None
|
|
92
|
+
) -> None:
|
|
93
|
+
"""Send a structured message over WebSocket.
|
|
94
|
+
|
|
95
|
+
Args:
|
|
96
|
+
msg_type: Message type (turn_start, chunk, tool_start, tool_end, turn_end)
|
|
97
|
+
data: Text content
|
|
98
|
+
metadata: Additional metadata
|
|
99
|
+
"""
|
|
100
|
+
try:
|
|
101
|
+
message = {
|
|
102
|
+
"type": msg_type,
|
|
103
|
+
"turn_id": self.turn_id,
|
|
104
|
+
"data": data,
|
|
105
|
+
"timestamp": time.time(),
|
|
106
|
+
}
|
|
107
|
+
if metadata:
|
|
108
|
+
message.update(metadata)
|
|
109
|
+
|
|
110
|
+
await self.websocket.send(json.dumps(message))
|
|
111
|
+
except Exception as e:
|
|
112
|
+
logger.warning(f"Failed to send message over WebSocket: {e}")
|
|
113
|
+
|
|
114
|
+
def _schedule_message(
|
|
115
|
+
self, msg_type: str, data: str = "", metadata: dict = None
|
|
116
|
+
) -> None:
|
|
117
|
+
"""Schedule an async message send from sync context.
|
|
118
|
+
|
|
119
|
+
Args:
|
|
120
|
+
msg_type: Message type
|
|
121
|
+
data: Text content
|
|
122
|
+
metadata: Additional metadata
|
|
123
|
+
"""
|
|
124
|
+
asyncio.run_coroutine_threadsafe(
|
|
125
|
+
self._send_message(msg_type, data, metadata), self.loop
|
|
126
|
+
)
|
|
127
|
+
|
|
128
|
+
def __call__(self, **kwargs: Any) -> None:
|
|
129
|
+
"""Stream events to WebSocket in real-time with turn tracking."""
|
|
130
|
+
reasoningText = kwargs.get("reasoningText", False)
|
|
131
|
+
data = kwargs.get("data", "")
|
|
132
|
+
complete = kwargs.get("complete", False)
|
|
133
|
+
current_tool_use = kwargs.get("current_tool_use", {})
|
|
134
|
+
message = kwargs.get("message", {})
|
|
135
|
+
|
|
136
|
+
# Stream reasoning text
|
|
137
|
+
if reasoningText:
|
|
138
|
+
self._schedule_message("chunk", reasoningText, {"reasoning": True})
|
|
139
|
+
|
|
140
|
+
# Stream response text chunks
|
|
141
|
+
if data:
|
|
142
|
+
self._schedule_message("chunk", data)
|
|
143
|
+
|
|
144
|
+
# Stream tool invocation notifications
|
|
145
|
+
if current_tool_use and current_tool_use.get("name"):
|
|
146
|
+
tool_name = current_tool_use.get("name", "Unknown tool")
|
|
147
|
+
if self.previous_tool_use != current_tool_use:
|
|
148
|
+
self.previous_tool_use = current_tool_use
|
|
149
|
+
self.tool_count += 1
|
|
150
|
+
self._schedule_message(
|
|
151
|
+
"tool_start", tool_name, {"tool_number": self.tool_count}
|
|
152
|
+
)
|
|
153
|
+
|
|
154
|
+
# Stream tool results
|
|
155
|
+
if isinstance(message, dict) and message.get("role") == "user":
|
|
156
|
+
for content in message.get("content", []):
|
|
157
|
+
if isinstance(content, dict):
|
|
158
|
+
tool_result = content.get("toolResult")
|
|
159
|
+
if tool_result:
|
|
160
|
+
status = tool_result.get("status", "unknown")
|
|
161
|
+
self._schedule_message(
|
|
162
|
+
"tool_end", status, {"success": status == "success"}
|
|
163
|
+
)
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
async def process_message_async(connection_agent, message, websocket, loop, turn_id):
|
|
167
|
+
"""Process a message in a concurrent task.
|
|
168
|
+
|
|
169
|
+
Args:
|
|
170
|
+
connection_agent: The agent instance to process the message
|
|
171
|
+
message: The message to process
|
|
172
|
+
websocket: WebSocket connection
|
|
173
|
+
loop: Event loop
|
|
174
|
+
turn_id: Unique turn ID
|
|
175
|
+
"""
|
|
176
|
+
try:
|
|
177
|
+
# Send turn start notification
|
|
178
|
+
turn_start = {
|
|
179
|
+
"type": "turn_start",
|
|
180
|
+
"turn_id": turn_id,
|
|
181
|
+
"data": message,
|
|
182
|
+
"timestamp": time.time(),
|
|
183
|
+
}
|
|
184
|
+
await websocket.send(json.dumps(turn_start))
|
|
185
|
+
|
|
186
|
+
# Create callback handler for this turn
|
|
187
|
+
streaming_handler = WebSocketStreamingCallbackHandler(websocket, loop, turn_id)
|
|
188
|
+
connection_agent.callback_handler = streaming_handler
|
|
189
|
+
|
|
190
|
+
# Process message in a thread to avoid blocking the event loop
|
|
191
|
+
with ThreadPoolExecutor() as executor:
|
|
192
|
+
await loop.run_in_executor(executor, connection_agent, message)
|
|
193
|
+
|
|
194
|
+
# Send turn end notification
|
|
195
|
+
turn_end = {"type": "turn_end", "turn_id": turn_id, "timestamp": time.time()}
|
|
196
|
+
await websocket.send(json.dumps(turn_end))
|
|
197
|
+
|
|
198
|
+
except Exception as e:
|
|
199
|
+
logger.error(f"Error processing message in turn {turn_id}: {e}", exc_info=True)
|
|
200
|
+
error_msg = {
|
|
201
|
+
"type": "error",
|
|
202
|
+
"turn_id": turn_id,
|
|
203
|
+
"data": f"Error processing message: {e}",
|
|
204
|
+
"timestamp": time.time(),
|
|
205
|
+
}
|
|
206
|
+
await websocket.send(json.dumps(error_msg))
|
|
207
|
+
|
|
208
|
+
|
|
209
|
+
async def handle_websocket_client(websocket, system_prompt: str):
|
|
210
|
+
"""Handle a WebSocket client connection with streaming responses.
|
|
211
|
+
|
|
212
|
+
Args:
|
|
213
|
+
websocket: WebSocket connection object
|
|
214
|
+
system_prompt: System prompt for the DevDuck agent
|
|
215
|
+
"""
|
|
216
|
+
client_address = websocket.remote_address
|
|
217
|
+
logger.info(f"WebSocket connection established with {client_address}")
|
|
218
|
+
|
|
219
|
+
# Get the current event loop
|
|
220
|
+
loop = asyncio.get_running_loop()
|
|
221
|
+
|
|
222
|
+
# Import DevDuck and create a new instance for this connection
|
|
223
|
+
try:
|
|
224
|
+
from devduck import DevDuck
|
|
225
|
+
|
|
226
|
+
# Create a new DevDuck instance with auto_start_servers=False to avoid recursion
|
|
227
|
+
connection_devduck = DevDuck(auto_start_servers=False)
|
|
228
|
+
|
|
229
|
+
# Override system prompt if provided
|
|
230
|
+
if connection_devduck.agent and system_prompt:
|
|
231
|
+
connection_devduck.agent.system_prompt += (
|
|
232
|
+
"\nCustom system prompt:" + system_prompt
|
|
233
|
+
)
|
|
234
|
+
|
|
235
|
+
connection_agent = connection_devduck.agent
|
|
236
|
+
|
|
237
|
+
except Exception as e:
|
|
238
|
+
logger.error(f"Failed to create DevDuck instance: {e}", exc_info=True)
|
|
239
|
+
# Fallback to basic Agent if DevDuck fails
|
|
240
|
+
from strands import Agent
|
|
241
|
+
from strands.models.ollama import OllamaModel
|
|
242
|
+
|
|
243
|
+
agent_model = OllamaModel(
|
|
244
|
+
host=os.getenv("OLLAMA_HOST", "http://localhost:11434"),
|
|
245
|
+
model_id=os.getenv("OLLAMA_MODEL", "qwen3:1.7b"),
|
|
246
|
+
temperature=1,
|
|
247
|
+
keep_alive="5m",
|
|
248
|
+
)
|
|
249
|
+
|
|
250
|
+
connection_agent = Agent(
|
|
251
|
+
model=agent_model,
|
|
252
|
+
tools=[],
|
|
253
|
+
system_prompt=system_prompt
|
|
254
|
+
or "You are a helpful WebSocket server assistant.",
|
|
255
|
+
)
|
|
256
|
+
|
|
257
|
+
# Track active tasks for concurrent processing
|
|
258
|
+
active_tasks = set()
|
|
259
|
+
|
|
260
|
+
try:
|
|
261
|
+
# Send welcome message
|
|
262
|
+
welcome = {
|
|
263
|
+
"type": "connected",
|
|
264
|
+
"data": "🦆 Welcome to DevDuck WebSocket Server! Real-time streaming enabled.",
|
|
265
|
+
"timestamp": time.time(),
|
|
266
|
+
}
|
|
267
|
+
await websocket.send(json.dumps(welcome))
|
|
268
|
+
|
|
269
|
+
async for message in websocket:
|
|
270
|
+
message = message.strip()
|
|
271
|
+
logger.info(f"Received from {client_address}: {message}")
|
|
272
|
+
|
|
273
|
+
if message.lower() == "exit":
|
|
274
|
+
bye = {
|
|
275
|
+
"type": "disconnected",
|
|
276
|
+
"data": "Connection closed by client request.",
|
|
277
|
+
"timestamp": time.time(),
|
|
278
|
+
}
|
|
279
|
+
await websocket.send(json.dumps(bye))
|
|
280
|
+
logger.info(f"Client {client_address} requested to exit")
|
|
281
|
+
break
|
|
282
|
+
|
|
283
|
+
# Generate unique turn ID for this conversation turn
|
|
284
|
+
turn_id = str(uuid.uuid4())
|
|
285
|
+
|
|
286
|
+
# Launch message processing as concurrent task (don't await)
|
|
287
|
+
task = asyncio.create_task(
|
|
288
|
+
process_message_async(
|
|
289
|
+
connection_agent, message, websocket, loop, turn_id
|
|
290
|
+
)
|
|
291
|
+
)
|
|
292
|
+
active_tasks.add(task)
|
|
293
|
+
|
|
294
|
+
# Clean up completed tasks
|
|
295
|
+
task.add_done_callback(active_tasks.discard)
|
|
296
|
+
|
|
297
|
+
# Wait for all active tasks to complete before closing
|
|
298
|
+
if active_tasks:
|
|
299
|
+
logger.info(f"Waiting for {len(active_tasks)} active tasks to complete...")
|
|
300
|
+
await asyncio.gather(*active_tasks, return_exceptions=True)
|
|
301
|
+
|
|
302
|
+
except Exception as e:
|
|
303
|
+
logger.error(
|
|
304
|
+
f"Error handling WebSocket client {client_address}: {e}", exc_info=True
|
|
305
|
+
)
|
|
306
|
+
finally:
|
|
307
|
+
logger.info(f"WebSocket connection with {client_address} closed")
|
|
308
|
+
|
|
309
|
+
|
|
310
|
+
def run_websocket_server(
|
|
311
|
+
host: str,
|
|
312
|
+
port: int,
|
|
313
|
+
system_prompt: str,
|
|
314
|
+
) -> None:
|
|
315
|
+
"""Run a WebSocket server that processes client requests with DevDuck instances."""
|
|
316
|
+
import websockets
|
|
317
|
+
|
|
318
|
+
WS_SERVER_THREADS[port]["running"] = True
|
|
319
|
+
WS_SERVER_THREADS[port]["connections"] = 0
|
|
320
|
+
WS_SERVER_THREADS[port]["start_time"] = time.time()
|
|
321
|
+
|
|
322
|
+
async def server_handler(websocket):
|
|
323
|
+
"""Handle incoming WebSocket connections.
|
|
324
|
+
|
|
325
|
+
Args:
|
|
326
|
+
websocket: WebSocket connection object
|
|
327
|
+
"""
|
|
328
|
+
WS_SERVER_THREADS[port]["connections"] += 1
|
|
329
|
+
await handle_websocket_client(websocket, system_prompt)
|
|
330
|
+
|
|
331
|
+
async def start_server():
|
|
332
|
+
stop_future = asyncio.Future()
|
|
333
|
+
WS_SERVER_THREADS[port]["stop_future"] = stop_future
|
|
334
|
+
|
|
335
|
+
server = await websockets.serve(server_handler, host, port)
|
|
336
|
+
logger.info(f"WebSocket Server listening on {host}:{port}")
|
|
337
|
+
|
|
338
|
+
# Wait for stop signal
|
|
339
|
+
await stop_future
|
|
340
|
+
|
|
341
|
+
# Close the server
|
|
342
|
+
server.close()
|
|
343
|
+
await server.wait_closed()
|
|
344
|
+
|
|
345
|
+
try:
|
|
346
|
+
loop = asyncio.new_event_loop()
|
|
347
|
+
asyncio.set_event_loop(loop)
|
|
348
|
+
WS_SERVER_THREADS[port]["loop"] = loop
|
|
349
|
+
loop.run_until_complete(start_server())
|
|
350
|
+
except Exception as e:
|
|
351
|
+
logger.error(f"WebSocket server error on {host}:{port}: {e}", exc_info=True)
|
|
352
|
+
finally:
|
|
353
|
+
logger.info(f"WebSocket Server on {host}:{port} stopped")
|
|
354
|
+
WS_SERVER_THREADS[port]["running"] = False
|
|
355
|
+
|
|
356
|
+
|
|
357
|
+
@tool
|
|
358
|
+
def websocket(
|
|
359
|
+
action: str,
|
|
360
|
+
host: str = "127.0.0.1",
|
|
361
|
+
port: int = 8080,
|
|
362
|
+
system_prompt: str = "You are a helpful WebSocket server assistant.",
|
|
363
|
+
) -> dict:
|
|
364
|
+
"""Create and manage WebSocket servers with real-time streaming.
|
|
365
|
+
|
|
366
|
+
Args:
|
|
367
|
+
action: Action to perform (start_server, stop_server, get_status)
|
|
368
|
+
host: Host address for server
|
|
369
|
+
port: Port number for server
|
|
370
|
+
system_prompt: System prompt for the server DevDuck instances
|
|
371
|
+
|
|
372
|
+
Returns:
|
|
373
|
+
Dictionary containing status and response content
|
|
374
|
+
"""
|
|
375
|
+
if action == "start_server":
|
|
376
|
+
if port in WS_SERVER_THREADS and WS_SERVER_THREADS[port].get("running", False):
|
|
377
|
+
return {
|
|
378
|
+
"status": "error",
|
|
379
|
+
"content": [
|
|
380
|
+
{
|
|
381
|
+
"text": f"❌ Error: WebSocket Server already running on port {port}"
|
|
382
|
+
}
|
|
383
|
+
],
|
|
384
|
+
}
|
|
385
|
+
|
|
386
|
+
WS_SERVER_THREADS[port] = {"running": False}
|
|
387
|
+
server_thread = threading.Thread(
|
|
388
|
+
target=run_websocket_server,
|
|
389
|
+
args=(host, port, system_prompt),
|
|
390
|
+
)
|
|
391
|
+
server_thread.daemon = True
|
|
392
|
+
server_thread.start()
|
|
393
|
+
|
|
394
|
+
time.sleep(0.5)
|
|
395
|
+
|
|
396
|
+
if not WS_SERVER_THREADS[port].get("running", False):
|
|
397
|
+
return {
|
|
398
|
+
"status": "error",
|
|
399
|
+
"content": [
|
|
400
|
+
{
|
|
401
|
+
"text": f"❌ Error: Failed to start WebSocket Server on {host}:{port}"
|
|
402
|
+
}
|
|
403
|
+
],
|
|
404
|
+
}
|
|
405
|
+
|
|
406
|
+
return {
|
|
407
|
+
"status": "success",
|
|
408
|
+
"content": [
|
|
409
|
+
{"text": f"✅ WebSocket Server started successfully on {host}:{port}"},
|
|
410
|
+
{"text": f"System prompt: {system_prompt}"},
|
|
411
|
+
{"text": "🌊 Real-time streaming with concurrent message processing"},
|
|
412
|
+
{"text": "📦 Structured JSON messages with turn_id"},
|
|
413
|
+
{
|
|
414
|
+
"text": "🦆 Server creates a new DevDuck instance for each connection"
|
|
415
|
+
},
|
|
416
|
+
{"text": "⚡ Send multiple messages without waiting!"},
|
|
417
|
+
{"text": f"📝 Test with: ws://localhost:{port}"},
|
|
418
|
+
],
|
|
419
|
+
}
|
|
420
|
+
|
|
421
|
+
elif action == "stop_server":
|
|
422
|
+
if port not in WS_SERVER_THREADS or not WS_SERVER_THREADS[port].get(
|
|
423
|
+
"running", False
|
|
424
|
+
):
|
|
425
|
+
return {
|
|
426
|
+
"status": "error",
|
|
427
|
+
"content": [
|
|
428
|
+
{"text": f"❌ Error: No WebSocket Server running on port {port}"}
|
|
429
|
+
],
|
|
430
|
+
}
|
|
431
|
+
|
|
432
|
+
WS_SERVER_THREADS[port]["running"] = False
|
|
433
|
+
|
|
434
|
+
# Signal the server to stop
|
|
435
|
+
if "stop_future" in WS_SERVER_THREADS[port]:
|
|
436
|
+
loop = WS_SERVER_THREADS[port]["loop"]
|
|
437
|
+
loop.call_soon_threadsafe(
|
|
438
|
+
lambda: WS_SERVER_THREADS[port]["stop_future"].set_result(None)
|
|
439
|
+
)
|
|
440
|
+
|
|
441
|
+
time.sleep(1.0)
|
|
442
|
+
|
|
443
|
+
connections = WS_SERVER_THREADS[port].get("connections", 0)
|
|
444
|
+
uptime = time.time() - WS_SERVER_THREADS[port].get("start_time", time.time())
|
|
445
|
+
|
|
446
|
+
del WS_SERVER_THREADS[port]
|
|
447
|
+
|
|
448
|
+
return {
|
|
449
|
+
"status": "success",
|
|
450
|
+
"content": [
|
|
451
|
+
{"text": f"✅ WebSocket Server on port {port} stopped successfully"},
|
|
452
|
+
{
|
|
453
|
+
"text": f"Statistics: {connections} connections handled, uptime {uptime:.2f} seconds"
|
|
454
|
+
},
|
|
455
|
+
],
|
|
456
|
+
}
|
|
457
|
+
|
|
458
|
+
elif action == "get_status":
|
|
459
|
+
if not WS_SERVER_THREADS:
|
|
460
|
+
return {
|
|
461
|
+
"status": "success",
|
|
462
|
+
"content": [{"text": "No WebSocket Servers running"}],
|
|
463
|
+
}
|
|
464
|
+
|
|
465
|
+
status_info = []
|
|
466
|
+
for port, data in WS_SERVER_THREADS.items():
|
|
467
|
+
if data.get("running", False):
|
|
468
|
+
uptime = time.time() - data.get("start_time", time.time())
|
|
469
|
+
connections = data.get("connections", 0)
|
|
470
|
+
status_info.append(
|
|
471
|
+
f"Port {port}: Running - {connections} connections, uptime {uptime:.2f}s"
|
|
472
|
+
)
|
|
473
|
+
else:
|
|
474
|
+
status_info.append(f"Port {port}: Stopped")
|
|
475
|
+
|
|
476
|
+
return {
|
|
477
|
+
"status": "success",
|
|
478
|
+
"content": [
|
|
479
|
+
{"text": "WebSocket Server Status:"},
|
|
480
|
+
{"text": "\n".join(status_info)},
|
|
481
|
+
],
|
|
482
|
+
}
|
|
483
|
+
|
|
484
|
+
else:
|
|
485
|
+
return {
|
|
486
|
+
"status": "error",
|
|
487
|
+
"content": [
|
|
488
|
+
{
|
|
489
|
+
"text": f"Error: Unknown action '{action}'. Supported: start_server, stop_server, get_status"
|
|
490
|
+
}
|
|
491
|
+
],
|
|
492
|
+
}
|
|
@@ -1,9 +1,7 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: devduck
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.2.0
|
|
4
4
|
Summary: 🦆 Extreme minimalist self-adapting AI agent - one file, self-healing, runtime dependencies
|
|
5
|
-
Home-page: https://github.com/cagataycali/devduck
|
|
6
|
-
Author: maxs
|
|
7
5
|
Author-email: duck <hey@devduck.dev>
|
|
8
6
|
License-Expression: MIT
|
|
9
7
|
Project-URL: Homepage, https://github.com/cagataycali/devduck
|
|
@@ -33,10 +31,12 @@ Requires-Dist: strands-agents[ollama]
|
|
|
33
31
|
Requires-Dist: strands-agents[openai]
|
|
34
32
|
Requires-Dist: strands-agents[anthropic]
|
|
35
33
|
Requires-Dist: strands-agents-tools
|
|
36
|
-
|
|
37
|
-
|
|
34
|
+
Requires-Dist: strands-fun-tools[audio]
|
|
35
|
+
Requires-Dist: strands-fun-tools[vision]
|
|
36
|
+
Requires-Dist: strands-fun-tools[all]
|
|
37
|
+
Requires-Dist: websockets
|
|
38
|
+
Requires-Dist: prompt_toolkit
|
|
38
39
|
Dynamic: license-file
|
|
39
|
-
Dynamic: requires-python
|
|
40
40
|
|
|
41
41
|
# 🦆 DevDuck
|
|
42
42
|
|
|
@@ -55,18 +55,54 @@ Requires: Python 3.10+, Ollama running
|
|
|
55
55
|
## Use
|
|
56
56
|
|
|
57
57
|
```bash
|
|
58
|
-
#
|
|
59
|
-
devduck "what's the time?"
|
|
60
|
-
|
|
61
|
-
# Interactive
|
|
58
|
+
# Start DevDuck (auto-starts TCP, WebSocket, MCP servers)
|
|
62
59
|
devduck
|
|
63
60
|
|
|
61
|
+
# CLI mode
|
|
62
|
+
devduck "what's the time?"
|
|
63
|
+
|
|
64
64
|
# Python
|
|
65
65
|
import devduck
|
|
66
|
-
|
|
67
66
|
devduck("calculate 2+2")
|
|
68
67
|
```
|
|
69
68
|
|
|
69
|
+
## Auto-Started Servers
|
|
70
|
+
|
|
71
|
+
When you run `devduck`, three servers start automatically:
|
|
72
|
+
|
|
73
|
+
- **🌐 Web UI**: [http://cagataycali.github.io/devduck](http://cagataycali.github.io/devduck) (auto-connects)
|
|
74
|
+
- **🔌 TCP**: `nc localhost 9999` (raw socket)
|
|
75
|
+
- **🌊 WebSocket**: `ws://localhost:8080` (structured JSON)
|
|
76
|
+
- **🔗 MCP**: `http://localhost:8000/mcp` (Model Context Protocol)
|
|
77
|
+
|
|
78
|
+
### Connect via MCP
|
|
79
|
+
|
|
80
|
+
Add to your MCP client (e.g., Claude Desktop):
|
|
81
|
+
|
|
82
|
+
```json
|
|
83
|
+
{
|
|
84
|
+
"mcpServers": {
|
|
85
|
+
"devduck": {
|
|
86
|
+
"command": "uvx",
|
|
87
|
+
"args": [
|
|
88
|
+
"strands-mcp-server",
|
|
89
|
+
"--upstream-url",
|
|
90
|
+
"http://localhost:8000/mcp/"
|
|
91
|
+
],
|
|
92
|
+
"disabled": false
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
```
|
|
97
|
+
|
|
98
|
+
### Connect via Terminal
|
|
99
|
+
|
|
100
|
+
```bash
|
|
101
|
+
# Direct TCP connection
|
|
102
|
+
nc localhost 9999
|
|
103
|
+
> what's the time?
|
|
104
|
+
```
|
|
105
|
+
|
|
70
106
|
## Features
|
|
71
107
|
|
|
72
108
|
- **Self-healing** - Auto-fixes deps, models, errors
|
|
@@ -74,6 +110,7 @@ devduck("calculate 2+2")
|
|
|
74
110
|
- **Adaptive** - Picks model based on OS (macOS: 1.7b, Linux: 30b)
|
|
75
111
|
- **14 tools** - shell, editor, files, python, calculator, tcp, etc.
|
|
76
112
|
- **History aware** - Remembers shell/conversation context
|
|
113
|
+
- **Multi-protocol** - TCP, WebSocket, MCP, CLI, Python
|
|
77
114
|
|
|
78
115
|
## Create Tool
|
|
79
116
|
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
devduck/__init__.py,sha256=w7H7wKWkeD0fKUhLMJ4dxxrWCkRH_R7w3maxjJI0WgU,51881
|
|
2
|
+
devduck/__main__.py,sha256=aeF2RR4k7lzSR2X1QKV9XQPCKhtsH0JYUv2etBBqmL0,145
|
|
3
|
+
devduck/_version.py,sha256=Dg8AmJomLVpjKL6prJylOONZAPRtB86LOce7dorQS_A,704
|
|
4
|
+
devduck/install.sh,sha256=tYq2WWZFCBEMbxCneKAw3GSNAG1zNhpd-kzW1l5ZISw,990
|
|
5
|
+
devduck/test_redduck.py,sha256=nqRchR7d54jWGx7JN5tji2ZV4Ek4L9s-P7hp0mKjA0Y,1773
|
|
6
|
+
devduck/tools/__init__.py,sha256=mu3V4jL2ACN4f-pnUID_A2p6o3Yc_-V_y9071PduCR0,177
|
|
7
|
+
devduck/tools/install_tools.py,sha256=wm_67b9IfY-2wRuWgxuEKhaSIV5vNfbGmZL3G9dGi2A,10348
|
|
8
|
+
devduck/tools/mcp_server.py,sha256=oyF1gb7K-OlxyJLUO3L-vNo2ajKzIrcnT1crwKMOkhU,20118
|
|
9
|
+
devduck/tools/tcp.py,sha256=4KtyRlgaOLKXT3PU0yFRM79FoOkn3-S10dVL4L5iG80,22063
|
|
10
|
+
devduck/tools/websocket.py,sha256=ryKE1WbfaTFguwp-AzJlyCOifHE5uXJAVdHD8jecJgQ,16681
|
|
11
|
+
devduck-0.2.0.dist-info/licenses/LICENSE,sha256=CVGEiNh6cW1mgAKW83Q0P4xrQEXvqc6W-rb789W_IHM,1060
|
|
12
|
+
devduck-0.2.0.dist-info/METADATA,sha256=yl_DpWxBYGIWkbehwUFH-jyD7W1O945cRCJsE9Ufzew,3902
|
|
13
|
+
devduck-0.2.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
14
|
+
devduck-0.2.0.dist-info/entry_points.txt,sha256=BAMQaIg_BLZQOTk12bT7hy1dE9oGPLt-_dTbI4cnBnQ,40
|
|
15
|
+
devduck-0.2.0.dist-info/top_level.txt,sha256=ySXWlVronp8xHYfQ_Hdfr463e0EnbWuqyuxs94EU7yk,8
|
|
16
|
+
devduck-0.2.0.dist-info/RECORD,,
|
devduck-0.1.0.dist-info/RECORD
DELETED
|
@@ -1,11 +0,0 @@
|
|
|
1
|
-
devduck/__init__.py,sha256=AaMXDIqX4j7FUfIF5rvdkJcSlLLtclHpuIQ5HeiiDQ4,34903
|
|
2
|
-
devduck/install.sh,sha256=tYq2WWZFCBEMbxCneKAw3GSNAG1zNhpd-kzW1l5ZISw,990
|
|
3
|
-
devduck/test_redduck.py,sha256=nqRchR7d54jWGx7JN5tji2ZV4Ek4L9s-P7hp0mKjA0Y,1773
|
|
4
|
-
devduck/tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
5
|
-
devduck/tools/tcp.py,sha256=zK8mrWh9-v3acIJ_qWzr9hi-SaONI4u7ScUWEuxWhN8,15477
|
|
6
|
-
devduck-0.1.0.dist-info/licenses/LICENSE,sha256=CVGEiNh6cW1mgAKW83Q0P4xrQEXvqc6W-rb789W_IHM,1060
|
|
7
|
-
devduck-0.1.0.dist-info/METADATA,sha256=x5csUDki9FxOvSTopwOOatfCpVWH1Z9QYvi5mYHRzJI,2971
|
|
8
|
-
devduck-0.1.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
9
|
-
devduck-0.1.0.dist-info/entry_points.txt,sha256=BAMQaIg_BLZQOTk12bT7hy1dE9oGPLt-_dTbI4cnBnQ,40
|
|
10
|
-
devduck-0.1.0.dist-info/top_level.txt,sha256=ySXWlVronp8xHYfQ_Hdfr463e0EnbWuqyuxs94EU7yk,8
|
|
11
|
-
devduck-0.1.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|