devduck 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of devduck might be problematic. Click here for more details.

devduck/install.sh ADDED
@@ -0,0 +1,42 @@
1
+ #!/bin/bash
2
+ # ๐Ÿฆ† DevDuck installer - Extreme minimalist agent
3
+
4
+ echo "๐Ÿฆ† Installing Devduck..."
5
+
6
+ # Check Python
7
+ if ! command -v python3 &> /dev/null; then
8
+ echo "โŒ Python 3 not found. Please install Python 3.8+"
9
+ exit 1
10
+ fi
11
+
12
+ # Check Ollama
13
+ if ! command -v ollama &> /dev/null; then
14
+ echo "โš ๏ธ Ollama not found. Installing..."
15
+ if [[ "$OSTYPE" == "darwin"* ]]; then
16
+ # macOS
17
+ if command -v brew &> /dev/null; then
18
+ brew install ollama
19
+ else
20
+ curl -fsSL https://ollama.ai/install.sh | sh
21
+ fi
22
+ else
23
+ # Linux
24
+ curl -fsSL https://ollama.ai/install.sh | sh
25
+ fi
26
+ fi
27
+
28
+ # Start ollama service
29
+ echo "๐Ÿฆ† Starting Ollama service..."
30
+ ollama serve &
31
+ sleep 2
32
+
33
+ # Pull a basic model
34
+ echo "๐Ÿฆ† Pulling basic model..."
35
+ ollama pull qwen3:1.7b
36
+
37
+ # Test devduck
38
+ echo "๐Ÿฆ† Testing Devduck..."
39
+ python3 __init__.py "what's 5*7?"
40
+
41
+ echo "โœ… Devduck installed successfully!"
42
+ echo "Usage: python3 __init__.py 'your question'"
@@ -0,0 +1,79 @@
1
+ #!/usr/bin/env python3
2
+ """๐Ÿฆ† DevDuck test suite"""
3
+
4
+
5
+ def test_import():
6
+ """Test basic import and initialization"""
7
+ try:
8
+ import devduck
9
+
10
+ print("โœ… Import successful")
11
+ return True
12
+ except Exception as e:
13
+ print(f"โŒ Import failed: {e}")
14
+ return False
15
+
16
+
17
+ def test_status():
18
+ """Test status function"""
19
+ try:
20
+ import devduck
21
+
22
+ status = devduck.status()
23
+ print(f"โœ… Status: {status}")
24
+ return True
25
+ except Exception as e:
26
+ print(f"โŒ Status failed: {e}")
27
+ return False
28
+
29
+
30
+ def test_basic_query():
31
+ """Test basic agent query"""
32
+ try:
33
+ import devduck
34
+
35
+ result = devduck.ask("what's 2+2?")
36
+ print(f"โœ… Query result: {result}")
37
+ return True
38
+ except Exception as e:
39
+ print(f"โŒ Query failed: {e}")
40
+ return False
41
+
42
+
43
+ def test_time_query():
44
+ """Test current time tool"""
45
+ try:
46
+ import devduck
47
+
48
+ result = devduck.ask("what time is it?")
49
+ print(f"โœ… Time query: {result}")
50
+ return True
51
+ except Exception as e:
52
+ print(f"โŒ Time query failed: {e}")
53
+ return False
54
+
55
+
56
+ def run_tests():
57
+ """Run all tests"""
58
+ print("๐Ÿฆ† Testing Devduck...")
59
+
60
+ tests = [test_import, test_status, test_basic_query, test_time_query]
61
+
62
+ results = []
63
+ for test in tests:
64
+ print(f"\n๐Ÿงช Running {test.__name__}...")
65
+ results.append(test())
66
+
67
+ passed = sum(results)
68
+ total = len(results)
69
+
70
+ print(f"\n๐Ÿฆ† Results: {passed}/{total} tests passed")
71
+
72
+ if passed == total:
73
+ print("๐ŸŽ‰ All tests passed! Devduck is ready to go!")
74
+ else:
75
+ print("โš ๏ธ Some tests failed. Check ollama service and dependencies.")
76
+
77
+
78
+ if __name__ == "__main__":
79
+ run_tests()
File without changes
devduck/tools/tcp.py ADDED
@@ -0,0 +1,457 @@
1
+ """TCP tool for Strands Agents to function as both server and client.
2
+
3
+ This module provides TCP server and client functionality for Strands Agents,
4
+ allowing them to communicate over TCP/IP networks. The tool runs server operations
5
+ in background threads, enabling concurrent communication without blocking the main agent.
6
+
7
+ Key Features:
8
+ 1. TCP Server: Listen for incoming connections and process them with an agent
9
+ 2. TCP Client: Connect to remote TCP servers and exchange messages
10
+ 3. Background Processing: Server runs in a background thread
11
+ 4. Per-Connection Agents: Creates a fresh agent for each client connection
12
+
13
+ Usage with Strands Agent:
14
+
15
+ ```python
16
+ from strands import Agent
17
+ from strands_tools import tcp
18
+
19
+ agent = Agent(tools=[tcp])
20
+
21
+ # Start a TCP server
22
+ result = agent.tool.tcp(
23
+ action="start_server",
24
+ host="127.0.0.1",
25
+ port=8000,
26
+ system_prompt="You are a helpful TCP server assistant.",
27
+ )
28
+
29
+ # Connect to a TCP server as client
30
+ result = agent.tool.tcp(
31
+ action="client_send", host="127.0.0.1", port=8000, message="Hello, server!"
32
+ )
33
+
34
+ # Stop the TCP server
35
+ result = agent.tool.tcp(action="stop_server", port=8000)
36
+ ```
37
+
38
+ See the tcp function docstring for more details on configuration options and parameters.
39
+ """
40
+
41
+ import logging
42
+ import socket
43
+ import threading
44
+ import time
45
+ from typing import Any
46
+
47
+ from strands import Agent, tool
48
+
49
+ logger = logging.getLogger(__name__)
50
+
51
+ # Global registry to store server threads
52
+ SERVER_THREADS: dict[int, dict[str, Any]] = {}
53
+
54
+
55
+ def handle_client(
56
+ client_socket: socket.socket,
57
+ client_address: tuple,
58
+ system_prompt: str,
59
+ buffer_size: int,
60
+ model: Any,
61
+ parent_tools: list | None = None,
62
+ callback_handler: Any = None,
63
+ trace_attributes: dict | None = None,
64
+ ) -> None:
65
+ """Handle a client connection in the TCP server.
66
+
67
+ Args:
68
+ client_socket: The socket for the client connection
69
+ client_address: The address of the client
70
+ system_prompt: System prompt for creating a new agent for this connection
71
+ buffer_size: Size of the message buffer
72
+ model: Model instance from parent agent
73
+ parent_tools: Tools inherited from the parent agent
74
+ callback_handler: Callback handler from parent agent
75
+ trace_attributes: Trace attributes from the parent agent
76
+ """
77
+ logger.info(f"Connection established with {client_address}")
78
+
79
+ # Create a fresh agent instance for this client connection
80
+ connection_agent = Agent(
81
+ model=model,
82
+ messages=[],
83
+ tools=parent_tools or [],
84
+ callback_handler=callback_handler,
85
+ system_prompt=system_prompt,
86
+ trace_attributes=trace_attributes or {},
87
+ )
88
+
89
+ try:
90
+ # Send welcome message
91
+ welcome_msg = "Welcome to Strands TCP Server! Send a message or 'exit' to close the connection.\n"
92
+ client_socket.sendall(welcome_msg.encode())
93
+
94
+ while True:
95
+ # Receive data from the client
96
+ data = client_socket.recv(buffer_size)
97
+
98
+ if not data:
99
+ logger.info(f"Client {client_address} disconnected")
100
+ break
101
+
102
+ message = data.decode().strip()
103
+ logger.info(f"Received from {client_address}: {message}")
104
+
105
+ if message.lower() == "exit":
106
+ client_socket.sendall(b"Connection closed by client request.\n")
107
+ logger.info(f"Client {client_address} requested to exit")
108
+ break
109
+
110
+ # Process the message with the connection-specific agent
111
+ response = connection_agent(message)
112
+ response_text = str(response)
113
+
114
+ # Send the response back to the client
115
+ client_socket.sendall((response_text + "\n").encode())
116
+
117
+ except Exception as e:
118
+ logger.error(f"Error handling client {client_address}: {e}")
119
+ finally:
120
+ client_socket.close()
121
+ logger.info(f"Connection with {client_address} closed")
122
+
123
+
124
+ def run_server(
125
+ host: str,
126
+ port: int,
127
+ system_prompt: str,
128
+ max_connections: int,
129
+ buffer_size: int,
130
+ parent_agent: Agent | None = None,
131
+ ) -> None:
132
+ """Run a TCP server that processes client requests with per-connection Strands agents.
133
+
134
+ Args:
135
+ host: Host address to bind the server
136
+ port: Port number to bind the server
137
+ system_prompt: System prompt for the server agents
138
+ max_connections: Maximum number of concurrent connections
139
+ buffer_size: Size of the message buffer
140
+ parent_agent: Parent agent to inherit tools from
141
+ """
142
+ # Store server state
143
+ SERVER_THREADS[port]["running"] = True
144
+ SERVER_THREADS[port]["connections"] = 0
145
+ SERVER_THREADS[port]["start_time"] = time.time()
146
+
147
+ # Get model, tools, callback_handler and trace attributes from parent agent
148
+ model = None
149
+ callback_handler = None
150
+ parent_tools = []
151
+ trace_attributes = {}
152
+ if parent_agent:
153
+ model = parent_agent.model
154
+ callback_handler = parent_agent.callback_handler
155
+ parent_tools = list(parent_agent.tool_registry.registry.values())
156
+ trace_attributes = parent_agent.trace_attributes
157
+
158
+ # Create server socket
159
+ server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
160
+ server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
161
+
162
+ try:
163
+ server_socket.bind((host, port))
164
+ server_socket.listen(max_connections)
165
+ logger.info(f"TCP Server listening on {host}:{port}")
166
+
167
+ SERVER_THREADS[port]["socket"] = server_socket
168
+
169
+ while SERVER_THREADS[port]["running"]:
170
+ # Set a timeout to check periodically if the server should stop
171
+ server_socket.settimeout(1.0)
172
+
173
+ try:
174
+ # Accept client connection
175
+ client_socket, client_address = server_socket.accept()
176
+ SERVER_THREADS[port]["connections"] += 1
177
+
178
+ # Handle client in a new thread with a fresh agent
179
+ client_thread = threading.Thread(
180
+ target=handle_client,
181
+ args=(
182
+ client_socket,
183
+ client_address,
184
+ system_prompt,
185
+ buffer_size,
186
+ model,
187
+ parent_tools,
188
+ callback_handler,
189
+ trace_attributes,
190
+ ),
191
+ )
192
+ client_thread.daemon = True
193
+ client_thread.start()
194
+
195
+ except TimeoutError:
196
+ # This is expected due to the timeout, allows checking if server should stop
197
+ pass
198
+ except Exception as e:
199
+ if SERVER_THREADS[port]["running"]:
200
+ logger.error(f"Error accepting connection: {e}")
201
+
202
+ except Exception as e:
203
+ logger.error(f"Server error on {host}:{port}: {e}")
204
+ finally:
205
+ try:
206
+ server_socket.close()
207
+ except OSError:
208
+ # Socket already closed, safe to ignore
209
+ pass
210
+ logger.info(f"TCP Server on {host}:{port} stopped")
211
+ SERVER_THREADS[port]["running"] = False
212
+
213
+
214
+ @tool
215
+ def tcp(
216
+ action: str,
217
+ host: str = "127.0.0.1",
218
+ port: int = 8000,
219
+ system_prompt: str = "You are a helpful TCP server assistant.",
220
+ message: str = "",
221
+ timeout: int = 90,
222
+ buffer_size: int = 4096,
223
+ max_connections: int = 5,
224
+ agent: Any = None,
225
+ ) -> dict:
226
+ """Create and manage TCP servers and clients for network communication with connection handling.
227
+
228
+ This function provides TCP server and client functionality for Strands agents,
229
+ allowing them to communicate over TCP/IP networks. Servers run in background
230
+ threads with a new, fresh agent instance for each client connection.
231
+
232
+ How It Works:
233
+ ------------
234
+ 1. Server Mode:
235
+ - Starts a TCP server in a background thread
236
+ - Creates a dedicated agent for EACH client connection
237
+ - Inherits tools from the parent agent
238
+ - Processes client messages and returns responses
239
+
240
+ 2. Client Mode:
241
+ - Connects to a TCP server
242
+ - Sends messages and receives responses
243
+ - Maintains stateless connections (no persistent sessions)
244
+
245
+ 3. Management:
246
+ - Track server status and statistics
247
+ - Stop servers gracefully
248
+ - Monitor connections and performance
249
+
250
+ Common Use Cases:
251
+ ---------------
252
+ - Network service automation
253
+ - Inter-agent communication
254
+ - Remote command and control
255
+ - API gateway implementation
256
+ - IoT device management
257
+
258
+ Args:
259
+ action: Action to perform (start_server, stop_server, get_status, client_send)
260
+ host: Host address for server or client connection
261
+ port: Port number for server or client connection
262
+ system_prompt: System prompt for the server agent (for start_server)
263
+ message: Message to send to the TCP server (for client_send action)
264
+ timeout: Connection timeout in seconds (default: 90)
265
+ buffer_size: Size of the message buffer in bytes (default: 4096)
266
+ max_connections: Maximum number of concurrent connections (default: 5)
267
+
268
+ Returns:
269
+ Dictionary containing status and response content
270
+
271
+ Notes:
272
+ - Server instances persist until explicitly stopped
273
+ - Each client connection gets its own agent instance
274
+ - Connection agents inherit tools from the parent agent
275
+ - Client connections are stateless
276
+ """
277
+ # Get parent agent from tool context if available
278
+ parent_agent = agent
279
+
280
+ if action == "start_server":
281
+ # Check if server already running on this port
282
+ if port in SERVER_THREADS and SERVER_THREADS[port].get("running", False):
283
+ return {
284
+ "status": "error",
285
+ "content": [{"text": f"โŒ Error: TCP Server already running on port {port}"}],
286
+ }
287
+
288
+ # Create server thread
289
+ SERVER_THREADS[port] = {"running": False}
290
+ server_thread = threading.Thread(
291
+ target=run_server,
292
+ args=(
293
+ host,
294
+ port,
295
+ system_prompt,
296
+ max_connections,
297
+ buffer_size,
298
+ parent_agent,
299
+ ),
300
+ )
301
+ server_thread.daemon = True
302
+ server_thread.start()
303
+
304
+ # Wait briefly to ensure server starts
305
+ time.sleep(0.5)
306
+
307
+ if not SERVER_THREADS[port].get("running", False):
308
+ return {
309
+ "status": "error",
310
+ "content": [{"text": f"โŒ Error: Failed to start TCP Server on {host}:{port}"}],
311
+ }
312
+
313
+ return {
314
+ "status": "success",
315
+ "content": [
316
+ {"text": f"โœ… TCP Server started successfully on {host}:{port}"},
317
+ {"text": f"System prompt: {system_prompt}"},
318
+ {"text": "Server creates a new agent instance for each connection"},
319
+ ],
320
+ }
321
+
322
+ elif action == "stop_server":
323
+ if port not in SERVER_THREADS or not SERVER_THREADS[port].get("running", False):
324
+ return {
325
+ "status": "error",
326
+ "content": [{"text": f"โŒ Error: No TCP Server running on port {port}"}],
327
+ }
328
+
329
+ # Stop the server
330
+ SERVER_THREADS[port]["running"] = False
331
+
332
+ # Close socket if it exists
333
+ if "socket" in SERVER_THREADS[port]:
334
+ try:
335
+ SERVER_THREADS[port]["socket"].close()
336
+ except OSError:
337
+ # Socket already closed, safe to ignore
338
+ pass
339
+
340
+ # Wait briefly to ensure server stops
341
+ time.sleep(1.0)
342
+
343
+ connections = SERVER_THREADS[port].get("connections", 0)
344
+ uptime = time.time() - SERVER_THREADS[port].get("start_time", time.time())
345
+
346
+ # Clean up server thread data
347
+ del SERVER_THREADS[port]
348
+
349
+ return {
350
+ "status": "success",
351
+ "content": [
352
+ {"text": f"โœ… TCP Server on port {port} stopped successfully"},
353
+ {"text": f"Statistics: {connections} connections handled, uptime {uptime:.2f} seconds"},
354
+ ],
355
+ }
356
+
357
+ elif action == "get_status":
358
+ if not SERVER_THREADS:
359
+ return {
360
+ "status": "success",
361
+ "content": [{"text": "No TCP Servers running"}],
362
+ }
363
+
364
+ status_info = []
365
+ for port, data in SERVER_THREADS.items():
366
+ if data.get("running", False):
367
+ uptime = time.time() - data.get("start_time", time.time())
368
+ connections = data.get("connections", 0)
369
+ status_info.append(f"Port {port}: Running - {connections} connections, uptime {uptime:.2f}s")
370
+ else:
371
+ status_info.append(f"Port {port}: Stopped")
372
+
373
+ return {
374
+ "status": "success",
375
+ "content": [
376
+ {"text": "TCP Server Status:"},
377
+ {"text": "\n".join(status_info)},
378
+ ],
379
+ }
380
+
381
+ elif action == "client_send":
382
+ host = host
383
+ port = port
384
+ message = message
385
+ timeout = timeout
386
+ buffer_size = buffer_size
387
+
388
+ if not message:
389
+ return {
390
+ "status": "error",
391
+ "content": [{"text": "Error: No message provided for client_send action"}],
392
+ }
393
+
394
+ # Create client socket
395
+ client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
396
+ client_socket.settimeout(timeout)
397
+
398
+ try:
399
+ # Connect to server
400
+ client_socket.connect((host, port))
401
+
402
+ # Receive welcome message
403
+ _welcome = client_socket.recv(buffer_size).decode()
404
+
405
+ # Send message to server
406
+ client_socket.sendall(message.encode())
407
+
408
+ # Receive response
409
+ response = client_socket.recv(buffer_size).decode()
410
+
411
+ # Send exit message and close connection
412
+ client_socket.sendall(b"exit")
413
+ client_socket.close()
414
+
415
+ return {
416
+ "status": "success",
417
+ "content": [
418
+ {"text": f"Connected to {host}:{port} successfully"},
419
+ {"text": f"Received welcome message: {_welcome}"},
420
+ {"text": f"Sent message: {message}"},
421
+ {"text": "Response received:"},
422
+ {"text": response},
423
+ ],
424
+ }
425
+
426
+ except TimeoutError:
427
+ return {
428
+ "status": "error",
429
+ "content": [{"text": f"Error: Connection to {host}:{port} timed out after {timeout} seconds"}],
430
+ }
431
+ except ConnectionRefusedError:
432
+ return {
433
+ "status": "error",
434
+ "content": [{"text": f"Error: Connection to {host}:{port} refused - no server running on that port"}],
435
+ }
436
+ except Exception as e:
437
+ return {
438
+ "status": "error",
439
+ "content": [{"text": f"Error connecting to {host}:{port}: {e!s}"}],
440
+ }
441
+ finally:
442
+ try:
443
+ client_socket.close()
444
+ except OSError:
445
+ # Socket already closed, safe to ignore
446
+ pass
447
+
448
+ else:
449
+ return {
450
+ "status": "error",
451
+ "content": [
452
+ {
453
+ "text": f"Error: Unknown action '{action}'. Supported actions are: "
454
+ f"start_server, stop_server, get_status, client_send"
455
+ }
456
+ ],
457
+ }
@@ -0,0 +1,106 @@
1
+ Metadata-Version: 2.4
2
+ Name: devduck
3
+ Version: 0.1.0
4
+ Summary: ๐Ÿฆ† Extreme minimalist self-adapting AI agent - one file, self-healing, runtime dependencies
5
+ Home-page: https://github.com/cagataycali/devduck
6
+ Author: maxs
7
+ Author-email: duck <hey@devduck.dev>
8
+ License-Expression: MIT
9
+ Project-URL: Homepage, https://github.com/cagataycali/devduck
10
+ Project-URL: Repository, https://github.com/cagataycali/devduck.git
11
+ Project-URL: Documentation, https://github.com/cagataycali/devduck#readme
12
+ Project-URL: Bug Tracker, https://github.com/cagataycali/devduck/issues
13
+ Keywords: ai,agent,minimalist,self-healing,ollama,strands-agents
14
+ Classifier: Development Status :: 4 - Beta
15
+ Classifier: Environment :: Console
16
+ Classifier: Intended Audience :: Developers
17
+ Classifier: Intended Audience :: System Administrators
18
+ Classifier: Operating System :: OS Independent
19
+ Classifier: Programming Language :: Python :: 3
20
+ Classifier: Programming Language :: Python :: 3.10
21
+ Classifier: Programming Language :: Python :: 3.11
22
+ Classifier: Programming Language :: Python :: 3.12
23
+ Classifier: Programming Language :: Python :: 3.13
24
+ Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
25
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
26
+ Classifier: Topic :: System :: Systems Administration
27
+ Classifier: Topic :: Utilities
28
+ Requires-Python: >=3.10
29
+ Description-Content-Type: text/markdown
30
+ License-File: LICENSE
31
+ Requires-Dist: strands-agents
32
+ Requires-Dist: strands-agents[ollama]
33
+ Requires-Dist: strands-agents[openai]
34
+ Requires-Dist: strands-agents[anthropic]
35
+ Requires-Dist: strands-agents-tools
36
+ Dynamic: author
37
+ Dynamic: home-page
38
+ Dynamic: license-file
39
+ Dynamic: requires-python
40
+
41
+ # ๐Ÿฆ† DevDuck
42
+
43
+ **One file. Self-healing. Adaptive.**
44
+
45
+ Minimalist AI agent that fixes itself when things break.
46
+
47
+ ## Install
48
+
49
+ ```bash
50
+ pipx install devduck
51
+ ```
52
+
53
+ Requires: Python 3.10+, Ollama running
54
+
55
+ ## Use
56
+
57
+ ```bash
58
+ # CLI
59
+ devduck "what's the time?"
60
+
61
+ # Interactive
62
+ devduck
63
+
64
+ # Python
65
+ import devduck
66
+
67
+ devduck("calculate 2+2")
68
+ ```
69
+
70
+ ## Features
71
+
72
+ - **Self-healing** - Auto-fixes deps, models, errors
73
+ - **Hot-reload** - Create tools in `./tools/*.py`, use instantly
74
+ - **Adaptive** - Picks model based on OS (macOS: 1.7b, Linux: 30b)
75
+ - **14 tools** - shell, editor, files, python, calculator, tcp, etc.
76
+ - **History aware** - Remembers shell/conversation context
77
+
78
+ ## Create Tool
79
+
80
+ ```python
81
+ # ./tools/greet.py
82
+ from strands import tool
83
+
84
+ @tool
85
+ def greet(name: str) -> str:
86
+ return f"Hello {name}!"
87
+ ```
88
+
89
+ Save. Done. Use immediately.
90
+
91
+ ## Multi-Model
92
+
93
+ ```bash
94
+ export MODEL_PROVIDER="bedrock"
95
+ export STRANDS_MODEL_ID="us.anthropic.claude-sonnet-4-5-20250929-v1:0"
96
+ export STRANDS_ADDITIONAL_REQUEST_FIELDS='{"anthropic_beta": ["interleaved-thinking-2025-05-14", "context-1m-2025-08-07"], "thinking": {"type": "enabled", "budget_tokens": 2048}}'
97
+ export STRANDS_MAX_TOKENS="64000"
98
+
99
+ devduck "analyze data"
100
+ ```
101
+
102
+ ---
103
+
104
+ **Quack.** ๐Ÿฆ†
105
+
106
+ *Built with [Strands Agents SDK](https://github.com/strands-agents/sdk-python)*
@@ -0,0 +1,11 @@
1
+ devduck/__init__.py,sha256=AaMXDIqX4j7FUfIF5rvdkJcSlLLtclHpuIQ5HeiiDQ4,34903
2
+ devduck/install.sh,sha256=tYq2WWZFCBEMbxCneKAw3GSNAG1zNhpd-kzW1l5ZISw,990
3
+ devduck/test_redduck.py,sha256=nqRchR7d54jWGx7JN5tji2ZV4Ek4L9s-P7hp0mKjA0Y,1773
4
+ devduck/tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
+ devduck/tools/tcp.py,sha256=zK8mrWh9-v3acIJ_qWzr9hi-SaONI4u7ScUWEuxWhN8,15477
6
+ devduck-0.1.0.dist-info/licenses/LICENSE,sha256=CVGEiNh6cW1mgAKW83Q0P4xrQEXvqc6W-rb789W_IHM,1060
7
+ devduck-0.1.0.dist-info/METADATA,sha256=x5csUDki9FxOvSTopwOOatfCpVWH1Z9QYvi5mYHRzJI,2971
8
+ devduck-0.1.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
9
+ devduck-0.1.0.dist-info/entry_points.txt,sha256=BAMQaIg_BLZQOTk12bT7hy1dE9oGPLt-_dTbI4cnBnQ,40
10
+ devduck-0.1.0.dist-info/top_level.txt,sha256=ySXWlVronp8xHYfQ_Hdfr463e0EnbWuqyuxs94EU7yk,8
11
+ devduck-0.1.0.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (80.9.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+