daie 1.0.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- daie-1.0.1/LICENSE +21 -0
- daie-1.0.1/PKG-INFO +304 -0
- daie-1.0.1/README.md +266 -0
- daie-1.0.1/pyproject.toml +86 -0
- daie-1.0.1/setup.cfg +4 -0
- daie-1.0.1/src/daie/__init__.py +64 -0
- daie-1.0.1/src/daie/agents/__init__.py +14 -0
- daie-1.0.1/src/daie/agents/agent.py +424 -0
- daie-1.0.1/src/daie/agents/config.py +218 -0
- daie-1.0.1/src/daie/agents/message.py +46 -0
- daie-1.0.1/src/daie/cli/__init__.py +9 -0
- daie-1.0.1/src/daie/cli/agent.py +162 -0
- daie-1.0.1/src/daie/cli/core.py +271 -0
- daie-1.0.1/src/daie/cli/main.py +110 -0
- daie-1.0.1/src/daie/communication/__init__.py +9 -0
- daie-1.0.1/src/daie/communication/manager.py +430 -0
- daie-1.0.1/src/daie/config/__init__.py +9 -0
- daie-1.0.1/src/daie/config/system.py +392 -0
- daie-1.0.1/src/daie/core/__init__.py +27 -0
- daie-1.0.1/src/daie/core/llm_manager.py +509 -0
- daie-1.0.1/src/daie/core/node.py +229 -0
- daie-1.0.1/src/daie/core/system.py +295 -0
- daie-1.0.1/src/daie/memory/__init__.py +9 -0
- daie-1.0.1/src/daie/memory/manager.py +369 -0
- daie-1.0.1/src/daie/tools/__init__.py +15 -0
- daie-1.0.1/src/daie/tools/registry.py +430 -0
- daie-1.0.1/src/daie/tools/tool.py +428 -0
- daie-1.0.1/src/daie/utils/__init__.py +18 -0
- daie-1.0.1/src/daie/utils/common.py +322 -0
- daie-1.0.1/src/daie/utils/encryption.py +281 -0
- daie-1.0.1/src/daie/utils/logger.py +324 -0
- daie-1.0.1/src/daie/utils/serialization.py +397 -0
- daie-1.0.1/src/daie.egg-info/PKG-INFO +304 -0
- daie-1.0.1/src/daie.egg-info/SOURCES.txt +44 -0
- daie-1.0.1/src/daie.egg-info/dependency_links.txt +1 -0
- daie-1.0.1/src/daie.egg-info/entry_points.txt +4 -0
- daie-1.0.1/src/daie.egg-info/requires.txt +21 -0
- daie-1.0.1/src/daie.egg-info/top_level.txt +1 -0
- daie-1.0.1/tests/test_agents.py +197 -0
- daie-1.0.1/tests/test_cli.py +173 -0
- daie-1.0.1/tests/test_communication.py +125 -0
- daie-1.0.1/tests/test_core.py +185 -0
- daie-1.0.1/tests/test_integration.py +250 -0
- daie-1.0.1/tests/test_memory.py +112 -0
- daie-1.0.1/tests/test_tools.py +256 -0
- daie-1.0.1/tests/test_utils.py +247 -0
daie-1.0.1/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
KK License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 Kanishk Kumar Singh
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
daie-1.0.1/PKG-INFO
ADDED
|
@@ -0,0 +1,304 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: daie
|
|
3
|
+
Version: 1.0.1
|
|
4
|
+
Summary: A Python library for creating and deploying decentralized AI agents with tools
|
|
5
|
+
Author-email: Kanishk Kumar Singh <kanishkkumar2004@gmail.com>
|
|
6
|
+
Classifier: Development Status :: 3 - Alpha
|
|
7
|
+
Classifier: Intended Audience :: Developers
|
|
8
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
9
|
+
Classifier: Operating System :: OS Independent
|
|
10
|
+
Classifier: Programming Language :: Python :: 3
|
|
11
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
12
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
13
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
14
|
+
Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
|
|
15
|
+
Requires-Python: >=3.10
|
|
16
|
+
Description-Content-Type: text/markdown
|
|
17
|
+
License-File: LICENSE
|
|
18
|
+
Requires-Dist: nats-py>=2.6.0
|
|
19
|
+
Requires-Dist: cryptography>=46.0.0
|
|
20
|
+
Requires-Dist: python-dotenv>=1.2.0
|
|
21
|
+
Requires-Dist: pydantic>=2.12.0
|
|
22
|
+
Requires-Dist: pydantic-settings>=2.12.0
|
|
23
|
+
Requires-Dist: requests>=2.31.0
|
|
24
|
+
Requires-Dist: rich>=13.0.0
|
|
25
|
+
Requires-Dist: typer>=0.12.0
|
|
26
|
+
Provides-Extra: dev
|
|
27
|
+
Requires-Dist: pytest>=9.0.0; extra == "dev"
|
|
28
|
+
Requires-Dist: pytest-asyncio>=1.3.0; extra == "dev"
|
|
29
|
+
Requires-Dist: pytest-cov>=7.0.0; extra == "dev"
|
|
30
|
+
Requires-Dist: black>=24.0.0; extra == "dev"
|
|
31
|
+
Requires-Dist: flake8>=7.0.0; extra == "dev"
|
|
32
|
+
Requires-Dist: mypy>=1.8.0; extra == "dev"
|
|
33
|
+
Provides-Extra: docs
|
|
34
|
+
Requires-Dist: sphinx>=7.0.0; extra == "docs"
|
|
35
|
+
Requires-Dist: sphinx-rtd-theme>=2.0.0; extra == "docs"
|
|
36
|
+
Requires-Dist: nbsphinx>=0.9.0; extra == "docs"
|
|
37
|
+
Dynamic: license-file
|
|
38
|
+
|
|
39
|
+
# DAIE - Decentralized AI Ecosystem
|
|
40
|
+
|
|
41
|
+
A lightweight Python library for creating and managing AI agents with tools, featuring decentralized communication and memory management.
|
|
42
|
+
|
|
43
|
+
## Features
|
|
44
|
+
|
|
45
|
+
### 🚀 **Core Features**
|
|
46
|
+
- **Lightweight Design**: Minimal dependencies, optimized for speed and resource efficiency
|
|
47
|
+
- **Agent Management**: Create, configure, and manage AI agents with unique identities
|
|
48
|
+
- **Tool System**: Define and register reusable tools for agents to execute
|
|
49
|
+
- **Decentralized Communication**: Agents communicate via NATS JetStream
|
|
50
|
+
- **Memory Management**: Agent-specific memories with persistence support
|
|
51
|
+
- **LLM Integration**: Centralized LLM management with Ollama integration (default: llama3)
|
|
52
|
+
- **CLI Interface**: Command-line tools for system management
|
|
53
|
+
|
|
54
|
+
### 🤖 **Agent Features**
|
|
55
|
+
Each agent has:
|
|
56
|
+
- **Unique Identity**: ID, name, role, goal, backstory, and system prompt
|
|
57
|
+
- **Local Tool Execution**: Agents execute tools locally within their own context
|
|
58
|
+
- **Chat History**: Individual memory stores with working, semantic, and episodic memory
|
|
59
|
+
- **Vector Database**: Each agent has its own vector database for semantic search (in development)
|
|
60
|
+
- **LangGraph Workflow**: Each agent has its own LangGraph workflow (in development)
|
|
61
|
+
- **LLM from Core**: Agents fetch LLM instances from the centralized LLM manager
|
|
62
|
+
|
|
63
|
+
## Installation
|
|
64
|
+
|
|
65
|
+
### Prerequisites
|
|
66
|
+
- Python 3.10+
|
|
67
|
+
- Ollama (for LLM functionality)
|
|
68
|
+
- NATS JetStream (for communication)
|
|
69
|
+
|
|
70
|
+
### Install the Library
|
|
71
|
+
```bash
|
|
72
|
+
pip install daie
|
|
73
|
+
```
|
|
74
|
+
|
|
75
|
+
### Install Ollama
|
|
76
|
+
1. Download and install Ollama from [ollama.com](https://ollama.com/download)
|
|
77
|
+
2. Pull the default model:
|
|
78
|
+
```bash
|
|
79
|
+
ollama pull llama3
|
|
80
|
+
```
|
|
81
|
+
|
|
82
|
+
## Quick Start
|
|
83
|
+
|
|
84
|
+
### Example: Creating a Simple Agent
|
|
85
|
+
```python
|
|
86
|
+
#!/usr/bin/env python3
|
|
87
|
+
import asyncio
|
|
88
|
+
import logging
|
|
89
|
+
from daie import Agent, AgentConfig, Tool, ToolRegistry
|
|
90
|
+
from daie.agents import AgentRole
|
|
91
|
+
from daie.tools import tool
|
|
92
|
+
|
|
93
|
+
# Configure logging
|
|
94
|
+
logging.basicConfig(
|
|
95
|
+
level=logging.INFO,
|
|
96
|
+
format='%(asctime)s - %(levelname)s - %(message)s'
|
|
97
|
+
)
|
|
98
|
+
logger = logging.getLogger(__name__)
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
async def main():
|
|
102
|
+
logger.info("=== DAIE - Decentralized AI Ecosystem Example ===")
|
|
103
|
+
|
|
104
|
+
# Create a tool
|
|
105
|
+
@tool(
|
|
106
|
+
name="greeting",
|
|
107
|
+
description="Generate a greeting message",
|
|
108
|
+
category="general",
|
|
109
|
+
version="1.0.0"
|
|
110
|
+
)
|
|
111
|
+
async def greeting_tool(name: str, language: str = "en") -> str:
|
|
112
|
+
greetings = {
|
|
113
|
+
"en": f"Hello, {name}! Welcome to DAIE!",
|
|
114
|
+
"es": f"Hola, {name}! ¡Bienvenido a DAIE!",
|
|
115
|
+
"fr": f"Bonjour, {name}! Bienvenue dans DAIE!",
|
|
116
|
+
"de": f"Hallo, {name}! Willkommen bei DAIE!"
|
|
117
|
+
}
|
|
118
|
+
return greetings.get(language.lower(), greetings["en"])
|
|
119
|
+
|
|
120
|
+
# Create agent configuration with new features
|
|
121
|
+
config = AgentConfig(
|
|
122
|
+
name="ResearchAgent",
|
|
123
|
+
role=AgentRole.SPECIALIZED,
|
|
124
|
+
goal="Research information on given topics",
|
|
125
|
+
backstory="Created to assist with research and information gathering",
|
|
126
|
+
system_prompt="You are a research assistant that helps users find and analyze information.",
|
|
127
|
+
capabilities=["greeting"]
|
|
128
|
+
)
|
|
129
|
+
|
|
130
|
+
# Create agent
|
|
131
|
+
agent = Agent(config=config)
|
|
132
|
+
agent.add_tool(greeting_tool)
|
|
133
|
+
|
|
134
|
+
# Test tool execution
|
|
135
|
+
result = await greeting_tool.execute({"name": "Alice", "language": "es"})
|
|
136
|
+
logger.info(f"✅ Tool executed successfully: {result}")
|
|
137
|
+
|
|
138
|
+
logger.info("\n🎉 Example completed successfully!")
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
if __name__ == "__main__":
|
|
142
|
+
try:
|
|
143
|
+
asyncio.run(main())
|
|
144
|
+
except Exception as e:
|
|
145
|
+
logger.error(f"❌ Error: {e}")
|
|
146
|
+
import sys
|
|
147
|
+
sys.exit(1)
|
|
148
|
+
```
|
|
149
|
+
|
|
150
|
+
## CLI Usage
|
|
151
|
+
|
|
152
|
+
### Agent Management
|
|
153
|
+
```bash
|
|
154
|
+
# List all agents
|
|
155
|
+
daie agent list
|
|
156
|
+
|
|
157
|
+
# Create a new agent
|
|
158
|
+
daie agent create --name "MyAgent" --role "general-purpose" --goal "Help users with questions"
|
|
159
|
+
|
|
160
|
+
# Start an agent
|
|
161
|
+
daie agent start <agent-id>
|
|
162
|
+
|
|
163
|
+
# Stop an agent
|
|
164
|
+
daie agent stop <agent-id>
|
|
165
|
+
|
|
166
|
+
# Get agent status
|
|
167
|
+
daie agent status <agent-id>
|
|
168
|
+
|
|
169
|
+
# Delete an agent
|
|
170
|
+
daie agent delete <agent-id>
|
|
171
|
+
```
|
|
172
|
+
|
|
173
|
+
### Core System Management
|
|
174
|
+
```bash
|
|
175
|
+
# Initialize the system
|
|
176
|
+
daie core init
|
|
177
|
+
|
|
178
|
+
# Start the central core system
|
|
179
|
+
daie core start
|
|
180
|
+
|
|
181
|
+
# Stop the central core system
|
|
182
|
+
daie core stop
|
|
183
|
+
|
|
184
|
+
# Restart the central core system
|
|
185
|
+
daie core restart
|
|
186
|
+
|
|
187
|
+
# Get system status
|
|
188
|
+
daie core status
|
|
189
|
+
|
|
190
|
+
# View system logs
|
|
191
|
+
daie core logs
|
|
192
|
+
|
|
193
|
+
# Check system health
|
|
194
|
+
daie core health
|
|
195
|
+
```
|
|
196
|
+
|
|
197
|
+
## LLM Configuration
|
|
198
|
+
|
|
199
|
+
### Setting LLM Parameters
|
|
200
|
+
```python
|
|
201
|
+
from daie import set_llm, get_llm_config, LLMType
|
|
202
|
+
|
|
203
|
+
# Using Ollama (default)
|
|
204
|
+
set_llm(ollama_llm="llama3")
|
|
205
|
+
set_llm(ollama_llm="mistral", temperature=0.3, max_tokens=1500)
|
|
206
|
+
|
|
207
|
+
# Using OpenAI
|
|
208
|
+
set_llm(
|
|
209
|
+
llm_type=LLMType.OPENAI,
|
|
210
|
+
model_name="gpt-3.5-turbo",
|
|
211
|
+
api_key="your-api-key",
|
|
212
|
+
temperature=0.5,
|
|
213
|
+
max_tokens=2000
|
|
214
|
+
)
|
|
215
|
+
|
|
216
|
+
# Get current configuration
|
|
217
|
+
config = get_llm_config()
|
|
218
|
+
print(f"Current LLM: {config.llm_type.value}/{config.model_name}")
|
|
219
|
+
print(f"Temperature: {config.temperature}")
|
|
220
|
+
print(f"Max tokens: {config.max_tokens}")
|
|
221
|
+
```
|
|
222
|
+
|
|
223
|
+
### Available LLM Models
|
|
224
|
+
|
|
225
|
+
#### Ollama Models:
|
|
226
|
+
- llama3 (default)
|
|
227
|
+
- llama3.2:latest
|
|
228
|
+
- mistral
|
|
229
|
+
- llama2
|
|
230
|
+
- gemma
|
|
231
|
+
|
|
232
|
+
#### OpenAI Models:
|
|
233
|
+
- gpt-4o
|
|
234
|
+
- gpt-4o-mini
|
|
235
|
+
- gpt-4-turbo
|
|
236
|
+
- gpt-3.5-turbo
|
|
237
|
+
|
|
238
|
+
## Configuration
|
|
239
|
+
|
|
240
|
+
### Environment Variables
|
|
241
|
+
```bash
|
|
242
|
+
# System configuration
|
|
243
|
+
DAIE_LOG_LEVEL=INFO
|
|
244
|
+
DAIE_NATS_URL=nats://localhost:4222
|
|
245
|
+
DAIE_CENTRAL_CORE_URL=http://localhost:8000
|
|
246
|
+
|
|
247
|
+
# LLM configuration
|
|
248
|
+
DAIE_DEFAULT_LLM_MODEL=llama3
|
|
249
|
+
DAIE_LLM_TEMPERATURE=0.7
|
|
250
|
+
DAIE_LLM_MAX_TOKENS=1000
|
|
251
|
+
|
|
252
|
+
# Database configuration
|
|
253
|
+
DAIE_DATABASE_URL=sqlite:///:memory:
|
|
254
|
+
DAIE_REDIS_URL=redis://localhost:6379/0
|
|
255
|
+
```
|
|
256
|
+
|
|
257
|
+
## Architecture
|
|
258
|
+
|
|
259
|
+
### System Components
|
|
260
|
+
1. **Agent**: Individual AI entity with specific capabilities
|
|
261
|
+
2. **Tool**: Reusable functionality that agents can execute
|
|
262
|
+
3. **LLM Manager**: Handles LLM integration with various providers
|
|
263
|
+
4. **Communication Manager**: Facilitates agent communication via NATS
|
|
264
|
+
5. **Memory Manager**: Manages agent memory storage and retrieval
|
|
265
|
+
6. **Tool Registry**: Central repository for available tools
|
|
266
|
+
7. **Central Core System**: Orchestrator for the entire ecosystem
|
|
267
|
+
|
|
268
|
+
### Communication Protocol
|
|
269
|
+
Agents communicate using NATS JetStream with the following message types:
|
|
270
|
+
- **Text Messages**: Direct communication between agents
|
|
271
|
+
- **Tasks**: Requests for tool execution
|
|
272
|
+
- **Responses**: Results from task execution
|
|
273
|
+
- **Events**: System and agent events
|
|
274
|
+
|
|
275
|
+
## Development
|
|
276
|
+
|
|
277
|
+
### Prerequisites
|
|
278
|
+
- Python 3.10+
|
|
279
|
+
- Docker (for running dependencies)
|
|
280
|
+
- Poetry (for package management)
|
|
281
|
+
|
|
282
|
+
### Setup
|
|
283
|
+
```bash
|
|
284
|
+
# Clone the repository
|
|
285
|
+
git clone https://github.com/decentralized-ai/decentralized-ai-ecosystem.git
|
|
286
|
+
cd decentralized-ai-ecosystem
|
|
287
|
+
|
|
288
|
+
# Install dependencies
|
|
289
|
+
poetry install
|
|
290
|
+
|
|
291
|
+
# Run tests
|
|
292
|
+
poetry run pytest tests/
|
|
293
|
+
|
|
294
|
+
# Run the CLI
|
|
295
|
+
poetry run daie --help
|
|
296
|
+
```
|
|
297
|
+
|
|
298
|
+
## License
|
|
299
|
+
|
|
300
|
+
This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
|
|
301
|
+
|
|
302
|
+
## Support
|
|
303
|
+
|
|
304
|
+
For questions or support, please contact **KANISHK KUMAR SINGH** at kanishkkumar2004@gmail.com.
|
daie-1.0.1/README.md
ADDED
|
@@ -0,0 +1,266 @@
|
|
|
1
|
+
# DAIE - Decentralized AI Ecosystem
|
|
2
|
+
|
|
3
|
+
A lightweight Python library for creating and managing AI agents with tools, featuring decentralized communication and memory management.
|
|
4
|
+
|
|
5
|
+
## Features
|
|
6
|
+
|
|
7
|
+
### 🚀 **Core Features**
|
|
8
|
+
- **Lightweight Design**: Minimal dependencies, optimized for speed and resource efficiency
|
|
9
|
+
- **Agent Management**: Create, configure, and manage AI agents with unique identities
|
|
10
|
+
- **Tool System**: Define and register reusable tools for agents to execute
|
|
11
|
+
- **Decentralized Communication**: Agents communicate via NATS JetStream
|
|
12
|
+
- **Memory Management**: Agent-specific memories with persistence support
|
|
13
|
+
- **LLM Integration**: Centralized LLM management with Ollama integration (default: llama3)
|
|
14
|
+
- **CLI Interface**: Command-line tools for system management
|
|
15
|
+
|
|
16
|
+
### 🤖 **Agent Features**
|
|
17
|
+
Each agent has:
|
|
18
|
+
- **Unique Identity**: ID, name, role, goal, backstory, and system prompt
|
|
19
|
+
- **Local Tool Execution**: Agents execute tools locally within their own context
|
|
20
|
+
- **Chat History**: Individual memory stores with working, semantic, and episodic memory
|
|
21
|
+
- **Vector Database**: Each agent has its own vector database for semantic search (in development)
|
|
22
|
+
- **LangGraph Workflow**: Each agent has its own LangGraph workflow (in development)
|
|
23
|
+
- **LLM from Core**: Agents fetch LLM instances from the centralized LLM manager
|
|
24
|
+
|
|
25
|
+
## Installation
|
|
26
|
+
|
|
27
|
+
### Prerequisites
|
|
28
|
+
- Python 3.10+
|
|
29
|
+
- Ollama (for LLM functionality)
|
|
30
|
+
- NATS JetStream (for communication)
|
|
31
|
+
|
|
32
|
+
### Install the Library
|
|
33
|
+
```bash
|
|
34
|
+
pip install daie
|
|
35
|
+
```
|
|
36
|
+
|
|
37
|
+
### Install Ollama
|
|
38
|
+
1. Download and install Ollama from [ollama.com](https://ollama.com/download)
|
|
39
|
+
2. Pull the default model:
|
|
40
|
+
```bash
|
|
41
|
+
ollama pull llama3
|
|
42
|
+
```
|
|
43
|
+
|
|
44
|
+
## Quick Start
|
|
45
|
+
|
|
46
|
+
### Example: Creating a Simple Agent
|
|
47
|
+
```python
|
|
48
|
+
#!/usr/bin/env python3
|
|
49
|
+
import asyncio
|
|
50
|
+
import logging
|
|
51
|
+
from daie import Agent, AgentConfig, Tool, ToolRegistry
|
|
52
|
+
from daie.agents import AgentRole
|
|
53
|
+
from daie.tools import tool
|
|
54
|
+
|
|
55
|
+
# Configure logging
|
|
56
|
+
logging.basicConfig(
|
|
57
|
+
level=logging.INFO,
|
|
58
|
+
format='%(asctime)s - %(levelname)s - %(message)s'
|
|
59
|
+
)
|
|
60
|
+
logger = logging.getLogger(__name__)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
async def main():
|
|
64
|
+
logger.info("=== DAIE - Decentralized AI Ecosystem Example ===")
|
|
65
|
+
|
|
66
|
+
# Create a tool
|
|
67
|
+
@tool(
|
|
68
|
+
name="greeting",
|
|
69
|
+
description="Generate a greeting message",
|
|
70
|
+
category="general",
|
|
71
|
+
version="1.0.0"
|
|
72
|
+
)
|
|
73
|
+
async def greeting_tool(name: str, language: str = "en") -> str:
|
|
74
|
+
greetings = {
|
|
75
|
+
"en": f"Hello, {name}! Welcome to DAIE!",
|
|
76
|
+
"es": f"Hola, {name}! ¡Bienvenido a DAIE!",
|
|
77
|
+
"fr": f"Bonjour, {name}! Bienvenue dans DAIE!",
|
|
78
|
+
"de": f"Hallo, {name}! Willkommen bei DAIE!"
|
|
79
|
+
}
|
|
80
|
+
return greetings.get(language.lower(), greetings["en"])
|
|
81
|
+
|
|
82
|
+
# Create agent configuration with new features
|
|
83
|
+
config = AgentConfig(
|
|
84
|
+
name="ResearchAgent",
|
|
85
|
+
role=AgentRole.SPECIALIZED,
|
|
86
|
+
goal="Research information on given topics",
|
|
87
|
+
backstory="Created to assist with research and information gathering",
|
|
88
|
+
system_prompt="You are a research assistant that helps users find and analyze information.",
|
|
89
|
+
capabilities=["greeting"]
|
|
90
|
+
)
|
|
91
|
+
|
|
92
|
+
# Create agent
|
|
93
|
+
agent = Agent(config=config)
|
|
94
|
+
agent.add_tool(greeting_tool)
|
|
95
|
+
|
|
96
|
+
# Test tool execution
|
|
97
|
+
result = await greeting_tool.execute({"name": "Alice", "language": "es"})
|
|
98
|
+
logger.info(f"✅ Tool executed successfully: {result}")
|
|
99
|
+
|
|
100
|
+
logger.info("\n🎉 Example completed successfully!")
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
if __name__ == "__main__":
|
|
104
|
+
try:
|
|
105
|
+
asyncio.run(main())
|
|
106
|
+
except Exception as e:
|
|
107
|
+
logger.error(f"❌ Error: {e}")
|
|
108
|
+
import sys
|
|
109
|
+
sys.exit(1)
|
|
110
|
+
```
|
|
111
|
+
|
|
112
|
+
## CLI Usage
|
|
113
|
+
|
|
114
|
+
### Agent Management
|
|
115
|
+
```bash
|
|
116
|
+
# List all agents
|
|
117
|
+
daie agent list
|
|
118
|
+
|
|
119
|
+
# Create a new agent
|
|
120
|
+
daie agent create --name "MyAgent" --role "general-purpose" --goal "Help users with questions"
|
|
121
|
+
|
|
122
|
+
# Start an agent
|
|
123
|
+
daie agent start <agent-id>
|
|
124
|
+
|
|
125
|
+
# Stop an agent
|
|
126
|
+
daie agent stop <agent-id>
|
|
127
|
+
|
|
128
|
+
# Get agent status
|
|
129
|
+
daie agent status <agent-id>
|
|
130
|
+
|
|
131
|
+
# Delete an agent
|
|
132
|
+
daie agent delete <agent-id>
|
|
133
|
+
```
|
|
134
|
+
|
|
135
|
+
### Core System Management
|
|
136
|
+
```bash
|
|
137
|
+
# Initialize the system
|
|
138
|
+
daie core init
|
|
139
|
+
|
|
140
|
+
# Start the central core system
|
|
141
|
+
daie core start
|
|
142
|
+
|
|
143
|
+
# Stop the central core system
|
|
144
|
+
daie core stop
|
|
145
|
+
|
|
146
|
+
# Restart the central core system
|
|
147
|
+
daie core restart
|
|
148
|
+
|
|
149
|
+
# Get system status
|
|
150
|
+
daie core status
|
|
151
|
+
|
|
152
|
+
# View system logs
|
|
153
|
+
daie core logs
|
|
154
|
+
|
|
155
|
+
# Check system health
|
|
156
|
+
daie core health
|
|
157
|
+
```
|
|
158
|
+
|
|
159
|
+
## LLM Configuration
|
|
160
|
+
|
|
161
|
+
### Setting LLM Parameters
|
|
162
|
+
```python
|
|
163
|
+
from daie import set_llm, get_llm_config, LLMType
|
|
164
|
+
|
|
165
|
+
# Using Ollama (default)
|
|
166
|
+
set_llm(ollama_llm="llama3")
|
|
167
|
+
set_llm(ollama_llm="mistral", temperature=0.3, max_tokens=1500)
|
|
168
|
+
|
|
169
|
+
# Using OpenAI
|
|
170
|
+
set_llm(
|
|
171
|
+
llm_type=LLMType.OPENAI,
|
|
172
|
+
model_name="gpt-3.5-turbo",
|
|
173
|
+
api_key="your-api-key",
|
|
174
|
+
temperature=0.5,
|
|
175
|
+
max_tokens=2000
|
|
176
|
+
)
|
|
177
|
+
|
|
178
|
+
# Get current configuration
|
|
179
|
+
config = get_llm_config()
|
|
180
|
+
print(f"Current LLM: {config.llm_type.value}/{config.model_name}")
|
|
181
|
+
print(f"Temperature: {config.temperature}")
|
|
182
|
+
print(f"Max tokens: {config.max_tokens}")
|
|
183
|
+
```
|
|
184
|
+
|
|
185
|
+
### Available LLM Models
|
|
186
|
+
|
|
187
|
+
#### Ollama Models:
|
|
188
|
+
- llama3 (default)
|
|
189
|
+
- llama3.2:latest
|
|
190
|
+
- mistral
|
|
191
|
+
- llama2
|
|
192
|
+
- gemma
|
|
193
|
+
|
|
194
|
+
#### OpenAI Models:
|
|
195
|
+
- gpt-4o
|
|
196
|
+
- gpt-4o-mini
|
|
197
|
+
- gpt-4-turbo
|
|
198
|
+
- gpt-3.5-turbo
|
|
199
|
+
|
|
200
|
+
## Configuration
|
|
201
|
+
|
|
202
|
+
### Environment Variables
|
|
203
|
+
```bash
|
|
204
|
+
# System configuration
|
|
205
|
+
DAIE_LOG_LEVEL=INFO
|
|
206
|
+
DAIE_NATS_URL=nats://localhost:4222
|
|
207
|
+
DAIE_CENTRAL_CORE_URL=http://localhost:8000
|
|
208
|
+
|
|
209
|
+
# LLM configuration
|
|
210
|
+
DAIE_DEFAULT_LLM_MODEL=llama3
|
|
211
|
+
DAIE_LLM_TEMPERATURE=0.7
|
|
212
|
+
DAIE_LLM_MAX_TOKENS=1000
|
|
213
|
+
|
|
214
|
+
# Database configuration
|
|
215
|
+
DAIE_DATABASE_URL=sqlite:///:memory:
|
|
216
|
+
DAIE_REDIS_URL=redis://localhost:6379/0
|
|
217
|
+
```
|
|
218
|
+
|
|
219
|
+
## Architecture
|
|
220
|
+
|
|
221
|
+
### System Components
|
|
222
|
+
1. **Agent**: Individual AI entity with specific capabilities
|
|
223
|
+
2. **Tool**: Reusable functionality that agents can execute
|
|
224
|
+
3. **LLM Manager**: Handles LLM integration with various providers
|
|
225
|
+
4. **Communication Manager**: Facilitates agent communication via NATS
|
|
226
|
+
5. **Memory Manager**: Manages agent memory storage and retrieval
|
|
227
|
+
6. **Tool Registry**: Central repository for available tools
|
|
228
|
+
7. **Central Core System**: Orchestrator for the entire ecosystem
|
|
229
|
+
|
|
230
|
+
### Communication Protocol
|
|
231
|
+
Agents communicate using NATS JetStream with the following message types:
|
|
232
|
+
- **Text Messages**: Direct communication between agents
|
|
233
|
+
- **Tasks**: Requests for tool execution
|
|
234
|
+
- **Responses**: Results from task execution
|
|
235
|
+
- **Events**: System and agent events
|
|
236
|
+
|
|
237
|
+
## Development
|
|
238
|
+
|
|
239
|
+
### Prerequisites
|
|
240
|
+
- Python 3.10+
|
|
241
|
+
- Docker (for running dependencies)
|
|
242
|
+
- Poetry (for package management)
|
|
243
|
+
|
|
244
|
+
### Setup
|
|
245
|
+
```bash
|
|
246
|
+
# Clone the repository
|
|
247
|
+
git clone https://github.com/decentralized-ai/decentralized-ai-ecosystem.git
|
|
248
|
+
cd decentralized-ai-ecosystem
|
|
249
|
+
|
|
250
|
+
# Install dependencies
|
|
251
|
+
poetry install
|
|
252
|
+
|
|
253
|
+
# Run tests
|
|
254
|
+
poetry run pytest tests/
|
|
255
|
+
|
|
256
|
+
# Run the CLI
|
|
257
|
+
poetry run daie --help
|
|
258
|
+
```
|
|
259
|
+
|
|
260
|
+
## License
|
|
261
|
+
|
|
262
|
+
This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
|
|
263
|
+
|
|
264
|
+
## Support
|
|
265
|
+
|
|
266
|
+
For questions or support, please contact **KANISHK KUMAR SINGH** at kanishkkumar2004@gmail.com.
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
[build-system]
|
|
2
|
+
requires = ["setuptools>=61.0"]
|
|
3
|
+
build-backend = "setuptools.build_meta"
|
|
4
|
+
|
|
5
|
+
[project]
|
|
6
|
+
name = "daie"
|
|
7
|
+
version = "1.0.1"
|
|
8
|
+
authors = [
|
|
9
|
+
{ name="Kanishk Kumar Singh", email="kanishkkumar2004@gmail.com" }
|
|
10
|
+
]
|
|
11
|
+
description = "A Python library for creating and deploying decentralized AI agents with tools"
|
|
12
|
+
readme = "README.md"
|
|
13
|
+
requires-python = ">=3.10"
|
|
14
|
+
classifiers = [
|
|
15
|
+
"Development Status :: 3 - Alpha",
|
|
16
|
+
"Intended Audience :: Developers",
|
|
17
|
+
"License :: OSI Approved :: MIT License",
|
|
18
|
+
"Operating System :: OS Independent",
|
|
19
|
+
"Programming Language :: Python :: 3",
|
|
20
|
+
"Programming Language :: Python :: 3.11",
|
|
21
|
+
"Programming Language :: Python :: 3.12",
|
|
22
|
+
"Topic :: Software Development :: Libraries :: Python Modules",
|
|
23
|
+
"Topic :: Scientific/Engineering :: Artificial Intelligence",
|
|
24
|
+
]
|
|
25
|
+
|
|
26
|
+
dependencies = [
|
|
27
|
+
"nats-py>=2.6.0",
|
|
28
|
+
"cryptography>=46.0.0",
|
|
29
|
+
"python-dotenv>=1.2.0",
|
|
30
|
+
"pydantic>=2.12.0",
|
|
31
|
+
"pydantic-settings>=2.12.0",
|
|
32
|
+
"requests>=2.31.0",
|
|
33
|
+
"rich>=13.0.0",
|
|
34
|
+
"typer>=0.12.0"
|
|
35
|
+
]
|
|
36
|
+
|
|
37
|
+
[project.optional-dependencies]
|
|
38
|
+
dev = [
|
|
39
|
+
"pytest>=9.0.0",
|
|
40
|
+
"pytest-asyncio>=1.3.0",
|
|
41
|
+
"pytest-cov>=7.0.0",
|
|
42
|
+
"black>=24.0.0",
|
|
43
|
+
"flake8>=7.0.0",
|
|
44
|
+
"mypy>=1.8.0",
|
|
45
|
+
]
|
|
46
|
+
docs = [
|
|
47
|
+
"sphinx>=7.0.0",
|
|
48
|
+
"sphinx-rtd-theme>=2.0.0",
|
|
49
|
+
"nbsphinx>=0.9.0",
|
|
50
|
+
]
|
|
51
|
+
|
|
52
|
+
[project.scripts]
|
|
53
|
+
daie = "daie.cli.main:cli"
|
|
54
|
+
daie-agent = "daie.cli.agent:cli"
|
|
55
|
+
daie-core = "daie.cli.core:cli"
|
|
56
|
+
|
|
57
|
+
[tool.pytest.ini_options]
|
|
58
|
+
testpaths = ["tests"]
|
|
59
|
+
python_files = ["test_*.py"]
|
|
60
|
+
python_classes = ["Test*"]
|
|
61
|
+
python_functions = ["test_*"]
|
|
62
|
+
asyncio_mode = "auto"
|
|
63
|
+
|
|
64
|
+
[tool.black]
|
|
65
|
+
line-length = 88
|
|
66
|
+
target-version = ['py311']
|
|
67
|
+
include = '\.pyi?$'
|
|
68
|
+
exclude = '''
|
|
69
|
+
/(
|
|
70
|
+
\.eggs
|
|
71
|
+
| \.git
|
|
72
|
+
| \.hg
|
|
73
|
+
| \.mypy_cache
|
|
74
|
+
| \.tox
|
|
75
|
+
| \.venv
|
|
76
|
+
| _build
|
|
77
|
+
| buck-out
|
|
78
|
+
| build
|
|
79
|
+
| dist
|
|
80
|
+
)/
|
|
81
|
+
'''
|
|
82
|
+
|
|
83
|
+
[tool.flake8]
|
|
84
|
+
max-line-length = 88
|
|
85
|
+
extend-ignore = ["E203", "W503"]
|
|
86
|
+
|
daie-1.0.1/setup.cfg
ADDED