dao-ai 0.1.1__py3-none-any.whl → 0.1.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dao_ai/agent_as_code.py +2 -5
- dao_ai/cli.py +65 -15
- dao_ai/config.py +672 -218
- dao_ai/genie/cache/core.py +6 -2
- dao_ai/genie/cache/lru.py +29 -11
- dao_ai/genie/cache/semantic.py +95 -44
- dao_ai/hooks/core.py +5 -5
- dao_ai/logging.py +56 -0
- dao_ai/memory/core.py +61 -44
- dao_ai/memory/databricks.py +54 -41
- dao_ai/memory/postgres.py +77 -36
- dao_ai/middleware/assertions.py +45 -17
- dao_ai/middleware/core.py +13 -7
- dao_ai/middleware/guardrails.py +30 -25
- dao_ai/middleware/human_in_the_loop.py +9 -5
- dao_ai/middleware/message_validation.py +61 -29
- dao_ai/middleware/summarization.py +16 -11
- dao_ai/models.py +172 -69
- dao_ai/nodes.py +148 -19
- dao_ai/optimization.py +26 -16
- dao_ai/orchestration/core.py +15 -8
- dao_ai/orchestration/supervisor.py +22 -8
- dao_ai/orchestration/swarm.py +57 -12
- dao_ai/prompts.py +17 -17
- dao_ai/providers/databricks.py +365 -155
- dao_ai/state.py +24 -6
- dao_ai/tools/__init__.py +2 -0
- dao_ai/tools/agent.py +1 -3
- dao_ai/tools/core.py +7 -7
- dao_ai/tools/email.py +29 -77
- dao_ai/tools/genie.py +18 -13
- dao_ai/tools/mcp.py +223 -156
- dao_ai/tools/python.py +5 -2
- dao_ai/tools/search.py +1 -1
- dao_ai/tools/slack.py +21 -9
- dao_ai/tools/sql.py +202 -0
- dao_ai/tools/time.py +30 -7
- dao_ai/tools/unity_catalog.py +129 -86
- dao_ai/tools/vector_search.py +318 -244
- dao_ai/utils.py +15 -10
- dao_ai-0.1.3.dist-info/METADATA +455 -0
- dao_ai-0.1.3.dist-info/RECORD +64 -0
- dao_ai-0.1.1.dist-info/METADATA +0 -1878
- dao_ai-0.1.1.dist-info/RECORD +0 -62
- {dao_ai-0.1.1.dist-info → dao_ai-0.1.3.dist-info}/WHEEL +0 -0
- {dao_ai-0.1.1.dist-info → dao_ai-0.1.3.dist-info}/entry_points.txt +0 -0
- {dao_ai-0.1.1.dist-info → dao_ai-0.1.3.dist-info}/licenses/LICENSE +0 -0
dao_ai/utils.py
CHANGED
|
@@ -27,8 +27,8 @@ def is_installed() -> bool:
|
|
|
27
27
|
site_packages.append(os.path.abspath(site.getusersitepackages()))
|
|
28
28
|
|
|
29
29
|
found: bool = any(current_file.startswith(pkg_path) for pkg_path in site_packages)
|
|
30
|
-
logger.
|
|
31
|
-
|
|
30
|
+
logger.trace(
|
|
31
|
+
"Checking if dao_ai is installed", is_installed=found, current_file=current_file
|
|
32
32
|
)
|
|
33
33
|
return found
|
|
34
34
|
|
|
@@ -85,7 +85,7 @@ def get_default_databricks_host() -> str | None:
|
|
|
85
85
|
w: WorkspaceClient = WorkspaceClient()
|
|
86
86
|
return normalize_host(w.config.host)
|
|
87
87
|
except Exception:
|
|
88
|
-
logger.
|
|
88
|
+
logger.trace("Could not get default Databricks host from WorkspaceClient")
|
|
89
89
|
return None
|
|
90
90
|
|
|
91
91
|
|
|
@@ -105,7 +105,7 @@ def dao_ai_version() -> str:
|
|
|
105
105
|
return version("dao-ai")
|
|
106
106
|
except PackageNotFoundError:
|
|
107
107
|
# Package not installed, try reading from pyproject.toml
|
|
108
|
-
logger.
|
|
108
|
+
logger.trace(
|
|
109
109
|
"dao-ai package not installed, attempting to read version from pyproject.toml"
|
|
110
110
|
)
|
|
111
111
|
try:
|
|
@@ -126,19 +126,24 @@ def dao_ai_version() -> str:
|
|
|
126
126
|
|
|
127
127
|
if not pyproject_path.exists():
|
|
128
128
|
logger.warning(
|
|
129
|
-
|
|
129
|
+
"Cannot determine dao-ai version: pyproject.toml not found",
|
|
130
|
+
path=str(pyproject_path),
|
|
130
131
|
)
|
|
131
132
|
return "dev"
|
|
132
133
|
|
|
133
134
|
with open(pyproject_path, "rb") as f:
|
|
134
135
|
pyproject_data = tomllib.load(f)
|
|
135
136
|
pkg_version = pyproject_data.get("project", {}).get("version", "dev")
|
|
136
|
-
logger.
|
|
137
|
-
|
|
137
|
+
logger.trace(
|
|
138
|
+
"Read version from pyproject.toml",
|
|
139
|
+
version=pkg_version,
|
|
140
|
+
path=str(pyproject_path),
|
|
138
141
|
)
|
|
139
142
|
return pkg_version
|
|
140
143
|
except Exception as e:
|
|
141
|
-
logger.warning(
|
|
144
|
+
logger.warning(
|
|
145
|
+
"Cannot determine dao-ai version from pyproject.toml", error=str(e)
|
|
146
|
+
)
|
|
142
147
|
return "dev"
|
|
143
148
|
|
|
144
149
|
|
|
@@ -200,7 +205,7 @@ def load_function(function_name: str) -> Callable[..., Any]:
|
|
|
200
205
|
>>> func = callable_from_fqn("dao_ai.models.get_latest_model_version")
|
|
201
206
|
>>> version = func("my_model")
|
|
202
207
|
"""
|
|
203
|
-
logger.
|
|
208
|
+
logger.trace("Loading function", function_name=function_name)
|
|
204
209
|
|
|
205
210
|
try:
|
|
206
211
|
# Split the FQN into module path and function name
|
|
@@ -251,7 +256,7 @@ def type_from_fqn(type_name: str) -> type:
|
|
|
251
256
|
>>> ProductModel = type_from_fqn("my_models.ProductInfo")
|
|
252
257
|
>>> instance = ProductModel(name="Widget", price=9.99)
|
|
253
258
|
"""
|
|
254
|
-
logger.
|
|
259
|
+
logger.trace("Loading type", type_name=type_name)
|
|
255
260
|
|
|
256
261
|
try:
|
|
257
262
|
# Split the FQN into module path and class name
|
|
@@ -0,0 +1,455 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: dao-ai
|
|
3
|
+
Version: 0.1.3
|
|
4
|
+
Summary: DAO AI: A modular, multi-agent orchestration framework for complex AI workflows. Supports agent handoff, tool integration, and dynamic configuration via YAML.
|
|
5
|
+
Project-URL: Homepage, https://github.com/natefleming/dao-ai
|
|
6
|
+
Project-URL: Documentation, https://natefleming.github.io/dao-ai
|
|
7
|
+
Project-URL: Repository, https://github.com/natefleming/dao-ai
|
|
8
|
+
Project-URL: Issues, https://github.com/natefleming/dao-ai/issues
|
|
9
|
+
Project-URL: Changelog, https://github.com/natefleming/dao-ai/blob/main/CHANGELOG.md
|
|
10
|
+
Author-email: Nate Fleming <nate.fleming@databricks.com>, Nate Fleming <nate.fleming@gmail.com>
|
|
11
|
+
Maintainer-email: Nate Fleming <nate.fleming@databricks.com>
|
|
12
|
+
License: MIT
|
|
13
|
+
License-File: LICENSE
|
|
14
|
+
Keywords: agents,ai,databricks,langchain,langgraph,llm,multi-agent,orchestration,vector-search,workflow
|
|
15
|
+
Classifier: Development Status :: 3 - Alpha
|
|
16
|
+
Classifier: Intended Audience :: Developers
|
|
17
|
+
Classifier: Intended Audience :: Science/Research
|
|
18
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
19
|
+
Classifier: Operating System :: OS Independent
|
|
20
|
+
Classifier: Programming Language :: Python :: 3
|
|
21
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
22
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
23
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
24
|
+
Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence
|
|
25
|
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
|
26
|
+
Classifier: Topic :: System :: Distributed Computing
|
|
27
|
+
Requires-Python: >=3.11
|
|
28
|
+
Requires-Dist: databricks-agents>=1.9.0
|
|
29
|
+
Requires-Dist: databricks-langchain[memory]>=0.12.1
|
|
30
|
+
Requires-Dist: databricks-mcp>=0.5.0
|
|
31
|
+
Requires-Dist: databricks-sdk[openai]>=0.76.0
|
|
32
|
+
Requires-Dist: ddgs>=9.10.0
|
|
33
|
+
Requires-Dist: dspy>=2.6.27
|
|
34
|
+
Requires-Dist: flashrank>=0.2.10
|
|
35
|
+
Requires-Dist: gepa>=0.0.22
|
|
36
|
+
Requires-Dist: grandalf>=0.8
|
|
37
|
+
Requires-Dist: langchain-community>=0.3.26
|
|
38
|
+
Requires-Dist: langchain-mcp-adapters>=0.2.1
|
|
39
|
+
Requires-Dist: langchain-tavily>=0.2.15
|
|
40
|
+
Requires-Dist: langchain>=1.2.0
|
|
41
|
+
Requires-Dist: langgraph-checkpoint-postgres>=3.0.2
|
|
42
|
+
Requires-Dist: langgraph>=1.0.5
|
|
43
|
+
Requires-Dist: langmem>=0.0.30
|
|
44
|
+
Requires-Dist: loguru>=0.7.3
|
|
45
|
+
Requires-Dist: mcp>=1.24.0
|
|
46
|
+
Requires-Dist: mlflow>=3.8.1
|
|
47
|
+
Requires-Dist: nest-asyncio>=1.6.0
|
|
48
|
+
Requires-Dist: openevals>=0.1.3
|
|
49
|
+
Requires-Dist: openpyxl>=3.1.5
|
|
50
|
+
Requires-Dist: psycopg[binary,pool]>=3.3.2
|
|
51
|
+
Requires-Dist: pydantic>=2.12.5
|
|
52
|
+
Requires-Dist: python-dotenv>=1.2.1
|
|
53
|
+
Requires-Dist: pyyaml>=6.0.2
|
|
54
|
+
Requires-Dist: rich>=14.2.0
|
|
55
|
+
Requires-Dist: scipy>=1.14.0
|
|
56
|
+
Requires-Dist: sqlparse>=0.5.4
|
|
57
|
+
Requires-Dist: tomli>=2.3.0
|
|
58
|
+
Requires-Dist: unitycatalog-ai[databricks]>=0.3.2
|
|
59
|
+
Provides-Extra: databricks
|
|
60
|
+
Requires-Dist: databricks-connect>=16.0.0; extra == 'databricks'
|
|
61
|
+
Requires-Dist: databricks-vectorsearch>=0.63; extra == 'databricks'
|
|
62
|
+
Requires-Dist: pyspark>=3.5.0; extra == 'databricks'
|
|
63
|
+
Provides-Extra: dev
|
|
64
|
+
Requires-Dist: mypy>=1.19.1; extra == 'dev'
|
|
65
|
+
Requires-Dist: pre-commit>=4.5.1; extra == 'dev'
|
|
66
|
+
Requires-Dist: pytest>=9.0.2; extra == 'dev'
|
|
67
|
+
Requires-Dist: ruff>=0.14.9; extra == 'dev'
|
|
68
|
+
Provides-Extra: docs
|
|
69
|
+
Requires-Dist: mkdocs-material>=9.7.1; extra == 'docs'
|
|
70
|
+
Requires-Dist: mkdocs>=1.6.1; extra == 'docs'
|
|
71
|
+
Requires-Dist: mkdocstrings[python]>=1.0.0; extra == 'docs'
|
|
72
|
+
Provides-Extra: test
|
|
73
|
+
Requires-Dist: pytest-cov>=7.0.0; extra == 'test'
|
|
74
|
+
Requires-Dist: pytest-mock>=3.15.1; extra == 'test'
|
|
75
|
+
Requires-Dist: pytest-rerunfailures>=15.0; extra == 'test'
|
|
76
|
+
Requires-Dist: pytest-xdist>=3.6.1; extra == 'test'
|
|
77
|
+
Requires-Dist: pytest>=9.0.2; extra == 'test'
|
|
78
|
+
Description-Content-Type: text/markdown
|
|
79
|
+
|
|
80
|
+
# DAO: Declarative Agent Orchestration
|
|
81
|
+
|
|
82
|
+
[](CHANGELOG.md)
|
|
83
|
+
[](https://www.python.org/)
|
|
84
|
+
[](LICENSE)
|
|
85
|
+
|
|
86
|
+
**Production-grade AI agents defined in YAML, powered by LangGraph, deployed on Databricks.**
|
|
87
|
+
|
|
88
|
+
DAO is an **infrastructure-as-code framework** for building, deploying, and managing multi-agent AI systems. Instead of writing boilerplate Python code to wire up agents, tools, and orchestration, you define everything declaratively in YAML configuration files.
|
|
89
|
+
|
|
90
|
+
```yaml
|
|
91
|
+
# Define an agent in 10 lines of YAML
|
|
92
|
+
agents:
|
|
93
|
+
product_expert:
|
|
94
|
+
name: product_expert
|
|
95
|
+
model: *claude_sonnet
|
|
96
|
+
tools:
|
|
97
|
+
- *vector_search_tool
|
|
98
|
+
- *genie_tool
|
|
99
|
+
prompt: |
|
|
100
|
+
You are a product expert. Answer questions about inventory and pricing.
|
|
101
|
+
```
|
|
102
|
+
|
|
103
|
+
### 🎨 Visual Configuration Studio
|
|
104
|
+
|
|
105
|
+
Prefer a visual interface? Check out **[DAO AI Builder](https://github.com/natefleming/dao-ai-builder)** — a React-based web application that provides a graphical interface for creating and editing DAO configurations. Perfect for:
|
|
106
|
+
|
|
107
|
+
- **Exploring** DAO's capabilities through an intuitive UI
|
|
108
|
+
- **Learning** the configuration structure with guided forms
|
|
109
|
+
- **Building** agents visually without writing YAML manually
|
|
110
|
+
- **Importing** and editing existing configurations
|
|
111
|
+
|
|
112
|
+
DAO AI Builder generates valid YAML configurations that work seamlessly with this framework. Use whichever workflow suits you best — visual builder or direct YAML editing.
|
|
113
|
+
|
|
114
|
+
<p align="center">
|
|
115
|
+
<img src="https://raw.githubusercontent.com/natefleming/dao-ai-builder/6ca07d2b977d9509b75edfb2e0f45681c840a931/docs/images/dao-ai-builder-screenshot.png" width="700" alt="DAO AI Builder Screenshot">
|
|
116
|
+
</p>
|
|
117
|
+
|
|
118
|
+
---
|
|
119
|
+
|
|
120
|
+
## 📚 Documentation
|
|
121
|
+
|
|
122
|
+
### Getting Started
|
|
123
|
+
- **[Why DAO?](docs/why-dao.md)** - Learn what DAO is and how it compares to other platforms
|
|
124
|
+
- **[Quick Start](#quick-start)** - Build and deploy your first agent in minutes
|
|
125
|
+
- **[Architecture](docs/architecture.md)** - Understand how DAO works under the hood
|
|
126
|
+
|
|
127
|
+
### Core Concepts
|
|
128
|
+
- **[Key Capabilities](docs/key-capabilities.md)** - Explore 14 powerful features for production agents
|
|
129
|
+
- **[Configuration Reference](docs/configuration-reference.md)** - Complete YAML configuration guide
|
|
130
|
+
- **[Examples](docs/examples.md)** - Ready-to-use example configurations
|
|
131
|
+
|
|
132
|
+
### Reference
|
|
133
|
+
- **[CLI Reference](docs/cli-reference.md)** - Command-line interface documentation
|
|
134
|
+
- **[Python API](docs/python-api.md)** - Programmatic usage and customization
|
|
135
|
+
- **[FAQ](docs/faq.md)** - Frequently asked questions
|
|
136
|
+
|
|
137
|
+
### Contributing
|
|
138
|
+
- **[Contributing Guide](docs/contributing.md)** - How to contribute to DAO
|
|
139
|
+
|
|
140
|
+
---
|
|
141
|
+
|
|
142
|
+
## Quick Start
|
|
143
|
+
|
|
144
|
+
### Prerequisites
|
|
145
|
+
|
|
146
|
+
Before you begin, you'll need:
|
|
147
|
+
|
|
148
|
+
- **Python 3.11 or newer** installed on your computer ([download here](https://www.python.org/downloads/))
|
|
149
|
+
- **A Databricks workspace** (ask your IT team or see [Databricks docs](https://docs.databricks.com/))
|
|
150
|
+
- Access to **Unity Catalog** (your organization's data catalog)
|
|
151
|
+
- **Model Serving** enabled (for deploying AI agents)
|
|
152
|
+
- *Optional*: Vector Search, Genie (for advanced features)
|
|
153
|
+
|
|
154
|
+
**Not sure if you have access?** Your Databricks administrator can grant you permissions.
|
|
155
|
+
|
|
156
|
+
### Installation
|
|
157
|
+
|
|
158
|
+
**Option 1: Install from PyPI (Recommended)**
|
|
159
|
+
|
|
160
|
+
The simplest way to get started:
|
|
161
|
+
|
|
162
|
+
```bash
|
|
163
|
+
# Install directly from PyPI
|
|
164
|
+
pip install dao-ai
|
|
165
|
+
```
|
|
166
|
+
|
|
167
|
+
**Option 2: For developers familiar with Git**
|
|
168
|
+
|
|
169
|
+
```bash
|
|
170
|
+
# Clone this repository
|
|
171
|
+
git clone <repo-url>
|
|
172
|
+
cd dao-ai
|
|
173
|
+
|
|
174
|
+
# Create an isolated Python environment
|
|
175
|
+
uv venv
|
|
176
|
+
source .venv/bin/activate # On Windows: .venv\Scripts\activate
|
|
177
|
+
|
|
178
|
+
# Install DAO and its dependencies
|
|
179
|
+
make install
|
|
180
|
+
```
|
|
181
|
+
|
|
182
|
+
**Option 3: For those new to development**
|
|
183
|
+
|
|
184
|
+
1. Download this project as a ZIP file (click the green "Code" button on GitHub → Download ZIP)
|
|
185
|
+
2. Extract the ZIP file to a folder on your computer
|
|
186
|
+
3. Open a terminal/command prompt and navigate to that folder
|
|
187
|
+
4. Run these commands:
|
|
188
|
+
|
|
189
|
+
```bash
|
|
190
|
+
# On Mac/Linux:
|
|
191
|
+
python3 -m venv .venv
|
|
192
|
+
source .venv/bin/activate
|
|
193
|
+
pip install -e .
|
|
194
|
+
|
|
195
|
+
# On Windows:
|
|
196
|
+
python -m venv .venv
|
|
197
|
+
.venv\Scripts\activate
|
|
198
|
+
pip install -e .
|
|
199
|
+
```
|
|
200
|
+
|
|
201
|
+
**Verification:** Run `dao-ai --version` to confirm the installation succeeded.
|
|
202
|
+
|
|
203
|
+
### Your First Agent
|
|
204
|
+
|
|
205
|
+
Let's build a simple AI assistant in 4 steps. This agent will use a language model from Databricks to answer questions.
|
|
206
|
+
|
|
207
|
+
**Step 1: Create a configuration file**
|
|
208
|
+
|
|
209
|
+
Create a new file called `config/my_agent.yaml` and paste this content:
|
|
210
|
+
|
|
211
|
+
```yaml
|
|
212
|
+
schemas:
|
|
213
|
+
my_schema: &my_schema
|
|
214
|
+
catalog_name: my_catalog # Replace with your Unity Catalog name
|
|
215
|
+
schema_name: my_schema # Replace with your schema name
|
|
216
|
+
|
|
217
|
+
resources:
|
|
218
|
+
llms:
|
|
219
|
+
default_llm: &default_llm
|
|
220
|
+
name: databricks-meta-llama-3-3-70b-instruct # The AI model to use
|
|
221
|
+
|
|
222
|
+
agents:
|
|
223
|
+
assistant: &assistant
|
|
224
|
+
name: assistant
|
|
225
|
+
model: *default_llm
|
|
226
|
+
prompt: |
|
|
227
|
+
You are a helpful assistant.
|
|
228
|
+
|
|
229
|
+
app:
|
|
230
|
+
name: my_first_agent
|
|
231
|
+
registered_model:
|
|
232
|
+
schema: *my_schema
|
|
233
|
+
name: my_first_agent
|
|
234
|
+
agents:
|
|
235
|
+
- *assistant
|
|
236
|
+
orchestration:
|
|
237
|
+
swarm:
|
|
238
|
+
model: *default_llm
|
|
239
|
+
```
|
|
240
|
+
|
|
241
|
+
**💡 What's happening here?**
|
|
242
|
+
- `schemas`: Points to your Unity Catalog location (where the agent will be registered)
|
|
243
|
+
- `resources`: Defines the AI model (Llama 3.3 70B in this case)
|
|
244
|
+
- `agents`: Describes your assistant agent and its behavior
|
|
245
|
+
- `app`: Configures how the agent is deployed and orchestrated
|
|
246
|
+
|
|
247
|
+
**Step 2: Validate your configuration**
|
|
248
|
+
|
|
249
|
+
This checks for errors in your YAML file:
|
|
250
|
+
|
|
251
|
+
```bash
|
|
252
|
+
dao-ai validate -c config/my_agent.yaml
|
|
253
|
+
```
|
|
254
|
+
|
|
255
|
+
You should see: ✅ `Configuration is valid!`
|
|
256
|
+
|
|
257
|
+
**Step 3: Visualize the agent workflow** (optional)
|
|
258
|
+
|
|
259
|
+
Generate a diagram showing how your agent works:
|
|
260
|
+
|
|
261
|
+
```bash
|
|
262
|
+
dao-ai graph -c config/my_agent.yaml -o my_agent.png
|
|
263
|
+
```
|
|
264
|
+
|
|
265
|
+
This creates `my_agent.png` — open it to see a visual representation of your agent.
|
|
266
|
+
|
|
267
|
+
**Step 4: Deploy to Databricks**
|
|
268
|
+
|
|
269
|
+
**Option A: Using Python** (programmatic deployment)
|
|
270
|
+
|
|
271
|
+
```python
|
|
272
|
+
from dao_ai.config import AppConfig
|
|
273
|
+
|
|
274
|
+
# Load your configuration
|
|
275
|
+
config = AppConfig.from_file("config/my_agent.yaml")
|
|
276
|
+
|
|
277
|
+
# Package the agent as an MLflow model
|
|
278
|
+
config.create_agent()
|
|
279
|
+
|
|
280
|
+
# Deploy to Databricks Model Serving
|
|
281
|
+
config.deploy_agent()
|
|
282
|
+
```
|
|
283
|
+
|
|
284
|
+
**Option B: Using the CLI** (one command)
|
|
285
|
+
|
|
286
|
+
```bash
|
|
287
|
+
dao-ai bundle --deploy --run -c config/my_agent.yaml
|
|
288
|
+
```
|
|
289
|
+
|
|
290
|
+
This single command:
|
|
291
|
+
1. Validates your configuration
|
|
292
|
+
2. Packages the agent
|
|
293
|
+
3. Deploys it to Databricks
|
|
294
|
+
4. Creates a serving endpoint
|
|
295
|
+
|
|
296
|
+
**Step 5: Interact with your agent**
|
|
297
|
+
|
|
298
|
+
Once deployed, you can chat with your agent using Python:
|
|
299
|
+
|
|
300
|
+
```python
|
|
301
|
+
from mlflow.deployments import get_deploy_client
|
|
302
|
+
|
|
303
|
+
# Connect to your Databricks workspace
|
|
304
|
+
client = get_deploy_client("databricks")
|
|
305
|
+
|
|
306
|
+
# Send a message to your agent
|
|
307
|
+
response = client.predict(
|
|
308
|
+
endpoint="my_first_agent",
|
|
309
|
+
inputs={
|
|
310
|
+
"messages": [{"role": "user", "content": "Hello! What can you help me with?"}],
|
|
311
|
+
"configurable": {
|
|
312
|
+
"thread_id": "1", # Conversation ID
|
|
313
|
+
"user_id": "demo_user" # User identifier
|
|
314
|
+
}
|
|
315
|
+
}
|
|
316
|
+
)
|
|
317
|
+
|
|
318
|
+
# Print the agent's response
|
|
319
|
+
print(response["message"]["content"])
|
|
320
|
+
```
|
|
321
|
+
|
|
322
|
+
**🎉 Congratulations!** You've built and deployed your first AI agent with DAO.
|
|
323
|
+
|
|
324
|
+
**Next steps:**
|
|
325
|
+
- Explore the [`config/examples/`](config/examples/) folder for more advanced configurations
|
|
326
|
+
- Try the [DAO AI Builder](https://github.com/natefleming/dao-ai-builder) visual interface
|
|
327
|
+
- Learn about [Key Capabilities](docs/key-capabilities.md) to add advanced features
|
|
328
|
+
- Read the [Architecture](docs/architecture.md) documentation to understand how it works
|
|
329
|
+
|
|
330
|
+
---
|
|
331
|
+
|
|
332
|
+
## Key Features at a Glance
|
|
333
|
+
|
|
334
|
+
DAO provides powerful capabilities for building production-ready AI agents:
|
|
335
|
+
|
|
336
|
+
| Feature | Description |
|
|
337
|
+
|---------|-------------|
|
|
338
|
+
| **Multi-Tool Support** | Python functions, Unity Catalog, MCP, Agent Endpoints |
|
|
339
|
+
| **On-Behalf-Of User** | Per-user permissions and governance |
|
|
340
|
+
| **Advanced Caching** | Two-tier (LRU + Semantic) caching for cost optimization |
|
|
341
|
+
| **Vector Search Reranking** | Improve RAG quality with FlashRank |
|
|
342
|
+
| **Human-in-the-Loop** | Approval workflows for sensitive operations |
|
|
343
|
+
| **Memory & Persistence** | PostgreSQL, Lakebase, or in-memory state |
|
|
344
|
+
| **Prompt Registry** | Version and manage prompts in MLflow |
|
|
345
|
+
| **Prompt Optimization** | Automated tuning with GEPA (Generative Evolution of Prompts and Agents) |
|
|
346
|
+
| **Guardrails** | Content filters, safety checks, validation |
|
|
347
|
+
| **Middleware** | Input validation, logging, performance monitoring, audit trails |
|
|
348
|
+
| **Conversation Summarization** | Handle long conversations automatically |
|
|
349
|
+
| **Structured Output** | JSON schema for predictable responses |
|
|
350
|
+
| **Custom I/O** | Flexible input/output with runtime state |
|
|
351
|
+
| **Hook System** | Lifecycle hooks for initialization and cleanup |
|
|
352
|
+
|
|
353
|
+
👉 **Learn more:** [Key Capabilities Documentation](docs/key-capabilities.md)
|
|
354
|
+
|
|
355
|
+
---
|
|
356
|
+
|
|
357
|
+
## Architecture Overview
|
|
358
|
+
|
|
359
|
+
```mermaid
|
|
360
|
+
graph TB
|
|
361
|
+
subgraph yaml["YAML Configuration"]
|
|
362
|
+
direction LR
|
|
363
|
+
schemas[Schemas] ~~~ resources[Resources] ~~~ tools[Tools] ~~~ agents[Agents] ~~~ orchestration[Orchestration]
|
|
364
|
+
end
|
|
365
|
+
|
|
366
|
+
subgraph dao["DAO Framework (Python)"]
|
|
367
|
+
direction LR
|
|
368
|
+
config[Config<br/>Loader] ~~~ graph_builder[Graph<br/>Builder] ~~~ nodes[Nodes<br/>Factory] ~~~ tool_factory[Tool<br/>Factory]
|
|
369
|
+
end
|
|
370
|
+
|
|
371
|
+
subgraph langgraph["LangGraph Runtime"]
|
|
372
|
+
direction LR
|
|
373
|
+
msg_hook[Message<br/>Hook] --> supervisor[Supervisor/<br/>Swarm] --> specialized[Specialized<br/>Agents]
|
|
374
|
+
end
|
|
375
|
+
|
|
376
|
+
subgraph databricks["Databricks Platform"]
|
|
377
|
+
direction LR
|
|
378
|
+
model_serving[Model<br/>Serving] ~~~ unity_catalog[Unity<br/>Catalog] ~~~ vector_search[Vector<br/>Search] ~~~ genie_spaces[Genie<br/>Spaces] ~~~ mlflow[MLflow]
|
|
379
|
+
end
|
|
380
|
+
|
|
381
|
+
yaml ==> dao
|
|
382
|
+
dao ==> langgraph
|
|
383
|
+
langgraph ==> databricks
|
|
384
|
+
|
|
385
|
+
style yaml fill:#1B5162,stroke:#618794,stroke-width:3px,color:#fff
|
|
386
|
+
style dao fill:#FFAB00,stroke:#7D5319,stroke-width:3px,color:#1B3139
|
|
387
|
+
style langgraph fill:#618794,stroke:#143D4A,stroke-width:3px,color:#fff
|
|
388
|
+
style databricks fill:#00875C,stroke:#095A35,stroke-width:3px,color:#fff
|
|
389
|
+
```
|
|
390
|
+
|
|
391
|
+
👉 **Learn more:** [Architecture Documentation](docs/architecture.md)
|
|
392
|
+
|
|
393
|
+
---
|
|
394
|
+
|
|
395
|
+
## Example Configurations
|
|
396
|
+
|
|
397
|
+
The `config/examples/` directory contains ready-to-use configurations organized in a **progressive learning path**:
|
|
398
|
+
|
|
399
|
+
- `01_getting_started/minimal.yaml` - Simplest possible agent
|
|
400
|
+
- `02_tools/vector_search_with_reranking.yaml` - RAG with improved accuracy
|
|
401
|
+
- `04_genie/genie_semantic_cache.yaml` - NL-to-SQL with two-tier caching
|
|
402
|
+
- `05_memory/conversation_summarization.yaml` - Long conversation handling
|
|
403
|
+
- `06_on_behalf_of_user/obo_basic.yaml` - User-level access control
|
|
404
|
+
- `07_human_in_the_loop/human_in_the_loop.yaml` - Approval workflows
|
|
405
|
+
|
|
406
|
+
And many more! Follow the numbered path or jump to what you need. See the full guide in [Examples Documentation](docs/examples.md).
|
|
407
|
+
|
|
408
|
+
---
|
|
409
|
+
|
|
410
|
+
## CLI Quick Reference
|
|
411
|
+
|
|
412
|
+
```bash
|
|
413
|
+
# Validate configuration
|
|
414
|
+
dao-ai validate -c config/my_config.yaml
|
|
415
|
+
|
|
416
|
+
# Generate JSON schema for IDE support
|
|
417
|
+
dao-ai schema > schemas/model_config_schema.json
|
|
418
|
+
|
|
419
|
+
# Visualize agent workflow
|
|
420
|
+
dao-ai graph -c config/my_config.yaml -o workflow.png
|
|
421
|
+
|
|
422
|
+
# Deploy with Databricks Asset Bundles
|
|
423
|
+
dao-ai bundle --deploy --run -c config/my_config.yaml
|
|
424
|
+
|
|
425
|
+
# Interactive chat with agent
|
|
426
|
+
dao-ai chat -c config/my_config.yaml
|
|
427
|
+
```
|
|
428
|
+
|
|
429
|
+
👉 **Learn more:** [CLI Reference Documentation](docs/cli-reference.md)
|
|
430
|
+
|
|
431
|
+
---
|
|
432
|
+
|
|
433
|
+
## Community & Support
|
|
434
|
+
|
|
435
|
+
- **Documentation**: [docs/](docs/)
|
|
436
|
+
- **Examples**: [config/examples/](config/examples/)
|
|
437
|
+
- **Issues**: [GitHub Issues](https://github.com/your-org/dao-ai/issues)
|
|
438
|
+
- **Discussions**: [GitHub Discussions](https://github.com/your-org/dao-ai/discussions)
|
|
439
|
+
|
|
440
|
+
---
|
|
441
|
+
|
|
442
|
+
## Contributing
|
|
443
|
+
|
|
444
|
+
We welcome contributions! See the [Contributing Guide](docs/contributing.md) for details on:
|
|
445
|
+
|
|
446
|
+
- Setting up your development environment
|
|
447
|
+
- Code style and testing guidelines
|
|
448
|
+
- How to submit pull requests
|
|
449
|
+
- Project structure overview
|
|
450
|
+
|
|
451
|
+
---
|
|
452
|
+
|
|
453
|
+
## License
|
|
454
|
+
|
|
455
|
+
This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
dao_ai/__init__.py,sha256=18P98ExEgUaJ1Byw440Ct1ty59v6nxyWtc5S6Uq2m9Q,1062
|
|
2
|
+
dao_ai/agent_as_code.py,sha256=xIlLDpPVfmDVzLvbdY_V_CrC4Jvj2ItCWJ-NzdrszTo,538
|
|
3
|
+
dao_ai/catalog.py,sha256=sPZpHTD3lPx4EZUtIWeQV7VQM89WJ6YH__wluk1v2lE,4947
|
|
4
|
+
dao_ai/cli.py,sha256=1TS4I6LQq6ixxd0JQA1bt3DHRizcSiynZkSBcZA3-XE,33149
|
|
5
|
+
dao_ai/config.py,sha256=FmXXwFMWqSxl2ofxPIL2ZLRbx7kx-u6VjFx4wdQnFVk,119755
|
|
6
|
+
dao_ai/graph.py,sha256=1-uQlo7iXZQTT3uU8aYu0N5rnhw5_g_2YLwVsAs6M-U,1119
|
|
7
|
+
dao_ai/logging.py,sha256=lYy4BmucCHvwW7aI3YQkQXKJtMvtTnPDu9Hnd7_O4oc,1556
|
|
8
|
+
dao_ai/messages.py,sha256=4ZBzO4iFdktGSLrmhHzFjzMIt2tpaL-aQLHOQJysGnY,6959
|
|
9
|
+
dao_ai/models.py,sha256=AwzwTRTNZF-UOh59HsuXEgFk_YH6q6M-mERNDe64Z8k,81783
|
|
10
|
+
dao_ai/nodes.py,sha256=uI2L_icOAB1xYWSOx7W5QBbTxnvrt16hpQJKuSRAqos,10996
|
|
11
|
+
dao_ai/optimization.py,sha256=phK6t4wYmWPObCjGUBHdZzsaFXGhQOjhAek2bAEfwXo,22971
|
|
12
|
+
dao_ai/prompts.py,sha256=G0ng5f2PkzfgdKrSl03Rnd6riZn5APedof0GAzsWQI8,4792
|
|
13
|
+
dao_ai/state.py,sha256=0wbbzfQmldkCu26gdTE5j0Rl-_pfilza-YIHPbSWlvI,6394
|
|
14
|
+
dao_ai/types.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
15
|
+
dao_ai/utils.py,sha256=_Urd7Nj2VzrgPKf3NS4E6vt0lWRhEUddBqWN9BksqeE,11543
|
|
16
|
+
dao_ai/vector_search.py,sha256=jlaFS_iizJ55wblgzZmswMM3UOL-qOp2BGJc0JqXYSg,2839
|
|
17
|
+
dao_ai/genie/__init__.py,sha256=vdEyGhrt6L8GlK75SyYvTnl8QpHKDCJC5hJKLg4DesQ,1063
|
|
18
|
+
dao_ai/genie/core.py,sha256=HPKbocvhnnw_PkQwfoq5bpgQmL9lZyyS6_goTJL8yiY,1073
|
|
19
|
+
dao_ai/genie/cache/__init__.py,sha256=JfgCJl1NYQ1aZvZ4kly4T6uQK6ZCJ6PX_htuq7nJF50,1203
|
|
20
|
+
dao_ai/genie/cache/base.py,sha256=_MhHqYrHejVGrJjSLX26TdHwvQZb-HgiantRYSB8fJY,1961
|
|
21
|
+
dao_ai/genie/cache/core.py,sha256=1hvV72kMJ5md78Am4cdAi6wpTC6vV7mn6Srpb8Pti_M,2544
|
|
22
|
+
dao_ai/genie/cache/lru.py,sha256=E7zBqdLvuFj_hzZhjcoMyfacbtfTgkVRNuFqVYi2Ic4,11835
|
|
23
|
+
dao_ai/genie/cache/semantic.py,sha256=P1TrlFLMmSgLPFeZYbQTSbsjNpgXmovgqxBO92zINFM,38075
|
|
24
|
+
dao_ai/hooks/__init__.py,sha256=uA4DQdP9gDf4SyNjNx9mWPoI8UZOcTyFsCXV0NraFvQ,463
|
|
25
|
+
dao_ai/hooks/core.py,sha256=yZAfRfB0MyMo--uwGr4STtVxxen5s4ZUrNTnR3a3qkA,1721
|
|
26
|
+
dao_ai/memory/__init__.py,sha256=Us3wFehvug_h83m-UJ7OXdq2qZ0e9nHBQE7m5RwoAd8,559
|
|
27
|
+
dao_ai/memory/base.py,sha256=99nfr2UZJ4jmfTL_KrqUlRSCoRxzkZyWyx5WqeUoMdQ,338
|
|
28
|
+
dao_ai/memory/core.py,sha256=38H-JLIyUrRDIECLvpXK3iJlWG35X97E-DTo_4c3Jzc,6317
|
|
29
|
+
dao_ai/memory/databricks.py,sha256=SM6nwLjhSRJO4hLc3GUuht5YydYtTi3BAOae6jPwTm4,14377
|
|
30
|
+
dao_ai/memory/postgres.py,sha256=q9IIAGs0wuaV-3rUIn4dtzOxbkCCoB-yv1Rtod7ohjI,16467
|
|
31
|
+
dao_ai/middleware/__init__.py,sha256=epSCtCtttIogl21nVK768Ln35L0mOShVczyURtR6Ln8,3609
|
|
32
|
+
dao_ai/middleware/assertions.py,sha256=VmTBhHKulcDwicEP9IB3_HyI4hDhmq3ZP_85InGgvCc,27360
|
|
33
|
+
dao_ai/middleware/base.py,sha256=uG2tpdnjL5xY5jCKvb_m3UTBtl4ZC6fJQUkDsQvV8S4,1279
|
|
34
|
+
dao_ai/middleware/core.py,sha256=rRPwNCBAj40lbZK08hdzP9utrqaHl3l520jgBOwI_Z4,2124
|
|
35
|
+
dao_ai/middleware/guardrails.py,sha256=qM_V0fb1fa2iuKGy_nmOvVrOP72UiPtKgrfSqxngQUc,13975
|
|
36
|
+
dao_ai/middleware/human_in_the_loop.py,sha256=Ylmiqo17dOKsUDhbD103tnknGcidVX0Joy7WPlodo78,7491
|
|
37
|
+
dao_ai/middleware/message_validation.py,sha256=evdfozSd2rsEARTrKaYAqNWhZCtgrnFAi6YudvT54qQ,19754
|
|
38
|
+
dao_ai/middleware/summarization.py,sha256=moqcR27vEeMc7uN_OJoStOFSEFQu-GqvjH933b3DTPE,7127
|
|
39
|
+
dao_ai/orchestration/__init__.py,sha256=i85CLfRR335NcCFhaXABcMkn6WZfXnJ8cHH4YZsZN0s,1622
|
|
40
|
+
dao_ai/orchestration/core.py,sha256=qoU7uMXBJCth-sqfu0jRE1L0GOn5H4LoZdRUY1Ib3DI,9585
|
|
41
|
+
dao_ai/orchestration/supervisor.py,sha256=0aQLL1a3oBjG9LYTuV1y73rB0mqkIkax7MwtZ7xx_pE,9833
|
|
42
|
+
dao_ai/orchestration/swarm.py,sha256=8tp1eGmsQqqWpaDcjPoJckddPWohZdmmN0RGRJ_xzOA,9198
|
|
43
|
+
dao_ai/providers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
44
|
+
dao_ai/providers/base.py,sha256=-fjKypCOk28h6vioPfMj9YZSw_3Kcbi2nMuAyY7vX9k,1383
|
|
45
|
+
dao_ai/providers/databricks.py,sha256=63jUkvFvdQoJGUlSEJgCsq6hPtj6tviE-AW_TPJvuvc,61850
|
|
46
|
+
dao_ai/tools/__init__.py,sha256=SRd7W2DOCXKbWWy8lclRtJiCskz7SDAm94qaFF47urQ,1664
|
|
47
|
+
dao_ai/tools/agent.py,sha256=plIWALywRjaDSnot13nYehBsrHRpBUpsVZakoGeajOE,1858
|
|
48
|
+
dao_ai/tools/core.py,sha256=bRIN3BZhRQX8-Kpu3HPomliodyskCqjxynQmYbk6Vjs,3783
|
|
49
|
+
dao_ai/tools/email.py,sha256=A3TsCoQgJR7UUWR0g45OPRGDpVoYwctFs1MOZMTt_d4,7389
|
|
50
|
+
dao_ai/tools/genie.py,sha256=4e_5MeAe7kDzHbYeXuNPFbY5z8ci3ouj8l5254CZ2lA,8874
|
|
51
|
+
dao_ai/tools/mcp.py,sha256=EFcKo_f-kPMnyR5w6oh0g4Hy4jyuVJEcUGzSiI9dXlg,8505
|
|
52
|
+
dao_ai/tools/memory.py,sha256=lwObKimAand22Nq3Y63tsv-AXQ5SXUigN9PqRjoWKes,1836
|
|
53
|
+
dao_ai/tools/python.py,sha256=jWFnZPni2sCdtd8D1CqXnZIPHnWkdK27bCJnBXpzhvo,1879
|
|
54
|
+
dao_ai/tools/search.py,sha256=cJ3D9FKr1GAR6xz55dLtRkjtQsI0WRueGt9TPDFpOxc,433
|
|
55
|
+
dao_ai/tools/slack.py,sha256=QpLMXDApjPKyRpEanLp0tOhCp9WXaEBa615p4t0pucs,5040
|
|
56
|
+
dao_ai/tools/sql.py,sha256=tKd1gjpLuKdQDyfmyYYtMiNRHDW6MGRbdEVaeqyB8Ok,7632
|
|
57
|
+
dao_ai/tools/time.py,sha256=tufJniwivq29y0LIffbgeBTIDE6VgrLpmVf8Qr90qjw,9224
|
|
58
|
+
dao_ai/tools/unity_catalog.py,sha256=AjQfW7bvV8NurqDLIyntYRv2eJuTwNdbvex1L5CRjOk,15534
|
|
59
|
+
dao_ai/tools/vector_search.py,sha256=oe2uBwl2TfeJIXPpwiS6Rmz7wcHczSxNyqS9P3hE6co,14542
|
|
60
|
+
dao_ai-0.1.3.dist-info/METADATA,sha256=0WeeUk6J2uQRdk9Hm1FkCk75RG8psnh3WrSt4A8MfNY,15615
|
|
61
|
+
dao_ai-0.1.3.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
62
|
+
dao_ai-0.1.3.dist-info/entry_points.txt,sha256=Xa-UFyc6gWGwMqMJOt06ZOog2vAfygV_DSwg1AiP46g,43
|
|
63
|
+
dao_ai-0.1.3.dist-info/licenses/LICENSE,sha256=YZt3W32LtPYruuvHE9lGk2bw6ZPMMJD8yLrjgHybyz4,1069
|
|
64
|
+
dao_ai-0.1.3.dist-info/RECORD,,
|