dao-ai 0.1.11__py3-none-any.whl → 0.1.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dao_ai/apps/handlers.py +1 -1
- dao_ai/apps/resources.py +60 -10
- dao_ai/apps/server.py +2 -2
- dao_ai/config.py +29 -28
- dao_ai/providers/databricks.py +74 -20
- {dao_ai-0.1.11.dist-info → dao_ai-0.1.13.dist-info}/METADATA +4 -3
- {dao_ai-0.1.11.dist-info → dao_ai-0.1.13.dist-info}/RECORD +10 -10
- {dao_ai-0.1.11.dist-info → dao_ai-0.1.13.dist-info}/WHEEL +0 -0
- {dao_ai-0.1.11.dist-info → dao_ai-0.1.13.dist-info}/entry_points.txt +0 -0
- {dao_ai-0.1.11.dist-info → dao_ai-0.1.13.dist-info}/licenses/LICENSE +0 -0
dao_ai/apps/handlers.py
CHANGED
|
@@ -34,7 +34,7 @@ mlflow.set_tracking_uri("databricks")
|
|
|
34
34
|
mlflow.langchain.autolog()
|
|
35
35
|
|
|
36
36
|
# Get config path from environment or use default
|
|
37
|
-
config_path: str = os.environ.get("DAO_AI_CONFIG_PATH", "
|
|
37
|
+
config_path: str = os.environ.get("DAO_AI_CONFIG_PATH", "dao_ai.yaml")
|
|
38
38
|
|
|
39
39
|
# Load configuration using AppConfig.from_file (consistent with CLI, notebook, builder)
|
|
40
40
|
config: AppConfig = AppConfig.from_file(config_path)
|
dao_ai/apps/resources.py
CHANGED
|
@@ -310,6 +310,25 @@ def _extract_secrets_from_config(config: AppConfig) -> list[dict[str, Any]]:
|
|
|
310
310
|
A list of secret resource dictionaries with unique scope/key pairs
|
|
311
311
|
"""
|
|
312
312
|
secrets: dict[tuple[str, str], dict[str, Any]] = {}
|
|
313
|
+
used_names: set[str] = set()
|
|
314
|
+
|
|
315
|
+
def get_unique_resource_name(base_name: str) -> str:
|
|
316
|
+
"""Generate a unique resource name, adding suffix if needed."""
|
|
317
|
+
sanitized = _sanitize_resource_name(base_name)
|
|
318
|
+
if sanitized not in used_names:
|
|
319
|
+
used_names.add(sanitized)
|
|
320
|
+
return sanitized
|
|
321
|
+
# Name collision - add numeric suffix
|
|
322
|
+
counter = 1
|
|
323
|
+
while True:
|
|
324
|
+
# Leave room for suffix (e.g., "_1", "_2", etc.)
|
|
325
|
+
suffix = f"_{counter}"
|
|
326
|
+
max_base_len = 30 - len(suffix)
|
|
327
|
+
candidate = sanitized[:max_base_len] + suffix
|
|
328
|
+
if candidate not in used_names:
|
|
329
|
+
used_names.add(candidate)
|
|
330
|
+
return candidate
|
|
331
|
+
counter += 1
|
|
313
332
|
|
|
314
333
|
def extract_from_value(value: Any, path: str = "") -> None:
|
|
315
334
|
"""Recursively extract secrets from any value."""
|
|
@@ -317,9 +336,10 @@ def _extract_secrets_from_config(config: AppConfig) -> list[dict[str, Any]]:
|
|
|
317
336
|
secret_key = (value.scope, value.secret)
|
|
318
337
|
if secret_key not in secrets:
|
|
319
338
|
# Create a unique name for the secret resource
|
|
320
|
-
|
|
321
|
-
"
|
|
322
|
-
)
|
|
339
|
+
base_name = f"{value.scope}_{value.secret}".replace("-", "_").replace(
|
|
340
|
+
"/", "_"
|
|
341
|
+
)
|
|
342
|
+
resource_name = get_unique_resource_name(base_name)
|
|
323
343
|
secrets[secret_key] = {
|
|
324
344
|
"name": resource_name,
|
|
325
345
|
"type": "secret",
|
|
@@ -327,7 +347,9 @@ def _extract_secrets_from_config(config: AppConfig) -> list[dict[str, Any]]:
|
|
|
327
347
|
"key": value.secret,
|
|
328
348
|
"permissions": [{"level": "READ"}],
|
|
329
349
|
}
|
|
330
|
-
logger.debug(
|
|
350
|
+
logger.debug(
|
|
351
|
+
f"Found secret: {value.scope}/{value.secret} at {path} -> resource: {resource_name}"
|
|
352
|
+
)
|
|
331
353
|
elif isinstance(value, dict):
|
|
332
354
|
for k, v in value.items():
|
|
333
355
|
extract_from_value(v, f"{path}.{k}" if path else k)
|
|
@@ -742,6 +764,25 @@ def _extract_sdk_secrets_from_config(config: AppConfig) -> list[AppResource]:
|
|
|
742
764
|
A list of AppResource objects for secrets
|
|
743
765
|
"""
|
|
744
766
|
secrets: dict[tuple[str, str], AppResource] = {}
|
|
767
|
+
used_names: set[str] = set()
|
|
768
|
+
|
|
769
|
+
def get_unique_resource_name(base_name: str) -> str:
|
|
770
|
+
"""Generate a unique resource name, adding suffix if needed."""
|
|
771
|
+
sanitized = _sanitize_resource_name(base_name)
|
|
772
|
+
if sanitized not in used_names:
|
|
773
|
+
used_names.add(sanitized)
|
|
774
|
+
return sanitized
|
|
775
|
+
# Name collision - add numeric suffix
|
|
776
|
+
counter = 1
|
|
777
|
+
while True:
|
|
778
|
+
# Leave room for suffix (e.g., "_1", "_2", etc.)
|
|
779
|
+
suffix = f"_{counter}"
|
|
780
|
+
max_base_len = 30 - len(suffix)
|
|
781
|
+
candidate = sanitized[:max_base_len] + suffix
|
|
782
|
+
if candidate not in used_names:
|
|
783
|
+
used_names.add(candidate)
|
|
784
|
+
return candidate
|
|
785
|
+
counter += 1
|
|
745
786
|
|
|
746
787
|
def extract_from_value(value: Any) -> None:
|
|
747
788
|
"""Recursively extract secrets from any value."""
|
|
@@ -749,10 +790,10 @@ def _extract_sdk_secrets_from_config(config: AppConfig) -> list[AppResource]:
|
|
|
749
790
|
secret_key = (value.scope, value.secret)
|
|
750
791
|
if secret_key not in secrets:
|
|
751
792
|
# Create a unique name for the secret resource
|
|
752
|
-
|
|
753
|
-
"
|
|
754
|
-
)
|
|
755
|
-
resource_name =
|
|
793
|
+
base_name = f"{value.scope}_{value.secret}".replace("-", "_").replace(
|
|
794
|
+
"/", "_"
|
|
795
|
+
)
|
|
796
|
+
resource_name = get_unique_resource_name(base_name)
|
|
756
797
|
|
|
757
798
|
resource = AppResource(
|
|
758
799
|
name=resource_name,
|
|
@@ -764,7 +805,7 @@ def _extract_sdk_secrets_from_config(config: AppConfig) -> list[AppResource]:
|
|
|
764
805
|
)
|
|
765
806
|
secrets[secret_key] = resource
|
|
766
807
|
logger.debug(
|
|
767
|
-
f"Found secret for SDK resource: {value.scope}/{value.secret}"
|
|
808
|
+
f"Found secret for SDK resource: {value.scope}/{value.secret} -> resource: {resource_name}"
|
|
768
809
|
)
|
|
769
810
|
elif isinstance(value, dict):
|
|
770
811
|
for v in value.values():
|
|
@@ -969,12 +1010,21 @@ def generate_app_yaml(
|
|
|
969
1010
|
{"name": "MLFLOW_TRACKING_URI", "value": "databricks"},
|
|
970
1011
|
{"name": "MLFLOW_REGISTRY_URI", "value": "databricks-uc"},
|
|
971
1012
|
{"name": "MLFLOW_EXPERIMENT_ID", "valueFrom": "experiment"},
|
|
972
|
-
{"name": "DAO_AI_CONFIG_PATH", "value": "
|
|
1013
|
+
{"name": "DAO_AI_CONFIG_PATH", "value": "dao_ai.yaml"},
|
|
973
1014
|
]
|
|
974
1015
|
|
|
975
1016
|
# Extract environment variables from config.app.environment_vars
|
|
976
1017
|
config_env_vars = _extract_env_vars_from_config(config)
|
|
977
1018
|
|
|
1019
|
+
# Environment variables that are automatically provided by Databricks Apps
|
|
1020
|
+
# and should not be included in app.yaml
|
|
1021
|
+
platform_provided_env_vars = {"DATABRICKS_HOST"}
|
|
1022
|
+
|
|
1023
|
+
# Filter out platform-provided env vars from config
|
|
1024
|
+
config_env_vars = [
|
|
1025
|
+
e for e in config_env_vars if e["name"] not in platform_provided_env_vars
|
|
1026
|
+
]
|
|
1027
|
+
|
|
978
1028
|
# Merge config env vars, avoiding duplicates (config takes precedence)
|
|
979
1029
|
base_env_names = {e["name"] for e in env_vars}
|
|
980
1030
|
for config_env in config_env_vars:
|
dao_ai/apps/server.py
CHANGED
|
@@ -7,13 +7,13 @@ uses the AgentServer for the Databricks Apps runtime.
|
|
|
7
7
|
|
|
8
8
|
Configuration Loading:
|
|
9
9
|
The config path is specified via the DAO_AI_CONFIG_PATH environment variable,
|
|
10
|
-
or defaults to
|
|
10
|
+
or defaults to dao_ai.yaml in the current directory.
|
|
11
11
|
|
|
12
12
|
Usage:
|
|
13
13
|
# With environment variable
|
|
14
14
|
DAO_AI_CONFIG_PATH=/path/to/config.yaml python -m dao_ai.apps.server
|
|
15
15
|
|
|
16
|
-
# With default
|
|
16
|
+
# With default dao_ai.yaml in current directory
|
|
17
17
|
python -m dao_ai.apps.server
|
|
18
18
|
"""
|
|
19
19
|
|
dao_ai/config.py
CHANGED
|
@@ -294,36 +294,37 @@ class IsDatabricksResource(ABC, BaseModel):
|
|
|
294
294
|
|
|
295
295
|
# Check for OBO first (highest priority)
|
|
296
296
|
if self.on_behalf_of_user:
|
|
297
|
-
#
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
297
|
+
# In Databricks Apps, use forwarded headers for per-user auth
|
|
298
|
+
from mlflow.genai.agent_server import get_request_headers
|
|
299
|
+
|
|
300
|
+
headers = get_request_headers()
|
|
301
|
+
logger.debug(f"Headers received: {list(headers.keys())}")
|
|
302
|
+
# Try both lowercase and title-case header names (HTTP headers are case-insensitive)
|
|
303
|
+
forwarded_token = headers.get("x-forwarded-access-token") or headers.get(
|
|
304
|
+
"X-Forwarded-Access-Token"
|
|
305
|
+
)
|
|
303
306
|
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
pass
|
|
307
|
+
if forwarded_token:
|
|
308
|
+
forwarded_user = headers.get("x-forwarded-user") or headers.get(
|
|
309
|
+
"X-Forwarded-User", "unknown"
|
|
310
|
+
)
|
|
311
|
+
logger.debug(
|
|
312
|
+
f"Creating WorkspaceClient for {self.__class__.__name__} "
|
|
313
|
+
f"with OBO using forwarded token from Databricks Apps",
|
|
314
|
+
forwarded_user=forwarded_user,
|
|
315
|
+
)
|
|
316
|
+
# Use workspace_host if configured, otherwise SDK will auto-detect
|
|
317
|
+
workspace_host_value: str | None = (
|
|
318
|
+
normalize_host(value_of(self.workspace_host))
|
|
319
|
+
if self.workspace_host
|
|
320
|
+
else None
|
|
321
|
+
)
|
|
322
|
+
return WorkspaceClient(
|
|
323
|
+
host=workspace_host_value,
|
|
324
|
+
token=forwarded_token,
|
|
325
|
+
auth_type="pat",
|
|
326
|
+
)
|
|
325
327
|
|
|
326
|
-
# Fall back to Model Serving OBO (existing behavior)
|
|
327
328
|
credentials_strategy: CredentialsStrategy = ModelServingUserCredentials()
|
|
328
329
|
logger.debug(
|
|
329
330
|
f"Creating WorkspaceClient for {self.__class__.__name__} "
|
dao_ai/providers/databricks.py
CHANGED
|
@@ -152,25 +152,77 @@ class DatabricksProvider(ServiceProvider):
|
|
|
152
152
|
client_secret: str | None = None,
|
|
153
153
|
workspace_host: str | None = None,
|
|
154
154
|
) -> None:
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
155
|
+
# Store credentials for lazy initialization
|
|
156
|
+
self._pat = pat
|
|
157
|
+
self._client_id = client_id
|
|
158
|
+
self._client_secret = client_secret
|
|
159
|
+
self._workspace_host = workspace_host
|
|
160
|
+
|
|
161
|
+
# Lazy initialization for WorkspaceClient
|
|
162
|
+
self._w: WorkspaceClient | None = w
|
|
163
|
+
self._w_initialized = w is not None
|
|
164
|
+
|
|
165
|
+
# Lazy initialization for VectorSearchClient - only create when needed
|
|
166
|
+
# This avoids authentication errors in Databricks Apps where VSC
|
|
167
|
+
# requires explicit credentials but the platform uses ambient auth
|
|
168
|
+
self._vsc: VectorSearchClient | None = vsc
|
|
169
|
+
self._vsc_initialized = vsc is not None
|
|
170
|
+
|
|
171
|
+
# Lazy initialization for DatabricksFunctionClient
|
|
172
|
+
self._dfs: DatabricksFunctionClient | None = dfs
|
|
173
|
+
self._dfs_initialized = dfs is not None
|
|
174
|
+
|
|
175
|
+
@property
|
|
176
|
+
def w(self) -> WorkspaceClient:
|
|
177
|
+
"""Lazy initialization of WorkspaceClient."""
|
|
178
|
+
if not self._w_initialized:
|
|
179
|
+
self._w = _workspace_client(
|
|
180
|
+
pat=self._pat,
|
|
181
|
+
client_id=self._client_id,
|
|
182
|
+
client_secret=self._client_secret,
|
|
183
|
+
workspace_host=self._workspace_host,
|
|
161
184
|
)
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
185
|
+
self._w_initialized = True
|
|
186
|
+
return self._w # type: ignore[return-value]
|
|
187
|
+
|
|
188
|
+
@w.setter
|
|
189
|
+
def w(self, value: WorkspaceClient) -> None:
|
|
190
|
+
"""Set WorkspaceClient and mark as initialized."""
|
|
191
|
+
self._w = value
|
|
192
|
+
self._w_initialized = True
|
|
193
|
+
|
|
194
|
+
@property
|
|
195
|
+
def vsc(self) -> VectorSearchClient:
|
|
196
|
+
"""Lazy initialization of VectorSearchClient."""
|
|
197
|
+
if not self._vsc_initialized:
|
|
198
|
+
self._vsc = _vector_search_client(
|
|
199
|
+
pat=self._pat,
|
|
200
|
+
client_id=self._client_id,
|
|
201
|
+
client_secret=self._client_secret,
|
|
202
|
+
workspace_host=self._workspace_host,
|
|
168
203
|
)
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
204
|
+
self._vsc_initialized = True
|
|
205
|
+
return self._vsc # type: ignore[return-value]
|
|
206
|
+
|
|
207
|
+
@vsc.setter
|
|
208
|
+
def vsc(self, value: VectorSearchClient) -> None:
|
|
209
|
+
"""Set VectorSearchClient and mark as initialized."""
|
|
210
|
+
self._vsc = value
|
|
211
|
+
self._vsc_initialized = True
|
|
212
|
+
|
|
213
|
+
@property
|
|
214
|
+
def dfs(self) -> DatabricksFunctionClient:
|
|
215
|
+
"""Lazy initialization of DatabricksFunctionClient."""
|
|
216
|
+
if not self._dfs_initialized:
|
|
217
|
+
self._dfs = _function_client(w=self.w)
|
|
218
|
+
self._dfs_initialized = True
|
|
219
|
+
return self._dfs # type: ignore[return-value]
|
|
220
|
+
|
|
221
|
+
@dfs.setter
|
|
222
|
+
def dfs(self, value: DatabricksFunctionClient) -> None:
|
|
223
|
+
"""Set DatabricksFunctionClient and mark as initialized."""
|
|
224
|
+
self._dfs = value
|
|
225
|
+
self._dfs_initialized = True
|
|
174
226
|
|
|
175
227
|
def experiment_name(self, config: AppConfig) -> str:
|
|
176
228
|
current_user: User = self.w.current_user.me()
|
|
@@ -572,7 +624,7 @@ class DatabricksProvider(ServiceProvider):
|
|
|
572
624
|
source_config_path: str | None = config.source_config_path
|
|
573
625
|
if source_config_path:
|
|
574
626
|
# Read the config file and upload to workspace
|
|
575
|
-
config_file_name: str = "
|
|
627
|
+
config_file_name: str = "dao_ai.yaml"
|
|
576
628
|
workspace_config_path: str = f"{source_path}/{config_file_name}"
|
|
577
629
|
|
|
578
630
|
logger.info(
|
|
@@ -603,7 +655,7 @@ class DatabricksProvider(ServiceProvider):
|
|
|
603
655
|
logger.warning(
|
|
604
656
|
"No source config path available. "
|
|
605
657
|
"Ensure DAO_AI_CONFIG_PATH is set in the app environment or "
|
|
606
|
-
"
|
|
658
|
+
"dao_ai.yaml exists in the app source directory."
|
|
607
659
|
)
|
|
608
660
|
|
|
609
661
|
# Generate and upload app.yaml with dynamically discovered resources
|
|
@@ -634,7 +686,9 @@ class DatabricksProvider(ServiceProvider):
|
|
|
634
686
|
generate_user_api_scopes,
|
|
635
687
|
)
|
|
636
688
|
|
|
637
|
-
sdk_resources = generate_sdk_resources(
|
|
689
|
+
sdk_resources = generate_sdk_resources(
|
|
690
|
+
config, experiment_id=experiment.experiment_id
|
|
691
|
+
)
|
|
638
692
|
if sdk_resources:
|
|
639
693
|
logger.info(
|
|
640
694
|
"Discovered app resources from config",
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: dao-ai
|
|
3
|
-
Version: 0.1.
|
|
3
|
+
Version: 0.1.13
|
|
4
4
|
Summary: DAO AI: A modular, multi-agent orchestration framework for complex AI workflows. Supports agent handoff, tool integration, and dynamic configuration via YAML.
|
|
5
5
|
Project-URL: Homepage, https://github.com/natefleming/dao-ai
|
|
6
6
|
Project-URL: Documentation, https://natefleming.github.io/dao-ai
|
|
@@ -125,7 +125,7 @@ DAO AI Builder generates valid YAML configurations that work seamlessly with thi
|
|
|
125
125
|
- **[Architecture](docs/architecture.md)** - Understand how DAO works under the hood
|
|
126
126
|
|
|
127
127
|
### Core Concepts
|
|
128
|
-
- **[Key Capabilities](docs/key-capabilities.md)** - Explore
|
|
128
|
+
- **[Key Capabilities](docs/key-capabilities.md)** - Explore 15 powerful features for production agents
|
|
129
129
|
- **[Configuration Reference](docs/configuration-reference.md)** - Complete YAML configuration guide
|
|
130
130
|
- **[Examples](docs/examples.md)** - Ready-to-use example configurations
|
|
131
131
|
|
|
@@ -148,7 +148,7 @@ Before you begin, you'll need:
|
|
|
148
148
|
- **Python 3.11 or newer** installed on your computer ([download here](https://www.python.org/downloads/))
|
|
149
149
|
- **A Databricks workspace** (ask your IT team or see [Databricks docs](https://docs.databricks.com/))
|
|
150
150
|
- Access to **Unity Catalog** (your organization's data catalog)
|
|
151
|
-
- **Model Serving** enabled (for deploying AI agents)
|
|
151
|
+
- **Model Serving** or **Databricks Apps** enabled (for deploying AI agents)
|
|
152
152
|
- *Optional*: Vector Search, Genie (for advanced features)
|
|
153
153
|
|
|
154
154
|
**Not sure if you have access?** Your Databricks administrator can grant you permissions.
|
|
@@ -345,6 +345,7 @@ DAO provides powerful capabilities for building production-ready AI agents:
|
|
|
345
345
|
|
|
346
346
|
| Feature | Description |
|
|
347
347
|
|---------|-------------|
|
|
348
|
+
| **Dual Deployment Targets** | Deploy to Databricks Model Serving or Databricks Apps with a single config |
|
|
348
349
|
| **Multi-Tool Support** | Python functions, Unity Catalog, MCP, Agent Endpoints |
|
|
349
350
|
| **On-Behalf-Of User** | Per-user permissions and governance |
|
|
350
351
|
| **Advanced Caching** | Two-tier (LRU + Semantic) caching for cost optimization |
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
dao_ai/__init__.py,sha256=18P98ExEgUaJ1Byw440Ct1ty59v6nxyWtc5S6Uq2m9Q,1062
|
|
2
2
|
dao_ai/catalog.py,sha256=sPZpHTD3lPx4EZUtIWeQV7VQM89WJ6YH__wluk1v2lE,4947
|
|
3
3
|
dao_ai/cli.py,sha256=1Ox8qjLKRlrKu2YXozm0lWoeZnDCouECeZSGVPkQgIQ,50923
|
|
4
|
-
dao_ai/config.py,sha256=
|
|
4
|
+
dao_ai/config.py,sha256=E2lwWro3A6c3cKLYyHZeqNz2X5vkXgLS8TfDlGL5o9M,129307
|
|
5
5
|
dao_ai/graph.py,sha256=1-uQlo7iXZQTT3uU8aYu0N5rnhw5_g_2YLwVsAs6M-U,1119
|
|
6
6
|
dao_ai/logging.py,sha256=lYy4BmucCHvwW7aI3YQkQXKJtMvtTnPDu9Hnd7_O4oc,1556
|
|
7
7
|
dao_ai/messages.py,sha256=4ZBzO4iFdktGSLrmhHzFjzMIt2tpaL-aQLHOQJysGnY,6959
|
|
@@ -14,10 +14,10 @@ dao_ai/types.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
|
14
14
|
dao_ai/utils.py,sha256=_Urd7Nj2VzrgPKf3NS4E6vt0lWRhEUddBqWN9BksqeE,11543
|
|
15
15
|
dao_ai/vector_search.py,sha256=8d3xROg9zSIYNXjRRl6rSexsJTlufjRl5Fy1ZA8daKA,4019
|
|
16
16
|
dao_ai/apps/__init__.py,sha256=RLuhZf4gQ4pemwKDz1183aXib8UfaRhwfKvRx68GRlM,661
|
|
17
|
-
dao_ai/apps/handlers.py,sha256=
|
|
17
|
+
dao_ai/apps/handlers.py,sha256=nbJZOgmnHG5icR4Pb56jxIWsm_AGnsURgViMJX2_LTU,2608
|
|
18
18
|
dao_ai/apps/model_serving.py,sha256=XLt3_0pGSRceMK6YtOrND9Jnh7mKLPCtwjVDLIaptQU,847
|
|
19
|
-
dao_ai/apps/resources.py,sha256=
|
|
20
|
-
dao_ai/apps/server.py,sha256=
|
|
19
|
+
dao_ai/apps/resources.py,sha256=5l6UxfMq6uspOql-HNDyUikfqRAa9eH_TiJHrGgMb6s,40029
|
|
20
|
+
dao_ai/apps/server.py,sha256=neWbVnC2z9f-tJZBnho70FytNDEVOdOM1YngoGc5KHI,1264
|
|
21
21
|
dao_ai/genie/__init__.py,sha256=vdEyGhrt6L8GlK75SyYvTnl8QpHKDCJC5hJKLg4DesQ,1063
|
|
22
22
|
dao_ai/genie/core.py,sha256=HPKbocvhnnw_PkQwfoq5bpgQmL9lZyyS6_goTJL8yiY,1073
|
|
23
23
|
dao_ai/genie/cache/__init__.py,sha256=JfgCJl1NYQ1aZvZ4kly4T6uQK6ZCJ6PX_htuq7nJF50,1203
|
|
@@ -53,7 +53,7 @@ dao_ai/orchestration/supervisor.py,sha256=alKMEEo9G5LhdpMvTVdAMel234cZj5_MguWl4w
|
|
|
53
53
|
dao_ai/orchestration/swarm.py,sha256=8tp1eGmsQqqWpaDcjPoJckddPWohZdmmN0RGRJ_xzOA,9198
|
|
54
54
|
dao_ai/providers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
55
55
|
dao_ai/providers/base.py,sha256=cJGo3UjUTPgS91dv38ePOHwQQtYhIa84ebb167CBXjk,2111
|
|
56
|
-
dao_ai/providers/databricks.py,sha256=
|
|
56
|
+
dao_ai/providers/databricks.py,sha256=bI8lWZ2DkNac9aJWCIJzTG3lCE8MJ8n2BPurEHM1SeE,72791
|
|
57
57
|
dao_ai/tools/__init__.py,sha256=NfRpAKds_taHbx6gzLPWgtPXve-YpwzkoOAUflwxceM,1734
|
|
58
58
|
dao_ai/tools/agent.py,sha256=plIWALywRjaDSnot13nYehBsrHRpBUpsVZakoGeajOE,1858
|
|
59
59
|
dao_ai/tools/core.py,sha256=bRIN3BZhRQX8-Kpu3HPomliodyskCqjxynQmYbk6Vjs,3783
|
|
@@ -68,8 +68,8 @@ dao_ai/tools/sql.py,sha256=tKd1gjpLuKdQDyfmyYYtMiNRHDW6MGRbdEVaeqyB8Ok,7632
|
|
|
68
68
|
dao_ai/tools/time.py,sha256=tufJniwivq29y0LIffbgeBTIDE6VgrLpmVf8Qr90qjw,9224
|
|
69
69
|
dao_ai/tools/unity_catalog.py,sha256=AjQfW7bvV8NurqDLIyntYRv2eJuTwNdbvex1L5CRjOk,15534
|
|
70
70
|
dao_ai/tools/vector_search.py,sha256=oe2uBwl2TfeJIXPpwiS6Rmz7wcHczSxNyqS9P3hE6co,14542
|
|
71
|
-
dao_ai-0.1.
|
|
72
|
-
dao_ai-0.1.
|
|
73
|
-
dao_ai-0.1.
|
|
74
|
-
dao_ai-0.1.
|
|
75
|
-
dao_ai-0.1.
|
|
71
|
+
dao_ai-0.1.13.dist-info/METADATA,sha256=xQ1apcAp24Co2FBzFL6Hw5mCqQzsskmMw-br41NSJqk,16830
|
|
72
|
+
dao_ai-0.1.13.dist-info/WHEEL,sha256=WLgqFyCfm_KASv4WHyYy0P3pM_m7J5L9k2skdKLirC8,87
|
|
73
|
+
dao_ai-0.1.13.dist-info/entry_points.txt,sha256=Xa-UFyc6gWGwMqMJOt06ZOog2vAfygV_DSwg1AiP46g,43
|
|
74
|
+
dao_ai-0.1.13.dist-info/licenses/LICENSE,sha256=YZt3W32LtPYruuvHE9lGk2bw6ZPMMJD8yLrjgHybyz4,1069
|
|
75
|
+
dao_ai-0.1.13.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|