davidkhala.ai 0.1.7__tar.gz → 0.2.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/PKG-INFO +5 -2
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/agent/dify/api/knowledge.py +2 -1
- davidkhala_ai-0.2.0/davidkhala/ai/agent/dify/const.py +10 -0
- davidkhala_ai-0.2.0/davidkhala/ai/agent/dify/interface.py +3 -0
- davidkhala_ai-0.2.0/davidkhala/ai/agent/dify/model.py +31 -0
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/agent/dify/ops/console/knowledge.py +3 -1
- davidkhala_ai-0.2.0/davidkhala/ai/agent/dify/ops/console/plugin.py +68 -0
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/agent/dify/ops/console/session.py +3 -1
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/agent/dify/ops/db/app.py +1 -4
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/agent/dify/ops/db/knowledge.py +8 -2
- davidkhala_ai-0.2.0/davidkhala/ai/agent/dify/plugins/__init__.py +7 -0
- davidkhala_ai-0.2.0/davidkhala/ai/agent/dify/plugins/popular.py +36 -0
- davidkhala_ai-0.2.0/davidkhala/ai/ali/agentbay.py +39 -0
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/openai/azure.py +2 -3
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/pyproject.toml +4 -4
- davidkhala_ai-0.1.7/davidkhala/ai/agent/dify/common.py +0 -36
- davidkhala_ai-0.1.7/davidkhala/ai/agent/dify/plugins/__init__.py +0 -14
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/.gitignore +0 -0
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/README.md +0 -0
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/__init__.py +0 -0
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/agent/README.md +0 -0
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/agent/__init__.py +0 -0
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/agent/dify/__init__.py +0 -0
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/agent/dify/api/__init__.py +0 -0
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/agent/dify/api/app.py +0 -0
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/agent/dify/ops/__init__.py +0 -0
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/agent/dify/ops/console/__init__.py +0 -0
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/agent/dify/ops/db/__init__.py +0 -0
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/agent/dify/ops/db/orm.py +0 -0
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/agent/dify/ops/db/sys.py +0 -0
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/agent/dify/plugins/file.py +0 -0
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/agent/dify/plugins/firecrawl.py +0 -0
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/agent/dify/plugins/jina.py +0 -0
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/agent/langgraph.py +0 -0
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/agent/ragflow.py +0 -0
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/ali/__init__.py +0 -0
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/ali/dashscope.py +0 -0
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/api/__init__.py +0 -0
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/api/openrouter.py +0 -0
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/api/siliconflow.py +0 -0
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/google/__init__.py +0 -0
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/google/adk.py +0 -0
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/google/gemini.py +0 -0
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/huggingface/BAAI.py +0 -0
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/huggingface/__init__.py +0 -0
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/huggingface/inference.py +0 -0
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/model.py +0 -0
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/openai/__init__.py +0 -0
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/openai/native.py +0 -0
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/openrouter/__init__.py +0 -0
- {davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/opik.py +0 -0
|
@@ -1,13 +1,16 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: davidkhala.ai
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.2.0
|
|
4
4
|
Summary: misc AI modules
|
|
5
|
-
Requires-Python: >=3.
|
|
5
|
+
Requires-Python: >=3.12
|
|
6
6
|
Provides-Extra: ali
|
|
7
7
|
Requires-Dist: dashscope; extra == 'ali'
|
|
8
|
+
Requires-Dist: davidkhala-utils; extra == 'ali'
|
|
9
|
+
Requires-Dist: wuying-agentbay-sdk; extra == 'ali'
|
|
8
10
|
Provides-Extra: api
|
|
9
11
|
Requires-Dist: davidkhala-utils[http-request]; extra == 'api'
|
|
10
12
|
Provides-Extra: azure
|
|
13
|
+
Requires-Dist: davidkhala-utils; extra == 'azure'
|
|
11
14
|
Requires-Dist: openai; extra == 'azure'
|
|
12
15
|
Provides-Extra: dify
|
|
13
16
|
Requires-Dist: davidkhala-databases[pg]; extra == 'dify'
|
|
@@ -8,7 +8,8 @@ from urllib.parse import urlparse
|
|
|
8
8
|
import requests
|
|
9
9
|
|
|
10
10
|
from davidkhala.ai.agent.dify.api import API, Iterator
|
|
11
|
-
from davidkhala.ai.agent.dify.
|
|
11
|
+
from davidkhala.ai.agent.dify.model import Document as DocumentBase
|
|
12
|
+
|
|
12
13
|
|
|
13
14
|
class DatasetDict(TypedDict):
|
|
14
15
|
id: str
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
from pydantic import BaseModel, Field
|
|
2
|
+
|
|
3
|
+
from davidkhala.ai.agent.dify.const import IndexingStatus
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class Document(BaseModel):
|
|
7
|
+
id: str
|
|
8
|
+
position: int
|
|
9
|
+
data_source_type: str
|
|
10
|
+
data_source_info: dict[str, str]
|
|
11
|
+
name: str
|
|
12
|
+
indexing_status: IndexingStatus
|
|
13
|
+
error: str | None
|
|
14
|
+
enabled: bool
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class Dataset(BaseModel):
|
|
18
|
+
id: str
|
|
19
|
+
name: str
|
|
20
|
+
description: str
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class JsonData(BaseModel):
|
|
24
|
+
data: list
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class NodeOutput(BaseModel):
|
|
28
|
+
"""Schema for Output of a Dify node"""
|
|
29
|
+
text: str
|
|
30
|
+
files: list
|
|
31
|
+
json_: list[JsonData] = Field(alias="json") # avoid conflict with .json()
|
{davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/agent/dify/ops/console/knowledge.py
RENAMED
|
@@ -3,7 +3,9 @@ from time import sleep
|
|
|
3
3
|
from davidkhala.utils.http_request.stream import as_sse, Request as StreamRequest
|
|
4
4
|
from pydantic import BaseModel
|
|
5
5
|
|
|
6
|
-
from davidkhala.ai.agent.dify.
|
|
6
|
+
from davidkhala.ai.agent.dify.interface import IndexingError
|
|
7
|
+
from davidkhala.ai.agent.dify.model import Document, Dataset
|
|
8
|
+
from davidkhala.ai.agent.dify.const import IndexingStatus
|
|
7
9
|
from davidkhala.ai.agent.dify.ops.console import API
|
|
8
10
|
from davidkhala.ai.agent.dify.ops.console.session import ConsoleUser
|
|
9
11
|
from davidkhala.ai.agent.dify.ops.db.orm import Node
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
from time import sleep
|
|
2
|
+
|
|
3
|
+
from davidkhala.ai.agent.dify.ops.console import API
|
|
4
|
+
from davidkhala.ai.agent.dify.ops.console.session import ConsoleUser
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class ConsolePlugin(API):
|
|
8
|
+
def __init__(self, context: ConsoleUser):
|
|
9
|
+
super().__init__()
|
|
10
|
+
self.base_url = f"{context.base_url}/workspaces/current/plugin"
|
|
11
|
+
self.session.cookies = context.session.cookies
|
|
12
|
+
self.options = context.options
|
|
13
|
+
|
|
14
|
+
def upgrade(self, *plugin_names: str) -> list[dict]:
|
|
15
|
+
versions = self.latest_version(*plugin_names)
|
|
16
|
+
self.async_install(*versions)
|
|
17
|
+
|
|
18
|
+
current = []
|
|
19
|
+
while len(current) < len(versions):
|
|
20
|
+
current = self.get(*plugin_names)
|
|
21
|
+
sleep(1)
|
|
22
|
+
return current
|
|
23
|
+
|
|
24
|
+
def async_install(self, *plugin_versioned_names: str) -> str | None:
|
|
25
|
+
url = f"{self.base_url}/install/marketplace"
|
|
26
|
+
r = self.request(url, method="POST", json={
|
|
27
|
+
'plugin_unique_identifiers': plugin_versioned_names,
|
|
28
|
+
})
|
|
29
|
+
if r['all_installed']:
|
|
30
|
+
# plugins exist, no need to install
|
|
31
|
+
return None
|
|
32
|
+
|
|
33
|
+
return r['task_id']
|
|
34
|
+
|
|
35
|
+
def plugins(self, *, page=1, size=100):
|
|
36
|
+
url = f"{self.base_url}/list?page={page}&page_size={size}"
|
|
37
|
+
r = self.request(url, method="GET")
|
|
38
|
+
_ = r['plugins']
|
|
39
|
+
assert r['total'] == len(_)
|
|
40
|
+
return _
|
|
41
|
+
|
|
42
|
+
def get(self, *plugin_names: str) -> list[dict]:
|
|
43
|
+
"inspect installed plugins"
|
|
44
|
+
url = f"{self.base_url}/list/installations/ids"
|
|
45
|
+
r = self.request(url, method="POST", json={
|
|
46
|
+
'plugin_ids': plugin_names,
|
|
47
|
+
})
|
|
48
|
+
return r['plugins']
|
|
49
|
+
|
|
50
|
+
def latest_version(self, *plugin_names: str) -> dict:
|
|
51
|
+
url = f"{self.base_url}/list/latest-versions"
|
|
52
|
+
r = self.request(url, method="POST", json={
|
|
53
|
+
'plugin_ids': plugin_names,
|
|
54
|
+
})
|
|
55
|
+
return [r['versions'][name]['unique_identifier'] for name in plugin_names]
|
|
56
|
+
|
|
57
|
+
def uninstall(self, id: str):
|
|
58
|
+
url = f"{self.base_url}/uninstall"
|
|
59
|
+
|
|
60
|
+
r = self.request(url, method="POST", json={
|
|
61
|
+
'plugin_installation_id': id
|
|
62
|
+
})
|
|
63
|
+
assert r['success'] is True
|
|
64
|
+
|
|
65
|
+
def uninstall_by(self, *plugin_names: str):
|
|
66
|
+
for name in plugin_names:
|
|
67
|
+
r = self.get(name)
|
|
68
|
+
self.uninstall(r[0]['id'])
|
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
from base64 import b64encode
|
|
2
|
+
|
|
1
3
|
from davidkhala.ai.agent.dify.ops.console import API
|
|
2
4
|
|
|
3
5
|
|
|
@@ -11,7 +13,7 @@ class ConsoleUser(API):
|
|
|
11
13
|
|
|
12
14
|
r = self.request(url, "POST", json={
|
|
13
15
|
'email': email,
|
|
14
|
-
'password': password,
|
|
16
|
+
'password': b64encode(password.encode()).decode(), # use base64 from dify 1.11
|
|
15
17
|
'remember_me': remember_me,
|
|
16
18
|
'language': language,
|
|
17
19
|
})
|
|
@@ -4,16 +4,13 @@ from sqlalchemy.orm import Session
|
|
|
4
4
|
from sqlalchemy import desc
|
|
5
5
|
|
|
6
6
|
class Studio(DB):
|
|
7
|
-
|
|
8
|
-
def user_feedbacks(self):
|
|
9
|
-
sql = """SELECT mf.conversation_id,
|
|
7
|
+
user_feedbacks_sql = """SELECT mf.conversation_id,
|
|
10
8
|
mf.content,
|
|
11
9
|
m.query,
|
|
12
10
|
m.answer
|
|
13
11
|
FROM message_feedbacks mf
|
|
14
12
|
LEFT JOIN messages m ON mf.message_id = m.id
|
|
15
13
|
WHERE mf.from_source = 'user'"""
|
|
16
|
-
return self.get_dict(sql)
|
|
17
14
|
|
|
18
15
|
@property
|
|
19
16
|
def apps(self): return self.get_dict("select id, name, mode from apps where status = 'normal'")
|
|
@@ -26,16 +26,22 @@ class Dataset(DB):
|
|
|
26
26
|
template = "select id from datasource_providers where name = :name and provider = :provider"
|
|
27
27
|
return self.query(template, {'name': name, 'provider': provider}).scalars().all()
|
|
28
28
|
|
|
29
|
+
def documents(self, dataset_id: str):
|
|
30
|
+
template = "select id, name,created_from, created_at from documents where dataset_id = :dataset_id"
|
|
31
|
+
return self.query(template, {'dataset_id': dataset_id})
|
|
32
|
+
|
|
29
33
|
|
|
30
34
|
class Document(DB):
|
|
31
35
|
def hit_documents(self, top_k: int = 3):
|
|
32
36
|
template = "SELECT dataset_id, document_id, content FROM document_segments ORDER BY hit_count DESC LIMIT :top_k"
|
|
33
37
|
return self.get_dict(template, {'top_k': top_k})
|
|
34
38
|
|
|
35
|
-
def id_by(self, name) -> list[str]:
|
|
39
|
+
def id_by(self, name: str, dataset_id: str = None) -> list[str]:
|
|
36
40
|
"""multiple ids can be found"""
|
|
37
41
|
template = "select id from documents where name = :name"
|
|
38
|
-
|
|
42
|
+
if dataset_id:
|
|
43
|
+
template = "select id from documents where name = :name and dataset_id = :dataset_id"
|
|
44
|
+
return [str(uuid) for uuid in self.query(template, {'name': name, 'dataset_id': dataset_id}).scalars().all()]
|
|
39
45
|
|
|
40
46
|
|
|
41
47
|
class Pipeline(DB):
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
model_provider = [
|
|
2
|
+
'langgenius/siliconflow',
|
|
3
|
+
'langgenius/azure_openai',
|
|
4
|
+
'langgenius/tongyi',
|
|
5
|
+
'langgenius/jina',
|
|
6
|
+
'langgenius/openrouter',
|
|
7
|
+
'langgenius/deepseek',
|
|
8
|
+
]
|
|
9
|
+
|
|
10
|
+
class Knowledge:
|
|
11
|
+
data_source = [
|
|
12
|
+
'langgenius/firecrawl_datasource',
|
|
13
|
+
]
|
|
14
|
+
chunk = [
|
|
15
|
+
'langgenius/parentchild_chunker',
|
|
16
|
+
'langgenius/general_chunker',
|
|
17
|
+
]
|
|
18
|
+
api = [
|
|
19
|
+
'abesticode/knowledge_pro',
|
|
20
|
+
]
|
|
21
|
+
|
|
22
|
+
class Node:
|
|
23
|
+
format = [
|
|
24
|
+
'langgenius/json_process',
|
|
25
|
+
'langgenius/dify_extractor',
|
|
26
|
+
]
|
|
27
|
+
agent = [
|
|
28
|
+
'langgenius/agent',
|
|
29
|
+
]
|
|
30
|
+
data = [
|
|
31
|
+
'langgenius/chart',
|
|
32
|
+
'junjiem/db_query',
|
|
33
|
+
'junjiem/db_query_pre_auth',
|
|
34
|
+
]
|
|
35
|
+
|
|
36
|
+
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
from agentbay import AgentBay, Session, Config, AgentBayLogger, BrowserOption
|
|
2
|
+
from davidkhala.utils.syntax.interface import ContextAware
|
|
3
|
+
|
|
4
|
+
AgentBayLogger.setup(level='WARNING') # Default to INFO
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class Client(ContextAware):
|
|
8
|
+
def __init__(self, api_key, *, timeout_ms=10000):
|
|
9
|
+
self.agent = AgentBay(
|
|
10
|
+
api_key=api_key,
|
|
11
|
+
cfg=Config(endpoint="wuyingai.ap-southeast-1.aliyuncs.com", timeout_ms=timeout_ms)
|
|
12
|
+
)
|
|
13
|
+
self.session: Session | None = None
|
|
14
|
+
|
|
15
|
+
def open(self):
|
|
16
|
+
r = self.agent.create()
|
|
17
|
+
if not r.success:
|
|
18
|
+
return False
|
|
19
|
+
self.session = r.session
|
|
20
|
+
return True
|
|
21
|
+
|
|
22
|
+
def close(self):
|
|
23
|
+
self.agent.delete(self.session)
|
|
24
|
+
del self.session
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class Browser(ContextAware):
|
|
28
|
+
def __init__(self, session: Session):
|
|
29
|
+
self.session = session
|
|
30
|
+
self.option = BrowserOption()
|
|
31
|
+
self.endpoint_url: str | None = None
|
|
32
|
+
|
|
33
|
+
def open(self) -> bool:
|
|
34
|
+
success = self.session.browser.initialize(self.option)
|
|
35
|
+
self.endpoint_url = self.session.browser.get_endpoint_url()
|
|
36
|
+
return success
|
|
37
|
+
|
|
38
|
+
def close(self):
|
|
39
|
+
self.session.browser.destroy()
|
|
@@ -1,5 +1,4 @@
|
|
|
1
|
-
import
|
|
2
|
-
|
|
1
|
+
from davidkhala.utils.syntax.compat import deprecated
|
|
3
2
|
from openai import AzureOpenAI, OpenAI
|
|
4
3
|
|
|
5
4
|
from davidkhala.ai.openai import Client
|
|
@@ -20,7 +19,7 @@ class ModelDeploymentClient(AzureHosted):
|
|
|
20
19
|
)
|
|
21
20
|
|
|
22
21
|
|
|
23
|
-
@
|
|
22
|
+
@deprecated("Azure Open AI is deprecated. Please migrate to Microsoft Foundry")
|
|
24
23
|
class OpenAIClient(AzureHosted):
|
|
25
24
|
|
|
26
25
|
def __init__(self, api_key, project):
|
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "davidkhala.ai"
|
|
3
|
-
version = "0.
|
|
3
|
+
version = "0.2.0"
|
|
4
4
|
description = "misc AI modules"
|
|
5
5
|
readme = "README.md"
|
|
6
|
-
requires-python = ">=3.
|
|
6
|
+
requires-python = ">=3.12"
|
|
7
7
|
|
|
8
8
|
[project.optional-dependencies]
|
|
9
9
|
langchain = [
|
|
@@ -18,8 +18,8 @@ hf = [
|
|
|
18
18
|
'onnxruntime', 'onnx' # for test only
|
|
19
19
|
]
|
|
20
20
|
openrouter = ["openrouter"]
|
|
21
|
-
ali = ["dashscope"]
|
|
22
|
-
azure = ["openai"]
|
|
21
|
+
ali = ["dashscope", "wuying-agentbay-sdk", "davidkhala.utils"]
|
|
22
|
+
azure = ["openai", "davidkhala.utils"]
|
|
23
23
|
telemetry = [
|
|
24
24
|
"opik ; python_version < '3.14'" # limit py version: PyO3 v0.22.6 currently supports up to Python 3.13.
|
|
25
25
|
]
|
|
@@ -1,36 +0,0 @@
|
|
|
1
|
-
from enum import Enum
|
|
2
|
-
|
|
3
|
-
from pydantic import BaseModel
|
|
4
|
-
|
|
5
|
-
from davidkhala.ai.agent.dify.plugins.firecrawl import DataSourceInfo
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
class IndexingStatus(str, Enum):
|
|
9
|
-
WAITING = "waiting"
|
|
10
|
-
PARSING = "parsing"
|
|
11
|
-
SPLITTING = 'splitting'
|
|
12
|
-
INDEXING = "indexing"
|
|
13
|
-
COMPLETED = "completed"
|
|
14
|
-
FAILED = "error"
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
class Document(BaseModel):
|
|
18
|
-
id: str
|
|
19
|
-
position: int
|
|
20
|
-
data_source_type: str
|
|
21
|
-
data_source_info: dict[str, str]
|
|
22
|
-
name: str
|
|
23
|
-
indexing_status: IndexingStatus
|
|
24
|
-
error: str | None
|
|
25
|
-
enabled: bool
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
class Dataset(BaseModel):
|
|
29
|
-
id: str
|
|
30
|
-
name: str
|
|
31
|
-
description: str
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
class IndexingError(Exception):
|
|
35
|
-
"""Raised when document indexing fails (indexing_status = 'error')"""
|
|
36
|
-
pass
|
|
@@ -1,14 +0,0 @@
|
|
|
1
|
-
from typing import Literal
|
|
2
|
-
|
|
3
|
-
from pydantic import BaseModel
|
|
4
|
-
|
|
5
|
-
class JsonEntry(BaseModel):
|
|
6
|
-
data: list
|
|
7
|
-
|
|
8
|
-
class Output(BaseModel):
|
|
9
|
-
"""Class for result of a Dify node"""
|
|
10
|
-
text: str
|
|
11
|
-
files: list
|
|
12
|
-
json: list[JsonEntry]
|
|
13
|
-
class DataSourceTypeAware(BaseModel):
|
|
14
|
-
datasource_type: Literal["local_file", "online_document", "website_crawl"]
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{davidkhala_ai-0.1.7 → davidkhala_ai-0.2.0}/davidkhala/ai/agent/dify/ops/console/__init__.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|