loom-agent 0.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of loom-agent might be problematic. Click here for more details.
- loom/__init__.py +77 -0
- loom/agent.py +217 -0
- loom/agents/__init__.py +10 -0
- loom/agents/refs.py +28 -0
- loom/agents/registry.py +50 -0
- loom/builtin/compression/__init__.py +4 -0
- loom/builtin/compression/structured.py +79 -0
- loom/builtin/embeddings/__init__.py +9 -0
- loom/builtin/embeddings/openai_embedding.py +135 -0
- loom/builtin/embeddings/sentence_transformers_embedding.py +145 -0
- loom/builtin/llms/__init__.py +8 -0
- loom/builtin/llms/mock.py +34 -0
- loom/builtin/llms/openai.py +168 -0
- loom/builtin/llms/rule.py +102 -0
- loom/builtin/memory/__init__.py +5 -0
- loom/builtin/memory/in_memory.py +21 -0
- loom/builtin/memory/persistent_memory.py +278 -0
- loom/builtin/retriever/__init__.py +9 -0
- loom/builtin/retriever/chroma_store.py +265 -0
- loom/builtin/retriever/in_memory.py +106 -0
- loom/builtin/retriever/milvus_store.py +307 -0
- loom/builtin/retriever/pinecone_store.py +237 -0
- loom/builtin/retriever/qdrant_store.py +274 -0
- loom/builtin/retriever/vector_store.py +128 -0
- loom/builtin/retriever/vector_store_config.py +217 -0
- loom/builtin/tools/__init__.py +32 -0
- loom/builtin/tools/calculator.py +49 -0
- loom/builtin/tools/document_search.py +111 -0
- loom/builtin/tools/glob.py +27 -0
- loom/builtin/tools/grep.py +56 -0
- loom/builtin/tools/http_request.py +86 -0
- loom/builtin/tools/python_repl.py +73 -0
- loom/builtin/tools/read_file.py +32 -0
- loom/builtin/tools/task.py +158 -0
- loom/builtin/tools/web_search.py +64 -0
- loom/builtin/tools/write_file.py +31 -0
- loom/callbacks/base.py +9 -0
- loom/callbacks/logging.py +12 -0
- loom/callbacks/metrics.py +27 -0
- loom/callbacks/observability.py +248 -0
- loom/components/agent.py +107 -0
- loom/core/agent_executor.py +450 -0
- loom/core/circuit_breaker.py +178 -0
- loom/core/compression_manager.py +329 -0
- loom/core/context_retriever.py +185 -0
- loom/core/error_classifier.py +193 -0
- loom/core/errors.py +66 -0
- loom/core/message_queue.py +167 -0
- loom/core/permission_store.py +62 -0
- loom/core/permissions.py +69 -0
- loom/core/scheduler.py +125 -0
- loom/core/steering_control.py +47 -0
- loom/core/structured_logger.py +279 -0
- loom/core/subagent_pool.py +232 -0
- loom/core/system_prompt.py +141 -0
- loom/core/system_reminders.py +283 -0
- loom/core/tool_pipeline.py +113 -0
- loom/core/types.py +269 -0
- loom/interfaces/compressor.py +59 -0
- loom/interfaces/embedding.py +51 -0
- loom/interfaces/llm.py +33 -0
- loom/interfaces/memory.py +29 -0
- loom/interfaces/retriever.py +179 -0
- loom/interfaces/tool.py +27 -0
- loom/interfaces/vector_store.py +80 -0
- loom/llm/__init__.py +14 -0
- loom/llm/config.py +228 -0
- loom/llm/factory.py +111 -0
- loom/llm/model_health.py +235 -0
- loom/llm/model_pool_advanced.py +305 -0
- loom/llm/pool.py +170 -0
- loom/llm/registry.py +201 -0
- loom/mcp/__init__.py +4 -0
- loom/mcp/client.py +86 -0
- loom/mcp/registry.py +58 -0
- loom/mcp/tool_adapter.py +48 -0
- loom/observability/__init__.py +5 -0
- loom/patterns/__init__.py +5 -0
- loom/patterns/multi_agent.py +123 -0
- loom/patterns/rag.py +262 -0
- loom/plugins/registry.py +55 -0
- loom/resilience/__init__.py +5 -0
- loom/tooling.py +72 -0
- loom/utils/agent_loader.py +218 -0
- loom/utils/token_counter.py +19 -0
- loom_agent-0.0.1.dist-info/METADATA +457 -0
- loom_agent-0.0.1.dist-info/RECORD +89 -0
- loom_agent-0.0.1.dist-info/WHEEL +4 -0
- loom_agent-0.0.1.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,274 @@
|
|
|
1
|
+
"""Qdrant 向量存储适配器"""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Any, Dict, List, Optional, Tuple
|
|
6
|
+
import uuid
|
|
7
|
+
|
|
8
|
+
from loom.interfaces.retriever import Document
|
|
9
|
+
from loom.interfaces.vector_store import BaseVectorStore
|
|
10
|
+
|
|
11
|
+
try:
|
|
12
|
+
from qdrant_client import QdrantClient
|
|
13
|
+
from qdrant_client.models import Distance, VectorParams, PointStruct, Filter, FieldCondition, MatchValue
|
|
14
|
+
QDRANT_AVAILABLE = True
|
|
15
|
+
except ImportError:
|
|
16
|
+
QDRANT_AVAILABLE = False
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class QdrantVectorStore(BaseVectorStore):
|
|
20
|
+
"""
|
|
21
|
+
Qdrant 向量存储适配器
|
|
22
|
+
|
|
23
|
+
特点:
|
|
24
|
+
- ✅ 开源向量数据库
|
|
25
|
+
- ✅ 支持本地部署和云服务
|
|
26
|
+
- ✅ 高性能 Rust 实现
|
|
27
|
+
- ✅ 丰富的过滤功能
|
|
28
|
+
- ✅ 支持 gRPC 和 HTTP
|
|
29
|
+
|
|
30
|
+
示例:
|
|
31
|
+
from loom.builtin.retriever.qdrant_store import QdrantVectorStore
|
|
32
|
+
from loom.builtin.retriever.vector_store_config import QdrantConfig
|
|
33
|
+
|
|
34
|
+
# 本地 Qdrant
|
|
35
|
+
config = QdrantConfig.create(
|
|
36
|
+
host="localhost",
|
|
37
|
+
port=6333,
|
|
38
|
+
collection_name="loom_docs"
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
# Qdrant Cloud
|
|
42
|
+
config = QdrantConfig.create(
|
|
43
|
+
host="your-cluster.qdrant.io",
|
|
44
|
+
api_key="your-api-key",
|
|
45
|
+
https=True
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
vector_store = QdrantVectorStore(config)
|
|
49
|
+
await vector_store.initialize()
|
|
50
|
+
"""
|
|
51
|
+
|
|
52
|
+
def __init__(self, config: Dict[str, Any] | Any):
|
|
53
|
+
"""
|
|
54
|
+
Parameters:
|
|
55
|
+
config: QdrantConfig 对象或配置字典
|
|
56
|
+
"""
|
|
57
|
+
if not QDRANT_AVAILABLE:
|
|
58
|
+
raise ImportError(
|
|
59
|
+
"Qdrant client is not installed. "
|
|
60
|
+
"Install with: pip install qdrant-client"
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
# 支持字典或 Pydantic 模型
|
|
64
|
+
if hasattr(config, "model_dump"):
|
|
65
|
+
self.config = config.model_dump()
|
|
66
|
+
else:
|
|
67
|
+
self.config = config
|
|
68
|
+
|
|
69
|
+
self.host = self.config.get("host", "localhost")
|
|
70
|
+
self.port = self.config.get("port", 6333)
|
|
71
|
+
self.collection_name = self.config.get("collection_name", "loom_documents")
|
|
72
|
+
self.dimension = self.config.get("dimension", 1536)
|
|
73
|
+
self.metric = self.config.get("metric", "cosine")
|
|
74
|
+
self.api_key = self.config.get("api_key")
|
|
75
|
+
self.https = self.config.get("https", False)
|
|
76
|
+
self.prefer_grpc = self.config.get("prefer_grpc", False)
|
|
77
|
+
|
|
78
|
+
self.client: Optional[QdrantClient] = None
|
|
79
|
+
self._initialized = False
|
|
80
|
+
|
|
81
|
+
async def initialize(self) -> None:
|
|
82
|
+
"""初始化 Qdrant 连接和集合"""
|
|
83
|
+
if self._initialized:
|
|
84
|
+
return
|
|
85
|
+
|
|
86
|
+
# 初始化客户端
|
|
87
|
+
self.client = QdrantClient(
|
|
88
|
+
host=self.host,
|
|
89
|
+
port=self.port,
|
|
90
|
+
api_key=self.api_key,
|
|
91
|
+
https=self.https,
|
|
92
|
+
prefer_grpc=self.prefer_grpc,
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
# 检查集合是否存在
|
|
96
|
+
collections = self.client.get_collections().collections
|
|
97
|
+
collection_names = [c.name for c in collections]
|
|
98
|
+
|
|
99
|
+
if self.collection_name not in collection_names:
|
|
100
|
+
# 创建集合
|
|
101
|
+
distance_map = {
|
|
102
|
+
"cosine": Distance.COSINE,
|
|
103
|
+
"euclidean": Distance.EUCLID,
|
|
104
|
+
"dot_product": Distance.DOT,
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
self.client.create_collection(
|
|
108
|
+
collection_name=self.collection_name,
|
|
109
|
+
vectors_config=VectorParams(
|
|
110
|
+
size=self.dimension,
|
|
111
|
+
distance=distance_map.get(self.metric, Distance.COSINE)
|
|
112
|
+
)
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
self._initialized = True
|
|
116
|
+
|
|
117
|
+
async def add_vectors(
|
|
118
|
+
self,
|
|
119
|
+
vectors: List[List[float]],
|
|
120
|
+
documents: List[Document]
|
|
121
|
+
) -> None:
|
|
122
|
+
"""
|
|
123
|
+
添加向量到 Qdrant
|
|
124
|
+
|
|
125
|
+
Parameters:
|
|
126
|
+
vectors: 向量列表
|
|
127
|
+
documents: 对应的文档列表
|
|
128
|
+
"""
|
|
129
|
+
if not self._initialized:
|
|
130
|
+
await self.initialize()
|
|
131
|
+
|
|
132
|
+
# 构建 Qdrant Point 格式
|
|
133
|
+
points = []
|
|
134
|
+
for i, (vector, doc) in enumerate(zip(vectors, documents)):
|
|
135
|
+
# 生成或使用文档 ID
|
|
136
|
+
point_id = doc.doc_id or str(uuid.uuid4())
|
|
137
|
+
|
|
138
|
+
# 构建 payload(元数据 + 内容)
|
|
139
|
+
payload = {
|
|
140
|
+
"content": doc.content,
|
|
141
|
+
**(doc.metadata or {})
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
# 添加分数(如果有)
|
|
145
|
+
if doc.score is not None:
|
|
146
|
+
payload["score"] = doc.score
|
|
147
|
+
|
|
148
|
+
points.append(
|
|
149
|
+
PointStruct(
|
|
150
|
+
id=point_id,
|
|
151
|
+
vector=vector,
|
|
152
|
+
payload=payload
|
|
153
|
+
)
|
|
154
|
+
)
|
|
155
|
+
|
|
156
|
+
# 批量上传
|
|
157
|
+
self.client.upsert(
|
|
158
|
+
collection_name=self.collection_name,
|
|
159
|
+
points=points
|
|
160
|
+
)
|
|
161
|
+
|
|
162
|
+
async def search(
|
|
163
|
+
self,
|
|
164
|
+
query_vector: List[float],
|
|
165
|
+
top_k: int = 5,
|
|
166
|
+
filters: Optional[Dict[str, Any]] = None
|
|
167
|
+
) -> List[Tuple[Document, float]]:
|
|
168
|
+
"""
|
|
169
|
+
搜索相似向量
|
|
170
|
+
|
|
171
|
+
Parameters:
|
|
172
|
+
query_vector: 查询向量
|
|
173
|
+
top_k: 返回结果数量
|
|
174
|
+
filters: 元数据过滤条件
|
|
175
|
+
|
|
176
|
+
Returns:
|
|
177
|
+
[(Document, score), ...] 列表
|
|
178
|
+
"""
|
|
179
|
+
if not self._initialized:
|
|
180
|
+
await self.initialize()
|
|
181
|
+
|
|
182
|
+
# 构建过滤器
|
|
183
|
+
qdrant_filter = None
|
|
184
|
+
if filters:
|
|
185
|
+
qdrant_filter = self._build_qdrant_filter(filters)
|
|
186
|
+
|
|
187
|
+
# 执行查询
|
|
188
|
+
results = self.client.search(
|
|
189
|
+
collection_name=self.collection_name,
|
|
190
|
+
query_vector=query_vector,
|
|
191
|
+
limit=top_k,
|
|
192
|
+
query_filter=qdrant_filter
|
|
193
|
+
)
|
|
194
|
+
|
|
195
|
+
# 转换结果
|
|
196
|
+
documents_with_scores = []
|
|
197
|
+
for hit in results:
|
|
198
|
+
# 提取内容和元数据
|
|
199
|
+
payload = hit.payload
|
|
200
|
+
content = payload.pop("content", "")
|
|
201
|
+
score = payload.pop("score", None)
|
|
202
|
+
|
|
203
|
+
doc = Document(
|
|
204
|
+
content=content,
|
|
205
|
+
metadata=payload,
|
|
206
|
+
score=hit.score,
|
|
207
|
+
doc_id=str(hit.id)
|
|
208
|
+
)
|
|
209
|
+
documents_with_scores.append((doc, hit.score))
|
|
210
|
+
|
|
211
|
+
return documents_with_scores
|
|
212
|
+
|
|
213
|
+
async def delete(self, doc_ids: List[str]) -> None:
|
|
214
|
+
"""
|
|
215
|
+
删除文档
|
|
216
|
+
|
|
217
|
+
Parameters:
|
|
218
|
+
doc_ids: 文档 ID 列表
|
|
219
|
+
"""
|
|
220
|
+
if not self._initialized:
|
|
221
|
+
await self.initialize()
|
|
222
|
+
|
|
223
|
+
self.client.delete(
|
|
224
|
+
collection_name=self.collection_name,
|
|
225
|
+
points_selector=doc_ids
|
|
226
|
+
)
|
|
227
|
+
|
|
228
|
+
async def clear(self) -> None:
|
|
229
|
+
"""清空集合"""
|
|
230
|
+
if not self._initialized:
|
|
231
|
+
await self.initialize()
|
|
232
|
+
|
|
233
|
+
# 删除并重建集合
|
|
234
|
+
self.client.delete_collection(collection_name=self.collection_name)
|
|
235
|
+
await self.initialize()
|
|
236
|
+
|
|
237
|
+
def _build_qdrant_filter(self, filters: Dict[str, Any]) -> Filter:
|
|
238
|
+
"""
|
|
239
|
+
构建 Qdrant 过滤器
|
|
240
|
+
|
|
241
|
+
示例:
|
|
242
|
+
{"category": "python", "price": {"$gte": 100}}
|
|
243
|
+
→
|
|
244
|
+
Filter(
|
|
245
|
+
must=[
|
|
246
|
+
FieldCondition(key="category", match=MatchValue(value="python")),
|
|
247
|
+
FieldCondition(key="price", range=Range(gte=100))
|
|
248
|
+
]
|
|
249
|
+
)
|
|
250
|
+
"""
|
|
251
|
+
conditions = []
|
|
252
|
+
|
|
253
|
+
for key, value in filters.items():
|
|
254
|
+
if isinstance(value, dict):
|
|
255
|
+
# 复杂查询(范围/比较)
|
|
256
|
+
# 简化实现:仅支持基本相等匹配
|
|
257
|
+
# 生产环境可扩展支持 $gte, $lte, $in 等
|
|
258
|
+
pass
|
|
259
|
+
else:
|
|
260
|
+
# 简单相等查询
|
|
261
|
+
conditions.append(
|
|
262
|
+
FieldCondition(
|
|
263
|
+
key=key,
|
|
264
|
+
match=MatchValue(value=value)
|
|
265
|
+
)
|
|
266
|
+
)
|
|
267
|
+
|
|
268
|
+
return Filter(must=conditions) if conditions else None
|
|
269
|
+
|
|
270
|
+
async def close(self) -> None:
|
|
271
|
+
"""关闭连接"""
|
|
272
|
+
if self.client:
|
|
273
|
+
self.client.close()
|
|
274
|
+
self._initialized = False
|
|
@@ -0,0 +1,128 @@
|
|
|
1
|
+
"""基于向量存储的检索器实现"""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Any, Dict, List, Optional
|
|
6
|
+
|
|
7
|
+
from loom.interfaces.retriever import BaseRetriever, Document
|
|
8
|
+
from loom.interfaces.vector_store import BaseVectorStore
|
|
9
|
+
from loom.interfaces.embedding import BaseEmbedding
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class VectorStoreRetriever(BaseRetriever):
|
|
13
|
+
"""
|
|
14
|
+
向量存储检索器 - 基于语义相似度检索
|
|
15
|
+
|
|
16
|
+
将向量存储和 Embedding 模型组合,提供语义检索能力。
|
|
17
|
+
|
|
18
|
+
特点:
|
|
19
|
+
- ✅ 语义相似度检索
|
|
20
|
+
- ✅ 支持多种向量数据库
|
|
21
|
+
- ✅ 支持多种 Embedding 模型
|
|
22
|
+
- ✅ 自动向量化
|
|
23
|
+
|
|
24
|
+
示例:
|
|
25
|
+
from loom.builtin.retriever.vector_store import VectorStoreRetriever
|
|
26
|
+
from loom.builtin.retriever.chroma_store import ChromaVectorStore
|
|
27
|
+
from loom.builtin.embeddings import OpenAIEmbedding
|
|
28
|
+
|
|
29
|
+
vector_store = ChromaVectorStore(config)
|
|
30
|
+
await vector_store.initialize()
|
|
31
|
+
|
|
32
|
+
embedding = OpenAIEmbedding(api_key="...")
|
|
33
|
+
|
|
34
|
+
retriever = VectorStoreRetriever(
|
|
35
|
+
vector_store=vector_store,
|
|
36
|
+
embedding=embedding
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
# 添加文档(自动向量化)
|
|
40
|
+
await retriever.add_documents([
|
|
41
|
+
Document(content="Loom is an AI agent framework"),
|
|
42
|
+
Document(content="Loom supports RAG capabilities"),
|
|
43
|
+
])
|
|
44
|
+
|
|
45
|
+
# 检索(自动向量化查询)
|
|
46
|
+
results = await retriever.retrieve("What is Loom?", top_k=2)
|
|
47
|
+
"""
|
|
48
|
+
|
|
49
|
+
def __init__(
|
|
50
|
+
self,
|
|
51
|
+
vector_store: BaseVectorStore,
|
|
52
|
+
embedding: BaseEmbedding,
|
|
53
|
+
):
|
|
54
|
+
"""
|
|
55
|
+
Parameters:
|
|
56
|
+
vector_store: 向量存储实例(Pinecone, Qdrant, Milvus, ChromaDB)
|
|
57
|
+
embedding: Embedding 模型实例(OpenAI, Sentence Transformers)
|
|
58
|
+
"""
|
|
59
|
+
self.vector_store = vector_store
|
|
60
|
+
self.embedding = embedding
|
|
61
|
+
|
|
62
|
+
async def retrieve(
|
|
63
|
+
self,
|
|
64
|
+
query: str,
|
|
65
|
+
top_k: int = 5,
|
|
66
|
+
filters: Optional[Dict[str, Any]] = None,
|
|
67
|
+
) -> List[Document]:
|
|
68
|
+
"""
|
|
69
|
+
基于语义相似度检索文档
|
|
70
|
+
|
|
71
|
+
Parameters:
|
|
72
|
+
query: 查询文本
|
|
73
|
+
top_k: 返回文档数量
|
|
74
|
+
filters: 元数据过滤条件(可选)
|
|
75
|
+
|
|
76
|
+
Returns:
|
|
77
|
+
Document 列表,按相似度分数降序排列
|
|
78
|
+
"""
|
|
79
|
+
# Step 1: 将查询向量化
|
|
80
|
+
query_vector = await self.embedding.embed_query(query)
|
|
81
|
+
|
|
82
|
+
# Step 2: 在向量存储中搜索
|
|
83
|
+
results = await self.vector_store.search(
|
|
84
|
+
query_vector=query_vector,
|
|
85
|
+
top_k=top_k,
|
|
86
|
+
filters=filters
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
# Step 3: 提取文档(已包含 score)
|
|
90
|
+
documents = [doc for doc, score in results]
|
|
91
|
+
|
|
92
|
+
return documents
|
|
93
|
+
|
|
94
|
+
async def add_documents(self, documents: List[Document]) -> None:
|
|
95
|
+
"""
|
|
96
|
+
添加文档到向量存储(自动向量化)
|
|
97
|
+
|
|
98
|
+
Parameters:
|
|
99
|
+
documents: 文档列表
|
|
100
|
+
"""
|
|
101
|
+
if not documents:
|
|
102
|
+
return
|
|
103
|
+
|
|
104
|
+
# Step 1: 提取文档内容
|
|
105
|
+
texts = [doc.content for doc in documents]
|
|
106
|
+
|
|
107
|
+
# Step 2: 批量向量化
|
|
108
|
+
vectors = await self.embedding.embed_documents(texts)
|
|
109
|
+
|
|
110
|
+
# Step 3: 添加到向量存储
|
|
111
|
+
await self.vector_store.add_vectors(vectors, documents)
|
|
112
|
+
|
|
113
|
+
async def delete_documents(self, doc_ids: List[str]) -> None:
|
|
114
|
+
"""
|
|
115
|
+
删除文档
|
|
116
|
+
|
|
117
|
+
Parameters:
|
|
118
|
+
doc_ids: 文档 ID 列表
|
|
119
|
+
"""
|
|
120
|
+
await self.vector_store.delete(doc_ids)
|
|
121
|
+
|
|
122
|
+
async def clear(self) -> None:
|
|
123
|
+
"""清空所有文档"""
|
|
124
|
+
await self.vector_store.clear()
|
|
125
|
+
|
|
126
|
+
async def close(self) -> None:
|
|
127
|
+
"""关闭连接"""
|
|
128
|
+
await self.vector_store.close()
|
|
@@ -0,0 +1,217 @@
|
|
|
1
|
+
"""向量数据库配置管理 - 统一的配置接口"""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from typing import Any, Dict, Optional
|
|
6
|
+
from pydantic import BaseModel, Field
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class VectorStoreConfig(BaseModel):
|
|
10
|
+
"""向量数据库配置基类"""
|
|
11
|
+
store_type: str = Field(description="向量数据库类型: pinecone, qdrant, milvus, chroma")
|
|
12
|
+
api_key: Optional[str] = Field(default=None, description="API Key(如果需要)")
|
|
13
|
+
host: Optional[str] = Field(default=None, description="数据库地址")
|
|
14
|
+
port: Optional[int] = Field(default=None, description="数据库端口")
|
|
15
|
+
collection_name: str = Field(default="loom_documents", description="集合/索引名称")
|
|
16
|
+
dimension: int = Field(default=1536, description="向量维度")
|
|
17
|
+
metric: str = Field(default="cosine", description="相似度度量: cosine, euclidean, dot_product")
|
|
18
|
+
extra_params: Dict[str, Any] = Field(default_factory=dict, description="额外参数")
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class PineconeConfig(VectorStoreConfig):
|
|
22
|
+
"""Pinecone 配置"""
|
|
23
|
+
store_type: str = "pinecone"
|
|
24
|
+
environment: str = Field(description="Pinecone 环境: us-west1-gcp, eu-west1-gcp, etc.")
|
|
25
|
+
index_name: str = Field(description="索引名称")
|
|
26
|
+
namespace: Optional[str] = Field(default=None, description="命名空间(可选)")
|
|
27
|
+
|
|
28
|
+
@classmethod
|
|
29
|
+
def create(
|
|
30
|
+
cls,
|
|
31
|
+
api_key: str,
|
|
32
|
+
environment: str,
|
|
33
|
+
index_name: str,
|
|
34
|
+
namespace: Optional[str] = None,
|
|
35
|
+
dimension: int = 1536,
|
|
36
|
+
) -> "PineconeConfig":
|
|
37
|
+
"""快速创建 Pinecone 配置"""
|
|
38
|
+
return cls(
|
|
39
|
+
api_key=api_key,
|
|
40
|
+
environment=environment,
|
|
41
|
+
index_name=index_name,
|
|
42
|
+
namespace=namespace,
|
|
43
|
+
dimension=dimension,
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class QdrantConfig(VectorStoreConfig):
|
|
48
|
+
"""Qdrant 配置"""
|
|
49
|
+
store_type: str = "qdrant"
|
|
50
|
+
host: str = Field(default="localhost", description="Qdrant 服务地址")
|
|
51
|
+
port: int = Field(default=6333, description="Qdrant 端口")
|
|
52
|
+
grpc_port: Optional[int] = Field(default=None, description="gRPC 端口(可选)")
|
|
53
|
+
prefer_grpc: bool = Field(default=False, description="优先使用 gRPC")
|
|
54
|
+
https: bool = Field(default=False, description="是否使用 HTTPS")
|
|
55
|
+
|
|
56
|
+
@classmethod
|
|
57
|
+
def create(
|
|
58
|
+
cls,
|
|
59
|
+
host: str = "localhost",
|
|
60
|
+
port: int = 6333,
|
|
61
|
+
collection_name: str = "loom_documents",
|
|
62
|
+
dimension: int = 1536,
|
|
63
|
+
api_key: Optional[str] = None,
|
|
64
|
+
https: bool = False,
|
|
65
|
+
) -> "QdrantConfig":
|
|
66
|
+
"""快速创建 Qdrant 配置"""
|
|
67
|
+
return cls(
|
|
68
|
+
host=host,
|
|
69
|
+
port=port,
|
|
70
|
+
collection_name=collection_name,
|
|
71
|
+
dimension=dimension,
|
|
72
|
+
api_key=api_key,
|
|
73
|
+
https=https,
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
class MilvusConfig(VectorStoreConfig):
|
|
78
|
+
"""Milvus 配置"""
|
|
79
|
+
store_type: str = "milvus"
|
|
80
|
+
host: str = Field(default="localhost", description="Milvus 服务地址")
|
|
81
|
+
port: int = Field(default=19530, description="Milvus 端口")
|
|
82
|
+
user: Optional[str] = Field(default=None, description="用户名")
|
|
83
|
+
password: Optional[str] = Field(default=None, description="密码")
|
|
84
|
+
secure: bool = Field(default=False, description="是否使用安全连接")
|
|
85
|
+
index_type: str = Field(default="IVF_FLAT", description="索引类型: IVF_FLAT, HNSW, etc.")
|
|
86
|
+
index_params: Dict[str, Any] = Field(default_factory=dict, description="索引参数")
|
|
87
|
+
|
|
88
|
+
@classmethod
|
|
89
|
+
def create(
|
|
90
|
+
cls,
|
|
91
|
+
host: str = "localhost",
|
|
92
|
+
port: int = 19530,
|
|
93
|
+
collection_name: str = "loom_documents",
|
|
94
|
+
dimension: int = 1536,
|
|
95
|
+
user: Optional[str] = None,
|
|
96
|
+
password: Optional[str] = None,
|
|
97
|
+
index_type: str = "IVF_FLAT",
|
|
98
|
+
) -> "MilvusConfig":
|
|
99
|
+
"""快速创建 Milvus 配置"""
|
|
100
|
+
return cls(
|
|
101
|
+
host=host,
|
|
102
|
+
port=port,
|
|
103
|
+
collection_name=collection_name,
|
|
104
|
+
dimension=dimension,
|
|
105
|
+
user=user,
|
|
106
|
+
password=password,
|
|
107
|
+
index_type=index_type,
|
|
108
|
+
)
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
class ChromaConfig(VectorStoreConfig):
|
|
112
|
+
"""ChromaDB 配置"""
|
|
113
|
+
store_type: str = "chroma"
|
|
114
|
+
host: Optional[str] = Field(default=None, description="ChromaDB 服务地址(远程模式)")
|
|
115
|
+
port: Optional[int] = Field(default=8000, description="ChromaDB 端口(远程模式)")
|
|
116
|
+
persist_directory: Optional[str] = Field(default=None, description="持久化目录(本地模式)")
|
|
117
|
+
client_type: str = Field(default="local", description="客户端类型: local, http")
|
|
118
|
+
|
|
119
|
+
@classmethod
|
|
120
|
+
def create_local(
|
|
121
|
+
cls,
|
|
122
|
+
persist_directory: str = "./chroma_db",
|
|
123
|
+
collection_name: str = "loom_documents",
|
|
124
|
+
dimension: int = 1536,
|
|
125
|
+
) -> "ChromaConfig":
|
|
126
|
+
"""创建本地 ChromaDB 配置"""
|
|
127
|
+
return cls(
|
|
128
|
+
persist_directory=persist_directory,
|
|
129
|
+
collection_name=collection_name,
|
|
130
|
+
dimension=dimension,
|
|
131
|
+
client_type="local",
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
@classmethod
|
|
135
|
+
def create_remote(
|
|
136
|
+
cls,
|
|
137
|
+
host: str = "localhost",
|
|
138
|
+
port: int = 8000,
|
|
139
|
+
collection_name: str = "loom_documents",
|
|
140
|
+
dimension: int = 1536,
|
|
141
|
+
) -> "ChromaConfig":
|
|
142
|
+
"""创建远程 ChromaDB 配置"""
|
|
143
|
+
return cls(
|
|
144
|
+
host=host,
|
|
145
|
+
port=port,
|
|
146
|
+
collection_name=collection_name,
|
|
147
|
+
dimension=dimension,
|
|
148
|
+
client_type="http",
|
|
149
|
+
)
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
class EmbeddingConfig(BaseModel):
|
|
153
|
+
"""Embedding 模型配置"""
|
|
154
|
+
provider: str = Field(description="提供商: openai, huggingface, cohere, sentence_transformers")
|
|
155
|
+
model_name: str = Field(description="模型名称")
|
|
156
|
+
api_key: Optional[str] = Field(default=None, description="API Key(如果需要)")
|
|
157
|
+
dimension: int = Field(default=1536, description="向量维度")
|
|
158
|
+
batch_size: int = Field(default=32, description="批处理大小")
|
|
159
|
+
extra_params: Dict[str, Any] = Field(default_factory=dict, description="额外参数")
|
|
160
|
+
|
|
161
|
+
@classmethod
|
|
162
|
+
def openai(
|
|
163
|
+
cls,
|
|
164
|
+
api_key: str,
|
|
165
|
+
model_name: str = "text-embedding-3-small",
|
|
166
|
+
dimension: int = 1536,
|
|
167
|
+
) -> "EmbeddingConfig":
|
|
168
|
+
"""OpenAI Embedding 配置"""
|
|
169
|
+
return cls(
|
|
170
|
+
provider="openai",
|
|
171
|
+
model_name=model_name,
|
|
172
|
+
api_key=api_key,
|
|
173
|
+
dimension=dimension,
|
|
174
|
+
)
|
|
175
|
+
|
|
176
|
+
@classmethod
|
|
177
|
+
def huggingface(
|
|
178
|
+
cls,
|
|
179
|
+
model_name: str = "sentence-transformers/all-MiniLM-L6-v2",
|
|
180
|
+
dimension: int = 384,
|
|
181
|
+
api_key: Optional[str] = None,
|
|
182
|
+
) -> "EmbeddingConfig":
|
|
183
|
+
"""HuggingFace Embedding 配置"""
|
|
184
|
+
return cls(
|
|
185
|
+
provider="huggingface",
|
|
186
|
+
model_name=model_name,
|
|
187
|
+
api_key=api_key,
|
|
188
|
+
dimension=dimension,
|
|
189
|
+
)
|
|
190
|
+
|
|
191
|
+
@classmethod
|
|
192
|
+
def sentence_transformers(
|
|
193
|
+
cls,
|
|
194
|
+
model_name: str = "all-MiniLM-L6-v2",
|
|
195
|
+
dimension: int = 384,
|
|
196
|
+
) -> "EmbeddingConfig":
|
|
197
|
+
"""Sentence Transformers 本地模型配置"""
|
|
198
|
+
return cls(
|
|
199
|
+
provider="sentence_transformers",
|
|
200
|
+
model_name=model_name,
|
|
201
|
+
dimension=dimension,
|
|
202
|
+
)
|
|
203
|
+
|
|
204
|
+
@classmethod
|
|
205
|
+
def cohere(
|
|
206
|
+
cls,
|
|
207
|
+
api_key: str,
|
|
208
|
+
model_name: str = "embed-english-v3.0",
|
|
209
|
+
dimension: int = 1024,
|
|
210
|
+
) -> "EmbeddingConfig":
|
|
211
|
+
"""Cohere Embedding 配置"""
|
|
212
|
+
return cls(
|
|
213
|
+
provider="cohere",
|
|
214
|
+
model_name=model_name,
|
|
215
|
+
api_key=api_key,
|
|
216
|
+
dimension=dimension,
|
|
217
|
+
)
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
from .calculator import Calculator
|
|
2
|
+
from .read_file import ReadFileTool
|
|
3
|
+
from .write_file import WriteFileTool
|
|
4
|
+
from .glob import GlobTool
|
|
5
|
+
from .grep import GrepTool
|
|
6
|
+
|
|
7
|
+
try:
|
|
8
|
+
from .web_search import WebSearchTool
|
|
9
|
+
_has_web_search = True
|
|
10
|
+
except ImportError:
|
|
11
|
+
_has_web_search = False
|
|
12
|
+
|
|
13
|
+
try:
|
|
14
|
+
from .python_repl import PythonREPLTool
|
|
15
|
+
_has_python_repl = True
|
|
16
|
+
except ImportError:
|
|
17
|
+
_has_python_repl = False
|
|
18
|
+
|
|
19
|
+
try:
|
|
20
|
+
from .http_request import HTTPRequestTool
|
|
21
|
+
_has_http_request = True
|
|
22
|
+
except ImportError:
|
|
23
|
+
_has_http_request = False
|
|
24
|
+
|
|
25
|
+
__all__ = ["Calculator", "ReadFileTool", "WriteFileTool", "GlobTool", "GrepTool"]
|
|
26
|
+
|
|
27
|
+
if _has_web_search:
|
|
28
|
+
__all__.append("WebSearchTool")
|
|
29
|
+
if _has_python_repl:
|
|
30
|
+
__all__.append("PythonREPLTool")
|
|
31
|
+
if _has_http_request:
|
|
32
|
+
__all__.append("HTTPRequestTool")
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import ast
|
|
4
|
+
import operator as op
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
from pydantic import BaseModel, Field
|
|
8
|
+
|
|
9
|
+
from loom.interfaces.tool import BaseTool
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class CalcArgs(BaseModel):
|
|
13
|
+
expression: str = Field(description="Arithmetic expression, e.g., '2+2*3'")
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class Calculator(BaseTool):
|
|
17
|
+
name = "calculator"
|
|
18
|
+
description = "Evaluate simple arithmetic expressions"
|
|
19
|
+
args_schema = CalcArgs
|
|
20
|
+
|
|
21
|
+
async def run(self, **kwargs) -> Any:
|
|
22
|
+
expr = kwargs.get("expression", "")
|
|
23
|
+
return str(_safe_eval(expr))
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
# 安全 eval:仅支持基本算术
|
|
27
|
+
_ops = {
|
|
28
|
+
ast.Add: op.add,
|
|
29
|
+
ast.Sub: op.sub,
|
|
30
|
+
ast.Mult: op.mul,
|
|
31
|
+
ast.Div: op.truediv,
|
|
32
|
+
ast.Pow: op.pow,
|
|
33
|
+
ast.USub: op.neg,
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def _safe_eval(expr: str) -> Any:
|
|
38
|
+
def _eval(node: ast.AST) -> Any:
|
|
39
|
+
if isinstance(node, ast.Num):
|
|
40
|
+
return node.n
|
|
41
|
+
if isinstance(node, ast.BinOp):
|
|
42
|
+
return _ops[type(node.op)](_eval(node.left), _eval(node.right))
|
|
43
|
+
if isinstance(node, ast.UnaryOp):
|
|
44
|
+
return _ops[type(node.op)](_eval(node.operand))
|
|
45
|
+
raise ValueError("Unsupported expression")
|
|
46
|
+
|
|
47
|
+
tree = ast.parse(expr, mode="eval")
|
|
48
|
+
return _eval(tree.body)
|
|
49
|
+
|