camel-ai 0.2.69a6__py3-none-any.whl → 0.2.70__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of camel-ai might be problematic. Click here for more details.

@@ -0,0 +1,349 @@
1
+ # ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
2
+ # Licensed under the Apache License, Version 2.0 (the "License");
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an "AS IS" BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ # ========= Copyright 2023-2024 @ CAMEL-AI.org. All Rights Reserved. =========
14
+
15
+ import json
16
+ from typing import Any, Dict, List, Optional
17
+
18
+ from camel.logger import get_logger
19
+ from camel.storages.vectordb_storages import (
20
+ BaseVectorStorage,
21
+ VectorDBQuery,
22
+ VectorDBQueryResult,
23
+ VectorDBStatus,
24
+ VectorRecord,
25
+ )
26
+ from camel.types import VectorDistance
27
+ from camel.utils import dependencies_required
28
+
29
+ logger = get_logger(__name__)
30
+
31
+
32
+ class PgVectorStorage(BaseVectorStorage):
33
+ r"""PgVectorStorage is an implementation of BaseVectorStorage for
34
+ PostgreSQL with pgvector extension.
35
+
36
+ This class provides methods to add, delete, query, and manage vector
37
+ records in a PostgreSQL database using the pgvector extension.
38
+ It supports different distance metrics for similarity search.
39
+
40
+ Args:
41
+ vector_dim (int): The dimension of the vectors to be stored.
42
+ conn_info (Dict[str, Any]): Connection information for
43
+ psycopg2.connect.
44
+ table_name (str, optional): Name of the table to store vectors.
45
+ (default: :obj:`None`)
46
+ distance (VectorDistance, optional): Distance metric for vector
47
+ comparison. (default: :obj:`VectorDistance.COSINE`)
48
+ """
49
+
50
+ @dependencies_required('psycopg', 'pgvector')
51
+ def __init__(
52
+ self,
53
+ vector_dim: int,
54
+ conn_info: Dict[str, Any],
55
+ table_name: Optional[str] = None,
56
+ distance: VectorDistance = VectorDistance.COSINE,
57
+ **kwargs: Any,
58
+ ) -> None:
59
+ r"""Initialize PgVectorStorage.
60
+
61
+ Args:
62
+ vector_dim (int): The dimension of the vectors.
63
+ conn_info (Dict[str, Any]): Connection info for psycopg2.connect.
64
+ table_name (str, optional): Table name. (default: :obj:`None`)
65
+ distance (VectorDistance, optional): Distance metric.
66
+ (default: :obj:`VectorDistance.COSINE`)
67
+ """
68
+ import psycopg
69
+ from pgvector.psycopg import register_vector
70
+
71
+ if vector_dim <= 0:
72
+ raise ValueError("vector_dim must be positive")
73
+
74
+ self.vector_dim = vector_dim
75
+ self.conn_info = conn_info
76
+ self.table_name = table_name or 'vectors'
77
+ self.distance = distance
78
+
79
+ try:
80
+ self._conn = psycopg.connect(**conn_info)
81
+ register_vector(self._conn)
82
+ self._ensure_table()
83
+ self._ensure_index()
84
+ except Exception as e:
85
+ logger.error(f"Failed to initialize PgVectorStorage: {e}")
86
+ raise
87
+
88
+ def _ensure_table(self) -> None:
89
+ r"""Ensure the vector table exists in the database.
90
+ Creates the table if it does not exist.
91
+ """
92
+ try:
93
+ from psycopg.sql import SQL, Identifier, Literal
94
+
95
+ with self._conn.cursor() as cur:
96
+ query = SQL("""
97
+ CREATE TABLE IF NOT EXISTS {table} (
98
+ id VARCHAR PRIMARY KEY,
99
+ vector vector({dim}),
100
+ payload JSONB
101
+ )
102
+ """).format(
103
+ table=Identifier(self.table_name),
104
+ dim=Literal(self.vector_dim),
105
+ )
106
+ cur.execute(query)
107
+ self._conn.commit()
108
+ except Exception as e:
109
+ logger.error(f"Failed to create table {self.table_name}: {e}")
110
+ raise
111
+
112
+ def _ensure_index(self) -> None:
113
+ r"""Ensure vector similarity search index exists for better
114
+ performance.
115
+ """
116
+ try:
117
+ from psycopg.sql import SQL, Identifier
118
+
119
+ with self._conn.cursor() as cur:
120
+ index_name = f"{self.table_name}_vector_idx"
121
+ query = SQL("""
122
+ CREATE INDEX IF NOT EXISTS {index_name}
123
+ ON {table}
124
+ USING hnsw (vector vector_cosine_ops)
125
+ """).format(
126
+ index_name=Identifier(index_name),
127
+ table=Identifier(self.table_name),
128
+ )
129
+ cur.execute(query)
130
+ self._conn.commit()
131
+ except Exception as e:
132
+ logger.warning(f"Failed to create vector index: {e}")
133
+
134
+ def add(self, records: List[VectorRecord], **kwargs: Any) -> None:
135
+ r"""Add or update vector records in the database.
136
+
137
+ Args:
138
+ records (List[VectorRecord]): List of vector records to
139
+ add or update.
140
+ """
141
+ if not records:
142
+ return
143
+
144
+ try:
145
+ with self._conn.cursor() as cur:
146
+ # Use batch insert for better performance
147
+ batch_data = []
148
+ for rec in records:
149
+ if len(rec.vector) != self.vector_dim:
150
+ raise ValueError(
151
+ f"Vector dimension mismatch: expected "
152
+ f"{self.vector_dim}, got {len(rec.vector)}"
153
+ )
154
+
155
+ batch_data.append(
156
+ (
157
+ rec.id,
158
+ rec.vector,
159
+ json.dumps(rec.payload)
160
+ if rec.payload is not None
161
+ else None,
162
+ )
163
+ )
164
+
165
+ # Use executemany for efficient batch insert
166
+ from psycopg.sql import SQL, Identifier
167
+
168
+ query = SQL("""
169
+ INSERT INTO {table} (id, vector, payload)
170
+ VALUES (%s, %s, %s)
171
+ ON CONFLICT (id) DO UPDATE SET
172
+ vector=EXCLUDED.vector,
173
+ payload=EXCLUDED.payload
174
+ """).format(table=Identifier(self.table_name))
175
+
176
+ cur.executemany(query, batch_data)
177
+ self._conn.commit()
178
+ except Exception as e:
179
+ self._conn.rollback()
180
+ logger.error(f"Failed to add records: {e}")
181
+ raise
182
+
183
+ def delete(self, ids: List[str], **kwargs: Any) -> None:
184
+ r"""Delete vector records from the database by their IDs.
185
+
186
+ Args:
187
+ ids (List[str]): List of record IDs to delete.
188
+ """
189
+ from psycopg.sql import SQL, Identifier
190
+
191
+ if not ids:
192
+ return
193
+
194
+ try:
195
+ with self._conn.cursor() as cur:
196
+ query = SQL("DELETE FROM {table} WHERE id = ANY(%s)").format(
197
+ table=Identifier(self.table_name)
198
+ )
199
+ cur.execute(query, (ids,))
200
+ self._conn.commit()
201
+ except Exception as e:
202
+ self._conn.rollback()
203
+ logger.error(f"Failed to delete records: {e}")
204
+ raise
205
+
206
+ def query(
207
+ self, query: VectorDBQuery, **kwargs: Any
208
+ ) -> List[VectorDBQueryResult]:
209
+ r"""Query the database for the most similar vectors to the given
210
+ query vector.
211
+
212
+ Args:
213
+ query (VectorDBQuery): Query object containing the query
214
+ vector and top_k.
215
+ **kwargs (Any): Additional keyword arguments for the query.
216
+
217
+ Returns:
218
+ List[VectorDBQueryResult]: List of query results sorted by
219
+ similarity.
220
+ """
221
+ if len(query.query_vector) != self.vector_dim:
222
+ raise ValueError(
223
+ f"Query vector dimension mismatch: "
224
+ f"expected {self.vector_dim}, got {len(query.query_vector)}"
225
+ )
226
+
227
+ try:
228
+ with self._conn.cursor() as cur:
229
+ # Fix distance metric mapping
230
+ metric_info = {
231
+ VectorDistance.COSINE: ('<=>', 'ASC'), # Cosine distance
232
+ VectorDistance.EUCLIDEAN: (
233
+ '<->',
234
+ 'ASC',
235
+ ), # Euclidean distance
236
+ VectorDistance.DOT: (
237
+ '<#>',
238
+ 'DESC',
239
+ ), # Negative dot product (higher is better)
240
+ }
241
+
242
+ if self.distance not in metric_info:
243
+ raise ValueError(
244
+ f"Unsupported distance metric: {self.distance}"
245
+ )
246
+
247
+ metric, order = metric_info[self.distance]
248
+
249
+ from psycopg.sql import SQL, Identifier, Literal
250
+
251
+ query_sql = SQL("""
252
+ SELECT id, vector, payload, (vector {} %s::vector)
253
+ AS similarity
254
+ FROM {}
255
+ ORDER BY similarity {}
256
+ LIMIT %s
257
+ """).format(
258
+ Literal(metric),
259
+ Identifier(self.table_name),
260
+ Literal(order),
261
+ )
262
+
263
+ cur.execute(query_sql, (query.query_vector, query.top_k))
264
+ results = []
265
+ for row in cur.fetchall():
266
+ id, vector, payload, similarity = row
267
+ results.append(
268
+ VectorDBQueryResult.create(
269
+ similarity=float(similarity),
270
+ vector=list(vector),
271
+ id=id,
272
+ payload=payload,
273
+ )
274
+ )
275
+ return results
276
+ except Exception as e:
277
+ logger.error(f"Failed to query vectors: {e}")
278
+ raise
279
+
280
+ def status(self, **kwargs: Any) -> VectorDBStatus:
281
+ r"""Get the status of the vector database, including vector
282
+ dimension and count.
283
+
284
+ Args:
285
+ **kwargs (Any): Additional keyword arguments for the query.
286
+
287
+ Returns:
288
+ VectorDBStatus: Status object with vector dimension and count.
289
+ """
290
+ try:
291
+ with self._conn.cursor() as cur:
292
+ from psycopg.sql import SQL, Identifier
293
+
294
+ query = SQL('SELECT COUNT(*) FROM {}').format(
295
+ Identifier(self.table_name)
296
+ )
297
+ cur.execute(query)
298
+ result = cur.fetchone()
299
+ count = result[0] if result else 0
300
+ return VectorDBStatus(
301
+ vector_dim=self.vector_dim, vector_count=count
302
+ )
303
+ except Exception as e:
304
+ logger.error(f"Failed to get status: {e}")
305
+ raise
306
+
307
+ def clear(self) -> None:
308
+ r"""Remove all vectors from the storage by truncating the table."""
309
+ try:
310
+ with self._conn.cursor() as cur:
311
+ from psycopg.sql import SQL, Identifier
312
+
313
+ query = SQL("TRUNCATE TABLE {table}").format(
314
+ table=Identifier(self.table_name)
315
+ )
316
+ cur.execute(query)
317
+ self._conn.commit()
318
+ except Exception as e:
319
+ self._conn.rollback()
320
+ logger.error(f"Failed to clear table: {e}")
321
+ raise
322
+
323
+ def load(self) -> None:
324
+ r"""Load the collection hosted on cloud service (no-op for pgvector).
325
+ This method is provided for interface compatibility.
326
+ """
327
+ # For PostgreSQL local/managed instances, no loading is required
328
+ pass
329
+
330
+ def close(self) -> None:
331
+ r"""Close the database connection."""
332
+ if hasattr(self, '_conn') and self._conn:
333
+ try:
334
+ self._conn.close()
335
+ except Exception as e:
336
+ logger.warning(f"Error closing connection: {e}")
337
+
338
+ def __del__(self) -> None:
339
+ r"""Ensure connection is closed when object is destroyed."""
340
+ self.close()
341
+
342
+ @property
343
+ def client(self) -> Any:
344
+ r"""Provides access to the underlying vector database client.
345
+
346
+ Returns:
347
+ Any: The underlying psycopg connection object.
348
+ """
349
+ return self._conn
camel/tasks/task.py CHANGED
@@ -25,7 +25,8 @@ from typing import (
25
25
  Union,
26
26
  )
27
27
 
28
- from pydantic import BaseModel, Field
28
+ from PIL import Image
29
+ from pydantic import BaseModel, ConfigDict, Field
29
30
 
30
31
  if TYPE_CHECKING:
31
32
  from camel.agents import ChatAgent
@@ -158,6 +159,14 @@ class Task(BaseModel):
158
159
  (default: :obj:`0`)
159
160
  additional_info (Optional[Dict[str, Any]]): Additional information for
160
161
  the task. (default: :obj:`None`)
162
+ image_list (Optional[List[Image.Image]]): Optional list of PIL Image
163
+ objects associated with the task. (default: :obj:`None`)
164
+ image_detail (Literal["auto", "low", "high"]): Detail level of the
165
+ images associated with the task. (default: :obj:`auto`)
166
+ video_bytes (Optional[bytes]): Optional bytes of a video associated
167
+ with the task. (default: :obj:`None`)
168
+ video_detail (Literal["auto", "low", "high"]): Detail level of the
169
+ videos associated with the task. (default: :obj:`auto`)
161
170
  """
162
171
 
163
172
  content: str
@@ -180,6 +189,16 @@ class Task(BaseModel):
180
189
 
181
190
  additional_info: Optional[Dict[str, Any]] = None
182
191
 
192
+ image_list: Optional[List[Image.Image]] = None
193
+
194
+ image_detail: Literal["auto", "low", "high"] = "auto"
195
+
196
+ video_bytes: Optional[bytes] = None
197
+
198
+ video_detail: Literal["auto", "low", "high"] = "auto"
199
+
200
+ model_config = ConfigDict(arbitrary_types_allowed=True)
201
+
183
202
  def __repr__(self) -> str:
184
203
  r"""Return a string representation of the task."""
185
204
  content_preview = self.content
@@ -363,6 +382,10 @@ class Task(BaseModel):
363
382
  role_name=role_name,
364
383
  content=self.content,
365
384
  additional_info=self.additional_info,
385
+ image_list=self.image_list,
386
+ image_detail=self.image_detail,
387
+ video_bytes=self.video_bytes,
388
+ video_detail=self.video_detail,
366
389
  other_results=sub_tasks_result,
367
390
  )
368
391
  msg = BaseMessage.make_user_message(
@@ -513,7 +536,12 @@ class TaskManager:
513
536
  role_name = agent.role_name
514
537
  content = template.format(role_name=role_name, content=task.content)
515
538
  msg = BaseMessage.make_user_message(
516
- role_name=role_name, content=content
539
+ role_name=role_name,
540
+ content=content,
541
+ image_list=task.image_list,
542
+ image_detail=task.image_detail,
543
+ video_bytes=task.video_bytes,
544
+ video_detail=task.video_detail,
517
545
  )
518
546
  response = agent.step(msg)
519
547
  if task_parser is None:
@@ -79,10 +79,10 @@ from .wolfram_alpha_toolkit import WolframAlphaToolkit
79
79
  from .task_planning_toolkit import TaskPlanningToolkit
80
80
  from .non_visual_browser_toolkit import BrowserNonVisualToolkit
81
81
  from .edgeone_pages_mcp_toolkit import EdgeOnePagesMCPToolkit
82
+ from .google_drive_mcp_toolkit import GoogleDriveMCPToolkit
82
83
  from .craw4ai_toolkit import Crawl4AIToolkit
83
84
  from .markitdown_toolkit import MarkItDownToolkit
84
85
 
85
-
86
86
  __all__ = [
87
87
  'BaseToolkit',
88
88
  'FunctionTool',
@@ -148,6 +148,7 @@ __all__ = [
148
148
  'TaskPlanningToolkit',
149
149
  'BrowserNonVisualToolkit',
150
150
  'EdgeOnePagesMCPToolkit',
151
+ 'GoogleDriveMCPToolkit',
151
152
  'Crawl4AIToolkit',
152
153
  'MarkItDownToolkit',
153
154
  ]