camel-ai 0.1.5.3__py3-none-any.whl → 0.1.5.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of camel-ai might be problematic. Click here for more details.
- camel/__init__.py +1 -1
- camel/agents/knowledge_graph_agent.py +4 -1
- camel/configs/__init__.py +6 -0
- camel/configs/litellm_config.py +8 -18
- camel/configs/ollama_config.py +85 -0
- camel/configs/zhipuai_config.py +78 -0
- camel/embeddings/base.py +10 -9
- camel/embeddings/openai_embedding.py +27 -14
- camel/embeddings/sentence_transformers_embeddings.py +28 -14
- camel/functions/search_functions.py +5 -14
- camel/functions/slack_functions.py +5 -7
- camel/functions/twitter_function.py +3 -8
- camel/functions/weather_functions.py +3 -8
- camel/interpreters/__init__.py +2 -0
- camel/interpreters/docker_interpreter.py +235 -0
- camel/loaders/__init__.py +2 -0
- camel/loaders/base_io.py +5 -9
- camel/loaders/jina_url_reader.py +99 -0
- camel/loaders/unstructured_io.py +4 -6
- camel/models/anthropic_model.py +6 -4
- camel/models/litellm_model.py +49 -21
- camel/models/model_factory.py +1 -0
- camel/models/nemotron_model.py +14 -6
- camel/models/ollama_model.py +11 -17
- camel/models/openai_audio_models.py +10 -2
- camel/models/openai_model.py +4 -3
- camel/models/zhipuai_model.py +12 -6
- camel/retrievers/auto_retriever.py +2 -2
- camel/retrievers/bm25_retriever.py +3 -8
- camel/retrievers/cohere_rerank_retriever.py +3 -5
- camel/storages/__init__.py +2 -0
- camel/storages/graph_storages/graph_element.py +9 -1
- camel/storages/graph_storages/neo4j_graph.py +3 -7
- camel/storages/key_value_storages/__init__.py +2 -0
- camel/storages/key_value_storages/redis.py +169 -0
- camel/storages/vectordb_storages/milvus.py +3 -7
- camel/storages/vectordb_storages/qdrant.py +3 -7
- camel/toolkits/__init__.py +2 -0
- camel/toolkits/code_execution.py +69 -0
- camel/toolkits/github_toolkit.py +5 -9
- camel/types/enums.py +49 -20
- camel/utils/__init__.py +2 -2
- camel/utils/async_func.py +42 -0
- camel/utils/commons.py +31 -49
- camel/utils/token_counting.py +40 -1
- {camel_ai-0.1.5.3.dist-info → camel_ai-0.1.5.5.dist-info}/METADATA +16 -8
- {camel_ai-0.1.5.3.dist-info → camel_ai-0.1.5.5.dist-info}/RECORD +48 -44
- camel/bots/__init__.py +0 -20
- camel/bots/discord_bot.py +0 -103
- camel/bots/telegram_bot.py +0 -84
- {camel_ai-0.1.5.3.dist-info → camel_ai-0.1.5.5.dist-info}/WHEEL +0 -0
camel/models/ollama_model.py
CHANGED
|
@@ -11,12 +11,11 @@
|
|
|
11
11
|
# See the License for the specific language governing permissions and
|
|
12
12
|
# limitations under the License.
|
|
13
13
|
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
|
|
14
|
-
import os
|
|
15
14
|
from typing import Any, Dict, List, Optional, Union
|
|
16
15
|
|
|
17
16
|
from openai import OpenAI, Stream
|
|
18
17
|
|
|
19
|
-
from camel.configs import
|
|
18
|
+
from camel.configs import OLLAMA_API_PARAMS
|
|
20
19
|
from camel.messages import OpenAIMessage
|
|
21
20
|
from camel.types import ChatCompletion, ChatCompletionChunk, ModelType
|
|
22
21
|
from camel.utils import BaseTokenCounter, OpenAITokenCounter
|
|
@@ -25,39 +24,34 @@ from camel.utils import BaseTokenCounter, OpenAITokenCounter
|
|
|
25
24
|
class OllamaModel:
|
|
26
25
|
r"""Ollama service interface."""
|
|
27
26
|
|
|
28
|
-
# NOTE: Current `ModelType and `TokenCounter` desigen is not suitable,
|
|
29
|
-
# stream mode is not supported
|
|
30
|
-
|
|
31
27
|
def __init__(
|
|
32
28
|
self,
|
|
33
29
|
model_type: str,
|
|
34
30
|
model_config_dict: Dict[str, Any],
|
|
35
|
-
api_key: Optional[str] = None,
|
|
36
31
|
url: Optional[str] = None,
|
|
37
32
|
) -> None:
|
|
38
33
|
r"""Constructor for Ollama backend with OpenAI compatibility.
|
|
39
34
|
|
|
35
|
+
# Reference: https://github.com/ollama/ollama/blob/main/docs/openai.md
|
|
36
|
+
|
|
40
37
|
Args:
|
|
41
38
|
model_type (str): Model for which a backend is created.
|
|
42
39
|
model_config_dict (Dict[str, Any]): A dictionary that will
|
|
43
40
|
be fed into openai.ChatCompletion.create().
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
url (Optional[str]): The url to the model service.
|
|
41
|
+
url (Optional[str]): The url to the model service. (default:
|
|
42
|
+
:obj:`None`)
|
|
47
43
|
"""
|
|
48
44
|
self.model_type = model_type
|
|
49
45
|
self.model_config_dict = model_config_dict
|
|
50
|
-
self._url = url or os.environ.get('OPENAI_API_BASE_URL')
|
|
51
|
-
self._api_key = api_key or os.environ.get("OPENAI_API_KEY")
|
|
52
46
|
# Use OpenAI cilent as interface call Ollama
|
|
53
|
-
# Reference: https://github.com/ollama/ollama/blob/main/docs/openai.md
|
|
54
47
|
self._client = OpenAI(
|
|
55
48
|
timeout=60,
|
|
56
49
|
max_retries=3,
|
|
57
|
-
base_url=
|
|
58
|
-
api_key=
|
|
50
|
+
base_url=url,
|
|
51
|
+
api_key="ollama", # required but ignored
|
|
59
52
|
)
|
|
60
53
|
self._token_counter: Optional[BaseTokenCounter] = None
|
|
54
|
+
self.check_model_config()
|
|
61
55
|
|
|
62
56
|
@property
|
|
63
57
|
def token_counter(self) -> BaseTokenCounter:
|
|
@@ -74,17 +68,17 @@ class OllamaModel:
|
|
|
74
68
|
|
|
75
69
|
def check_model_config(self):
|
|
76
70
|
r"""Check whether the model configuration contains any
|
|
77
|
-
unexpected arguments to
|
|
71
|
+
unexpected arguments to Ollama API.
|
|
78
72
|
|
|
79
73
|
Raises:
|
|
80
74
|
ValueError: If the model configuration dictionary contains any
|
|
81
75
|
unexpected arguments to OpenAI API.
|
|
82
76
|
"""
|
|
83
77
|
for param in self.model_config_dict:
|
|
84
|
-
if param not in
|
|
78
|
+
if param not in OLLAMA_API_PARAMS:
|
|
85
79
|
raise ValueError(
|
|
86
80
|
f"Unexpected argument `{param}` is "
|
|
87
|
-
"input into
|
|
81
|
+
"input into Ollama model backend."
|
|
88
82
|
)
|
|
89
83
|
|
|
90
84
|
def run(
|
|
@@ -25,10 +25,18 @@ class OpenAIAudioModels:
|
|
|
25
25
|
|
|
26
26
|
def __init__(
|
|
27
27
|
self,
|
|
28
|
+
api_key: Optional[str] = None,
|
|
29
|
+
url: Optional[str] = None,
|
|
28
30
|
) -> None:
|
|
29
31
|
r"""Initialize an instance of OpenAI."""
|
|
30
|
-
|
|
31
|
-
self.
|
|
32
|
+
self._url = url or os.environ.get("OPENAI_API_BASE_URL")
|
|
33
|
+
self._api_key = api_key or os.environ.get("OPENAI_API_KEY")
|
|
34
|
+
self._client = OpenAI(
|
|
35
|
+
timeout=120,
|
|
36
|
+
max_retries=3,
|
|
37
|
+
base_url=self._url,
|
|
38
|
+
api_key=self._api_key,
|
|
39
|
+
)
|
|
32
40
|
|
|
33
41
|
def text_to_speech(
|
|
34
42
|
self,
|
camel/models/openai_model.py
CHANGED
|
@@ -23,7 +23,7 @@ from camel.types import ChatCompletion, ChatCompletionChunk, ModelType
|
|
|
23
23
|
from camel.utils import (
|
|
24
24
|
BaseTokenCounter,
|
|
25
25
|
OpenAITokenCounter,
|
|
26
|
-
|
|
26
|
+
api_keys_required,
|
|
27
27
|
)
|
|
28
28
|
|
|
29
29
|
|
|
@@ -46,7 +46,8 @@ class OpenAIModel(BaseModelBackend):
|
|
|
46
46
|
be fed into openai.ChatCompletion.create().
|
|
47
47
|
api_key (Optional[str]): The API key for authenticating with the
|
|
48
48
|
OpenAI service. (default: :obj:`None`)
|
|
49
|
-
url (Optional[str]): The url to the OpenAI service.
|
|
49
|
+
url (Optional[str]): The url to the OpenAI service. (default:
|
|
50
|
+
:obj:`None`)
|
|
50
51
|
"""
|
|
51
52
|
super().__init__(model_type, model_config_dict, api_key, url)
|
|
52
53
|
self._url = url or os.environ.get("OPENAI_API_BASE_URL")
|
|
@@ -71,7 +72,7 @@ class OpenAIModel(BaseModelBackend):
|
|
|
71
72
|
self._token_counter = OpenAITokenCounter(self.model_type)
|
|
72
73
|
return self._token_counter
|
|
73
74
|
|
|
74
|
-
@
|
|
75
|
+
@api_keys_required("OPENAI_API_KEY")
|
|
75
76
|
def run(
|
|
76
77
|
self,
|
|
77
78
|
messages: List[OpenAIMessage],
|
camel/models/zhipuai_model.py
CHANGED
|
@@ -17,14 +17,14 @@ from typing import Any, Dict, List, Optional, Union
|
|
|
17
17
|
|
|
18
18
|
from openai import OpenAI, Stream
|
|
19
19
|
|
|
20
|
-
from camel.configs import
|
|
20
|
+
from camel.configs import ZHIPUAI_API_PARAMS
|
|
21
21
|
from camel.messages import OpenAIMessage
|
|
22
22
|
from camel.models import BaseModelBackend
|
|
23
23
|
from camel.types import ChatCompletion, ChatCompletionChunk, ModelType
|
|
24
24
|
from camel.utils import (
|
|
25
25
|
BaseTokenCounter,
|
|
26
26
|
OpenAITokenCounter,
|
|
27
|
-
|
|
27
|
+
api_keys_required,
|
|
28
28
|
)
|
|
29
29
|
|
|
30
30
|
|
|
@@ -47,10 +47,16 @@ class ZhipuAIModel(BaseModelBackend):
|
|
|
47
47
|
be fed into openai.ChatCompletion.create().
|
|
48
48
|
api_key (Optional[str]): The API key for authenticating with the
|
|
49
49
|
ZhipuAI service. (default: :obj:`None`)
|
|
50
|
+
url (Optional[str]): The url to the ZhipuAI service. (default:
|
|
51
|
+
:obj:`None`)
|
|
50
52
|
"""
|
|
51
53
|
super().__init__(model_type, model_config_dict)
|
|
52
54
|
self._url = url or os.environ.get("ZHIPUAI_API_BASE_URL")
|
|
53
55
|
self._api_key = api_key or os.environ.get("ZHIPUAI_API_KEY")
|
|
56
|
+
if not self._url or not self._api_key:
|
|
57
|
+
raise ValueError(
|
|
58
|
+
"ZHIPUAI_API_BASE_URL and ZHIPUAI_API_KEY should be set."
|
|
59
|
+
)
|
|
54
60
|
self._client = OpenAI(
|
|
55
61
|
timeout=60,
|
|
56
62
|
max_retries=3,
|
|
@@ -59,7 +65,7 @@ class ZhipuAIModel(BaseModelBackend):
|
|
|
59
65
|
)
|
|
60
66
|
self._token_counter: Optional[BaseTokenCounter] = None
|
|
61
67
|
|
|
62
|
-
@
|
|
68
|
+
@api_keys_required("ZHIPUAI_API_KEY")
|
|
63
69
|
def run(
|
|
64
70
|
self,
|
|
65
71
|
messages: List[OpenAIMessage],
|
|
@@ -104,13 +110,13 @@ class ZhipuAIModel(BaseModelBackend):
|
|
|
104
110
|
|
|
105
111
|
Raises:
|
|
106
112
|
ValueError: If the model configuration dictionary contains any
|
|
107
|
-
unexpected arguments to
|
|
113
|
+
unexpected arguments to ZhipuAI API.
|
|
108
114
|
"""
|
|
109
115
|
for param in self.model_config_dict:
|
|
110
|
-
if param not in
|
|
116
|
+
if param not in ZHIPUAI_API_PARAMS:
|
|
111
117
|
raise ValueError(
|
|
112
118
|
f"Unexpected argument `{param}` is "
|
|
113
|
-
"input into
|
|
119
|
+
"input into ZhipuAI model backend."
|
|
114
120
|
)
|
|
115
121
|
pass
|
|
116
122
|
|
|
@@ -159,11 +159,11 @@ class AutoRetriever:
|
|
|
159
159
|
) -> str:
|
|
160
160
|
r"""Retrieves the last modified date and time of a given file. This
|
|
161
161
|
function takes vector storage instance as input and returns the last
|
|
162
|
-
modified date from the
|
|
162
|
+
modified date from the metadata.
|
|
163
163
|
|
|
164
164
|
Args:
|
|
165
165
|
vector_storage_instance (BaseVectorStorage): The vector storage
|
|
166
|
-
where modified date is to be retrieved from
|
|
166
|
+
where modified date is to be retrieved from metadata.
|
|
167
167
|
|
|
168
168
|
Returns:
|
|
169
169
|
str: The last modified date from vector storage.
|
|
@@ -17,6 +17,7 @@ import numpy as np
|
|
|
17
17
|
|
|
18
18
|
from camel.loaders import UnstructuredIO
|
|
19
19
|
from camel.retrievers import BaseRetriever
|
|
20
|
+
from camel.utils import dependencies_required
|
|
20
21
|
|
|
21
22
|
DEFAULT_TOP_K_RESULTS = 1
|
|
22
23
|
|
|
@@ -40,16 +41,10 @@ class BM25Retriever(BaseRetriever):
|
|
|
40
41
|
https://github.com/dorianbrown/rank_bm25
|
|
41
42
|
"""
|
|
42
43
|
|
|
44
|
+
@dependencies_required('rank_bm25')
|
|
43
45
|
def __init__(self) -> None:
|
|
44
46
|
r"""Initializes the BM25Retriever."""
|
|
45
|
-
|
|
46
|
-
try:
|
|
47
|
-
from rank_bm25 import BM25Okapi
|
|
48
|
-
except ImportError as e:
|
|
49
|
-
raise ImportError(
|
|
50
|
-
"Package `rank_bm25` not installed, install by running 'pip "
|
|
51
|
-
"install rank_bm25'"
|
|
52
|
-
) from e
|
|
47
|
+
from rank_bm25 import BM25Okapi
|
|
53
48
|
|
|
54
49
|
self.bm25: BM25Okapi = None
|
|
55
50
|
self.content_input_path: str = ""
|
|
@@ -15,6 +15,7 @@ import os
|
|
|
15
15
|
from typing import Any, Dict, List, Optional
|
|
16
16
|
|
|
17
17
|
from camel.retrievers import BaseRetriever
|
|
18
|
+
from camel.utils import dependencies_required
|
|
18
19
|
|
|
19
20
|
DEFAULT_TOP_K_RESULTS = 1
|
|
20
21
|
|
|
@@ -32,6 +33,7 @@ class CohereRerankRetriever(BaseRetriever):
|
|
|
32
33
|
https://txt.cohere.com/rerank/
|
|
33
34
|
"""
|
|
34
35
|
|
|
36
|
+
@dependencies_required('cohere')
|
|
35
37
|
def __init__(
|
|
36
38
|
self,
|
|
37
39
|
model_name: str = "rerank-multilingual-v2.0",
|
|
@@ -56,11 +58,7 @@ class CohereRerankRetriever(BaseRetriever):
|
|
|
56
58
|
ValueError: If the API key is neither passed as an argument nor
|
|
57
59
|
set in the environment variable.
|
|
58
60
|
"""
|
|
59
|
-
|
|
60
|
-
try:
|
|
61
|
-
import cohere
|
|
62
|
-
except ImportError as e:
|
|
63
|
-
raise ImportError("Package 'cohere' is not installed") from e
|
|
61
|
+
import cohere
|
|
64
62
|
|
|
65
63
|
try:
|
|
66
64
|
self.api_key = api_key or os.environ["COHERE_API_KEY"]
|
camel/storages/__init__.py
CHANGED
|
@@ -17,6 +17,7 @@ from .graph_storages.neo4j_graph import Neo4jGraph
|
|
|
17
17
|
from .key_value_storages.base import BaseKeyValueStorage
|
|
18
18
|
from .key_value_storages.in_memory import InMemoryKeyValueStorage
|
|
19
19
|
from .key_value_storages.json import JsonStorage
|
|
20
|
+
from .key_value_storages.redis import RedisStorage
|
|
20
21
|
from .vectordb_storages.base import (
|
|
21
22
|
BaseVectorStorage,
|
|
22
23
|
VectorDBQuery,
|
|
@@ -30,6 +31,7 @@ __all__ = [
|
|
|
30
31
|
'BaseKeyValueStorage',
|
|
31
32
|
'InMemoryKeyValueStorage',
|
|
32
33
|
'JsonStorage',
|
|
34
|
+
'RedisStorage',
|
|
33
35
|
'VectorRecord',
|
|
34
36
|
'BaseVectorStorage',
|
|
35
37
|
'VectorDBQuery',
|
|
@@ -16,7 +16,10 @@ from __future__ import annotations
|
|
|
16
16
|
from dataclasses import dataclass, field
|
|
17
17
|
from typing import List, Union
|
|
18
18
|
|
|
19
|
-
|
|
19
|
+
try:
|
|
20
|
+
from unstructured.documents.elements import Element
|
|
21
|
+
except ImportError:
|
|
22
|
+
Element = None
|
|
20
23
|
|
|
21
24
|
|
|
22
25
|
@dataclass
|
|
@@ -72,3 +75,8 @@ class GraphElement:
|
|
|
72
75
|
nodes: List[Node]
|
|
73
76
|
relationships: List[Relationship]
|
|
74
77
|
source: Element
|
|
78
|
+
|
|
79
|
+
def __post_init__(self):
|
|
80
|
+
if Element is None:
|
|
81
|
+
raise ImportError("""The 'unstructured' package is required to use
|
|
82
|
+
the 'source' attribute.""")
|
|
@@ -16,6 +16,7 @@ from hashlib import md5
|
|
|
16
16
|
from typing import Any, Dict, List, Optional
|
|
17
17
|
|
|
18
18
|
from camel.storages.graph_storages import BaseGraphStorage, GraphElement
|
|
19
|
+
from camel.utils import dependencies_required
|
|
19
20
|
|
|
20
21
|
logger = logging.getLogger(__name__)
|
|
21
22
|
|
|
@@ -81,6 +82,7 @@ class Neo4jGraph(BaseGraphStorage):
|
|
|
81
82
|
than `LIST_LIMIT` elements from results. Defaults to `False`.
|
|
82
83
|
"""
|
|
83
84
|
|
|
85
|
+
@dependencies_required('neo4j')
|
|
84
86
|
def __init__(
|
|
85
87
|
self,
|
|
86
88
|
url: str,
|
|
@@ -91,13 +93,7 @@ class Neo4jGraph(BaseGraphStorage):
|
|
|
91
93
|
truncate: bool = False,
|
|
92
94
|
) -> None:
|
|
93
95
|
r"""Create a new Neo4j graph instance."""
|
|
94
|
-
|
|
95
|
-
import neo4j
|
|
96
|
-
except ImportError:
|
|
97
|
-
raise ValueError(
|
|
98
|
-
"Could not import neo4j python package. "
|
|
99
|
-
"Please install it with `pip install neo4j`."
|
|
100
|
-
)
|
|
96
|
+
import neo4j
|
|
101
97
|
|
|
102
98
|
self.driver = neo4j.GraphDatabase.driver(
|
|
103
99
|
url, auth=(username, password)
|
|
@@ -15,9 +15,11 @@
|
|
|
15
15
|
from .base import BaseKeyValueStorage
|
|
16
16
|
from .in_memory import InMemoryKeyValueStorage
|
|
17
17
|
from .json import JsonStorage
|
|
18
|
+
from .redis import RedisStorage
|
|
18
19
|
|
|
19
20
|
__all__ = [
|
|
20
21
|
'BaseKeyValueStorage',
|
|
21
22
|
'InMemoryKeyValueStorage',
|
|
22
23
|
'JsonStorage',
|
|
24
|
+
'RedisStorage',
|
|
23
25
|
]
|
|
@@ -0,0 +1,169 @@
|
|
|
1
|
+
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
|
|
2
|
+
# Licensed under the Apache License, Version 2.0 (the “License”);
|
|
3
|
+
# you may not use this file except in compliance with the License.
|
|
4
|
+
# You may obtain a copy of the License at
|
|
5
|
+
#
|
|
6
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
7
|
+
#
|
|
8
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
9
|
+
# distributed under the License is distributed on an “AS IS” BASIS,
|
|
10
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
11
|
+
# See the License for the specific language governing permissions and
|
|
12
|
+
# limitations under the License.
|
|
13
|
+
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
|
|
14
|
+
|
|
15
|
+
import asyncio
|
|
16
|
+
import json
|
|
17
|
+
import logging
|
|
18
|
+
from typing import TYPE_CHECKING, Any, Dict, List, Optional
|
|
19
|
+
|
|
20
|
+
from camel.storages.key_value_storages import BaseKeyValueStorage
|
|
21
|
+
|
|
22
|
+
if TYPE_CHECKING:
|
|
23
|
+
from redis.asyncio import Redis
|
|
24
|
+
|
|
25
|
+
logger = logging.getLogger(__name__)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class RedisStorage(BaseKeyValueStorage):
|
|
29
|
+
r"""A concrete implementation of the :obj:`BaseCacheStorage` using Redis as
|
|
30
|
+
the backend. This is suitable for distributed cache systems that require
|
|
31
|
+
persistence and high availability.
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
def __init__(
|
|
35
|
+
self,
|
|
36
|
+
sid: str,
|
|
37
|
+
url: str = "redis://localhost:6379",
|
|
38
|
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
|
39
|
+
**kwargs,
|
|
40
|
+
) -> None:
|
|
41
|
+
r"""Initializes the RedisStorage instance with the provided URL and
|
|
42
|
+
options.
|
|
43
|
+
|
|
44
|
+
Args:
|
|
45
|
+
sid (str): The ID for the storage instance to identify the
|
|
46
|
+
record space.
|
|
47
|
+
url (str): The URL for connecting to the Redis server.
|
|
48
|
+
**kwargs: Additional keyword arguments for Redis client
|
|
49
|
+
configuration.
|
|
50
|
+
|
|
51
|
+
Raises:
|
|
52
|
+
ImportError: If the `redis.asyncio` module is not installed.
|
|
53
|
+
"""
|
|
54
|
+
try:
|
|
55
|
+
import redis.asyncio as aredis
|
|
56
|
+
except ImportError as exc:
|
|
57
|
+
logger.error(
|
|
58
|
+
"Please install `redis` first. You can install it by "
|
|
59
|
+
"running `pip install redis`."
|
|
60
|
+
)
|
|
61
|
+
raise exc
|
|
62
|
+
|
|
63
|
+
self._client: Optional[aredis.Redis] = None
|
|
64
|
+
self._url = url
|
|
65
|
+
self._sid = sid
|
|
66
|
+
self._loop = loop or asyncio.get_event_loop()
|
|
67
|
+
|
|
68
|
+
self._create_client(**kwargs)
|
|
69
|
+
|
|
70
|
+
def __enter__(self):
|
|
71
|
+
return self
|
|
72
|
+
|
|
73
|
+
def __exit__(self, exc_type, exc, tb):
|
|
74
|
+
self._run_async(self.close())
|
|
75
|
+
|
|
76
|
+
async def close(self) -> None:
|
|
77
|
+
r"""Closes the Redis client asynchronously."""
|
|
78
|
+
if self._client:
|
|
79
|
+
await self._client.close()
|
|
80
|
+
|
|
81
|
+
def _create_client(self, **kwargs) -> None:
|
|
82
|
+
r"""Creates the Redis client with the provided URL and options.
|
|
83
|
+
|
|
84
|
+
Args:
|
|
85
|
+
**kwargs: Additional keyword arguments for Redis client
|
|
86
|
+
configuration.
|
|
87
|
+
"""
|
|
88
|
+
import redis.asyncio as aredis
|
|
89
|
+
|
|
90
|
+
self._client = aredis.from_url(self._url, **kwargs)
|
|
91
|
+
|
|
92
|
+
@property
|
|
93
|
+
def client(self) -> Optional["Redis"]:
|
|
94
|
+
r"""Returns the Redis client instance.
|
|
95
|
+
|
|
96
|
+
Returns:
|
|
97
|
+
redis.asyncio.Redis: The Redis client instance.
|
|
98
|
+
"""
|
|
99
|
+
return self._client
|
|
100
|
+
|
|
101
|
+
def save(
|
|
102
|
+
self, records: List[Dict[str, Any]], expire: Optional[int] = None
|
|
103
|
+
) -> None:
|
|
104
|
+
r"""Saves a batch of records to the key-value storage system."""
|
|
105
|
+
try:
|
|
106
|
+
self._run_async(self._async_save(records, expire))
|
|
107
|
+
except Exception as e:
|
|
108
|
+
logger.error(f"Error in save: {e}")
|
|
109
|
+
|
|
110
|
+
def load(self) -> List[Dict[str, Any]]:
|
|
111
|
+
r"""Loads all stored records from the key-value storage system.
|
|
112
|
+
|
|
113
|
+
Returns:
|
|
114
|
+
List[Dict[str, Any]]: A list of dictionaries, where each dictionary
|
|
115
|
+
represents a stored record.
|
|
116
|
+
"""
|
|
117
|
+
try:
|
|
118
|
+
return self._run_async(self._async_load())
|
|
119
|
+
except Exception as e:
|
|
120
|
+
logger.error(f"Error in load: {e}")
|
|
121
|
+
return []
|
|
122
|
+
|
|
123
|
+
def clear(self) -> None:
|
|
124
|
+
r"""Removes all records from the key-value storage system."""
|
|
125
|
+
try:
|
|
126
|
+
self._run_async(self._async_clear())
|
|
127
|
+
except Exception as e:
|
|
128
|
+
logger.error(f"Error in clear: {e}")
|
|
129
|
+
|
|
130
|
+
async def _async_save(
|
|
131
|
+
self, records: List[Dict[str, Any]], expire: Optional[int] = None
|
|
132
|
+
) -> None:
|
|
133
|
+
if self._client is None:
|
|
134
|
+
raise ValueError("Redis client is not initialized")
|
|
135
|
+
try:
|
|
136
|
+
value = json.dumps(records)
|
|
137
|
+
if expire:
|
|
138
|
+
await self._client.setex(self._sid, expire, value)
|
|
139
|
+
else:
|
|
140
|
+
await self._client.set(self._sid, value)
|
|
141
|
+
except Exception as e:
|
|
142
|
+
logger.error(f"Error saving records: {e}")
|
|
143
|
+
|
|
144
|
+
async def _async_load(self) -> List[Dict[str, Any]]:
|
|
145
|
+
if self._client is None:
|
|
146
|
+
raise ValueError("Redis client is not initialized")
|
|
147
|
+
try:
|
|
148
|
+
value = await self._client.get(self._sid)
|
|
149
|
+
if value:
|
|
150
|
+
return json.loads(value)
|
|
151
|
+
return []
|
|
152
|
+
except Exception as e:
|
|
153
|
+
logger.error(f"Error loading records: {e}")
|
|
154
|
+
return []
|
|
155
|
+
|
|
156
|
+
async def _async_clear(self) -> None:
|
|
157
|
+
if self._client is None:
|
|
158
|
+
raise ValueError("Redis client is not initialized")
|
|
159
|
+
try:
|
|
160
|
+
await self._client.delete(self._sid)
|
|
161
|
+
except Exception as e:
|
|
162
|
+
logger.error(f"Error clearing records: {e}")
|
|
163
|
+
|
|
164
|
+
def _run_async(self, coro):
|
|
165
|
+
if not self._loop.is_running():
|
|
166
|
+
return self._loop.run_until_complete(coro)
|
|
167
|
+
else:
|
|
168
|
+
future = asyncio.run_coroutine_threadsafe(coro, self._loop)
|
|
169
|
+
return future.result()
|
|
@@ -23,6 +23,7 @@ from camel.storages.vectordb_storages import (
|
|
|
23
23
|
VectorDBStatus,
|
|
24
24
|
VectorRecord,
|
|
25
25
|
)
|
|
26
|
+
from camel.utils import dependencies_required
|
|
26
27
|
|
|
27
28
|
logger = logging.getLogger(__name__)
|
|
28
29
|
|
|
@@ -52,6 +53,7 @@ class MilvusStorage(BaseVectorStorage):
|
|
|
52
53
|
ImportError: If `pymilvus` package is not installed.
|
|
53
54
|
"""
|
|
54
55
|
|
|
56
|
+
@dependencies_required('pymilvus')
|
|
55
57
|
def __init__(
|
|
56
58
|
self,
|
|
57
59
|
vector_dim: int,
|
|
@@ -59,13 +61,7 @@ class MilvusStorage(BaseVectorStorage):
|
|
|
59
61
|
collection_name: Optional[str] = None,
|
|
60
62
|
**kwargs: Any,
|
|
61
63
|
) -> None:
|
|
62
|
-
|
|
63
|
-
from pymilvus import MilvusClient
|
|
64
|
-
except ImportError as exc:
|
|
65
|
-
raise ImportError(
|
|
66
|
-
"Please install `pymilvus` first. You can install it by "
|
|
67
|
-
"running `pip install pymilvus`."
|
|
68
|
-
) from exc
|
|
64
|
+
from pymilvus import MilvusClient
|
|
69
65
|
|
|
70
66
|
self._client: MilvusClient
|
|
71
67
|
self._create_client(url_and_api_key, **kwargs)
|
|
@@ -23,6 +23,7 @@ from camel.storages.vectordb_storages import (
|
|
|
23
23
|
VectorRecord,
|
|
24
24
|
)
|
|
25
25
|
from camel.types import VectorDistance
|
|
26
|
+
from camel.utils import dependencies_required
|
|
26
27
|
|
|
27
28
|
_qdrant_local_client_map: Dict[str, Tuple[Any, int]] = {}
|
|
28
29
|
|
|
@@ -62,6 +63,7 @@ class QdrantStorage(BaseVectorStorage):
|
|
|
62
63
|
be initialized with an in-memory storage (`":memory:"`).
|
|
63
64
|
"""
|
|
64
65
|
|
|
66
|
+
@dependencies_required('qdrant_client')
|
|
65
67
|
def __init__(
|
|
66
68
|
self,
|
|
67
69
|
vector_dim: int,
|
|
@@ -72,13 +74,7 @@ class QdrantStorage(BaseVectorStorage):
|
|
|
72
74
|
delete_collection_on_del: bool = False,
|
|
73
75
|
**kwargs: Any,
|
|
74
76
|
) -> None:
|
|
75
|
-
|
|
76
|
-
from qdrant_client import QdrantClient
|
|
77
|
-
except ImportError as exc:
|
|
78
|
-
raise ImportError(
|
|
79
|
-
"Please install `qdrant-client` first. You can install it by "
|
|
80
|
-
"running `pip install qdrant-client`."
|
|
81
|
-
) from exc
|
|
77
|
+
from qdrant_client import QdrantClient
|
|
82
78
|
|
|
83
79
|
self._client: QdrantClient
|
|
84
80
|
self._local_path: Optional[str] = None
|
camel/toolkits/__init__.py
CHANGED
|
@@ -13,9 +13,11 @@
|
|
|
13
13
|
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
|
|
14
14
|
|
|
15
15
|
from .base import BaseToolkit
|
|
16
|
+
from .code_execution import CodeExecutionToolkit
|
|
16
17
|
from .github_toolkit import GithubToolkit
|
|
17
18
|
|
|
18
19
|
__all__ = [
|
|
19
20
|
'BaseToolkit',
|
|
20
21
|
'GithubToolkit',
|
|
22
|
+
'CodeExecutionToolkit',
|
|
21
23
|
]
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
|
|
2
|
+
# Licensed under the Apache License, Version 2.0 (the “License”);
|
|
3
|
+
# you may not use this file except in compliance with the License.
|
|
4
|
+
# You may obtain a copy of the License at
|
|
5
|
+
#
|
|
6
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
7
|
+
#
|
|
8
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
9
|
+
# distributed under the License is distributed on an “AS IS” BASIS,
|
|
10
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
11
|
+
# See the License for the specific language governing permissions and
|
|
12
|
+
# limitations under the License.
|
|
13
|
+
# =========== Copyright 2023 @ CAMEL-AI.org. All Rights Reserved. ===========
|
|
14
|
+
from typing import List, Literal
|
|
15
|
+
|
|
16
|
+
from camel.functions import OpenAIFunction
|
|
17
|
+
from camel.interpreters import InternalPythonInterpreter
|
|
18
|
+
|
|
19
|
+
from .base import BaseToolkit
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class CodeExecutionToolkit(BaseToolkit):
|
|
23
|
+
r"""A tookit for code execution.
|
|
24
|
+
|
|
25
|
+
Args:
|
|
26
|
+
sandbox (str): the environment type used to execute code.
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
def __init__(
|
|
30
|
+
self,
|
|
31
|
+
sandbox: Literal[
|
|
32
|
+
"internal_python", "jupyter", "docker"
|
|
33
|
+
] = "internal_python",
|
|
34
|
+
verbose: bool = False,
|
|
35
|
+
) -> None:
|
|
36
|
+
# TODO: Add support for docker and jupyter.
|
|
37
|
+
self.verbose = verbose
|
|
38
|
+
if sandbox == "internal_python":
|
|
39
|
+
self.interpreter = InternalPythonInterpreter()
|
|
40
|
+
else:
|
|
41
|
+
raise RuntimeError(
|
|
42
|
+
f"The sandbox type `{sandbox}` is not supported."
|
|
43
|
+
)
|
|
44
|
+
|
|
45
|
+
def execute_code(self, code: str) -> str:
|
|
46
|
+
r"""Execute a given code snippet.
|
|
47
|
+
|
|
48
|
+
Args:
|
|
49
|
+
code (str): The input code to the Code Interpreter tool call.
|
|
50
|
+
|
|
51
|
+
Returns:
|
|
52
|
+
str: The text output from the Code Interpreter tool call.
|
|
53
|
+
"""
|
|
54
|
+
output = self.interpreter.run(code, "python")
|
|
55
|
+
# ruff: noqa: E501
|
|
56
|
+
content = f"Executed the code below:\n```py\n{code}\n```\n> Executed Results:\n{output}"
|
|
57
|
+
if self.verbose:
|
|
58
|
+
print(content)
|
|
59
|
+
return content
|
|
60
|
+
|
|
61
|
+
def get_tools(self) -> List[OpenAIFunction]:
|
|
62
|
+
r"""Returns a list of OpenAIFunction objects representing the
|
|
63
|
+
functions in the toolkit.
|
|
64
|
+
|
|
65
|
+
Returns:
|
|
66
|
+
List[OpenAIFunction]: A list of OpenAIFunction objects
|
|
67
|
+
representing the functions in the toolkit.
|
|
68
|
+
"""
|
|
69
|
+
return [OpenAIFunction(self.execute_code)]
|