ag2 0.8.0b1__tar.gz → 0.8.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ag2 might be problematic. Click here for more details.
- {ag2-0.8.0b1/ag2.egg-info → ag2-0.8.2}/PKG-INFO +10 -7
- {ag2-0.8.0b1 → ag2-0.8.2}/README.md +6 -6
- {ag2-0.8.0b1 → ag2-0.8.2/ag2.egg-info}/PKG-INFO +10 -7
- {ag2-0.8.0b1 → ag2-0.8.2}/ag2.egg-info/SOURCES.txt +2 -0
- ag2-0.8.2/ag2.egg-info/requires.txt +154 -0
- {ag2-0.8.0b1 → ag2-0.8.2}/pyproject.toml +68 -26
- {ag2-0.8.0b1 → ag2-0.8.2}/setup_ag2.py +3 -0
- {ag2-0.8.0b1 → ag2-0.8.2}/test/test_code_utils.py +12 -47
- ag2-0.8.2/test/test_import_utils.py +451 -0
- ag2-0.8.2/test/test_json_utils.py +48 -0
- ag2-0.8.2/test/test_llm_config.py +770 -0
- {ag2-0.8.0b1 → ag2-0.8.2}/test/test_logging.py +4 -4
- {ag2-0.8.0b1 → ag2-0.8.2}/test/test_notebook.py +21 -21
- {ag2-0.8.0b1 → ag2-0.8.2}/test/test_retrieve_utils.py +10 -4
- {ag2-0.8.0b1 → ag2-0.8.2}/test/test_token_count.py +2 -2
- ag2-0.8.0b1/ag2.egg-info/requires.txt +0 -145
- ag2-0.8.0b1/test/test_import_utils.py +0 -200
- {ag2-0.8.0b1 → ag2-0.8.2}/LICENSE +0 -0
- {ag2-0.8.0b1 → ag2-0.8.2}/NOTICE.md +0 -0
- {ag2-0.8.0b1 → ag2-0.8.2}/ag2.egg-info/dependency_links.txt +0 -0
- {ag2-0.8.0b1 → ag2-0.8.2}/ag2.egg-info/top_level.txt +0 -0
- {ag2-0.8.0b1 → ag2-0.8.2}/setup.cfg +0 -0
- {ag2-0.8.0b1 → ag2-0.8.2}/test/test_browser_utils.py +0 -0
- {ag2-0.8.0b1 → ag2-0.8.2}/test/test_conftest.py +0 -0
- {ag2-0.8.0b1 → ag2-0.8.2}/test/test_graph_utils.py +0 -0
- {ag2-0.8.0b1 → ag2-0.8.2}/test/test_import.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: ag2
|
|
3
|
-
Version: 0.8.
|
|
3
|
+
Version: 0.8.2
|
|
4
4
|
Summary: Alias package for pyautogen
|
|
5
5
|
Home-page: https://github.com/ag2ai/ag2
|
|
6
6
|
Author: Chi Wang & Qingyun Wu
|
|
@@ -25,6 +25,7 @@ Provides-Extra: graph-rag-falkor-db
|
|
|
25
25
|
Provides-Extra: rag
|
|
26
26
|
Provides-Extra: crawl4ai
|
|
27
27
|
Provides-Extra: browser-use
|
|
28
|
+
Provides-Extra: google-search
|
|
28
29
|
Provides-Extra: neo4j
|
|
29
30
|
Provides-Extra: twilio
|
|
30
31
|
Provides-Extra: interop-crewai
|
|
@@ -39,6 +40,7 @@ Provides-Extra: teachable
|
|
|
39
40
|
Provides-Extra: lmm
|
|
40
41
|
Provides-Extra: graph
|
|
41
42
|
Provides-Extra: gemini
|
|
43
|
+
Provides-Extra: gemini-realtime
|
|
42
44
|
Provides-Extra: together
|
|
43
45
|
Provides-Extra: websurfer
|
|
44
46
|
Provides-Extra: redis
|
|
@@ -52,6 +54,7 @@ Provides-Extra: groq
|
|
|
52
54
|
Provides-Extra: cohere
|
|
53
55
|
Provides-Extra: ollama
|
|
54
56
|
Provides-Extra: bedrock
|
|
57
|
+
Provides-Extra: deepseek
|
|
55
58
|
Provides-Extra: commsagent-discord
|
|
56
59
|
Provides-Extra: commsagent-slack
|
|
57
60
|
Provides-Extra: commsagent-telegram
|
|
@@ -158,14 +161,14 @@ You can use the sample file `OAI_CONFIG_LIST_sample` as a template.
|
|
|
158
161
|
Create a script or a Jupyter Notebook and run your first agent.
|
|
159
162
|
|
|
160
163
|
```python
|
|
161
|
-
from autogen import AssistantAgent, UserProxyAgent,
|
|
164
|
+
from autogen import AssistantAgent, UserProxyAgent, LLMConfig
|
|
165
|
+
|
|
166
|
+
llm_config = LLMConfig.from_json(path="OAI_CONFIG_LIST")
|
|
162
167
|
|
|
163
|
-
llm_config = {
|
|
164
|
-
"config_list": config_list_from_json(env_or_file="OAI_CONFIG_LIST")
|
|
165
|
-
}
|
|
166
168
|
|
|
167
|
-
|
|
168
|
-
|
|
169
|
+
with llm_config:
|
|
170
|
+
assistant = AssistantAgent("assistant", llm_config=llm_config)
|
|
171
|
+
user_proxy = UserProxyAgent("user_proxy", code_execution_config={"work_dir": "coding", "use_docker": False})
|
|
169
172
|
user_proxy.initiate_chat(assistant, message="Plot a chart of NVDA and TESLA stock price change YTD.")
|
|
170
173
|
# This initiates an automated chat between the two agents to solve the task
|
|
171
174
|
```
|
|
@@ -93,14 +93,14 @@ You can use the sample file `OAI_CONFIG_LIST_sample` as a template.
|
|
|
93
93
|
Create a script or a Jupyter Notebook and run your first agent.
|
|
94
94
|
|
|
95
95
|
```python
|
|
96
|
-
from autogen import AssistantAgent, UserProxyAgent,
|
|
96
|
+
from autogen import AssistantAgent, UserProxyAgent, LLMConfig
|
|
97
|
+
|
|
98
|
+
llm_config = LLMConfig.from_json(path="OAI_CONFIG_LIST")
|
|
97
99
|
|
|
98
|
-
llm_config = {
|
|
99
|
-
"config_list": config_list_from_json(env_or_file="OAI_CONFIG_LIST")
|
|
100
|
-
}
|
|
101
100
|
|
|
102
|
-
|
|
103
|
-
|
|
101
|
+
with llm_config:
|
|
102
|
+
assistant = AssistantAgent("assistant", llm_config=llm_config)
|
|
103
|
+
user_proxy = UserProxyAgent("user_proxy", code_execution_config={"work_dir": "coding", "use_docker": False})
|
|
104
104
|
user_proxy.initiate_chat(assistant, message="Plot a chart of NVDA and TESLA stock price change YTD.")
|
|
105
105
|
# This initiates an automated chat between the two agents to solve the task
|
|
106
106
|
```
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: ag2
|
|
3
|
-
Version: 0.8.
|
|
3
|
+
Version: 0.8.2
|
|
4
4
|
Summary: Alias package for pyautogen
|
|
5
5
|
Home-page: https://github.com/ag2ai/ag2
|
|
6
6
|
Author: Chi Wang & Qingyun Wu
|
|
@@ -25,6 +25,7 @@ Provides-Extra: graph-rag-falkor-db
|
|
|
25
25
|
Provides-Extra: rag
|
|
26
26
|
Provides-Extra: crawl4ai
|
|
27
27
|
Provides-Extra: browser-use
|
|
28
|
+
Provides-Extra: google-search
|
|
28
29
|
Provides-Extra: neo4j
|
|
29
30
|
Provides-Extra: twilio
|
|
30
31
|
Provides-Extra: interop-crewai
|
|
@@ -39,6 +40,7 @@ Provides-Extra: teachable
|
|
|
39
40
|
Provides-Extra: lmm
|
|
40
41
|
Provides-Extra: graph
|
|
41
42
|
Provides-Extra: gemini
|
|
43
|
+
Provides-Extra: gemini-realtime
|
|
42
44
|
Provides-Extra: together
|
|
43
45
|
Provides-Extra: websurfer
|
|
44
46
|
Provides-Extra: redis
|
|
@@ -52,6 +54,7 @@ Provides-Extra: groq
|
|
|
52
54
|
Provides-Extra: cohere
|
|
53
55
|
Provides-Extra: ollama
|
|
54
56
|
Provides-Extra: bedrock
|
|
57
|
+
Provides-Extra: deepseek
|
|
55
58
|
Provides-Extra: commsagent-discord
|
|
56
59
|
Provides-Extra: commsagent-slack
|
|
57
60
|
Provides-Extra: commsagent-telegram
|
|
@@ -158,14 +161,14 @@ You can use the sample file `OAI_CONFIG_LIST_sample` as a template.
|
|
|
158
161
|
Create a script or a Jupyter Notebook and run your first agent.
|
|
159
162
|
|
|
160
163
|
```python
|
|
161
|
-
from autogen import AssistantAgent, UserProxyAgent,
|
|
164
|
+
from autogen import AssistantAgent, UserProxyAgent, LLMConfig
|
|
165
|
+
|
|
166
|
+
llm_config = LLMConfig.from_json(path="OAI_CONFIG_LIST")
|
|
162
167
|
|
|
163
|
-
llm_config = {
|
|
164
|
-
"config_list": config_list_from_json(env_or_file="OAI_CONFIG_LIST")
|
|
165
|
-
}
|
|
166
168
|
|
|
167
|
-
|
|
168
|
-
|
|
169
|
+
with llm_config:
|
|
170
|
+
assistant = AssistantAgent("assistant", llm_config=llm_config)
|
|
171
|
+
user_proxy = UserProxyAgent("user_proxy", code_execution_config={"work_dir": "coding", "use_docker": False})
|
|
169
172
|
user_proxy.initiate_chat(assistant, message="Plot a chart of NVDA and TESLA stock price change YTD.")
|
|
170
173
|
# This initiates an automated chat between the two agents to solve the task
|
|
171
174
|
```
|
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
pyautogen==0.8.2
|
|
2
|
+
|
|
3
|
+
[anthropic]
|
|
4
|
+
pyautogen[anthropic]==0.8.2
|
|
5
|
+
|
|
6
|
+
[autobuild]
|
|
7
|
+
pyautogen[autobuild]==0.8.2
|
|
8
|
+
|
|
9
|
+
[bedrock]
|
|
10
|
+
pyautogen[bedrock]==0.8.2
|
|
11
|
+
|
|
12
|
+
[blendsearch]
|
|
13
|
+
pyautogen[blendsearch]==0.8.2
|
|
14
|
+
|
|
15
|
+
[browser-use]
|
|
16
|
+
pyautogen[browser-use]==0.8.2
|
|
17
|
+
|
|
18
|
+
[captainagent]
|
|
19
|
+
pyautogen[captainagent]==0.8.2
|
|
20
|
+
|
|
21
|
+
[cerebras]
|
|
22
|
+
pyautogen[cerebras]==0.8.2
|
|
23
|
+
|
|
24
|
+
[cohere]
|
|
25
|
+
pyautogen[cohere]==0.8.2
|
|
26
|
+
|
|
27
|
+
[commsagent-discord]
|
|
28
|
+
pyautogen[commsagent-discord]==0.8.2
|
|
29
|
+
|
|
30
|
+
[commsagent-slack]
|
|
31
|
+
pyautogen[commsagent-slack]==0.8.2
|
|
32
|
+
|
|
33
|
+
[commsagent-telegram]
|
|
34
|
+
pyautogen[commsagent-telegram]==0.8.2
|
|
35
|
+
|
|
36
|
+
[cosmosdb]
|
|
37
|
+
pyautogen[cosmosdb]==0.8.2
|
|
38
|
+
|
|
39
|
+
[crawl4ai]
|
|
40
|
+
pyautogen[crawl4ai]==0.8.2
|
|
41
|
+
|
|
42
|
+
[deepseek]
|
|
43
|
+
pyautogen[deepseek]==0.8.2
|
|
44
|
+
|
|
45
|
+
[dev]
|
|
46
|
+
pyautogen[dev]==0.8.2
|
|
47
|
+
|
|
48
|
+
[docs]
|
|
49
|
+
pyautogen[docs]==0.8.2
|
|
50
|
+
|
|
51
|
+
[flaml]
|
|
52
|
+
pyautogen[flaml]==0.8.2
|
|
53
|
+
|
|
54
|
+
[gemini]
|
|
55
|
+
pyautogen[gemini]==0.8.2
|
|
56
|
+
|
|
57
|
+
[gemini-realtime]
|
|
58
|
+
pyautogen[gemini-realtime]==0.8.2
|
|
59
|
+
|
|
60
|
+
[google-search]
|
|
61
|
+
pyautogen[google-search]==0.8.2
|
|
62
|
+
|
|
63
|
+
[graph]
|
|
64
|
+
pyautogen[graph]==0.8.2
|
|
65
|
+
|
|
66
|
+
[graph-rag-falkor-db]
|
|
67
|
+
pyautogen[graph-rag-falkor-db]==0.8.2
|
|
68
|
+
|
|
69
|
+
[groq]
|
|
70
|
+
pyautogen[groq]==0.8.2
|
|
71
|
+
|
|
72
|
+
[interop]
|
|
73
|
+
pyautogen[interop]==0.8.2
|
|
74
|
+
|
|
75
|
+
[interop-crewai]
|
|
76
|
+
pyautogen[interop-crewai]==0.8.2
|
|
77
|
+
|
|
78
|
+
[interop-langchain]
|
|
79
|
+
pyautogen[interop-langchain]==0.8.2
|
|
80
|
+
|
|
81
|
+
[interop-pydantic-ai]
|
|
82
|
+
pyautogen[interop-pydantic-ai]==0.8.2
|
|
83
|
+
|
|
84
|
+
[jupyter-executor]
|
|
85
|
+
pyautogen[jupyter-executor]==0.8.2
|
|
86
|
+
|
|
87
|
+
[lint]
|
|
88
|
+
pyautogen[lint]==0.8.2
|
|
89
|
+
|
|
90
|
+
[lmm]
|
|
91
|
+
pyautogen[lmm]==0.8.2
|
|
92
|
+
|
|
93
|
+
[long-context]
|
|
94
|
+
pyautogen[long-context]==0.8.2
|
|
95
|
+
|
|
96
|
+
[mathchat]
|
|
97
|
+
pyautogen[mathchat]==0.8.2
|
|
98
|
+
|
|
99
|
+
[mistral]
|
|
100
|
+
pyautogen[mistral]==0.8.2
|
|
101
|
+
|
|
102
|
+
[neo4j]
|
|
103
|
+
pyautogen[neo4j]==0.8.2
|
|
104
|
+
|
|
105
|
+
[ollama]
|
|
106
|
+
pyautogen[ollama]==0.8.2
|
|
107
|
+
|
|
108
|
+
[openai]
|
|
109
|
+
pyautogen[openai]==0.8.2
|
|
110
|
+
|
|
111
|
+
[openai-realtime]
|
|
112
|
+
pyautogen[openai-realtime]==0.8.2
|
|
113
|
+
|
|
114
|
+
[rag]
|
|
115
|
+
pyautogen[rag]==0.8.2
|
|
116
|
+
|
|
117
|
+
[redis]
|
|
118
|
+
pyautogen[redis]==0.8.2
|
|
119
|
+
|
|
120
|
+
[retrievechat]
|
|
121
|
+
pyautogen[retrievechat]==0.8.2
|
|
122
|
+
|
|
123
|
+
[retrievechat-couchbase]
|
|
124
|
+
pyautogen[retrievechat-couchbase]==0.8.2
|
|
125
|
+
|
|
126
|
+
[retrievechat-mongodb]
|
|
127
|
+
pyautogen[retrievechat-mongodb]==0.8.2
|
|
128
|
+
|
|
129
|
+
[retrievechat-pgvector]
|
|
130
|
+
pyautogen[retrievechat-pgvector]==0.8.2
|
|
131
|
+
|
|
132
|
+
[retrievechat-qdrant]
|
|
133
|
+
pyautogen[retrievechat-qdrant]==0.8.2
|
|
134
|
+
|
|
135
|
+
[teachable]
|
|
136
|
+
pyautogen[teachable]==0.8.2
|
|
137
|
+
|
|
138
|
+
[test]
|
|
139
|
+
pyautogen[test]==0.8.2
|
|
140
|
+
|
|
141
|
+
[together]
|
|
142
|
+
pyautogen[together]==0.8.2
|
|
143
|
+
|
|
144
|
+
[twilio]
|
|
145
|
+
pyautogen[twilio]==0.8.2
|
|
146
|
+
|
|
147
|
+
[types]
|
|
148
|
+
pyautogen[types]==0.8.2
|
|
149
|
+
|
|
150
|
+
[websockets]
|
|
151
|
+
pyautogen[websockets]==0.8.2
|
|
152
|
+
|
|
153
|
+
[websurfer]
|
|
154
|
+
pyautogen[websurfer]==0.8.2
|
|
@@ -60,8 +60,9 @@ dependencies = [
|
|
|
60
60
|
"docker",
|
|
61
61
|
"packaging",
|
|
62
62
|
"asyncer==0.0.8",
|
|
63
|
-
"fast-depends>=2.4.12,<3",
|
|
63
|
+
# "fast-depends>=2.4.12,<3", # integrated into the package
|
|
64
64
|
"httpx>=0.28.1,<1",
|
|
65
|
+
"anyio>=3.0.0,<5.0.0" # needed by the internal fast-depends
|
|
65
66
|
]
|
|
66
67
|
|
|
67
68
|
[project.optional-dependencies]
|
|
@@ -76,11 +77,11 @@ flaml = [
|
|
|
76
77
|
# public distributions
|
|
77
78
|
|
|
78
79
|
openai = [
|
|
79
|
-
"openai>=1.
|
|
80
|
+
"openai>=1.66.2",
|
|
80
81
|
]
|
|
81
82
|
|
|
82
83
|
openai-realtime = [
|
|
83
|
-
"
|
|
84
|
+
"pyautogen[openai]",
|
|
84
85
|
"openai[realtime]",
|
|
85
86
|
]
|
|
86
87
|
|
|
@@ -139,6 +140,10 @@ rag = [
|
|
|
139
140
|
"chromadb>=0.5,<1",
|
|
140
141
|
"llama-index>=0.12,<1",
|
|
141
142
|
"llama-index-vector-stores-chroma==0.4.1",
|
|
143
|
+
"llama-index-vector-stores-mongodb==0.6.0",
|
|
144
|
+
"llama-index-embeddings-huggingface==0.5.2",
|
|
145
|
+
"llama-index-llms-langchain==0.6.0",
|
|
146
|
+
"requests>=2.32.3,<3",
|
|
142
147
|
]
|
|
143
148
|
|
|
144
149
|
|
|
@@ -152,12 +157,15 @@ browser-use = [
|
|
|
152
157
|
"browser-use==0.1.37",
|
|
153
158
|
]
|
|
154
159
|
|
|
160
|
+
google-search = [
|
|
161
|
+
"google-api-python-client>=2.163.0,<3.0",
|
|
162
|
+
]
|
|
163
|
+
|
|
155
164
|
neo4j = [
|
|
156
165
|
"docx2txt==0.8",
|
|
157
|
-
"llama-index
|
|
166
|
+
"llama-index>=0.12,<1",
|
|
158
167
|
"llama-index-graph-stores-neo4j==0.4.6",
|
|
159
|
-
"llama-index-
|
|
160
|
-
"llama-index-readers-web==0.3.5",
|
|
168
|
+
"llama-index-readers-web==0.3.7",
|
|
161
169
|
]
|
|
162
170
|
|
|
163
171
|
# used for agentchat_realtime_swarm notebook and realtime agent twilio demo
|
|
@@ -172,13 +180,12 @@ interop-crewai = [
|
|
|
172
180
|
"weaviate-client>=4,<5; python_version>='3.10' and python_version<'3.13'",
|
|
173
181
|
]
|
|
174
182
|
interop-langchain = ["langchain-community>=0.3.12,<1"]
|
|
175
|
-
interop-pydantic-ai = ["pydantic-ai==0.0.
|
|
183
|
+
interop-pydantic-ai = ["pydantic-ai==0.0.31"]
|
|
176
184
|
interop =[
|
|
177
185
|
"pyautogen[interop-crewai, interop-langchain, interop-pydantic-ai]",
|
|
178
186
|
]
|
|
179
187
|
|
|
180
|
-
|
|
181
|
-
autobuild = ["chromadb", "sentence-transformers", "huggingface-hub", "pysqlite3-binary"]
|
|
188
|
+
autobuild = ["chromadb", "sentence-transformers", "huggingface-hub"]
|
|
182
189
|
|
|
183
190
|
blendsearch = ["flaml[blendsearch]"]
|
|
184
191
|
mathchat = ["sympy", "wolframalpha"]
|
|
@@ -188,29 +195,34 @@ lmm = ["replicate", "pillow"]
|
|
|
188
195
|
graph = ["networkx", "matplotlib"]
|
|
189
196
|
gemini = [
|
|
190
197
|
"google-api-core",
|
|
191
|
-
"google-genai>=1.2.0
|
|
198
|
+
"google-genai>=1.2.0",
|
|
192
199
|
"google-cloud-aiplatform",
|
|
193
200
|
"google-auth",
|
|
194
201
|
"pillow",
|
|
195
202
|
"jsonschema",
|
|
196
|
-
|
|
203
|
+
]
|
|
204
|
+
|
|
205
|
+
gemini-realtime = [
|
|
206
|
+
"pyautogen[gemini]",
|
|
207
|
+
"pyautogen[websockets]",
|
|
197
208
|
]
|
|
198
209
|
|
|
199
210
|
together = ["together>=1.2"]
|
|
200
211
|
websurfer = ["beautifulsoup4", "markdownify", "pdfminer.six", "pathvalidate"]
|
|
201
212
|
redis = ["redis"]
|
|
202
213
|
cosmosdb = ["azure-cosmos>=4.2.0"]
|
|
203
|
-
websockets = ["websockets>=14.0,<
|
|
214
|
+
websockets = ["websockets>=14.0,<16"]
|
|
204
215
|
long-context = ["llmlingua<0.3"]
|
|
205
216
|
anthropic = ["anthropic[vertex]>=0.23.1"]
|
|
206
217
|
cerebras = ["cerebras_cloud_sdk>=1.0.0"]
|
|
207
218
|
mistral = ["mistralai>=1.0.1"]
|
|
208
219
|
groq = ["groq>=0.9.0"]
|
|
209
220
|
cohere = ["cohere>=5.13.5"]
|
|
210
|
-
ollama = ["ollama>=0.4.
|
|
221
|
+
ollama = ["ollama>=0.4.7", "fix_busted_json>=0.0.18"]
|
|
211
222
|
bedrock = ["boto3>=1.34.149"]
|
|
223
|
+
deepseek = ["pyautogen[openai]"]
|
|
212
224
|
|
|
213
|
-
commsagent-discord = ["discord.py>=2.4.0,<2.
|
|
225
|
+
commsagent-discord = ["discord.py>=2.4.0,<2.6"]
|
|
214
226
|
commsagent-slack = ["slack_sdk>=3.33.0,<3.40"]
|
|
215
227
|
commsagent-telegram = ["telethon>=1.38.1, <2"]
|
|
216
228
|
|
|
@@ -223,27 +235,30 @@ test = [
|
|
|
223
235
|
"nbformat==5.10.4",
|
|
224
236
|
"pytest-cov==6.0.0",
|
|
225
237
|
"pytest-asyncio==0.25.3",
|
|
226
|
-
"pytest==8.3.
|
|
227
|
-
"mock==5.
|
|
238
|
+
"pytest==8.3.5",
|
|
239
|
+
"mock==5.2.0",
|
|
228
240
|
"pandas==2.2.3",
|
|
229
|
-
"fastapi==0.115.
|
|
241
|
+
"fastapi==0.115.11",
|
|
242
|
+
"dirty-equals==0.9.0",
|
|
230
243
|
]
|
|
231
244
|
|
|
232
245
|
docs = [
|
|
233
|
-
"mkdocs-material==9.6.
|
|
234
|
-
"mkdocstrings[python]==0.
|
|
246
|
+
"mkdocs-material==9.6.9",
|
|
247
|
+
"mkdocstrings[python]==0.29.0",
|
|
235
248
|
"mkdocs-literate-nav==0.6.1",
|
|
236
249
|
"mdx-include==1.4.2",
|
|
250
|
+
# currently problematic and cannot be upgraded
|
|
237
251
|
"mkdocs-git-revision-date-localized-plugin==1.3.0",
|
|
238
252
|
"mike==2.1.3",
|
|
239
|
-
"typer==0.15.
|
|
253
|
+
"typer==0.15.2",
|
|
240
254
|
"mkdocs-minify-plugin==0.8.0",
|
|
241
255
|
"mkdocs-macros-plugin==1.3.7", # includes with variables
|
|
242
256
|
"mkdocs-glightbox==0.4.0", # img zoom
|
|
257
|
+
"mkdocs-ezlinks-plugin==0.1.14", # converts abs links to rel links
|
|
243
258
|
"pillow", # required for mkdocs-glightbo
|
|
244
259
|
"cairosvg", # required for mkdocs-glightbo
|
|
245
260
|
"pdoc3==0.11.5",
|
|
246
|
-
"jinja2==3.1.
|
|
261
|
+
"jinja2==3.1.6",
|
|
247
262
|
"pyyaml==6.0.2",
|
|
248
263
|
"termcolor==2.5.0",
|
|
249
264
|
"nbclient==0.10.2",
|
|
@@ -255,7 +270,7 @@ types = [
|
|
|
255
270
|
]
|
|
256
271
|
|
|
257
272
|
lint = [
|
|
258
|
-
"ruff==0.9.
|
|
273
|
+
"ruff==0.9.9",
|
|
259
274
|
"codespell==2.4.1",
|
|
260
275
|
"pyupgrade-directories==0.3.0",
|
|
261
276
|
]
|
|
@@ -265,7 +280,7 @@ dev = [
|
|
|
265
280
|
"pyautogen[lint,test,types,docs]",
|
|
266
281
|
"pre-commit==4.1.0",
|
|
267
282
|
"detect-secrets==1.5.0",
|
|
268
|
-
"uv==0.6.
|
|
283
|
+
"uv==0.6.6",
|
|
269
284
|
]
|
|
270
285
|
|
|
271
286
|
|
|
@@ -294,23 +309,31 @@ exclude = ["test", "notebook"]
|
|
|
294
309
|
"autogen" = "autogen"
|
|
295
310
|
"autogen/agentchat/contrib/captainagent/tools" = "autogen/agentchat/contrib/captainagent/tools"
|
|
296
311
|
|
|
297
|
-
|
|
298
312
|
[tool.pytest.ini_options]
|
|
299
313
|
addopts = '--cov=autogen --cov-append --cov-branch --cov-report=xml -m "not conda"'
|
|
300
314
|
testpaths = [
|
|
301
315
|
"test",
|
|
302
316
|
]
|
|
317
|
+
|
|
303
318
|
markers = [
|
|
304
319
|
"conda: test related to conda forge distribution",
|
|
305
320
|
"all",
|
|
306
321
|
"openai",
|
|
322
|
+
"openai_realtime",
|
|
307
323
|
"gemini",
|
|
324
|
+
"gemini_realtime",
|
|
308
325
|
"anthropic",
|
|
309
326
|
"deepseek",
|
|
327
|
+
"cosmosdb",
|
|
328
|
+
"ollama",
|
|
329
|
+
"bedrock",
|
|
330
|
+
"cerebras",
|
|
331
|
+
"aux_neg_flag",
|
|
332
|
+
# optional deps
|
|
310
333
|
"redis",
|
|
311
334
|
"docker",
|
|
312
335
|
"docs",
|
|
313
|
-
|
|
336
|
+
"rag",
|
|
314
337
|
"jupyter_executor",
|
|
315
338
|
"retrievechat",
|
|
316
339
|
"retrievechat_pgvector",
|
|
@@ -322,7 +345,12 @@ markers = [
|
|
|
322
345
|
"interop",
|
|
323
346
|
"browser_use",
|
|
324
347
|
"crawl4ai",
|
|
348
|
+
"google_search",
|
|
325
349
|
"websockets",
|
|
350
|
+
"commsagent_discord",
|
|
351
|
+
"commsagent_slack",
|
|
352
|
+
"commsagent-telegram",
|
|
353
|
+
"lmm",
|
|
326
354
|
]
|
|
327
355
|
|
|
328
356
|
[tool.black]
|
|
@@ -379,6 +407,8 @@ select = [
|
|
|
379
407
|
# "UP", # pyupgrade https://docs.astral.sh/ruff/rules/#pydocstyle-d
|
|
380
408
|
]
|
|
381
409
|
|
|
410
|
+
extend-select = ["D417"]
|
|
411
|
+
|
|
382
412
|
ignore = ["E501", "F403", "C901",
|
|
383
413
|
"E402",
|
|
384
414
|
"E721",
|
|
@@ -404,28 +434,40 @@ convention = "google"
|
|
|
404
434
|
|
|
405
435
|
[tool.mypy]
|
|
406
436
|
files = [
|
|
437
|
+
"autogen/agentchat/agent.py",
|
|
438
|
+
# "autogen/agentchat/cenversable_agent.py",
|
|
407
439
|
"autogen/agentchat/contrib/rag",
|
|
408
440
|
"autogen/agentchat/contrib/graph_rag",
|
|
441
|
+
"autogen/agentchat/contrib/swarm_agent.py",
|
|
409
442
|
"autogen/agentchat/realtime_agent",
|
|
443
|
+
"autogen/agentchat/utils.py",
|
|
410
444
|
"autogen/agents",
|
|
411
445
|
"autogen/coding",
|
|
412
446
|
"autogen/exception_utils.py",
|
|
447
|
+
"autogen/fast_depends",
|
|
413
448
|
"autogen/import_utils.py",
|
|
414
449
|
"autogen/interop",
|
|
415
450
|
"autogen/io",
|
|
451
|
+
"autogen/json_utils.py",
|
|
452
|
+
"autogen/llm_config.py",
|
|
416
453
|
"autogen/logger",
|
|
417
454
|
"autogen/messages",
|
|
418
455
|
"autogen/oai/oai_models",
|
|
419
456
|
"autogen/oai/openai_utils.py",
|
|
420
457
|
"autogen/tools",
|
|
458
|
+
"autogen/_website",
|
|
421
459
|
"website/*.py",
|
|
460
|
+
"test/agentchat/contrib/rag",
|
|
422
461
|
"test/agentchat/contrib/graph_rag",
|
|
423
|
-
|
|
462
|
+
"test/agentchat/contrib/test_swarm.py",
|
|
424
463
|
"test/agentchat/realtime_agent",
|
|
425
464
|
"test/agents",
|
|
426
465
|
"test/conftest.py",
|
|
466
|
+
# "test/fast_depends",
|
|
427
467
|
"test/interop",
|
|
428
468
|
"test/io",
|
|
469
|
+
"test/test_json_utils.py",
|
|
470
|
+
"test/test_llm_config.py",
|
|
429
471
|
"test/messages",
|
|
430
472
|
"test/test_import_utils.py",
|
|
431
473
|
"test/test_import.py",
|
|
@@ -41,6 +41,7 @@ setuptools.setup(
|
|
|
41
41
|
"rag": ["pyautogen[rag]==" + __version__],
|
|
42
42
|
"crawl4ai": ["pyautogen[crawl4ai]==" + __version__],
|
|
43
43
|
"browser-use": ["pyautogen[browser-use]==" + __version__],
|
|
44
|
+
"google-search": ["pyautogen[google-search]==" + __version__],
|
|
44
45
|
"neo4j": ["pyautogen[neo4j]==" + __version__],
|
|
45
46
|
"twilio": ["pyautogen[twilio]==" + __version__],
|
|
46
47
|
"interop-crewai": ["pyautogen[interop-crewai]==" + __version__],
|
|
@@ -55,6 +56,7 @@ setuptools.setup(
|
|
|
55
56
|
"lmm": ["pyautogen[lmm]==" + __version__],
|
|
56
57
|
"graph": ["pyautogen[graph]==" + __version__],
|
|
57
58
|
"gemini": ["pyautogen[gemini]==" + __version__],
|
|
59
|
+
"gemini-realtime": ["pyautogen[gemini-realtime]==" + __version__],
|
|
58
60
|
"together": ["pyautogen[together]==" + __version__],
|
|
59
61
|
"websurfer": ["pyautogen[websurfer]==" + __version__],
|
|
60
62
|
"redis": ["pyautogen[redis]==" + __version__],
|
|
@@ -68,6 +70,7 @@ setuptools.setup(
|
|
|
68
70
|
"cohere": ["pyautogen[cohere]==" + __version__],
|
|
69
71
|
"ollama": ["pyautogen[ollama]==" + __version__],
|
|
70
72
|
"bedrock": ["pyautogen[bedrock]==" + __version__],
|
|
73
|
+
"deepseek": ["pyautogen[deepseek]==" + __version__],
|
|
71
74
|
"commsagent-discord": ["pyautogen[commsagent-discord]==" + __version__],
|
|
72
75
|
"commsagent-slack": ["pyautogen[commsagent-slack]==" + __version__],
|
|
73
76
|
"commsagent-telegram": ["pyautogen[commsagent-telegram]==" + __version__],
|