ag2 0.5.3b1__tar.gz → 0.6.0b1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ag2 might be problematic. Click here for more details.
- {ag2-0.5.3b1/ag2.egg-info → ag2-0.6.0b1}/PKG-INFO +9 -4
- {ag2-0.5.3b1 → ag2-0.6.0b1}/README.md +2 -2
- {ag2-0.5.3b1 → ag2-0.6.0b1/ag2.egg-info}/PKG-INFO +9 -4
- ag2-0.6.0b1/ag2.egg-info/requires.txt +106 -0
- {ag2-0.5.3b1 → ag2-0.6.0b1}/pyproject.toml +6 -2
- {ag2-0.5.3b1 → ag2-0.6.0b1}/setup.py +32 -14
- {ag2-0.5.3b1 → ag2-0.6.0b1}/setup_ag2.py +6 -1
- {ag2-0.5.3b1 → ag2-0.6.0b1}/test/test_browser_utils.py +3 -3
- {ag2-0.5.3b1 → ag2-0.6.0b1}/test/test_code_utils.py +1 -1
- {ag2-0.5.3b1 → ag2-0.6.0b1}/test/test_logging.py +6 -6
- {ag2-0.5.3b1 → ag2-0.6.0b1}/test/test_retrieve_utils.py +1 -1
- {ag2-0.5.3b1 → ag2-0.6.0b1}/test/test_token_count.py +2 -0
- ag2-0.5.3b1/ag2.egg-info/requires.txt +0 -91
- {ag2-0.5.3b1 → ag2-0.6.0b1}/LICENSE +0 -0
- {ag2-0.5.3b1 → ag2-0.6.0b1}/NOTICE.md +0 -0
- {ag2-0.5.3b1 → ag2-0.6.0b1}/ag2.egg-info/SOURCES.txt +0 -0
- {ag2-0.5.3b1 → ag2-0.6.0b1}/ag2.egg-info/dependency_links.txt +0 -0
- {ag2-0.5.3b1 → ag2-0.6.0b1}/ag2.egg-info/top_level.txt +0 -0
- {ag2-0.5.3b1 → ag2-0.6.0b1}/setup.cfg +0 -0
- {ag2-0.5.3b1 → ag2-0.6.0b1}/test/test_function_utils.py +0 -0
- {ag2-0.5.3b1 → ag2-0.6.0b1}/test/test_graph_utils.py +0 -0
- {ag2-0.5.3b1 → ag2-0.6.0b1}/test/test_notebook.py +0 -0
- {ag2-0.5.3b1 → ag2-0.6.0b1}/test/test_pydantic.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: ag2
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.6.0b1
|
|
4
4
|
Summary: Alias package for pyautogen
|
|
5
5
|
Home-page: https://github.com/ag2ai/ag2
|
|
6
6
|
Author: Chi Wang & Qingyun Wu
|
|
@@ -10,7 +10,7 @@ Platform: UNKNOWN
|
|
|
10
10
|
Classifier: Programming Language :: Python :: 3
|
|
11
11
|
Classifier: License :: OSI Approved :: Apache Software License
|
|
12
12
|
Classifier: Operating System :: OS Independent
|
|
13
|
-
Requires-Python: >=3.
|
|
13
|
+
Requires-Python: >=3.9,<3.14
|
|
14
14
|
Description-Content-Type: text/markdown
|
|
15
15
|
Provides-Extra: anthropic
|
|
16
16
|
Provides-Extra: autobuild
|
|
@@ -24,6 +24,10 @@ Provides-Extra: gemini
|
|
|
24
24
|
Provides-Extra: graph
|
|
25
25
|
Provides-Extra: graph-rag-falkor-db
|
|
26
26
|
Provides-Extra: groq
|
|
27
|
+
Provides-Extra: interop
|
|
28
|
+
Provides-Extra: interop-crewai
|
|
29
|
+
Provides-Extra: interop-langchain
|
|
30
|
+
Provides-Extra: interop-pydantic-ai
|
|
27
31
|
Provides-Extra: jupyter-executor
|
|
28
32
|
Provides-Extra: lmm
|
|
29
33
|
Provides-Extra: long-context
|
|
@@ -39,6 +43,7 @@ Provides-Extra: retrievechat-qdrant
|
|
|
39
43
|
Provides-Extra: teachable
|
|
40
44
|
Provides-Extra: test
|
|
41
45
|
Provides-Extra: together
|
|
46
|
+
Provides-Extra: twilio
|
|
42
47
|
Provides-Extra: types
|
|
43
48
|
Provides-Extra: websockets
|
|
44
49
|
Provides-Extra: websurfer
|
|
@@ -50,7 +55,7 @@ License-File: NOTICE.md
|
|
|
50
55
|

|
|
51
56
|
[](https://badge.fury.io/py/autogen)
|
|
52
57
|
[](https://github.com/ag2ai/ag2/actions/workflows/python-package.yml)
|
|
53
|
-

|
|
54
59
|
[](https://discord.gg/pAbnFJrkgZ)
|
|
55
60
|
[](https://x.com/ag2ai)
|
|
56
61
|
|
|
@@ -172,7 +177,7 @@ Find detailed instructions for users [here](https://ag2ai.github.io/ag2/docs/ins
|
|
|
172
177
|
|
|
173
178
|
### Option 2. Install AG2 Locally
|
|
174
179
|
|
|
175
|
-
AG2 requires **Python version >= 3.
|
|
180
|
+
AG2 requires **Python version >= 3.9, < 3.14**. It can be installed from pip:
|
|
176
181
|
|
|
177
182
|
```bash
|
|
178
183
|
pip install ag2
|
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|

|
|
4
4
|
[](https://badge.fury.io/py/autogen)
|
|
5
5
|
[](https://github.com/ag2ai/ag2/actions/workflows/python-package.yml)
|
|
6
|
-

|
|
7
7
|
[](https://discord.gg/pAbnFJrkgZ)
|
|
8
8
|
[](https://x.com/ag2ai)
|
|
9
9
|
|
|
@@ -125,7 +125,7 @@ Find detailed instructions for users [here](https://ag2ai.github.io/ag2/docs/ins
|
|
|
125
125
|
|
|
126
126
|
### Option 2. Install AG2 Locally
|
|
127
127
|
|
|
128
|
-
AG2 requires **Python version >= 3.
|
|
128
|
+
AG2 requires **Python version >= 3.9, < 3.14**. It can be installed from pip:
|
|
129
129
|
|
|
130
130
|
```bash
|
|
131
131
|
pip install ag2
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: ag2
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.6.0b1
|
|
4
4
|
Summary: Alias package for pyautogen
|
|
5
5
|
Home-page: https://github.com/ag2ai/ag2
|
|
6
6
|
Author: Chi Wang & Qingyun Wu
|
|
@@ -10,7 +10,7 @@ Platform: UNKNOWN
|
|
|
10
10
|
Classifier: Programming Language :: Python :: 3
|
|
11
11
|
Classifier: License :: OSI Approved :: Apache Software License
|
|
12
12
|
Classifier: Operating System :: OS Independent
|
|
13
|
-
Requires-Python: >=3.
|
|
13
|
+
Requires-Python: >=3.9,<3.14
|
|
14
14
|
Description-Content-Type: text/markdown
|
|
15
15
|
Provides-Extra: anthropic
|
|
16
16
|
Provides-Extra: autobuild
|
|
@@ -24,6 +24,10 @@ Provides-Extra: gemini
|
|
|
24
24
|
Provides-Extra: graph
|
|
25
25
|
Provides-Extra: graph-rag-falkor-db
|
|
26
26
|
Provides-Extra: groq
|
|
27
|
+
Provides-Extra: interop
|
|
28
|
+
Provides-Extra: interop-crewai
|
|
29
|
+
Provides-Extra: interop-langchain
|
|
30
|
+
Provides-Extra: interop-pydantic-ai
|
|
27
31
|
Provides-Extra: jupyter-executor
|
|
28
32
|
Provides-Extra: lmm
|
|
29
33
|
Provides-Extra: long-context
|
|
@@ -39,6 +43,7 @@ Provides-Extra: retrievechat-qdrant
|
|
|
39
43
|
Provides-Extra: teachable
|
|
40
44
|
Provides-Extra: test
|
|
41
45
|
Provides-Extra: together
|
|
46
|
+
Provides-Extra: twilio
|
|
42
47
|
Provides-Extra: types
|
|
43
48
|
Provides-Extra: websockets
|
|
44
49
|
Provides-Extra: websurfer
|
|
@@ -50,7 +55,7 @@ License-File: NOTICE.md
|
|
|
50
55
|

|
|
51
56
|
[](https://badge.fury.io/py/autogen)
|
|
52
57
|
[](https://github.com/ag2ai/ag2/actions/workflows/python-package.yml)
|
|
53
|
-

|
|
54
59
|
[](https://discord.gg/pAbnFJrkgZ)
|
|
55
60
|
[](https://x.com/ag2ai)
|
|
56
61
|
|
|
@@ -172,7 +177,7 @@ Find detailed instructions for users [here](https://ag2ai.github.io/ag2/docs/ins
|
|
|
172
177
|
|
|
173
178
|
### Option 2. Install AG2 Locally
|
|
174
179
|
|
|
175
|
-
AG2 requires **Python version >= 3.
|
|
180
|
+
AG2 requires **Python version >= 3.9, < 3.14**. It can be installed from pip:
|
|
176
181
|
|
|
177
182
|
```bash
|
|
178
183
|
pip install ag2
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
pyautogen==0.6.0b1
|
|
2
|
+
|
|
3
|
+
[anthropic]
|
|
4
|
+
pyautogen[anthropic]==0.6.0b1
|
|
5
|
+
|
|
6
|
+
[autobuild]
|
|
7
|
+
pyautogen[autobuild]==0.6.0b1
|
|
8
|
+
|
|
9
|
+
[bedrock]
|
|
10
|
+
pyautogen[bedrock]==0.6.0b1
|
|
11
|
+
|
|
12
|
+
[blendsearch]
|
|
13
|
+
pyautogen[blendsearch]==0.6.0b1
|
|
14
|
+
|
|
15
|
+
[captainagent]
|
|
16
|
+
pyautogen[captainagent]==0.6.0b1
|
|
17
|
+
|
|
18
|
+
[cerebras]
|
|
19
|
+
pyautogen[cerebras]==0.6.0b1
|
|
20
|
+
|
|
21
|
+
[cohere]
|
|
22
|
+
pyautogen[cohere]==0.6.0b1
|
|
23
|
+
|
|
24
|
+
[cosmosdb]
|
|
25
|
+
pyautogen[cosmosdb]==0.6.0b1
|
|
26
|
+
|
|
27
|
+
[gemini]
|
|
28
|
+
pyautogen[gemini]==0.6.0b1
|
|
29
|
+
|
|
30
|
+
[graph]
|
|
31
|
+
pyautogen[graph]==0.6.0b1
|
|
32
|
+
|
|
33
|
+
[graph-rag-falkor-db]
|
|
34
|
+
pyautogen[graph-rag-falkor-db]==0.6.0b1
|
|
35
|
+
|
|
36
|
+
[groq]
|
|
37
|
+
pyautogen[groq]==0.6.0b1
|
|
38
|
+
|
|
39
|
+
[interop]
|
|
40
|
+
pyautogen[interop]==0.6.0b1
|
|
41
|
+
|
|
42
|
+
[interop-crewai]
|
|
43
|
+
pyautogen[interop-crewai]==0.6.0b1
|
|
44
|
+
|
|
45
|
+
[interop-langchain]
|
|
46
|
+
pyautogen[interop-langchain]==0.6.0b1
|
|
47
|
+
|
|
48
|
+
[interop-pydantic-ai]
|
|
49
|
+
pyautogen[interop-pydantic-ai]==0.6.0b1
|
|
50
|
+
|
|
51
|
+
[jupyter-executor]
|
|
52
|
+
pyautogen[jupyter-executor]==0.6.0b1
|
|
53
|
+
|
|
54
|
+
[lmm]
|
|
55
|
+
pyautogen[lmm]==0.6.0b1
|
|
56
|
+
|
|
57
|
+
[long-context]
|
|
58
|
+
pyautogen[long-context]==0.6.0b1
|
|
59
|
+
|
|
60
|
+
[mathchat]
|
|
61
|
+
pyautogen[mathchat]==0.6.0b1
|
|
62
|
+
|
|
63
|
+
[mistral]
|
|
64
|
+
pyautogen[mistral]==0.6.0b1
|
|
65
|
+
|
|
66
|
+
[neo4j]
|
|
67
|
+
pyautogen[neo4j]==0.6.0b1
|
|
68
|
+
|
|
69
|
+
[ollama]
|
|
70
|
+
pyautogen[ollama]==0.6.0b1
|
|
71
|
+
|
|
72
|
+
[redis]
|
|
73
|
+
pyautogen[redis]==0.6.0b1
|
|
74
|
+
|
|
75
|
+
[retrievechat]
|
|
76
|
+
pyautogen[retrievechat]==0.6.0b1
|
|
77
|
+
|
|
78
|
+
[retrievechat-mongodb]
|
|
79
|
+
pyautogen[retrievechat-mongodb]==0.6.0b1
|
|
80
|
+
|
|
81
|
+
[retrievechat-pgvector]
|
|
82
|
+
pyautogen[retrievechat-pgvector]==0.6.0b1
|
|
83
|
+
|
|
84
|
+
[retrievechat-qdrant]
|
|
85
|
+
pyautogen[retrievechat-qdrant]==0.6.0b1
|
|
86
|
+
|
|
87
|
+
[teachable]
|
|
88
|
+
pyautogen[teachable]==0.6.0b1
|
|
89
|
+
|
|
90
|
+
[test]
|
|
91
|
+
pyautogen[test]==0.6.0b1
|
|
92
|
+
|
|
93
|
+
[together]
|
|
94
|
+
pyautogen[together]==0.6.0b1
|
|
95
|
+
|
|
96
|
+
[twilio]
|
|
97
|
+
pyautogen[twilio]==0.6.0b1
|
|
98
|
+
|
|
99
|
+
[types]
|
|
100
|
+
pyautogen[types]==0.6.0b1
|
|
101
|
+
|
|
102
|
+
[websockets]
|
|
103
|
+
pyautogen[websockets]==0.6.0b1
|
|
104
|
+
|
|
105
|
+
[websurfer]
|
|
106
|
+
pyautogen[websurfer]==0.6.0b1
|
|
@@ -60,9 +60,13 @@ files = [
|
|
|
60
60
|
"autogen/_pydantic.py",
|
|
61
61
|
"autogen/function_utils.py",
|
|
62
62
|
"autogen/io",
|
|
63
|
+
"autogen/tools",
|
|
64
|
+
"autogen/interop",
|
|
63
65
|
"test/test_pydantic.py",
|
|
64
66
|
"test/test_function_utils.py",
|
|
65
67
|
"test/io",
|
|
68
|
+
"test/tools",
|
|
69
|
+
"test/interop",
|
|
66
70
|
]
|
|
67
71
|
exclude = [
|
|
68
72
|
"autogen/math_utils\\.py",
|
|
@@ -73,7 +77,6 @@ exclude = [
|
|
|
73
77
|
]
|
|
74
78
|
|
|
75
79
|
strict = true
|
|
76
|
-
python_version = "3.8"
|
|
77
80
|
ignore_missing_imports = true
|
|
78
81
|
install_types = true
|
|
79
82
|
non_interactive = true
|
|
@@ -88,7 +91,8 @@ no_implicit_optional = true
|
|
|
88
91
|
check_untyped_defs = true
|
|
89
92
|
warn_return_any = true
|
|
90
93
|
show_error_codes = true
|
|
91
|
-
|
|
94
|
+
|
|
95
|
+
warn_unused_ignores = false
|
|
92
96
|
|
|
93
97
|
disallow_incomplete_defs = true
|
|
94
98
|
disallow_untyped_decorators = true
|
|
@@ -25,7 +25,7 @@ __version__ = version["__version__"]
|
|
|
25
25
|
current_os = platform.system()
|
|
26
26
|
|
|
27
27
|
install_requires = [
|
|
28
|
-
"openai>=1.
|
|
28
|
+
"openai>=1.57",
|
|
29
29
|
"diskcache",
|
|
30
30
|
"termcolor",
|
|
31
31
|
"flaml",
|
|
@@ -38,6 +38,19 @@ install_requires = [
|
|
|
38
38
|
"pydantic>=1.10,<3,!=2.6.0", # could be both V1 and V2
|
|
39
39
|
"docker",
|
|
40
40
|
"packaging",
|
|
41
|
+
"websockets>=14,<15",
|
|
42
|
+
"asyncer>=0.0.8",
|
|
43
|
+
]
|
|
44
|
+
|
|
45
|
+
test = [
|
|
46
|
+
"ipykernel",
|
|
47
|
+
"nbconvert",
|
|
48
|
+
"nbformat",
|
|
49
|
+
"pre-commit",
|
|
50
|
+
"pytest-cov>=5",
|
|
51
|
+
"pytest-asyncio",
|
|
52
|
+
"pytest>=8,<9",
|
|
53
|
+
"pandas",
|
|
41
54
|
]
|
|
42
55
|
|
|
43
56
|
jupyter_executor = [
|
|
@@ -69,6 +82,15 @@ neo4j = [
|
|
|
69
82
|
"llama-index-core==0.12.5",
|
|
70
83
|
]
|
|
71
84
|
|
|
85
|
+
twilio = ["fastapi>=0.115.0,<1", "uvicorn>=0.30.6,<1", "twilio>=9.3.2"]
|
|
86
|
+
|
|
87
|
+
interop_crewai = ["crewai[tools]>=0.86,<1; python_version>='3.10' and python_version<'3.13'"]
|
|
88
|
+
interop_langchain = ["langchain-community>=0.3.12,<1"]
|
|
89
|
+
interop_pydantic_ai = ["pydantic-ai==0.0.13"]
|
|
90
|
+
interop = interop_crewai + interop_langchain + interop_pydantic_ai
|
|
91
|
+
|
|
92
|
+
types = ["mypy==1.9.0"] + test + jupyter_executor + interop
|
|
93
|
+
|
|
72
94
|
if current_os in ["Windows", "Darwin"]:
|
|
73
95
|
retrieve_chat_pgvector.extend(["psycopg[binary]>=3.1.18"])
|
|
74
96
|
elif current_os == "Linux":
|
|
@@ -84,16 +106,7 @@ autobuild = ["chromadb", "sentence-transformers", "huggingface-hub", "pysqlite3-
|
|
|
84
106
|
# PLEASE add them in the setup_ag2.py and setup_autogen.py files
|
|
85
107
|
|
|
86
108
|
extra_require = {
|
|
87
|
-
"test":
|
|
88
|
-
"ipykernel",
|
|
89
|
-
"nbconvert",
|
|
90
|
-
"nbformat",
|
|
91
|
-
"pre-commit",
|
|
92
|
-
"pytest-cov>=5",
|
|
93
|
-
"pytest-asyncio",
|
|
94
|
-
"pytest>=6.1.1,<8",
|
|
95
|
-
"pandas",
|
|
96
|
-
],
|
|
109
|
+
"test": test,
|
|
97
110
|
"blendsearch": ["flaml[blendsearch]"],
|
|
98
111
|
"mathchat": ["sympy", "pydantic==1.10.9", "wolframalpha"],
|
|
99
112
|
"retrievechat": retrieve_chat,
|
|
@@ -111,9 +124,9 @@ extra_require = {
|
|
|
111
124
|
"websurfer": ["beautifulsoup4", "markdownify", "pdfminer.six", "pathvalidate"],
|
|
112
125
|
"redis": ["redis"],
|
|
113
126
|
"cosmosdb": ["azure-cosmos>=4.2.0"],
|
|
114
|
-
"websockets": ["websockets>=
|
|
127
|
+
"websockets": ["websockets>=14.0,<15"],
|
|
115
128
|
"jupyter-executor": jupyter_executor,
|
|
116
|
-
"types":
|
|
129
|
+
"types": types,
|
|
117
130
|
"long-context": ["llmlingua<0.3"],
|
|
118
131
|
"anthropic": ["anthropic>=0.23.1"],
|
|
119
132
|
"cerebras": ["cerebras_cloud_sdk>=1.0.0"],
|
|
@@ -122,6 +135,11 @@ extra_require = {
|
|
|
122
135
|
"cohere": ["cohere>=5.5.8"],
|
|
123
136
|
"ollama": ["ollama>=0.3.3", "fix_busted_json>=0.0.18"],
|
|
124
137
|
"bedrock": ["boto3>=1.34.149"],
|
|
138
|
+
"twilio": twilio,
|
|
139
|
+
"interop-crewai": interop_crewai,
|
|
140
|
+
"interop-langchain": interop_langchain,
|
|
141
|
+
"interop-pydantic-ai": interop_pydantic_ai,
|
|
142
|
+
"interop": interop,
|
|
125
143
|
"neo4j": neo4j,
|
|
126
144
|
}
|
|
127
145
|
|
|
@@ -156,5 +174,5 @@ setuptools.setup(
|
|
|
156
174
|
"Operating System :: OS Independent",
|
|
157
175
|
],
|
|
158
176
|
license="Apache Software License 2.0",
|
|
159
|
-
python_requires=">=3.
|
|
177
|
+
python_requires=">=3.9,<3.14",
|
|
160
178
|
)
|
|
@@ -54,6 +54,11 @@ setuptools.setup(
|
|
|
54
54
|
"cohere": ["pyautogen[cohere]==" + __version__],
|
|
55
55
|
"ollama": ["pyautogen[ollama]==" + __version__],
|
|
56
56
|
"bedrock": ["pyautogen[bedrock]==" + __version__],
|
|
57
|
+
"twilio": ["pyautogen[twilio]==" + __version__],
|
|
58
|
+
"interop-crewai": ["pyautogen[interop-crewai]==" + __version__],
|
|
59
|
+
"interop-langchain": ["pyautogen[interop-langchain]==" + __version__],
|
|
60
|
+
"interop-pydantic-ai": ["pyautogen[interop-pydantic-ai]==" + __version__],
|
|
61
|
+
"interop": ["pyautogen[interop]==" + __version__],
|
|
57
62
|
"neo4j": ["pyautogen[neo4j]==" + __version__],
|
|
58
63
|
},
|
|
59
64
|
url="https://github.com/ag2ai/ag2",
|
|
@@ -65,5 +70,5 @@ setuptools.setup(
|
|
|
65
70
|
"Operating System :: OS Independent",
|
|
66
71
|
],
|
|
67
72
|
license="Apache Software License 2.0",
|
|
68
|
-
python_requires=">=3.
|
|
73
|
+
python_requires=">=3.9,<3.14",
|
|
69
74
|
)
|
|
@@ -16,8 +16,8 @@ import pytest
|
|
|
16
16
|
import requests
|
|
17
17
|
from agentchat.test_assistant_agent import KEY_LOC # noqa: E402
|
|
18
18
|
|
|
19
|
-
BLOG_POST_URL = "https://
|
|
20
|
-
BLOG_POST_TITLE = "Does Model and Inference Parameter Matter in LLM Applications? - A Case Study for MATH
|
|
19
|
+
BLOG_POST_URL = "https://docs.ag2.ai/blog/2023-04-21-LLM-tuning-math"
|
|
20
|
+
BLOG_POST_TITLE = "Does Model and Inference Parameter Matter in LLM Applications? - A Case Study for MATH - AG2"
|
|
21
21
|
BLOG_POST_STRING = "Large language models (LLMs) are powerful tools that can generate natural language texts for various applications, such as chatbots, summarization, translation, and more. GPT-4 is currently the state of the art LLM in the world. Is model selection irrelevant? What about inference parameters?"
|
|
22
22
|
|
|
23
23
|
WIKIPEDIA_URL = "https://en.wikipedia.org/wiki/Microsoft"
|
|
@@ -156,7 +156,7 @@ def test_simple_text_browser():
|
|
|
156
156
|
|
|
157
157
|
|
|
158
158
|
@pytest.mark.skipif(
|
|
159
|
-
skip_bing,
|
|
159
|
+
skip_all or skip_bing,
|
|
160
160
|
reason="do not run bing tests if key is missing",
|
|
161
161
|
)
|
|
162
162
|
def test_bing_search():
|
|
@@ -54,7 +54,7 @@ else:
|
|
|
54
54
|
# OAI_CONFIG_LIST,
|
|
55
55
|
# file_location=KEY_LOC,
|
|
56
56
|
# filter_dict={
|
|
57
|
-
# "model": ["gpt-
|
|
57
|
+
# "model": ["gpt-4o", "gpt4", "gpt-4-32k", "gpt-4-32k-0314"],
|
|
58
58
|
# },
|
|
59
59
|
# )
|
|
60
60
|
# # config_list = autogen.config_list_from_json(
|
|
@@ -35,7 +35,7 @@ SAMPLE_CHAT_REQUEST = json.loads(
|
|
|
35
35
|
"role": "user"
|
|
36
36
|
}
|
|
37
37
|
],
|
|
38
|
-
"model": "gpt-
|
|
38
|
+
"model": "gpt-4o"
|
|
39
39
|
}
|
|
40
40
|
"""
|
|
41
41
|
)
|
|
@@ -58,7 +58,7 @@ SAMPLE_CHAT_RESPONSE = json.loads(
|
|
|
58
58
|
}
|
|
59
59
|
],
|
|
60
60
|
"created": 1705993480,
|
|
61
|
-
"model": "gpt-
|
|
61
|
+
"model": "gpt-4o",
|
|
62
62
|
"object": "chat.completion",
|
|
63
63
|
"system_fingerprint": "fp_6d044fb900",
|
|
64
64
|
"usage": {
|
|
@@ -159,7 +159,7 @@ def test_log_new_agent(db_connection):
|
|
|
159
159
|
|
|
160
160
|
cur = db_connection.cursor()
|
|
161
161
|
agent_name = "some_assistant"
|
|
162
|
-
config_list = [{"model": "gpt-
|
|
162
|
+
config_list = [{"model": "gpt-4o", "api_key": "some_key"}]
|
|
163
163
|
|
|
164
164
|
agent = AssistantAgent(agent_name, llm_config={"config_list": config_list})
|
|
165
165
|
init_args = {"foo": "bar", "baz": {"other_key": "other_val"}, "a": None}
|
|
@@ -184,7 +184,7 @@ def test_log_oai_wrapper(db_connection):
|
|
|
184
184
|
|
|
185
185
|
cur = db_connection.cursor()
|
|
186
186
|
|
|
187
|
-
llm_config = {"config_list": [{"model": "gpt-
|
|
187
|
+
llm_config = {"config_list": [{"model": "gpt-4o", "api_key": "some_key", "base_url": "some url"}]}
|
|
188
188
|
init_args = {"llm_config": llm_config, "base_config": {}}
|
|
189
189
|
wrapper = OpenAIWrapper(**llm_config)
|
|
190
190
|
|
|
@@ -210,8 +210,8 @@ def test_log_oai_client(db_connection):
|
|
|
210
210
|
|
|
211
211
|
openai_config = {
|
|
212
212
|
"api_key": "some_key",
|
|
213
|
-
"api_version": "2024-
|
|
214
|
-
"azure_deployment": "gpt-
|
|
213
|
+
"api_version": "2024-08-06",
|
|
214
|
+
"azure_deployment": "gpt-4o",
|
|
215
215
|
"azure_endpoint": "https://foobar.openai.azure.com/",
|
|
216
216
|
}
|
|
217
217
|
client = AzureOpenAI(**openai_config)
|
|
@@ -226,7 +226,7 @@ class TestRetrieveUtils:
|
|
|
226
226
|
dir_path="./website/docs",
|
|
227
227
|
client=client,
|
|
228
228
|
collection_name="autogen-docs",
|
|
229
|
-
custom_text_types=["txt", "md", "rtf", "rst"],
|
|
229
|
+
custom_text_types=["txt", "md", "rtf", "rst", "mdx"],
|
|
230
230
|
get_or_create=True,
|
|
231
231
|
)
|
|
232
232
|
results = query_vector_db(
|
|
@@ -141,6 +141,8 @@ def test_model_aliases():
|
|
|
141
141
|
assert get_max_token_limit("gpt-35-turbo") == get_max_token_limit("gpt-3.5-turbo")
|
|
142
142
|
assert get_max_token_limit("gpt4") == get_max_token_limit("gpt-4")
|
|
143
143
|
assert get_max_token_limit("gpt4-32k") == get_max_token_limit("gpt-4-32k")
|
|
144
|
+
assert get_max_token_limit("gpt4o") == get_max_token_limit("gpt-4o")
|
|
145
|
+
assert get_max_token_limit("gpt4o-mini") == get_max_token_limit("gpt-4o-mini")
|
|
144
146
|
|
|
145
147
|
|
|
146
148
|
if __name__ == "__main__":
|
|
@@ -1,91 +0,0 @@
|
|
|
1
|
-
pyautogen==0.5.3b1
|
|
2
|
-
|
|
3
|
-
[anthropic]
|
|
4
|
-
pyautogen[anthropic]==0.5.3b1
|
|
5
|
-
|
|
6
|
-
[autobuild]
|
|
7
|
-
pyautogen[autobuild]==0.5.3b1
|
|
8
|
-
|
|
9
|
-
[bedrock]
|
|
10
|
-
pyautogen[bedrock]==0.5.3b1
|
|
11
|
-
|
|
12
|
-
[blendsearch]
|
|
13
|
-
pyautogen[blendsearch]==0.5.3b1
|
|
14
|
-
|
|
15
|
-
[captainagent]
|
|
16
|
-
pyautogen[captainagent]==0.5.3b1
|
|
17
|
-
|
|
18
|
-
[cerebras]
|
|
19
|
-
pyautogen[cerebras]==0.5.3b1
|
|
20
|
-
|
|
21
|
-
[cohere]
|
|
22
|
-
pyautogen[cohere]==0.5.3b1
|
|
23
|
-
|
|
24
|
-
[cosmosdb]
|
|
25
|
-
pyautogen[cosmosdb]==0.5.3b1
|
|
26
|
-
|
|
27
|
-
[gemini]
|
|
28
|
-
pyautogen[gemini]==0.5.3b1
|
|
29
|
-
|
|
30
|
-
[graph]
|
|
31
|
-
pyautogen[graph]==0.5.3b1
|
|
32
|
-
|
|
33
|
-
[graph-rag-falkor-db]
|
|
34
|
-
pyautogen[graph-rag-falkor-db]==0.5.3b1
|
|
35
|
-
|
|
36
|
-
[groq]
|
|
37
|
-
pyautogen[groq]==0.5.3b1
|
|
38
|
-
|
|
39
|
-
[jupyter-executor]
|
|
40
|
-
pyautogen[jupyter-executor]==0.5.3b1
|
|
41
|
-
|
|
42
|
-
[lmm]
|
|
43
|
-
pyautogen[lmm]==0.5.3b1
|
|
44
|
-
|
|
45
|
-
[long-context]
|
|
46
|
-
pyautogen[long-context]==0.5.3b1
|
|
47
|
-
|
|
48
|
-
[mathchat]
|
|
49
|
-
pyautogen[mathchat]==0.5.3b1
|
|
50
|
-
|
|
51
|
-
[mistral]
|
|
52
|
-
pyautogen[mistral]==0.5.3b1
|
|
53
|
-
|
|
54
|
-
[neo4j]
|
|
55
|
-
pyautogen[neo4j]==0.5.3b1
|
|
56
|
-
|
|
57
|
-
[ollama]
|
|
58
|
-
pyautogen[ollama]==0.5.3b1
|
|
59
|
-
|
|
60
|
-
[redis]
|
|
61
|
-
pyautogen[redis]==0.5.3b1
|
|
62
|
-
|
|
63
|
-
[retrievechat]
|
|
64
|
-
pyautogen[retrievechat]==0.5.3b1
|
|
65
|
-
|
|
66
|
-
[retrievechat-mongodb]
|
|
67
|
-
pyautogen[retrievechat-mongodb]==0.5.3b1
|
|
68
|
-
|
|
69
|
-
[retrievechat-pgvector]
|
|
70
|
-
pyautogen[retrievechat-pgvector]==0.5.3b1
|
|
71
|
-
|
|
72
|
-
[retrievechat-qdrant]
|
|
73
|
-
pyautogen[retrievechat-qdrant]==0.5.3b1
|
|
74
|
-
|
|
75
|
-
[teachable]
|
|
76
|
-
pyautogen[teachable]==0.5.3b1
|
|
77
|
-
|
|
78
|
-
[test]
|
|
79
|
-
pyautogen[test]==0.5.3b1
|
|
80
|
-
|
|
81
|
-
[together]
|
|
82
|
-
pyautogen[together]==0.5.3b1
|
|
83
|
-
|
|
84
|
-
[types]
|
|
85
|
-
pyautogen[types]==0.5.3b1
|
|
86
|
-
|
|
87
|
-
[websockets]
|
|
88
|
-
pyautogen[websockets]==0.5.3b1
|
|
89
|
-
|
|
90
|
-
[websurfer]
|
|
91
|
-
pyautogen[websurfer]==0.5.3b1
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|