ag2 0.4b1__py3-none-any.whl → 0.4.2b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ag2 might be problematic. Click here for more details.

Files changed (118) hide show
  1. ag2-0.4.2b1.dist-info/METADATA +19 -0
  2. ag2-0.4.2b1.dist-info/RECORD +6 -0
  3. ag2-0.4.2b1.dist-info/top_level.txt +1 -0
  4. ag2-0.4b1.dist-info/METADATA +0 -496
  5. ag2-0.4b1.dist-info/RECORD +0 -115
  6. ag2-0.4b1.dist-info/top_level.txt +0 -1
  7. autogen/__init__.py +0 -17
  8. autogen/_pydantic.py +0 -116
  9. autogen/agentchat/__init__.py +0 -42
  10. autogen/agentchat/agent.py +0 -142
  11. autogen/agentchat/assistant_agent.py +0 -85
  12. autogen/agentchat/chat.py +0 -306
  13. autogen/agentchat/contrib/__init__.py +0 -0
  14. autogen/agentchat/contrib/agent_builder.py +0 -787
  15. autogen/agentchat/contrib/agent_optimizer.py +0 -450
  16. autogen/agentchat/contrib/capabilities/__init__.py +0 -0
  17. autogen/agentchat/contrib/capabilities/agent_capability.py +0 -21
  18. autogen/agentchat/contrib/capabilities/generate_images.py +0 -297
  19. autogen/agentchat/contrib/capabilities/teachability.py +0 -406
  20. autogen/agentchat/contrib/capabilities/text_compressors.py +0 -72
  21. autogen/agentchat/contrib/capabilities/transform_messages.py +0 -92
  22. autogen/agentchat/contrib/capabilities/transforms.py +0 -565
  23. autogen/agentchat/contrib/capabilities/transforms_util.py +0 -120
  24. autogen/agentchat/contrib/capabilities/vision_capability.py +0 -217
  25. autogen/agentchat/contrib/captainagent.py +0 -487
  26. autogen/agentchat/contrib/gpt_assistant_agent.py +0 -545
  27. autogen/agentchat/contrib/graph_rag/__init__.py +0 -0
  28. autogen/agentchat/contrib/graph_rag/document.py +0 -24
  29. autogen/agentchat/contrib/graph_rag/falkor_graph_query_engine.py +0 -76
  30. autogen/agentchat/contrib/graph_rag/graph_query_engine.py +0 -50
  31. autogen/agentchat/contrib/graph_rag/graph_rag_capability.py +0 -56
  32. autogen/agentchat/contrib/img_utils.py +0 -390
  33. autogen/agentchat/contrib/llamaindex_conversable_agent.py +0 -123
  34. autogen/agentchat/contrib/llava_agent.py +0 -176
  35. autogen/agentchat/contrib/math_user_proxy_agent.py +0 -471
  36. autogen/agentchat/contrib/multimodal_conversable_agent.py +0 -128
  37. autogen/agentchat/contrib/qdrant_retrieve_user_proxy_agent.py +0 -325
  38. autogen/agentchat/contrib/retrieve_assistant_agent.py +0 -56
  39. autogen/agentchat/contrib/retrieve_user_proxy_agent.py +0 -701
  40. autogen/agentchat/contrib/society_of_mind_agent.py +0 -203
  41. autogen/agentchat/contrib/swarm_agent.py +0 -414
  42. autogen/agentchat/contrib/text_analyzer_agent.py +0 -76
  43. autogen/agentchat/contrib/tool_retriever.py +0 -114
  44. autogen/agentchat/contrib/vectordb/__init__.py +0 -0
  45. autogen/agentchat/contrib/vectordb/base.py +0 -243
  46. autogen/agentchat/contrib/vectordb/chromadb.py +0 -326
  47. autogen/agentchat/contrib/vectordb/mongodb.py +0 -559
  48. autogen/agentchat/contrib/vectordb/pgvectordb.py +0 -958
  49. autogen/agentchat/contrib/vectordb/qdrant.py +0 -334
  50. autogen/agentchat/contrib/vectordb/utils.py +0 -126
  51. autogen/agentchat/contrib/web_surfer.py +0 -305
  52. autogen/agentchat/conversable_agent.py +0 -2908
  53. autogen/agentchat/groupchat.py +0 -1668
  54. autogen/agentchat/user_proxy_agent.py +0 -109
  55. autogen/agentchat/utils.py +0 -207
  56. autogen/browser_utils.py +0 -291
  57. autogen/cache/__init__.py +0 -10
  58. autogen/cache/abstract_cache_base.py +0 -78
  59. autogen/cache/cache.py +0 -182
  60. autogen/cache/cache_factory.py +0 -85
  61. autogen/cache/cosmos_db_cache.py +0 -150
  62. autogen/cache/disk_cache.py +0 -109
  63. autogen/cache/in_memory_cache.py +0 -61
  64. autogen/cache/redis_cache.py +0 -128
  65. autogen/code_utils.py +0 -745
  66. autogen/coding/__init__.py +0 -22
  67. autogen/coding/base.py +0 -113
  68. autogen/coding/docker_commandline_code_executor.py +0 -262
  69. autogen/coding/factory.py +0 -45
  70. autogen/coding/func_with_reqs.py +0 -203
  71. autogen/coding/jupyter/__init__.py +0 -22
  72. autogen/coding/jupyter/base.py +0 -32
  73. autogen/coding/jupyter/docker_jupyter_server.py +0 -164
  74. autogen/coding/jupyter/embedded_ipython_code_executor.py +0 -182
  75. autogen/coding/jupyter/jupyter_client.py +0 -224
  76. autogen/coding/jupyter/jupyter_code_executor.py +0 -161
  77. autogen/coding/jupyter/local_jupyter_server.py +0 -168
  78. autogen/coding/local_commandline_code_executor.py +0 -410
  79. autogen/coding/markdown_code_extractor.py +0 -44
  80. autogen/coding/utils.py +0 -57
  81. autogen/exception_utils.py +0 -46
  82. autogen/extensions/__init__.py +0 -0
  83. autogen/formatting_utils.py +0 -76
  84. autogen/function_utils.py +0 -362
  85. autogen/graph_utils.py +0 -148
  86. autogen/io/__init__.py +0 -15
  87. autogen/io/base.py +0 -105
  88. autogen/io/console.py +0 -43
  89. autogen/io/websockets.py +0 -213
  90. autogen/logger/__init__.py +0 -11
  91. autogen/logger/base_logger.py +0 -140
  92. autogen/logger/file_logger.py +0 -287
  93. autogen/logger/logger_factory.py +0 -29
  94. autogen/logger/logger_utils.py +0 -42
  95. autogen/logger/sqlite_logger.py +0 -459
  96. autogen/math_utils.py +0 -356
  97. autogen/oai/__init__.py +0 -33
  98. autogen/oai/anthropic.py +0 -428
  99. autogen/oai/bedrock.py +0 -600
  100. autogen/oai/cerebras.py +0 -264
  101. autogen/oai/client.py +0 -1148
  102. autogen/oai/client_utils.py +0 -167
  103. autogen/oai/cohere.py +0 -453
  104. autogen/oai/completion.py +0 -1216
  105. autogen/oai/gemini.py +0 -469
  106. autogen/oai/groq.py +0 -281
  107. autogen/oai/mistral.py +0 -279
  108. autogen/oai/ollama.py +0 -576
  109. autogen/oai/openai_utils.py +0 -810
  110. autogen/oai/together.py +0 -343
  111. autogen/retrieve_utils.py +0 -487
  112. autogen/runtime_logging.py +0 -163
  113. autogen/token_count_utils.py +0 -257
  114. autogen/types.py +0 -20
  115. autogen/version.py +0 -7
  116. {ag2-0.4b1.dist-info → ag2-0.4.2b1.dist-info}/LICENSE +0 -0
  117. {ag2-0.4b1.dist-info → ag2-0.4.2b1.dist-info}/NOTICE.md +0 -0
  118. {ag2-0.4b1.dist-info → ag2-0.4.2b1.dist-info}/WHEEL +0 -0
@@ -1,810 +0,0 @@
1
- # Copyright (c) 2023 - 2024, Owners of https://github.com/ag2ai
2
- #
3
- # SPDX-License-Identifier: Apache-2.0
4
- #
5
- # Portions derived from https://github.com/microsoft/autogen are under the MIT License.
6
- # SPDX-License-Identifier: MIT
7
- import importlib.metadata
8
- import json
9
- import logging
10
- import os
11
- import re
12
- import tempfile
13
- import time
14
- from pathlib import Path
15
- from typing import Any, Dict, List, Optional, Set, Union
16
-
17
- from dotenv import find_dotenv, load_dotenv
18
- from openai import OpenAI
19
- from openai.types.beta.assistant import Assistant
20
- from packaging.version import parse
21
-
22
- NON_CACHE_KEY = [
23
- "api_key",
24
- "base_url",
25
- "api_type",
26
- "api_version",
27
- "azure_ad_token",
28
- "azure_ad_token_provider",
29
- "credentials",
30
- ]
31
- DEFAULT_AZURE_API_VERSION = "2024-02-01"
32
- OAI_PRICE1K = {
33
- # https://openai.com/api/pricing/
34
- # gpt-4o
35
- "gpt-4o": (0.005, 0.015),
36
- "gpt-4o-2024-05-13": (0.005, 0.015),
37
- "gpt-4o-2024-08-06": (0.0025, 0.01),
38
- # gpt-4-turbo
39
- "gpt-4-turbo-2024-04-09": (0.01, 0.03),
40
- # gpt-4
41
- "gpt-4": (0.03, 0.06),
42
- "gpt-4-32k": (0.06, 0.12),
43
- # gpt-4o-mini
44
- "gpt-4o-mini": (0.000150, 0.000600),
45
- "gpt-4o-mini-2024-07-18": (0.000150, 0.000600),
46
- # gpt-3.5 turbo
47
- "gpt-3.5-turbo": (0.0005, 0.0015), # default is 0125
48
- "gpt-3.5-turbo-0125": (0.0005, 0.0015), # 16k
49
- "gpt-3.5-turbo-instruct": (0.0015, 0.002),
50
- # base model
51
- "davinci-002": 0.002,
52
- "babbage-002": 0.0004,
53
- # old model
54
- "gpt-4-0125-preview": (0.01, 0.03),
55
- "gpt-4-1106-preview": (0.01, 0.03),
56
- "gpt-4-1106-vision-preview": (0.01, 0.03), # TODO: support vision pricing of images
57
- "gpt-3.5-turbo-1106": (0.001, 0.002),
58
- "gpt-3.5-turbo-0613": (0.0015, 0.002),
59
- # "gpt-3.5-turbo-16k": (0.003, 0.004),
60
- "gpt-3.5-turbo-16k-0613": (0.003, 0.004),
61
- "gpt-3.5-turbo-0301": (0.0015, 0.002),
62
- "text-ada-001": 0.0004,
63
- "text-babbage-001": 0.0005,
64
- "text-curie-001": 0.002,
65
- "code-cushman-001": 0.024,
66
- "code-davinci-002": 0.1,
67
- "text-davinci-002": 0.02,
68
- "text-davinci-003": 0.02,
69
- "gpt-4-0314": (0.03, 0.06), # deprecate in Sep
70
- "gpt-4-32k-0314": (0.06, 0.12), # deprecate in Sep
71
- "gpt-4-0613": (0.03, 0.06),
72
- "gpt-4-32k-0613": (0.06, 0.12),
73
- "gpt-4-turbo-preview": (0.01, 0.03),
74
- # https://azure.microsoft.com/en-us/pricing/details/cognitive-services/openai-service/#pricing
75
- "gpt-35-turbo": (0.0005, 0.0015), # what's the default? using 0125 here.
76
- "gpt-35-turbo-0125": (0.0005, 0.0015),
77
- "gpt-35-turbo-instruct": (0.0015, 0.002),
78
- "gpt-35-turbo-1106": (0.001, 0.002),
79
- "gpt-35-turbo-0613": (0.0015, 0.002),
80
- "gpt-35-turbo-0301": (0.0015, 0.002),
81
- "gpt-35-turbo-16k": (0.003, 0.004),
82
- "gpt-35-turbo-16k-0613": (0.003, 0.004),
83
- }
84
-
85
-
86
- def get_key(config: Dict[str, Any]) -> str:
87
- """Get a unique identifier of a configuration.
88
-
89
- Args:
90
- config (dict or list): A configuration.
91
-
92
- Returns:
93
- tuple: A unique identifier which can be used as a key for a dict.
94
- """
95
- copied = False
96
- for key in NON_CACHE_KEY:
97
- if key in config:
98
- config, copied = config.copy() if not copied else config, True
99
- config.pop(key)
100
- # if isinstance(config, dict):
101
- # return tuple(get_key(x) for x in sorted(config.items()))
102
- # if isinstance(config, list):
103
- # return tuple(get_key(x) for x in config)
104
- # return config
105
- return json.dumps(config, sort_keys=True)
106
-
107
-
108
- def is_valid_api_key(api_key: str) -> bool:
109
- """Determine if input is valid OpenAI API key. As of 2024-09-24 there's no official definition of the key structure
110
- so we will allow anything starting with "sk-" and having at least 48 alphanumeric (plus underscore and dash) characters.
111
- Keys are known to start with "sk-", "sk-proj", "sk-None", and "sk-svcaat"
112
-
113
- Args:
114
- api_key (str): An input string to be validated.
115
-
116
- Returns:
117
- bool: A boolean that indicates if input is valid OpenAI API key.
118
- """
119
- api_key_re = re.compile(r"^sk-[A-Za-z0-9_-]{48,}$")
120
- return bool(re.fullmatch(api_key_re, api_key))
121
-
122
-
123
- def get_config_list(
124
- api_keys: List[str],
125
- base_urls: Optional[List[str]] = None,
126
- api_type: Optional[str] = None,
127
- api_version: Optional[str] = None,
128
- ) -> List[Dict[str, Any]]:
129
- """Get a list of configs for OpenAI API client.
130
-
131
- Args:
132
- api_keys (list): The api keys for openai api calls.
133
- base_urls (list, optional): The api bases for openai api calls. If provided, should match the length of api_keys.
134
- api_type (str, optional): The api type for openai api calls.
135
- api_version (str, optional): The api version for openai api calls.
136
-
137
- Returns:
138
- list: A list of configs for OepnAI API calls.
139
-
140
- Example:
141
- ```python
142
- # Define a list of API keys
143
- api_keys = ['key1', 'key2', 'key3']
144
-
145
- # Optionally, define a list of base URLs corresponding to each API key
146
- base_urls = ['https://api.service1.com', 'https://api.service2.com', 'https://api.service3.com']
147
-
148
- # Optionally, define the API type and version if they are common for all keys
149
- api_type = 'azure'
150
- api_version = '2024-02-01'
151
-
152
- # Call the get_config_list function to get a list of configuration dictionaries
153
- config_list = get_config_list(api_keys, base_urls, api_type, api_version)
154
- ```
155
-
156
- """
157
- if base_urls is not None:
158
- assert len(api_keys) == len(base_urls), "The length of api_keys must match the length of base_urls"
159
- config_list = []
160
- for i, api_key in enumerate(api_keys):
161
- if not api_key.strip():
162
- continue
163
- config = {"api_key": api_key}
164
- if base_urls:
165
- config["base_url"] = base_urls[i]
166
- if api_type:
167
- config["api_type"] = api_type
168
- if api_version:
169
- config["api_version"] = api_version
170
- config_list.append(config)
171
- return config_list
172
-
173
-
174
- def config_list_openai_aoai(
175
- key_file_path: Optional[str] = ".",
176
- openai_api_key_file: Optional[str] = "key_openai.txt",
177
- aoai_api_key_file: Optional[str] = "key_aoai.txt",
178
- openai_api_base_file: Optional[str] = "base_openai.txt",
179
- aoai_api_base_file: Optional[str] = "base_aoai.txt",
180
- exclude: Optional[str] = None,
181
- ) -> List[Dict[str, Any]]:
182
- """Get a list of configs for OpenAI API client (including Azure or local model deployments that support OpenAI's chat completion API).
183
-
184
- This function constructs configurations by reading API keys and base URLs from environment variables or text files.
185
- It supports configurations for both OpenAI and Azure OpenAI services, allowing for the exclusion of one or the other.
186
- When text files are used, the environment variables will be overwritten.
187
- To prevent text files from being used, set the corresponding file name to None.
188
- Or set key_file_path to None to disallow reading from text files.
189
-
190
- Args:
191
- key_file_path (str, optional): The directory path where the API key files are located. Defaults to the current directory.
192
- openai_api_key_file (str, optional): The filename containing the OpenAI API key. Defaults to 'key_openai.txt'.
193
- aoai_api_key_file (str, optional): The filename containing the Azure OpenAI API key. Defaults to 'key_aoai.txt'.
194
- openai_api_base_file (str, optional): The filename containing the OpenAI API base URL. Defaults to 'base_openai.txt'.
195
- aoai_api_base_file (str, optional): The filename containing the Azure OpenAI API base URL. Defaults to 'base_aoai.txt'.
196
- exclude (str, optional): The API type to exclude from the configuration list. Can be 'openai' or 'aoai'. Defaults to None.
197
-
198
- Returns:
199
- List[Dict]: A list of configuration dictionaries. Each dictionary contains keys for 'api_key',
200
- and optionally 'base_url', 'api_type', and 'api_version'.
201
-
202
- Raises:
203
- FileNotFoundError: If the specified key files are not found and the corresponding API key is not set in the environment variables.
204
-
205
- Example:
206
- # To generate configurations excluding Azure OpenAI:
207
- configs = config_list_openai_aoai(exclude='aoai')
208
-
209
- File samples:
210
- - key_aoai.txt
211
-
212
- ```
213
- aoai-12345abcdef67890ghijklmnopqr
214
- aoai-09876zyxwvuts54321fedcba
215
- ```
216
-
217
- - base_aoai.txt
218
-
219
- ```
220
- https://api.azure.com/v1
221
- https://api.azure2.com/v1
222
- ```
223
-
224
- Notes:
225
- - The function checks for API keys and base URLs in the following environment variables: 'OPENAI_API_KEY', 'AZURE_OPENAI_API_KEY',
226
- 'OPENAI_API_BASE' and 'AZURE_OPENAI_API_BASE'. If these are not found, it attempts to read from the specified files in the
227
- 'key_file_path' directory.
228
- - The API version for Azure configurations is set to DEFAULT_AZURE_API_VERSION by default.
229
- - If 'exclude' is set to 'openai', only Azure OpenAI configurations are returned, and vice versa.
230
- - The function assumes that the API keys and base URLs in the environment variables are separated by new lines if there are
231
- multiple entries.
232
- """
233
- if exclude != "openai" and key_file_path is not None:
234
- # skip if key_file_path is None
235
- if openai_api_key_file is not None:
236
- # skip if openai_api_key_file is None
237
- try:
238
- with open(f"{key_file_path}/{openai_api_key_file}") as key_file:
239
- os.environ["OPENAI_API_KEY"] = key_file.read().strip()
240
- except FileNotFoundError:
241
- logging.info(
242
- "OPENAI_API_KEY is not found in os.environ "
243
- "and key_openai.txt is not found in the specified path. You can specify the api_key in the config_list."
244
- )
245
- if openai_api_base_file is not None:
246
- # skip if openai_api_base_file is None
247
- try:
248
- with open(f"{key_file_path}/{openai_api_base_file}") as key_file:
249
- os.environ["OPENAI_API_BASE"] = key_file.read().strip()
250
- except FileNotFoundError:
251
- logging.info(
252
- "OPENAI_API_BASE is not found in os.environ "
253
- "and base_openai.txt is not found in the specified path. You can specify the base_url in the config_list."
254
- )
255
- if exclude != "aoai" and key_file_path is not None:
256
- # skip if key_file_path is None
257
- if aoai_api_key_file is not None:
258
- try:
259
- with open(f"{key_file_path}/{aoai_api_key_file}") as key_file:
260
- os.environ["AZURE_OPENAI_API_KEY"] = key_file.read().strip()
261
- except FileNotFoundError:
262
- logging.info(
263
- "AZURE_OPENAI_API_KEY is not found in os.environ "
264
- "and key_aoai.txt is not found in the specified path. You can specify the api_key in the config_list."
265
- )
266
- if aoai_api_base_file is not None:
267
- try:
268
- with open(f"{key_file_path}/{aoai_api_base_file}") as key_file:
269
- os.environ["AZURE_OPENAI_API_BASE"] = key_file.read().strip()
270
- except FileNotFoundError:
271
- logging.info(
272
- "AZURE_OPENAI_API_BASE is not found in os.environ "
273
- "and base_aoai.txt is not found in the specified path. You can specify the base_url in the config_list."
274
- )
275
- aoai_config = (
276
- get_config_list(
277
- # Assuming Azure OpenAI api keys in os.environ["AZURE_OPENAI_API_KEY"], in separated lines
278
- api_keys=os.environ.get("AZURE_OPENAI_API_KEY", "").split("\n"),
279
- # Assuming Azure OpenAI api bases in os.environ["AZURE_OPENAI_API_BASE"], in separated lines
280
- base_urls=os.environ.get("AZURE_OPENAI_API_BASE", "").split("\n"),
281
- api_type="azure",
282
- api_version=DEFAULT_AZURE_API_VERSION,
283
- )
284
- if exclude != "aoai"
285
- else []
286
- )
287
- # process openai base urls
288
- base_urls_env_var = os.environ.get("OPENAI_API_BASE", None)
289
- base_urls = base_urls_env_var if base_urls_env_var is None else base_urls_env_var.split("\n")
290
- openai_config = (
291
- get_config_list(
292
- # Assuming OpenAI API_KEY in os.environ["OPENAI_API_KEY"]
293
- api_keys=os.environ.get("OPENAI_API_KEY", "").split("\n"),
294
- base_urls=base_urls,
295
- )
296
- if exclude != "openai"
297
- else []
298
- )
299
- config_list = openai_config + aoai_config
300
- return config_list
301
-
302
-
303
- def config_list_from_models(
304
- key_file_path: Optional[str] = ".",
305
- openai_api_key_file: Optional[str] = "key_openai.txt",
306
- aoai_api_key_file: Optional[str] = "key_aoai.txt",
307
- aoai_api_base_file: Optional[str] = "base_aoai.txt",
308
- exclude: Optional[str] = None,
309
- model_list: Optional[List[str]] = None,
310
- ) -> List[Dict[str, Any]]:
311
- """
312
- Get a list of configs for API calls with models specified in the model list.
313
-
314
- This function extends `config_list_openai_aoai` by allowing to clone its' out for each of the models provided.
315
- Each configuration will have a 'model' key with the model name as its value. This is particularly useful when
316
- all endpoints have same set of models.
317
-
318
- Args:
319
- key_file_path (str, optional): The path to the key files.
320
- openai_api_key_file (str, optional): The file name of the OpenAI API key.
321
- aoai_api_key_file (str, optional): The file name of the Azure OpenAI API key.
322
- aoai_api_base_file (str, optional): The file name of the Azure OpenAI API base.
323
- exclude (str, optional): The API type to exclude, "openai" or "aoai".
324
- model_list (list, optional): The list of model names to include in the configs.
325
-
326
- Returns:
327
- list: A list of configs for OpenAI API calls, each including model information.
328
-
329
- Example:
330
- ```python
331
- # Define the path where the API key files are located
332
- key_file_path = '/path/to/key/files'
333
-
334
- # Define the file names for the OpenAI and Azure OpenAI API keys and bases
335
- openai_api_key_file = 'key_openai.txt'
336
- aoai_api_key_file = 'key_aoai.txt'
337
- aoai_api_base_file = 'base_aoai.txt'
338
-
339
- # Define the list of models for which to create configurations
340
- model_list = ['gpt-4', 'gpt-3.5-turbo']
341
-
342
- # Call the function to get a list of configuration dictionaries
343
- config_list = config_list_from_models(
344
- key_file_path=key_file_path,
345
- openai_api_key_file=openai_api_key_file,
346
- aoai_api_key_file=aoai_api_key_file,
347
- aoai_api_base_file=aoai_api_base_file,
348
- model_list=model_list
349
- )
350
-
351
- # The `config_list` will contain configurations for the specified models, for example:
352
- # [
353
- # {'api_key': '...', 'base_url': 'https://api.openai.com', 'model': 'gpt-4'},
354
- # {'api_key': '...', 'base_url': 'https://api.openai.com', 'model': 'gpt-3.5-turbo'}
355
- # ]
356
- ```
357
- """
358
- config_list = config_list_openai_aoai(
359
- key_file_path=key_file_path,
360
- openai_api_key_file=openai_api_key_file,
361
- aoai_api_key_file=aoai_api_key_file,
362
- aoai_api_base_file=aoai_api_base_file,
363
- exclude=exclude,
364
- )
365
- if model_list:
366
- config_list = [{**config, "model": model} for model in model_list for config in config_list]
367
- return config_list
368
-
369
-
370
- def config_list_gpt4_gpt35(
371
- key_file_path: Optional[str] = ".",
372
- openai_api_key_file: Optional[str] = "key_openai.txt",
373
- aoai_api_key_file: Optional[str] = "key_aoai.txt",
374
- aoai_api_base_file: Optional[str] = "base_aoai.txt",
375
- exclude: Optional[str] = None,
376
- ) -> List[Dict[str, Any]]:
377
- """Get a list of configs for 'gpt-4' followed by 'gpt-3.5-turbo' API calls.
378
-
379
- Args:
380
- key_file_path (str, optional): The path to the key files.
381
- openai_api_key_file (str, optional): The file name of the openai api key.
382
- aoai_api_key_file (str, optional): The file name of the azure openai api key.
383
- aoai_api_base_file (str, optional): The file name of the azure openai api base.
384
- exclude (str, optional): The api type to exclude, "openai" or "aoai".
385
-
386
- Returns:
387
- list: A list of configs for openai api calls.
388
- """
389
- return config_list_from_models(
390
- key_file_path,
391
- openai_api_key_file,
392
- aoai_api_key_file,
393
- aoai_api_base_file,
394
- exclude,
395
- model_list=["gpt-4", "gpt-3.5-turbo"],
396
- )
397
-
398
-
399
- def filter_config(
400
- config_list: List[Dict[str, Any]],
401
- filter_dict: Optional[Dict[str, Union[List[Union[str, None]], Set[Union[str, None]]]]],
402
- exclude: bool = False,
403
- ) -> List[Dict[str, Any]]:
404
- """This function filters `config_list` by checking each configuration dictionary against the criteria specified in
405
- `filter_dict`. A configuration dictionary is retained if for every key in `filter_dict`, see example below.
406
-
407
- Args:
408
- config_list (list of dict): A list of configuration dictionaries to be filtered.
409
- filter_dict (dict): A dictionary representing the filter criteria, where each key is a
410
- field name to check within the configuration dictionaries, and the
411
- corresponding value is a list of acceptable values for that field.
412
- If the configuration's field's value is not a list, then a match occurs
413
- when it is found in the list of acceptable values. If the configuration's
414
- field's value is a list, then a match occurs if there is a non-empty
415
- intersection with the acceptable values.
416
- exclude (bool): If False (the default value), configs that match the filter will be included in the returned
417
- list. If True, configs that match the filter will be excluded in the returned list.
418
- Returns:
419
- list of dict: A list of configuration dictionaries that meet all the criteria specified
420
- in `filter_dict`.
421
-
422
- Example:
423
- ```python
424
- # Example configuration list with various models and API types
425
- configs = [
426
- {'model': 'gpt-3.5-turbo'},
427
- {'model': 'gpt-4'},
428
- {'model': 'gpt-3.5-turbo', 'api_type': 'azure'},
429
- {'model': 'gpt-3.5-turbo', 'tags': ['gpt35_turbo', 'gpt-35-turbo']},
430
- ]
431
- # Define filter criteria to select configurations for the 'gpt-3.5-turbo' model
432
- # that are also using the 'azure' API type
433
- filter_criteria = {
434
- 'model': ['gpt-3.5-turbo'], # Only accept configurations for 'gpt-3.5-turbo'
435
- 'api_type': ['azure'] # Only accept configurations for 'azure' API type
436
- }
437
- # Apply the filter to the configuration list
438
- filtered_configs = filter_config(configs, filter_criteria)
439
- # The resulting `filtered_configs` will be:
440
- # [{'model': 'gpt-3.5-turbo', 'api_type': 'azure', ...}]
441
- # Define a filter to select a given tag
442
- filter_criteria = {
443
- 'tags': ['gpt35_turbo'],
444
- }
445
- # Apply the filter to the configuration list
446
- filtered_configs = filter_config(configs, filter_criteria)
447
- # The resulting `filtered_configs` will be:
448
- # [{'model': 'gpt-3.5-turbo', 'tags': ['gpt35_turbo', 'gpt-35-turbo']}]
449
- ```
450
- Note:
451
- - If `filter_dict` is empty or None, no filtering is applied and `config_list` is returned as is.
452
- - If a configuration dictionary in `config_list` does not contain a key specified in `filter_dict`,
453
- it is considered a non-match and is excluded from the result.
454
- - If the list of acceptable values for a key in `filter_dict` includes None, then configuration
455
- dictionaries that do not have that key will also be considered a match.
456
-
457
- """
458
-
459
- if filter_dict:
460
- return [
461
- item
462
- for item in config_list
463
- if all(_satisfies_criteria(item.get(key), values) != exclude for key, values in filter_dict.items())
464
- ]
465
- return config_list
466
-
467
-
468
- def _satisfies_criteria(value: Any, criteria_values: Any) -> bool:
469
- if value is None:
470
- return False
471
-
472
- if isinstance(value, list):
473
- return bool(set(value) & set(criteria_values)) # Non-empty intersection
474
- else:
475
- return value in criteria_values
476
-
477
-
478
- def config_list_from_json(
479
- env_or_file: str,
480
- file_location: Optional[str] = "",
481
- filter_dict: Optional[Dict[str, Union[List[Union[str, None]], Set[Union[str, None]]]]] = None,
482
- ) -> List[Dict[str, Any]]:
483
- """
484
- Retrieves a list of API configurations from a JSON stored in an environment variable or a file.
485
-
486
- This function attempts to parse JSON data from the given `env_or_file` parameter. If `env_or_file` is an
487
- environment variable containing JSON data, it will be used directly. Otherwise, it is assumed to be a filename,
488
- and the function will attempt to read the file from the specified `file_location`.
489
-
490
- The `filter_dict` parameter allows for filtering the configurations based on specified criteria. Each key in the
491
- `filter_dict` corresponds to a field in the configuration dictionaries, and the associated value is a list or set
492
- of acceptable values for that field. If a field is missing in a configuration and `None` is included in the list
493
- of acceptable values for that field, the configuration will still be considered a match.
494
-
495
- Args:
496
- env_or_file (str): The name of the environment variable, the filename, or the environment variable of the filename
497
- that containing the JSON data.
498
- file_location (str, optional): The directory path where the file is located, if `env_or_file` is a filename.
499
- filter_dict (dict, optional): A dictionary specifying the filtering criteria for the configurations, with
500
- keys representing field names and values being lists or sets of acceptable values for those fields.
501
-
502
- Example:
503
- ```python
504
- # Suppose we have an environment variable 'CONFIG_JSON' with the following content:
505
- # '[{"model": "gpt-3.5-turbo", "api_type": "azure"}, {"model": "gpt-4"}]'
506
-
507
- # We can retrieve a filtered list of configurations like this:
508
- filter_criteria = {"model": ["gpt-3.5-turbo"]}
509
- configs = config_list_from_json('CONFIG_JSON', filter_dict=filter_criteria)
510
- # The 'configs' variable will now contain only the configurations that match the filter criteria.
511
- ```
512
-
513
- Returns:
514
- List[Dict]: A list of configuration dictionaries that match the filtering criteria specified in `filter_dict`.
515
-
516
- Raises:
517
- FileNotFoundError: if env_or_file is neither found as an environment variable nor a file
518
- """
519
- env_str = os.environ.get(env_or_file)
520
-
521
- if env_str:
522
- # The environment variable exists. We should use information from it.
523
- if os.path.exists(env_str):
524
- # It is a file location, and we need to load the json from the file.
525
- with open(env_str, "r") as file:
526
- json_str = file.read()
527
- else:
528
- # Else, it should be a JSON string by itself.
529
- json_str = env_str
530
- config_list = json.loads(json_str)
531
- else:
532
- # The environment variable does not exist.
533
- # So, `env_or_file` is a filename. We should use the file location.
534
- if file_location is not None:
535
- config_list_path = os.path.join(file_location, env_or_file)
536
- else:
537
- config_list_path = env_or_file
538
-
539
- with open(config_list_path) as json_file:
540
- config_list = json.load(json_file)
541
- return filter_config(config_list, filter_dict)
542
-
543
-
544
- def get_config(
545
- api_key: Optional[str],
546
- base_url: Optional[str] = None,
547
- api_type: Optional[str] = None,
548
- api_version: Optional[str] = None,
549
- ) -> Dict[str, Any]:
550
- """
551
- Constructs a configuration dictionary for a single model with the provided API configurations.
552
-
553
- Example:
554
- ```python
555
- config = get_config(
556
- api_key="sk-abcdef1234567890",
557
- base_url="https://api.openai.com",
558
- api_version="v1"
559
- )
560
- # The 'config' variable will now contain:
561
- # {
562
- # "api_key": "sk-abcdef1234567890",
563
- # "base_url": "https://api.openai.com",
564
- # "api_version": "v1"
565
- # }
566
- ```
567
-
568
- Args:
569
- api_key (str): The API key for authenticating API requests.
570
- base_url (Optional[str]): The base URL of the API. If not provided, defaults to None.
571
- api_type (Optional[str]): The type of API. If not provided, defaults to None.
572
- api_version (Optional[str]): The version of the API. If not provided, defaults to None.
573
-
574
- Returns:
575
- Dict: A dictionary containing the provided API configurations.
576
- """
577
- config = {"api_key": api_key}
578
- if base_url:
579
- config["base_url"] = os.getenv(base_url, default=base_url)
580
- if api_type:
581
- config["api_type"] = os.getenv(api_type, default=api_type)
582
- if api_version:
583
- config["api_version"] = os.getenv(api_version, default=api_version)
584
- return config
585
-
586
-
587
- def config_list_from_dotenv(
588
- dotenv_file_path: Optional[str] = None,
589
- model_api_key_map: Optional[Dict[str, Any]] = None,
590
- filter_dict: Optional[Dict[str, Union[List[Union[str, None]], Set[Union[str, None]]]]] = None,
591
- ) -> List[Dict[str, Union[str, Set[str]]]]:
592
- """
593
- Load API configurations from a specified .env file or environment variables and construct a list of configurations.
594
-
595
- This function will:
596
- - Load API keys from a provided .env file or from existing environment variables.
597
- - Create a configuration dictionary for each model using the API keys and additional configurations.
598
- - Filter and return the configurations based on provided filters.
599
-
600
- model_api_key_map will default to `{"gpt-4": "OPENAI_API_KEY", "gpt-3.5-turbo": "OPENAI_API_KEY"}` if none
601
-
602
- Args:
603
- dotenv_file_path (str, optional): The path to the .env file. Defaults to None.
604
- model_api_key_map (str/dict, optional): A dictionary mapping models to their API key configurations.
605
- If a string is provided as configuration, it is considered as an environment
606
- variable name storing the API key.
607
- If a dict is provided, it should contain at least 'api_key_env_var' key,
608
- and optionally other API configurations like 'base_url', 'api_type', and 'api_version'.
609
- Defaults to a basic map with 'gpt-4' and 'gpt-3.5-turbo' mapped to 'OPENAI_API_KEY'.
610
- filter_dict (dict, optional): A dictionary containing the models to be loaded.
611
- Containing a 'model' key mapped to a set of model names to be loaded.
612
- Defaults to None, which loads all found configurations.
613
-
614
- Returns:
615
- List[Dict[str, Union[str, Set[str]]]]: A list of configuration dictionaries for each model.
616
-
617
- Raises:
618
- FileNotFoundError: If the specified .env file does not exist.
619
- TypeError: If an unsupported type of configuration is provided in model_api_key_map.
620
- """
621
- if dotenv_file_path:
622
- dotenv_path = Path(dotenv_file_path)
623
- if dotenv_path.exists():
624
- load_dotenv(dotenv_path)
625
- else:
626
- logging.warning(f"The specified .env file {dotenv_path} does not exist.")
627
- else:
628
- dotenv_path_str = find_dotenv()
629
- if not dotenv_path_str:
630
- logging.warning("No .env file found. Loading configurations from environment variables.")
631
- dotenv_path = Path(dotenv_path_str)
632
- load_dotenv(dotenv_path)
633
-
634
- # Ensure the model_api_key_map is not None to prevent TypeErrors during key assignment.
635
- model_api_key_map = model_api_key_map or {}
636
-
637
- # Ensure default models are always considered
638
- default_models = ["gpt-4", "gpt-3.5-turbo"]
639
-
640
- for model in default_models:
641
- # Only assign default API key if the model is not present in the map.
642
- # If model is present but set to invalid/empty, do not overwrite.
643
- if model not in model_api_key_map:
644
- model_api_key_map[model] = "OPENAI_API_KEY"
645
-
646
- env_var = []
647
- # Loop over the models and create configuration dictionaries
648
- for model, config in model_api_key_map.items():
649
- if isinstance(config, str):
650
- api_key_env_var = config
651
- config_dict = get_config(api_key=os.getenv(api_key_env_var))
652
- elif isinstance(config, dict):
653
- api_key = os.getenv(config.get("api_key_env_var", "OPENAI_API_KEY"))
654
- config_without_key_var = {k: v for k, v in config.items() if k != "api_key_env_var"}
655
- config_dict = get_config(api_key=api_key, **config_without_key_var)
656
- else:
657
- logging.warning(f"Unsupported type {type(config)} for model {model} configuration")
658
-
659
- if not config_dict["api_key"] or config_dict["api_key"].strip() == "":
660
- logging.warning(
661
- f"API key not found or empty for model {model}. Please ensure path to .env file is correct."
662
- )
663
- continue # Skip this configuration and continue with the next
664
-
665
- # Add model to the configuration and append to the list
666
- config_dict["model"] = model
667
- env_var.append(config_dict)
668
-
669
- fd, temp_name = tempfile.mkstemp()
670
- try:
671
- with os.fdopen(fd, "w+") as temp:
672
- env_var_str = json.dumps(env_var)
673
- temp.write(env_var_str)
674
- temp.flush()
675
-
676
- # Assuming config_list_from_json is a valid function from your code
677
- config_list = config_list_from_json(env_or_file=temp_name, filter_dict=filter_dict)
678
- finally:
679
- # The file is deleted after using its name (to prevent windows build from breaking)
680
- os.remove(temp_name)
681
-
682
- if len(config_list) == 0:
683
- logging.error("No configurations loaded.")
684
- return []
685
-
686
- logging.info(f"Models available: {[config['model'] for config in config_list]}")
687
- return config_list
688
-
689
-
690
- def retrieve_assistants_by_name(client: OpenAI, name: str) -> List[Assistant]:
691
- """
692
- Return the assistants with the given name from OAI assistant API
693
- """
694
- assistants = client.beta.assistants.list()
695
- candidate_assistants = []
696
- for assistant in assistants.data:
697
- if assistant.name == name:
698
- candidate_assistants.append(assistant)
699
- return candidate_assistants
700
-
701
-
702
- def detect_gpt_assistant_api_version() -> str:
703
- """Detect the openai assistant API version"""
704
- oai_version = importlib.metadata.version("openai")
705
- if parse(oai_version) < parse("1.21"):
706
- return "v1"
707
- else:
708
- return "v2"
709
-
710
-
711
- def create_gpt_vector_store(client: OpenAI, name: str, fild_ids: List[str]) -> Any:
712
- """Create a openai vector store for gpt assistant"""
713
-
714
- try:
715
- vector_store = client.beta.vector_stores.create(name=name)
716
- except Exception as e:
717
- raise AttributeError(f"Failed to create vector store, please install the latest OpenAI python package: {e}")
718
-
719
- # poll the status of the file batch for completion.
720
- batch = client.beta.vector_stores.file_batches.create_and_poll(vector_store_id=vector_store.id, file_ids=fild_ids)
721
-
722
- if batch.status == "in_progress":
723
- time.sleep(1)
724
- logging.debug(f"file batch status: {batch.file_counts}")
725
- batch = client.beta.vector_stores.file_batches.poll(vector_store_id=vector_store.id, batch_id=batch.id)
726
-
727
- if batch.status == "completed":
728
- return vector_store
729
-
730
- raise ValueError(f"Failed to upload files to vector store {vector_store.id}:{batch.status}")
731
-
732
-
733
- def create_gpt_assistant(
734
- client: OpenAI, name: str, instructions: str, model: str, assistant_config: Dict[str, Any]
735
- ) -> Assistant:
736
- """Create a openai gpt assistant"""
737
-
738
- assistant_create_kwargs = {}
739
- gpt_assistant_api_version = detect_gpt_assistant_api_version()
740
- tools = assistant_config.get("tools", [])
741
-
742
- if gpt_assistant_api_version == "v2":
743
- tool_resources = assistant_config.get("tool_resources", {})
744
- file_ids = assistant_config.get("file_ids")
745
- if tool_resources.get("file_search") is not None and file_ids is not None:
746
- raise ValueError(
747
- "Cannot specify both `tool_resources['file_search']` tool and `file_ids` in the assistant config."
748
- )
749
-
750
- # Designed for backwards compatibility for the V1 API
751
- # Instead of V1 AssistantFile, files are attached to Assistants using the tool_resources object.
752
- for tool in tools:
753
- if tool["type"] == "retrieval":
754
- tool["type"] = "file_search"
755
- if file_ids is not None:
756
- # create a vector store for the file search tool
757
- vs = create_gpt_vector_store(client, f"{name}-vectorestore", file_ids)
758
- tool_resources["file_search"] = {
759
- "vector_store_ids": [vs.id],
760
- }
761
- elif tool["type"] == "code_interpreter" and file_ids is not None:
762
- tool_resources["code_interpreter"] = {
763
- "file_ids": file_ids,
764
- }
765
-
766
- assistant_create_kwargs["tools"] = tools
767
- if len(tool_resources) > 0:
768
- assistant_create_kwargs["tool_resources"] = tool_resources
769
- else:
770
- # not support forwards compatibility
771
- if "tool_resources" in assistant_config:
772
- raise ValueError("`tool_resources` argument are not supported in the openai assistant V1 API.")
773
- if any(tool["type"] == "file_search" for tool in tools):
774
- raise ValueError(
775
- "`file_search` tool are not supported in the openai assistant V1 API, please use `retrieval`."
776
- )
777
- assistant_create_kwargs["tools"] = tools
778
- assistant_create_kwargs["file_ids"] = assistant_config.get("file_ids", [])
779
-
780
- logging.info(f"Creating assistant with config: {assistant_create_kwargs}")
781
- return client.beta.assistants.create(name=name, instructions=instructions, model=model, **assistant_create_kwargs)
782
-
783
-
784
- def update_gpt_assistant(client: OpenAI, assistant_id: str, assistant_config: Dict[str, Any]) -> Assistant:
785
- """Update openai gpt assistant"""
786
-
787
- gpt_assistant_api_version = detect_gpt_assistant_api_version()
788
- assistant_update_kwargs = {}
789
-
790
- if assistant_config.get("tools") is not None:
791
- assistant_update_kwargs["tools"] = assistant_config["tools"]
792
-
793
- if assistant_config.get("instructions") is not None:
794
- assistant_update_kwargs["instructions"] = assistant_config["instructions"]
795
-
796
- if gpt_assistant_api_version == "v2":
797
- if assistant_config.get("tool_resources") is not None:
798
- assistant_update_kwargs["tool_resources"] = assistant_config["tool_resources"]
799
- else:
800
- if assistant_config.get("file_ids") is not None:
801
- assistant_update_kwargs["file_ids"] = assistant_config["file_ids"]
802
-
803
- return client.beta.assistants.update(assistant_id=assistant_id, **assistant_update_kwargs)
804
-
805
-
806
- def _satisfies(config_value: Any, acceptable_values: Any) -> bool:
807
- if isinstance(config_value, list):
808
- return bool(set(config_value) & set(acceptable_values)) # Non-empty intersection
809
- else:
810
- return config_value in acceptable_values