deepbuilder 0.0.1__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- deepbuilder-0.0.1/LICENSE +21 -0
- deepbuilder-0.0.1/PKG-INFO +356 -0
- deepbuilder-0.0.1/README.md +311 -0
- deepbuilder-0.0.1/deepbuilder.egg-info/PKG-INFO +356 -0
- deepbuilder-0.0.1/deepbuilder.egg-info/SOURCES.txt +66 -0
- deepbuilder-0.0.1/deepbuilder.egg-info/dependency_links.txt +1 -0
- deepbuilder-0.0.1/deepbuilder.egg-info/entry_points.txt +2 -0
- deepbuilder-0.0.1/deepbuilder.egg-info/requires.txt +36 -0
- deepbuilder-0.0.1/deepbuilder.egg-info/top_level.txt +1 -0
- deepbuilder-0.0.1/deepbuilder.egg-info/zip-safe +1 -0
- deepbuilder-0.0.1/openai/__init__.py +104 -0
- deepbuilder-0.0.1/openai/_openai_scripts.py +89 -0
- deepbuilder-0.0.1/openai/api_requestor.py +778 -0
- deepbuilder-0.0.1/openai/api_resources/__init__.py +14 -0
- deepbuilder-0.0.1/openai/api_resources/abstract/__init__.py +10 -0
- deepbuilder-0.0.1/openai/api_resources/abstract/api_resource.py +172 -0
- deepbuilder-0.0.1/openai/api_resources/abstract/createable_api_resource.py +98 -0
- deepbuilder-0.0.1/openai/api_resources/abstract/deletable_api_resource.py +48 -0
- deepbuilder-0.0.1/openai/api_resources/abstract/engine_api_resource.py +325 -0
- deepbuilder-0.0.1/openai/api_resources/abstract/listable_api_resource.py +95 -0
- deepbuilder-0.0.1/openai/api_resources/abstract/nested_resource_class_methods.py +154 -0
- deepbuilder-0.0.1/openai/api_resources/abstract/updateable_api_resource.py +16 -0
- deepbuilder-0.0.1/openai/api_resources/audio.py +269 -0
- deepbuilder-0.0.1/openai/api_resources/chat_completion.py +50 -0
- deepbuilder-0.0.1/openai/api_resources/completion.py +50 -0
- deepbuilder-0.0.1/openai/api_resources/customer.py +17 -0
- deepbuilder-0.0.1/openai/api_resources/deployment.py +119 -0
- deepbuilder-0.0.1/openai/api_resources/edit.py +57 -0
- deepbuilder-0.0.1/openai/api_resources/embedding.py +91 -0
- deepbuilder-0.0.1/openai/api_resources/engine.py +50 -0
- deepbuilder-0.0.1/openai/api_resources/error_object.py +28 -0
- deepbuilder-0.0.1/openai/api_resources/experimental/__init__.py +3 -0
- deepbuilder-0.0.1/openai/api_resources/experimental/completion_config.py +11 -0
- deepbuilder-0.0.1/openai/api_resources/file.py +261 -0
- deepbuilder-0.0.1/openai/api_resources/fine_tune.py +204 -0
- deepbuilder-0.0.1/openai/api_resources/image.py +273 -0
- deepbuilder-0.0.1/openai/api_resources/model.py +5 -0
- deepbuilder-0.0.1/openai/api_resources/moderation.py +45 -0
- deepbuilder-0.0.1/openai/cli.py +1136 -0
- deepbuilder-0.0.1/openai/datalib/__init__.py +14 -0
- deepbuilder-0.0.1/openai/datalib/common.py +17 -0
- deepbuilder-0.0.1/openai/datalib/numpy_helper.py +15 -0
- deepbuilder-0.0.1/openai/datalib/pandas_helper.py +15 -0
- deepbuilder-0.0.1/openai/embeddings_utils.py +252 -0
- deepbuilder-0.0.1/openai/error.py +169 -0
- deepbuilder-0.0.1/openai/object_classes.py +11 -0
- deepbuilder-0.0.1/openai/openai_object.py +347 -0
- deepbuilder-0.0.1/openai/openai_response.py +31 -0
- deepbuilder-0.0.1/openai/py.typed +0 -0
- deepbuilder-0.0.1/openai/tests/__init__.py +0 -0
- deepbuilder-0.0.1/openai/tests/asyncio/__init__.py +0 -0
- deepbuilder-0.0.1/openai/tests/asyncio/test_endpoints.py +90 -0
- deepbuilder-0.0.1/openai/tests/test_api_requestor.py +101 -0
- deepbuilder-0.0.1/openai/tests/test_endpoints.py +118 -0
- deepbuilder-0.0.1/openai/tests/test_exceptions.py +40 -0
- deepbuilder-0.0.1/openai/tests/test_file_cli.py +39 -0
- deepbuilder-0.0.1/openai/tests/test_long_examples_validator.py +54 -0
- deepbuilder-0.0.1/openai/tests/test_url_composition.py +209 -0
- deepbuilder-0.0.1/openai/tests/test_util.py +55 -0
- deepbuilder-0.0.1/openai/upload_progress.py +52 -0
- deepbuilder-0.0.1/openai/util.py +188 -0
- deepbuilder-0.0.1/openai/validators.py +852 -0
- deepbuilder-0.0.1/openai/version.py +1 -0
- deepbuilder-0.0.1/openai/wandb_logger.py +300 -0
- deepbuilder-0.0.1/pyproject.toml +13 -0
- deepbuilder-0.0.1/setup.cfg +70 -0
- deepbuilder-0.0.1/setup.py +3 -0
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
The MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) OpenAI (https://openai.com)
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in
|
|
13
|
+
all copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|
21
|
+
THE SOFTWARE.
|
|
@@ -0,0 +1,356 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: deepbuilder
|
|
3
|
+
Version: 0.0.1
|
|
4
|
+
Summary: Python client library for the deepbuilder API
|
|
5
|
+
Home-page: https://github.com/open-lm/openai-python
|
|
6
|
+
Author: deepbuilder
|
|
7
|
+
Author-email: support@gensei.ai
|
|
8
|
+
Classifier: Programming Language :: Python :: 3
|
|
9
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
10
|
+
Classifier: Operating System :: OS Independent
|
|
11
|
+
Requires-Python: >=3.7.1
|
|
12
|
+
Description-Content-Type: text/markdown
|
|
13
|
+
License-File: LICENSE
|
|
14
|
+
Requires-Dist: requests>=2.20
|
|
15
|
+
Requires-Dist: tqdm
|
|
16
|
+
Requires-Dist: typing_extensions; python_version < "3.8"
|
|
17
|
+
Requires-Dist: aiohttp
|
|
18
|
+
Provides-Extra: dev
|
|
19
|
+
Requires-Dist: black~=21.6b0; extra == "dev"
|
|
20
|
+
Requires-Dist: pytest==6.*; extra == "dev"
|
|
21
|
+
Requires-Dist: pytest-asyncio; extra == "dev"
|
|
22
|
+
Requires-Dist: pytest-mock; extra == "dev"
|
|
23
|
+
Provides-Extra: datalib
|
|
24
|
+
Requires-Dist: numpy; extra == "datalib"
|
|
25
|
+
Requires-Dist: pandas>=1.2.3; extra == "datalib"
|
|
26
|
+
Requires-Dist: pandas-stubs>=1.1.0.11; extra == "datalib"
|
|
27
|
+
Requires-Dist: openpyxl>=3.0.7; extra == "datalib"
|
|
28
|
+
Provides-Extra: wandb
|
|
29
|
+
Requires-Dist: wandb; extra == "wandb"
|
|
30
|
+
Requires-Dist: numpy; extra == "wandb"
|
|
31
|
+
Requires-Dist: pandas>=1.2.3; extra == "wandb"
|
|
32
|
+
Requires-Dist: pandas-stubs>=1.1.0.11; extra == "wandb"
|
|
33
|
+
Requires-Dist: openpyxl>=3.0.7; extra == "wandb"
|
|
34
|
+
Provides-Extra: embeddings
|
|
35
|
+
Requires-Dist: scikit-learn>=1.0.2; extra == "embeddings"
|
|
36
|
+
Requires-Dist: tenacity>=8.0.1; extra == "embeddings"
|
|
37
|
+
Requires-Dist: matplotlib; extra == "embeddings"
|
|
38
|
+
Requires-Dist: plotly; extra == "embeddings"
|
|
39
|
+
Requires-Dist: numpy; extra == "embeddings"
|
|
40
|
+
Requires-Dist: scipy; extra == "embeddings"
|
|
41
|
+
Requires-Dist: pandas>=1.2.3; extra == "embeddings"
|
|
42
|
+
Requires-Dist: pandas-stubs>=1.1.0.11; extra == "embeddings"
|
|
43
|
+
Requires-Dist: openpyxl>=3.0.7; extra == "embeddings"
|
|
44
|
+
Dynamic: license-file
|
|
45
|
+
|
|
46
|
+
# OpenAI Python Library
|
|
47
|
+
|
|
48
|
+
The OpenAI Python library provides convenient access to the OpenAI API
|
|
49
|
+
from applications written in the Python language. It includes a
|
|
50
|
+
pre-defined set of classes for API resources that initialize
|
|
51
|
+
themselves dynamically from API responses which makes it compatible
|
|
52
|
+
with a wide range of versions of the OpenAI API.
|
|
53
|
+
|
|
54
|
+
You can find usage examples for the OpenAI Python library in our [API reference](https://beta.openai.com/docs/api-reference?lang=python) and the [OpenAI Cookbook](https://github.com/openai/openai-cookbook/).
|
|
55
|
+
|
|
56
|
+
## Installation
|
|
57
|
+
|
|
58
|
+
You don't need this source code unless you want to modify the package. If you just
|
|
59
|
+
want to use the package, just run:
|
|
60
|
+
|
|
61
|
+
```sh
|
|
62
|
+
pip install --upgrade deepbuilder
|
|
63
|
+
```
|
|
64
|
+
|
|
65
|
+
Install from source with:
|
|
66
|
+
|
|
67
|
+
```sh
|
|
68
|
+
python setup.py install
|
|
69
|
+
```
|
|
70
|
+
|
|
71
|
+
### Optional dependencies
|
|
72
|
+
|
|
73
|
+
Install dependencies for [`openai.embeddings_utils`](openai/embeddings_utils.py):
|
|
74
|
+
|
|
75
|
+
```sh
|
|
76
|
+
pip install openai[embeddings]
|
|
77
|
+
```
|
|
78
|
+
|
|
79
|
+
Install support for [Weights & Biases](https://wandb.me/openai-docs):
|
|
80
|
+
|
|
81
|
+
```
|
|
82
|
+
pip install openai[wandb]
|
|
83
|
+
```
|
|
84
|
+
|
|
85
|
+
Data libraries like `numpy` and `pandas` are not installed by default due to their size. They’re needed for some functionality of this library, but generally not for talking to the API. If you encounter a `MissingDependencyError`, install them with:
|
|
86
|
+
|
|
87
|
+
```sh
|
|
88
|
+
pip install openai[datalib]
|
|
89
|
+
```
|
|
90
|
+
|
|
91
|
+
## Usage
|
|
92
|
+
|
|
93
|
+
The library needs to be configured with your account's secret key which is available on the [website](https://platform.openai.com/account/api-keys). Either set it as the `OPENAI_API_KEY` environment variable before using the library:
|
|
94
|
+
|
|
95
|
+
```bash
|
|
96
|
+
export OPENAI_API_KEY='sk-...'
|
|
97
|
+
```
|
|
98
|
+
|
|
99
|
+
Or set `openai.api_key` to its value:
|
|
100
|
+
|
|
101
|
+
```python
|
|
102
|
+
import openai
|
|
103
|
+
openai.api_key = "sk-..."
|
|
104
|
+
|
|
105
|
+
# list models
|
|
106
|
+
models = openai.Model.list()
|
|
107
|
+
|
|
108
|
+
# print the first model's id
|
|
109
|
+
print(models.data[0].id)
|
|
110
|
+
|
|
111
|
+
# create a chat completion
|
|
112
|
+
chat_completion = openai.ChatCompletion.create(model="gpt-3.5-turbo", messages=[{"role": "user", "content": "Hello world"}])
|
|
113
|
+
|
|
114
|
+
# print the chat completion
|
|
115
|
+
print(chat_completion.choices[0].message.content)
|
|
116
|
+
```
|
|
117
|
+
|
|
118
|
+
### Params
|
|
119
|
+
|
|
120
|
+
All endpoints have a `.create` method that supports a `request_timeout` param. This param takes a `Union[float, Tuple[float, float]]` and will raise an `openai.error.Timeout` error if the request exceeds that time in seconds (See: https://requests.readthedocs.io/en/latest/user/quickstart/#timeouts).
|
|
121
|
+
|
|
122
|
+
### Microsoft Azure Endpoints
|
|
123
|
+
|
|
124
|
+
In order to use the library with Microsoft Azure endpoints, you need to set the `api_type`, `api_base` and `api_version` in addition to the `api_key`. The `api_type` must be set to 'azure' and the others correspond to the properties of your endpoint.
|
|
125
|
+
In addition, the deployment name must be passed as the engine parameter.
|
|
126
|
+
|
|
127
|
+
```python
|
|
128
|
+
import openai
|
|
129
|
+
openai.api_type = "azure"
|
|
130
|
+
openai.api_key = "..."
|
|
131
|
+
openai.api_base = "https://example-endpoint.openai.azure.com"
|
|
132
|
+
openai.api_version = "2023-05-15"
|
|
133
|
+
|
|
134
|
+
# create a chat completion
|
|
135
|
+
chat_completion = openai.ChatCompletion.create(deployment_id="deployment-name", model="gpt-3.5-turbo", messages=[{"role": "user", "content": "Hello world"}])
|
|
136
|
+
|
|
137
|
+
# print the completion
|
|
138
|
+
print(completion.choices[0].message.content)
|
|
139
|
+
```
|
|
140
|
+
|
|
141
|
+
Please note that for the moment, the Microsoft Azure endpoints can only be used for completion, embedding, and fine-tuning operations.
|
|
142
|
+
For a detailed example of how to use fine-tuning and other operations using Azure endpoints, please check out the following Jupyter notebooks:
|
|
143
|
+
|
|
144
|
+
- [Using Azure completions](https://github.com/openai/openai-cookbook/tree/main/examples/azure/completions.ipynb)
|
|
145
|
+
- [Using Azure fine-tuning](https://github.com/openai/openai-cookbook/tree/main/examples/azure/finetuning.ipynb)
|
|
146
|
+
- [Using Azure embeddings](https://github.com/openai/openai-cookbook/blob/main/examples/azure/embeddings.ipynb)
|
|
147
|
+
|
|
148
|
+
### Microsoft Azure Active Directory Authentication
|
|
149
|
+
|
|
150
|
+
In order to use Microsoft Active Directory to authenticate to your Azure endpoint, you need to set the `api_type` to "azure_ad" and pass the acquired credential token to `api_key`. The rest of the parameters need to be set as specified in the previous section.
|
|
151
|
+
|
|
152
|
+
```python
|
|
153
|
+
from azure.identity import DefaultAzureCredential
|
|
154
|
+
import openai
|
|
155
|
+
|
|
156
|
+
# Request credential
|
|
157
|
+
default_credential = DefaultAzureCredential()
|
|
158
|
+
token = default_credential.get_token("https://cognitiveservices.azure.com/.default")
|
|
159
|
+
|
|
160
|
+
# Setup parameters
|
|
161
|
+
openai.api_type = "azure_ad"
|
|
162
|
+
openai.api_key = token.token
|
|
163
|
+
openai.api_base = "https://example-endpoint.openai.azure.com/"
|
|
164
|
+
openai.api_version = "2023-05-15"
|
|
165
|
+
|
|
166
|
+
# ...
|
|
167
|
+
```
|
|
168
|
+
|
|
169
|
+
### Command-line interface
|
|
170
|
+
|
|
171
|
+
This library additionally provides an `openai` command-line utility
|
|
172
|
+
which makes it easy to interact with the API from your terminal. Run
|
|
173
|
+
`openai api -h` for usage.
|
|
174
|
+
|
|
175
|
+
```sh
|
|
176
|
+
# list models
|
|
177
|
+
openai api models.list
|
|
178
|
+
|
|
179
|
+
# create a chat completion (gpt-3.5-turbo, gpt-4, etc.)
|
|
180
|
+
openai api chat_completions.create -m gpt-3.5-turbo -g user "Hello world"
|
|
181
|
+
|
|
182
|
+
# create a completion (text-davinci-003, text-davinci-002, ada, babbage, curie, davinci, etc.)
|
|
183
|
+
openai api completions.create -m ada -p "Hello world"
|
|
184
|
+
|
|
185
|
+
# generate images via DALL·E API
|
|
186
|
+
openai api image.create -p "two dogs playing chess, cartoon" -n 1
|
|
187
|
+
|
|
188
|
+
# using openai through a proxy
|
|
189
|
+
openai --proxy=http://proxy.com api models.list
|
|
190
|
+
```
|
|
191
|
+
|
|
192
|
+
## Example code
|
|
193
|
+
|
|
194
|
+
Examples of how to use this Python library to accomplish various tasks can be found in the [OpenAI Cookbook](https://github.com/openai/openai-cookbook/). It contains code examples for:
|
|
195
|
+
|
|
196
|
+
- Classification using fine-tuning
|
|
197
|
+
- Clustering
|
|
198
|
+
- Code search
|
|
199
|
+
- Customizing embeddings
|
|
200
|
+
- Question answering from a corpus of documents
|
|
201
|
+
- Recommendations
|
|
202
|
+
- Visualization of embeddings
|
|
203
|
+
- And more
|
|
204
|
+
|
|
205
|
+
Prior to July 2022, this OpenAI Python library hosted code examples in its examples folder, but since then all examples have been migrated to the [OpenAI Cookbook](https://github.com/openai/openai-cookbook/).
|
|
206
|
+
|
|
207
|
+
### Chat Completions
|
|
208
|
+
|
|
209
|
+
Conversational models such as `gpt-3.5-turbo` can be called using the chat completions endpoint.
|
|
210
|
+
|
|
211
|
+
```python
|
|
212
|
+
import openai
|
|
213
|
+
openai.api_key = "sk-..." # supply your API key however you choose
|
|
214
|
+
|
|
215
|
+
completion = openai.ChatCompletion.create(model="gpt-3.5-turbo", messages=[{"role": "user", "content": "Hello world"}])
|
|
216
|
+
print(completion.choices[0].message.content)
|
|
217
|
+
```
|
|
218
|
+
|
|
219
|
+
### Completions
|
|
220
|
+
|
|
221
|
+
Text models such as `text-davinci-003`, `text-davinci-002` and earlier (`ada`, `babbage`, `curie`, `davinci`, etc.) can be called using the completions endpoint.
|
|
222
|
+
|
|
223
|
+
```python
|
|
224
|
+
import openai
|
|
225
|
+
openai.api_key = "sk-..." # supply your API key however you choose
|
|
226
|
+
|
|
227
|
+
completion = openai.Completion.create(model="text-davinci-003", prompt="Hello world")
|
|
228
|
+
print(completion.choices[0].text)
|
|
229
|
+
```
|
|
230
|
+
|
|
231
|
+
### Embeddings
|
|
232
|
+
|
|
233
|
+
In the OpenAI Python library, an embedding represents a text string as a fixed-length vector of floating point numbers. Embeddings are designed to measure the similarity or relevance between text strings.
|
|
234
|
+
|
|
235
|
+
To get an embedding for a text string, you can use the embeddings method as follows in Python:
|
|
236
|
+
|
|
237
|
+
```python
|
|
238
|
+
import openai
|
|
239
|
+
openai.api_key = "sk-..." # supply your API key however you choose
|
|
240
|
+
|
|
241
|
+
# choose text to embed
|
|
242
|
+
text_string = "sample text"
|
|
243
|
+
|
|
244
|
+
# choose an embedding
|
|
245
|
+
model_id = "text-similarity-davinci-001"
|
|
246
|
+
|
|
247
|
+
# compute the embedding of the text
|
|
248
|
+
embedding = openai.Embedding.create(input=text_string, model=model_id)['data'][0]['embedding']
|
|
249
|
+
```
|
|
250
|
+
|
|
251
|
+
An example of how to call the embeddings method is shown in this [get embeddings notebook](https://github.com/openai/openai-cookbook/blob/main/examples/Get_embeddings.ipynb).
|
|
252
|
+
|
|
253
|
+
Examples of how to use embeddings are shared in the following Jupyter notebooks:
|
|
254
|
+
|
|
255
|
+
- [Classification using embeddings](https://github.com/openai/openai-cookbook/blob/main/examples/Classification_using_embeddings.ipynb)
|
|
256
|
+
- [Clustering using embeddings](https://github.com/openai/openai-cookbook/blob/main/examples/Clustering.ipynb)
|
|
257
|
+
- [Code search using embeddings](https://github.com/openai/openai-cookbook/blob/main/examples/Code_search.ipynb)
|
|
258
|
+
- [Semantic text search using embeddings](https://github.com/openai/openai-cookbook/blob/main/examples/Semantic_text_search_using_embeddings.ipynb)
|
|
259
|
+
- [User and product embeddings](https://github.com/openai/openai-cookbook/blob/main/examples/User_and_product_embeddings.ipynb)
|
|
260
|
+
- [Zero-shot classification using embeddings](https://github.com/openai/openai-cookbook/blob/main/examples/Zero-shot_classification_with_embeddings.ipynb)
|
|
261
|
+
- [Recommendation using embeddings](https://github.com/openai/openai-cookbook/blob/main/examples/Recommendation_using_embeddings.ipynb)
|
|
262
|
+
|
|
263
|
+
For more information on embeddings and the types of embeddings OpenAI offers, read the [embeddings guide](https://beta.openai.com/docs/guides/embeddings) in the OpenAI documentation.
|
|
264
|
+
|
|
265
|
+
### Fine-tuning
|
|
266
|
+
|
|
267
|
+
Fine-tuning a model on training data can both improve the results (by giving the model more examples to learn from) and reduce the cost/latency of API calls (chiefly through reducing the need to include training examples in prompts).
|
|
268
|
+
|
|
269
|
+
Examples of fine-tuning are shared in the following Jupyter notebooks:
|
|
270
|
+
|
|
271
|
+
- [Classification with fine-tuning](https://github.com/openai/openai-cookbook/blob/main/examples/Fine-tuned_classification.ipynb) (a simple notebook that shows the steps required for fine-tuning)
|
|
272
|
+
- Fine-tuning a model that answers questions about the 2020 Olympics
|
|
273
|
+
- [Step 1: Collecting data](https://github.com/openai/openai-cookbook/blob/main/examples/fine-tuned_qa/olympics-1-collect-data.ipynb)
|
|
274
|
+
- [Step 2: Creating a synthetic Q&A dataset](https://github.com/openai/openai-cookbook/blob/main/examples/fine-tuned_qa/olympics-2-create-qa.ipynb)
|
|
275
|
+
- [Step 3: Train a fine-tuning model specialized for Q&A](https://github.com/openai/openai-cookbook/blob/main/examples/fine-tuned_qa/olympics-3-train-qa.ipynb)
|
|
276
|
+
|
|
277
|
+
Sync your fine-tunes to [Weights & Biases](https://wandb.me/openai-docs) to track experiments, models, and datasets in your central dashboard with:
|
|
278
|
+
|
|
279
|
+
```bash
|
|
280
|
+
openai wandb sync
|
|
281
|
+
```
|
|
282
|
+
|
|
283
|
+
For more information on fine-tuning, read the [fine-tuning guide](https://beta.openai.com/docs/guides/fine-tuning) in the OpenAI documentation.
|
|
284
|
+
|
|
285
|
+
### Moderation
|
|
286
|
+
|
|
287
|
+
OpenAI provides a Moderation endpoint that can be used to check whether content complies with the OpenAI [content policy](https://platform.openai.com/docs/usage-policies)
|
|
288
|
+
|
|
289
|
+
```python
|
|
290
|
+
import openai
|
|
291
|
+
openai.api_key = "sk-..." # supply your API key however you choose
|
|
292
|
+
|
|
293
|
+
moderation_resp = openai.Moderation.create(input="Here is some perfectly innocuous text that follows all OpenAI content policies.")
|
|
294
|
+
```
|
|
295
|
+
|
|
296
|
+
See the [moderation guide](https://platform.openai.com/docs/guides/moderation) for more details.
|
|
297
|
+
|
|
298
|
+
## Image generation (DALL·E)
|
|
299
|
+
|
|
300
|
+
```python
|
|
301
|
+
import openai
|
|
302
|
+
openai.api_key = "sk-..." # supply your API key however you choose
|
|
303
|
+
|
|
304
|
+
image_resp = openai.Image.create(prompt="two dogs playing chess, oil painting", n=4, size="512x512")
|
|
305
|
+
|
|
306
|
+
```
|
|
307
|
+
|
|
308
|
+
## Audio transcription (Whisper)
|
|
309
|
+
|
|
310
|
+
```python
|
|
311
|
+
import openai
|
|
312
|
+
openai.api_key = "sk-..." # supply your API key however you choose
|
|
313
|
+
f = open("path/to/file.mp3", "rb")
|
|
314
|
+
transcript = openai.Audio.transcribe("whisper-1", f)
|
|
315
|
+
|
|
316
|
+
```
|
|
317
|
+
|
|
318
|
+
## Async API
|
|
319
|
+
|
|
320
|
+
Async support is available in the API by prepending `a` to a network-bound method:
|
|
321
|
+
|
|
322
|
+
```python
|
|
323
|
+
import openai
|
|
324
|
+
openai.api_key = "sk-..." # supply your API key however you choose
|
|
325
|
+
|
|
326
|
+
async def create_chat_completion():
|
|
327
|
+
chat_completion_resp = await openai.ChatCompletion.acreate(model="gpt-3.5-turbo", messages=[{"role": "user", "content": "Hello world"}])
|
|
328
|
+
|
|
329
|
+
```
|
|
330
|
+
|
|
331
|
+
To make async requests more efficient, you can pass in your own
|
|
332
|
+
`aiohttp.ClientSession`, but you must manually close the client session at the end
|
|
333
|
+
of your program/event loop:
|
|
334
|
+
|
|
335
|
+
```python
|
|
336
|
+
import openai
|
|
337
|
+
from aiohttp import ClientSession
|
|
338
|
+
|
|
339
|
+
openai.aiosession.set(ClientSession())
|
|
340
|
+
# At the end of your program, close the http session
|
|
341
|
+
await openai.aiosession.get().close()
|
|
342
|
+
```
|
|
343
|
+
|
|
344
|
+
See the [usage guide](https://platform.openai.com/docs/guides/images) for more details.
|
|
345
|
+
|
|
346
|
+
## Requirements
|
|
347
|
+
|
|
348
|
+
- Python 3.7.1+
|
|
349
|
+
|
|
350
|
+
In general, we want to support the versions of Python that our
|
|
351
|
+
customers are using. If you run into problems with any version
|
|
352
|
+
issues, please let us know on our [support page](https://help.openai.com/en/).
|
|
353
|
+
|
|
354
|
+
## Credit
|
|
355
|
+
|
|
356
|
+
This library is forked from the [Stripe Python Library](https://github.com/stripe/stripe-python).
|
|
@@ -0,0 +1,311 @@
|
|
|
1
|
+
# OpenAI Python Library
|
|
2
|
+
|
|
3
|
+
The OpenAI Python library provides convenient access to the OpenAI API
|
|
4
|
+
from applications written in the Python language. It includes a
|
|
5
|
+
pre-defined set of classes for API resources that initialize
|
|
6
|
+
themselves dynamically from API responses which makes it compatible
|
|
7
|
+
with a wide range of versions of the OpenAI API.
|
|
8
|
+
|
|
9
|
+
You can find usage examples for the OpenAI Python library in our [API reference](https://beta.openai.com/docs/api-reference?lang=python) and the [OpenAI Cookbook](https://github.com/openai/openai-cookbook/).
|
|
10
|
+
|
|
11
|
+
## Installation
|
|
12
|
+
|
|
13
|
+
You don't need this source code unless you want to modify the package. If you just
|
|
14
|
+
want to use the package, just run:
|
|
15
|
+
|
|
16
|
+
```sh
|
|
17
|
+
pip install --upgrade deepbuilder
|
|
18
|
+
```
|
|
19
|
+
|
|
20
|
+
Install from source with:
|
|
21
|
+
|
|
22
|
+
```sh
|
|
23
|
+
python setup.py install
|
|
24
|
+
```
|
|
25
|
+
|
|
26
|
+
### Optional dependencies
|
|
27
|
+
|
|
28
|
+
Install dependencies for [`openai.embeddings_utils`](openai/embeddings_utils.py):
|
|
29
|
+
|
|
30
|
+
```sh
|
|
31
|
+
pip install openai[embeddings]
|
|
32
|
+
```
|
|
33
|
+
|
|
34
|
+
Install support for [Weights & Biases](https://wandb.me/openai-docs):
|
|
35
|
+
|
|
36
|
+
```
|
|
37
|
+
pip install openai[wandb]
|
|
38
|
+
```
|
|
39
|
+
|
|
40
|
+
Data libraries like `numpy` and `pandas` are not installed by default due to their size. They’re needed for some functionality of this library, but generally not for talking to the API. If you encounter a `MissingDependencyError`, install them with:
|
|
41
|
+
|
|
42
|
+
```sh
|
|
43
|
+
pip install openai[datalib]
|
|
44
|
+
```
|
|
45
|
+
|
|
46
|
+
## Usage
|
|
47
|
+
|
|
48
|
+
The library needs to be configured with your account's secret key which is available on the [website](https://platform.openai.com/account/api-keys). Either set it as the `OPENAI_API_KEY` environment variable before using the library:
|
|
49
|
+
|
|
50
|
+
```bash
|
|
51
|
+
export OPENAI_API_KEY='sk-...'
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
Or set `openai.api_key` to its value:
|
|
55
|
+
|
|
56
|
+
```python
|
|
57
|
+
import openai
|
|
58
|
+
openai.api_key = "sk-..."
|
|
59
|
+
|
|
60
|
+
# list models
|
|
61
|
+
models = openai.Model.list()
|
|
62
|
+
|
|
63
|
+
# print the first model's id
|
|
64
|
+
print(models.data[0].id)
|
|
65
|
+
|
|
66
|
+
# create a chat completion
|
|
67
|
+
chat_completion = openai.ChatCompletion.create(model="gpt-3.5-turbo", messages=[{"role": "user", "content": "Hello world"}])
|
|
68
|
+
|
|
69
|
+
# print the chat completion
|
|
70
|
+
print(chat_completion.choices[0].message.content)
|
|
71
|
+
```
|
|
72
|
+
|
|
73
|
+
### Params
|
|
74
|
+
|
|
75
|
+
All endpoints have a `.create` method that supports a `request_timeout` param. This param takes a `Union[float, Tuple[float, float]]` and will raise an `openai.error.Timeout` error if the request exceeds that time in seconds (See: https://requests.readthedocs.io/en/latest/user/quickstart/#timeouts).
|
|
76
|
+
|
|
77
|
+
### Microsoft Azure Endpoints
|
|
78
|
+
|
|
79
|
+
In order to use the library with Microsoft Azure endpoints, you need to set the `api_type`, `api_base` and `api_version` in addition to the `api_key`. The `api_type` must be set to 'azure' and the others correspond to the properties of your endpoint.
|
|
80
|
+
In addition, the deployment name must be passed as the engine parameter.
|
|
81
|
+
|
|
82
|
+
```python
|
|
83
|
+
import openai
|
|
84
|
+
openai.api_type = "azure"
|
|
85
|
+
openai.api_key = "..."
|
|
86
|
+
openai.api_base = "https://example-endpoint.openai.azure.com"
|
|
87
|
+
openai.api_version = "2023-05-15"
|
|
88
|
+
|
|
89
|
+
# create a chat completion
|
|
90
|
+
chat_completion = openai.ChatCompletion.create(deployment_id="deployment-name", model="gpt-3.5-turbo", messages=[{"role": "user", "content": "Hello world"}])
|
|
91
|
+
|
|
92
|
+
# print the completion
|
|
93
|
+
print(completion.choices[0].message.content)
|
|
94
|
+
```
|
|
95
|
+
|
|
96
|
+
Please note that for the moment, the Microsoft Azure endpoints can only be used for completion, embedding, and fine-tuning operations.
|
|
97
|
+
For a detailed example of how to use fine-tuning and other operations using Azure endpoints, please check out the following Jupyter notebooks:
|
|
98
|
+
|
|
99
|
+
- [Using Azure completions](https://github.com/openai/openai-cookbook/tree/main/examples/azure/completions.ipynb)
|
|
100
|
+
- [Using Azure fine-tuning](https://github.com/openai/openai-cookbook/tree/main/examples/azure/finetuning.ipynb)
|
|
101
|
+
- [Using Azure embeddings](https://github.com/openai/openai-cookbook/blob/main/examples/azure/embeddings.ipynb)
|
|
102
|
+
|
|
103
|
+
### Microsoft Azure Active Directory Authentication
|
|
104
|
+
|
|
105
|
+
In order to use Microsoft Active Directory to authenticate to your Azure endpoint, you need to set the `api_type` to "azure_ad" and pass the acquired credential token to `api_key`. The rest of the parameters need to be set as specified in the previous section.
|
|
106
|
+
|
|
107
|
+
```python
|
|
108
|
+
from azure.identity import DefaultAzureCredential
|
|
109
|
+
import openai
|
|
110
|
+
|
|
111
|
+
# Request credential
|
|
112
|
+
default_credential = DefaultAzureCredential()
|
|
113
|
+
token = default_credential.get_token("https://cognitiveservices.azure.com/.default")
|
|
114
|
+
|
|
115
|
+
# Setup parameters
|
|
116
|
+
openai.api_type = "azure_ad"
|
|
117
|
+
openai.api_key = token.token
|
|
118
|
+
openai.api_base = "https://example-endpoint.openai.azure.com/"
|
|
119
|
+
openai.api_version = "2023-05-15"
|
|
120
|
+
|
|
121
|
+
# ...
|
|
122
|
+
```
|
|
123
|
+
|
|
124
|
+
### Command-line interface
|
|
125
|
+
|
|
126
|
+
This library additionally provides an `openai` command-line utility
|
|
127
|
+
which makes it easy to interact with the API from your terminal. Run
|
|
128
|
+
`openai api -h` for usage.
|
|
129
|
+
|
|
130
|
+
```sh
|
|
131
|
+
# list models
|
|
132
|
+
openai api models.list
|
|
133
|
+
|
|
134
|
+
# create a chat completion (gpt-3.5-turbo, gpt-4, etc.)
|
|
135
|
+
openai api chat_completions.create -m gpt-3.5-turbo -g user "Hello world"
|
|
136
|
+
|
|
137
|
+
# create a completion (text-davinci-003, text-davinci-002, ada, babbage, curie, davinci, etc.)
|
|
138
|
+
openai api completions.create -m ada -p "Hello world"
|
|
139
|
+
|
|
140
|
+
# generate images via DALL·E API
|
|
141
|
+
openai api image.create -p "two dogs playing chess, cartoon" -n 1
|
|
142
|
+
|
|
143
|
+
# using openai through a proxy
|
|
144
|
+
openai --proxy=http://proxy.com api models.list
|
|
145
|
+
```
|
|
146
|
+
|
|
147
|
+
## Example code
|
|
148
|
+
|
|
149
|
+
Examples of how to use this Python library to accomplish various tasks can be found in the [OpenAI Cookbook](https://github.com/openai/openai-cookbook/). It contains code examples for:
|
|
150
|
+
|
|
151
|
+
- Classification using fine-tuning
|
|
152
|
+
- Clustering
|
|
153
|
+
- Code search
|
|
154
|
+
- Customizing embeddings
|
|
155
|
+
- Question answering from a corpus of documents
|
|
156
|
+
- Recommendations
|
|
157
|
+
- Visualization of embeddings
|
|
158
|
+
- And more
|
|
159
|
+
|
|
160
|
+
Prior to July 2022, this OpenAI Python library hosted code examples in its examples folder, but since then all examples have been migrated to the [OpenAI Cookbook](https://github.com/openai/openai-cookbook/).
|
|
161
|
+
|
|
162
|
+
### Chat Completions
|
|
163
|
+
|
|
164
|
+
Conversational models such as `gpt-3.5-turbo` can be called using the chat completions endpoint.
|
|
165
|
+
|
|
166
|
+
```python
|
|
167
|
+
import openai
|
|
168
|
+
openai.api_key = "sk-..." # supply your API key however you choose
|
|
169
|
+
|
|
170
|
+
completion = openai.ChatCompletion.create(model="gpt-3.5-turbo", messages=[{"role": "user", "content": "Hello world"}])
|
|
171
|
+
print(completion.choices[0].message.content)
|
|
172
|
+
```
|
|
173
|
+
|
|
174
|
+
### Completions
|
|
175
|
+
|
|
176
|
+
Text models such as `text-davinci-003`, `text-davinci-002` and earlier (`ada`, `babbage`, `curie`, `davinci`, etc.) can be called using the completions endpoint.
|
|
177
|
+
|
|
178
|
+
```python
|
|
179
|
+
import openai
|
|
180
|
+
openai.api_key = "sk-..." # supply your API key however you choose
|
|
181
|
+
|
|
182
|
+
completion = openai.Completion.create(model="text-davinci-003", prompt="Hello world")
|
|
183
|
+
print(completion.choices[0].text)
|
|
184
|
+
```
|
|
185
|
+
|
|
186
|
+
### Embeddings
|
|
187
|
+
|
|
188
|
+
In the OpenAI Python library, an embedding represents a text string as a fixed-length vector of floating point numbers. Embeddings are designed to measure the similarity or relevance between text strings.
|
|
189
|
+
|
|
190
|
+
To get an embedding for a text string, you can use the embeddings method as follows in Python:
|
|
191
|
+
|
|
192
|
+
```python
|
|
193
|
+
import openai
|
|
194
|
+
openai.api_key = "sk-..." # supply your API key however you choose
|
|
195
|
+
|
|
196
|
+
# choose text to embed
|
|
197
|
+
text_string = "sample text"
|
|
198
|
+
|
|
199
|
+
# choose an embedding
|
|
200
|
+
model_id = "text-similarity-davinci-001"
|
|
201
|
+
|
|
202
|
+
# compute the embedding of the text
|
|
203
|
+
embedding = openai.Embedding.create(input=text_string, model=model_id)['data'][0]['embedding']
|
|
204
|
+
```
|
|
205
|
+
|
|
206
|
+
An example of how to call the embeddings method is shown in this [get embeddings notebook](https://github.com/openai/openai-cookbook/blob/main/examples/Get_embeddings.ipynb).
|
|
207
|
+
|
|
208
|
+
Examples of how to use embeddings are shared in the following Jupyter notebooks:
|
|
209
|
+
|
|
210
|
+
- [Classification using embeddings](https://github.com/openai/openai-cookbook/blob/main/examples/Classification_using_embeddings.ipynb)
|
|
211
|
+
- [Clustering using embeddings](https://github.com/openai/openai-cookbook/blob/main/examples/Clustering.ipynb)
|
|
212
|
+
- [Code search using embeddings](https://github.com/openai/openai-cookbook/blob/main/examples/Code_search.ipynb)
|
|
213
|
+
- [Semantic text search using embeddings](https://github.com/openai/openai-cookbook/blob/main/examples/Semantic_text_search_using_embeddings.ipynb)
|
|
214
|
+
- [User and product embeddings](https://github.com/openai/openai-cookbook/blob/main/examples/User_and_product_embeddings.ipynb)
|
|
215
|
+
- [Zero-shot classification using embeddings](https://github.com/openai/openai-cookbook/blob/main/examples/Zero-shot_classification_with_embeddings.ipynb)
|
|
216
|
+
- [Recommendation using embeddings](https://github.com/openai/openai-cookbook/blob/main/examples/Recommendation_using_embeddings.ipynb)
|
|
217
|
+
|
|
218
|
+
For more information on embeddings and the types of embeddings OpenAI offers, read the [embeddings guide](https://beta.openai.com/docs/guides/embeddings) in the OpenAI documentation.
|
|
219
|
+
|
|
220
|
+
### Fine-tuning
|
|
221
|
+
|
|
222
|
+
Fine-tuning a model on training data can both improve the results (by giving the model more examples to learn from) and reduce the cost/latency of API calls (chiefly through reducing the need to include training examples in prompts).
|
|
223
|
+
|
|
224
|
+
Examples of fine-tuning are shared in the following Jupyter notebooks:
|
|
225
|
+
|
|
226
|
+
- [Classification with fine-tuning](https://github.com/openai/openai-cookbook/blob/main/examples/Fine-tuned_classification.ipynb) (a simple notebook that shows the steps required for fine-tuning)
|
|
227
|
+
- Fine-tuning a model that answers questions about the 2020 Olympics
|
|
228
|
+
- [Step 1: Collecting data](https://github.com/openai/openai-cookbook/blob/main/examples/fine-tuned_qa/olympics-1-collect-data.ipynb)
|
|
229
|
+
- [Step 2: Creating a synthetic Q&A dataset](https://github.com/openai/openai-cookbook/blob/main/examples/fine-tuned_qa/olympics-2-create-qa.ipynb)
|
|
230
|
+
- [Step 3: Train a fine-tuning model specialized for Q&A](https://github.com/openai/openai-cookbook/blob/main/examples/fine-tuned_qa/olympics-3-train-qa.ipynb)
|
|
231
|
+
|
|
232
|
+
Sync your fine-tunes to [Weights & Biases](https://wandb.me/openai-docs) to track experiments, models, and datasets in your central dashboard with:
|
|
233
|
+
|
|
234
|
+
```bash
|
|
235
|
+
openai wandb sync
|
|
236
|
+
```
|
|
237
|
+
|
|
238
|
+
For more information on fine-tuning, read the [fine-tuning guide](https://beta.openai.com/docs/guides/fine-tuning) in the OpenAI documentation.
|
|
239
|
+
|
|
240
|
+
### Moderation
|
|
241
|
+
|
|
242
|
+
OpenAI provides a Moderation endpoint that can be used to check whether content complies with the OpenAI [content policy](https://platform.openai.com/docs/usage-policies)
|
|
243
|
+
|
|
244
|
+
```python
|
|
245
|
+
import openai
|
|
246
|
+
openai.api_key = "sk-..." # supply your API key however you choose
|
|
247
|
+
|
|
248
|
+
moderation_resp = openai.Moderation.create(input="Here is some perfectly innocuous text that follows all OpenAI content policies.")
|
|
249
|
+
```
|
|
250
|
+
|
|
251
|
+
See the [moderation guide](https://platform.openai.com/docs/guides/moderation) for more details.
|
|
252
|
+
|
|
253
|
+
## Image generation (DALL·E)
|
|
254
|
+
|
|
255
|
+
```python
|
|
256
|
+
import openai
|
|
257
|
+
openai.api_key = "sk-..." # supply your API key however you choose
|
|
258
|
+
|
|
259
|
+
image_resp = openai.Image.create(prompt="two dogs playing chess, oil painting", n=4, size="512x512")
|
|
260
|
+
|
|
261
|
+
```
|
|
262
|
+
|
|
263
|
+
## Audio transcription (Whisper)
|
|
264
|
+
|
|
265
|
+
```python
|
|
266
|
+
import openai
|
|
267
|
+
openai.api_key = "sk-..." # supply your API key however you choose
|
|
268
|
+
f = open("path/to/file.mp3", "rb")
|
|
269
|
+
transcript = openai.Audio.transcribe("whisper-1", f)
|
|
270
|
+
|
|
271
|
+
```
|
|
272
|
+
|
|
273
|
+
## Async API
|
|
274
|
+
|
|
275
|
+
Async support is available in the API by prepending `a` to a network-bound method:
|
|
276
|
+
|
|
277
|
+
```python
|
|
278
|
+
import openai
|
|
279
|
+
openai.api_key = "sk-..." # supply your API key however you choose
|
|
280
|
+
|
|
281
|
+
async def create_chat_completion():
|
|
282
|
+
chat_completion_resp = await openai.ChatCompletion.acreate(model="gpt-3.5-turbo", messages=[{"role": "user", "content": "Hello world"}])
|
|
283
|
+
|
|
284
|
+
```
|
|
285
|
+
|
|
286
|
+
To make async requests more efficient, you can pass in your own
|
|
287
|
+
`aiohttp.ClientSession`, but you must manually close the client session at the end
|
|
288
|
+
of your program/event loop:
|
|
289
|
+
|
|
290
|
+
```python
|
|
291
|
+
import openai
|
|
292
|
+
from aiohttp import ClientSession
|
|
293
|
+
|
|
294
|
+
openai.aiosession.set(ClientSession())
|
|
295
|
+
# At the end of your program, close the http session
|
|
296
|
+
await openai.aiosession.get().close()
|
|
297
|
+
```
|
|
298
|
+
|
|
299
|
+
See the [usage guide](https://platform.openai.com/docs/guides/images) for more details.
|
|
300
|
+
|
|
301
|
+
## Requirements
|
|
302
|
+
|
|
303
|
+
- Python 3.7.1+
|
|
304
|
+
|
|
305
|
+
In general, we want to support the versions of Python that our
|
|
306
|
+
customers are using. If you run into problems with any version
|
|
307
|
+
issues, please let us know on our [support page](https://help.openai.com/en/).
|
|
308
|
+
|
|
309
|
+
## Credit
|
|
310
|
+
|
|
311
|
+
This library is forked from the [Stripe Python Library](https://github.com/stripe/stripe-python).
|