freeplay 0.2.27__py3-none-any.whl → 0.2.30__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
freeplay/completions.py CHANGED
@@ -1,17 +1,22 @@
1
1
  from dataclasses import dataclass
2
- from typing import Any, Dict, List, Optional
2
+ from typing import Any, Dict, List, Optional, TypedDict
3
+
4
+ from openai.types.chat.chat_completion_chunk import ChoiceDeltaFunctionCall
5
+ from openai.types.chat.chat_completion_message import FunctionCall
3
6
 
4
7
  from .llm_parameters import LLMParameters
5
8
 
6
- ChatMessage = Dict[str, str]
7
- OpenAIFunctionCall = Dict[str, str]
9
+
10
+ class ChatMessage(TypedDict):
11
+ role: str
12
+ content: str
8
13
 
9
14
 
10
15
  @dataclass
11
16
  class CompletionResponse:
12
17
  content: str
13
18
  is_complete: bool
14
- openai_function_call: Optional[OpenAIFunctionCall] = None
19
+ openai_function_call: Optional[FunctionCall] = None
15
20
 
16
21
 
17
22
  @dataclass
@@ -44,4 +49,4 @@ class PromptTemplates:
44
49
  class CompletionChunk:
45
50
  text: str
46
51
  is_complete: bool
47
- openai_function_call: Optional[OpenAIFunctionCall] = None
52
+ openai_function_call: Optional[ChoiceDeltaFunctionCall] = None
freeplay/flavors.py CHANGED
@@ -1,17 +1,18 @@
1
1
  import json
2
2
  from abc import abstractmethod, ABC
3
3
  from copy import copy
4
- from typing import Any, Dict, Generator, List, Optional
4
+ from typing import cast, Any, Dict, Generator, List, Optional, Union
5
5
 
6
- import anthropic # type: ignore
6
+ import anthropic
7
7
  import openai
8
- from openai.error import AuthenticationError, InvalidRequestError
8
+ from openai import AuthenticationError, BadRequestError, Stream
9
+ from openai.types.chat import ChatCompletion, ChatCompletionChunk, ChatCompletionMessageParam
9
10
 
10
11
  from .completions import CompletionChunk, PromptTemplateWithMetadata, CompletionResponse, ChatCompletionResponse, \
11
12
  ChatMessage
12
13
  from .errors import FreeplayConfigurationError, LLMClientError, LLMServerError, FreeplayError
13
14
  from .llm_parameters import LLMParameters
14
- from .provider_config import ProviderConfig, OpenAIConfig, AnthropicConfig
15
+ from .provider_config import AnthropicConfig, AzureConfig, OpenAIConfig, ProviderConfig
15
16
  from .utils import format_template_variables
16
17
 
17
18
 
@@ -89,45 +90,17 @@ class ChatFlavor(Flavor, ABC):
89
90
  pass
90
91
 
91
92
 
92
- class OpenAI(Flavor, ABC):
93
- def configure_openai(
94
- self,
95
- openai_config: Optional[OpenAIConfig],
96
- api_base: Optional[str] = None,
97
- api_version: Optional[str] = None,
98
- api_type: Optional[str] = None,
99
- ) -> None:
100
- super().__init__()
101
- if not openai_config:
102
- raise FreeplayConfigurationError(
103
- "Missing OpenAI key. Use a ProviderConfig to specify keys prior to getting completion.")
104
-
105
- if api_base:
106
- openai.api_base = api_base
107
- elif openai_config.api_base:
108
- openai.api_base = openai_config.api_base
109
-
110
- if api_type:
111
- openai.api_type = api_type
112
-
113
- if api_version:
114
- openai.api_version = api_version
115
-
116
- if not openai_config.api_key or not openai_config.api_key.strip():
117
- raise FreeplayConfigurationError("OpenAI API key is not set. It must be set to make calls to the service.")
118
-
119
- openai.api_key = openai_config.api_key
93
+ class OpenAIChatFlavor(ChatFlavor, ABC):
120
94
 
121
- @property
122
- def provider(self) -> str:
123
- return "openai"
124
-
125
-
126
- class OpenAIChat(OpenAI, ChatFlavor):
127
- record_format_type = "openai_chat"
128
- _model_params_with_defaults = LLMParameters({
129
- "model": "gpt-3.5-turbo"
130
- })
95
+ @abstractmethod
96
+ def _call_openai(
97
+ self,
98
+ messages: List[ChatMessage],
99
+ provider_config: ProviderConfig,
100
+ llm_parameters: LLMParameters,
101
+ stream: bool
102
+ ) -> Union[ChatCompletion, openai.Stream[ChatCompletionChunk]]:
103
+ pass
131
104
 
132
105
  def format(self, prompt_template: PromptTemplateWithMetadata, variables: Dict[str, str]) -> str:
133
106
  # Extract messages JSON to enable formatting of individual content fields of each message. If we do not
@@ -146,11 +119,12 @@ class OpenAIChat(OpenAI, ChatFlavor):
146
119
  llm_parameters: LLMParameters
147
120
  ) -> CompletionResponse:
148
121
  messages = json.loads(formatted_prompt)
149
- completion = self._call_openai(messages, provider_config, llm_parameters, stream=False)
122
+ completion = cast(ChatCompletion, self._call_openai(messages, provider_config, llm_parameters, stream=False))
123
+
150
124
  return CompletionResponse(
151
125
  content=completion.choices[0].message.content or '',
152
126
  is_complete=completion.choices[0].finish_reason == 'stop',
153
- openai_function_call=completion.choices[0].message.get('function_call')
127
+ openai_function_call=completion.choices[0].message.function_call,
154
128
  )
155
129
 
156
130
  def call_service_stream(
@@ -160,12 +134,13 @@ class OpenAIChat(OpenAI, ChatFlavor):
160
134
  llm_parameters: LLMParameters
161
135
  ) -> Generator[CompletionChunk, None, None]:
162
136
  messages = json.loads(formatted_prompt)
163
- completion_stream = self._call_openai(messages, provider_config, llm_parameters, stream=True)
137
+ completion_stream = cast(Stream[ChatCompletionChunk],
138
+ self._call_openai(messages, provider_config, llm_parameters, stream=True))
164
139
  for chunk in completion_stream:
165
140
  yield CompletionChunk(
166
- text=chunk.choices[0].delta.get('content') or '',
141
+ text=chunk.choices[0].delta.content or '',
167
142
  is_complete=chunk.choices[0].finish_reason == 'stop',
168
- openai_function_call=chunk.choices[0].delta.get('function_call')
143
+ openai_function_call=chunk.choices[0].delta.function_call
169
144
  )
170
145
 
171
146
  def continue_chat(
@@ -174,12 +149,16 @@ class OpenAIChat(OpenAI, ChatFlavor):
174
149
  provider_config: ProviderConfig,
175
150
  llm_parameters: LLMParameters
176
151
  ) -> ChatCompletionResponse:
177
- completion = self._call_openai(messages, provider_config, llm_parameters, stream=False)
152
+ completion = cast(ChatCompletion, self._call_openai(messages, provider_config, llm_parameters, stream=False))
178
153
 
179
154
  message_history = copy(messages)
180
- message_history.append(completion.choices[0].message.to_dict())
155
+ message = completion.choices[0].message
156
+ message_history.append({
157
+ "role": message.role or '',
158
+ "content": message.content or ''
159
+ })
181
160
  return ChatCompletionResponse(
182
- content=completion.choices[0].message.content,
161
+ content=message.content or '',
183
162
  message_history=message_history,
184
163
  is_complete=completion.choices[0].finish_reason == "stop"
185
164
  )
@@ -190,37 +169,91 @@ class OpenAIChat(OpenAI, ChatFlavor):
190
169
  provider_config: ProviderConfig,
191
170
  llm_parameters: LLMParameters
192
171
  ) -> Generator[CompletionChunk, None, None]:
193
- completion_stream = self._call_openai(messages, provider_config, llm_parameters, stream=True)
172
+ completion_stream = cast(Stream[ChatCompletionChunk],
173
+ self._call_openai(messages, provider_config, llm_parameters, stream=True))
194
174
  for chunk in completion_stream:
195
175
  yield CompletionChunk(
196
- text=chunk.choices[0].delta.get('content', ''),
176
+ text=chunk.choices[0].delta.content or '',
197
177
  is_complete=chunk.choices[0].finish_reason == "stop"
198
178
  )
199
179
 
180
+
181
+ class OpenAIChat(OpenAIChatFlavor):
182
+ record_format_type = "openai_chat"
183
+ _model_params_with_defaults = LLMParameters({
184
+ "model": "gpt-3.5-turbo"
185
+ })
186
+
187
+ def __init__(self) -> None:
188
+ self.client: Optional[openai.OpenAI] = None
189
+
190
+ @property
191
+ def provider(self) -> str:
192
+ return "openai"
193
+
194
+ def get_openai_client(self, openai_config: Optional[OpenAIConfig]) -> openai.OpenAI:
195
+ if self.client:
196
+ return self.client
197
+
198
+ if not openai_config:
199
+ raise FreeplayConfigurationError(
200
+ "Missing OpenAI key. Use a ProviderConfig to specify keys prior to getting completion.")
201
+
202
+ self.client = openai.OpenAI(api_key=openai_config.api_key, base_url=openai_config.base_url)
203
+ return self.client
204
+
200
205
  def _call_openai(
201
206
  self,
202
207
  messages: List[ChatMessage],
203
208
  provider_config: ProviderConfig,
204
209
  llm_parameters: LLMParameters,
205
210
  stream: bool
206
- ) -> Any:
207
- self.configure_openai(provider_config.openai)
208
- llm_parameters.pop('endpoint')
211
+ ) -> Union[ChatCompletion, openai.Stream[ChatCompletionChunk]]:
212
+ client = self.get_openai_client(provider_config.openai)
209
213
  try:
210
- return openai.ChatCompletion.create(
211
- messages=messages,
214
+ return client.chat.completions.create(
215
+ messages=cast(List[ChatCompletionMessageParam], messages),
212
216
  **self.get_model_params(llm_parameters),
213
217
  stream=stream,
214
- ) # type: ignore
215
- except (InvalidRequestError, AuthenticationError) as e:
218
+ )
219
+ except (BadRequestError, AuthenticationError) as e:
216
220
  raise LLMClientError("Unable to call OpenAI") from e
217
221
  except Exception as e:
218
222
  raise LLMServerError("Unable to call OpenAI") from e
219
223
 
220
224
 
221
- class AzureOpenAIChat(OpenAIChat):
225
+ class AzureOpenAIChat(OpenAIChatFlavor):
222
226
  record_format_type = "azure_openai_chat"
223
227
 
228
+ def __init__(self) -> None:
229
+ self.client: Optional[openai.AzureOpenAI] = None
230
+
231
+ @property
232
+ def provider(self) -> str:
233
+ return "azure"
234
+
235
+ def get_azure_client(
236
+ self,
237
+ azure_config: Optional[AzureConfig],
238
+ api_version: Optional[str] = None,
239
+ endpoint: Optional[str] = None,
240
+ deployment: Optional[str] = None,
241
+ ) -> openai.AzureOpenAI:
242
+ if self.client:
243
+ return self.client
244
+
245
+ if not azure_config:
246
+ raise FreeplayConfigurationError(
247
+ "Missing Azure key. Use a ProviderConfig to specify keys prior to getting completion.")
248
+
249
+ self.client = openai.AzureOpenAI(
250
+ api_key=azure_config.api_key,
251
+ api_version=api_version,
252
+ azure_endpoint=endpoint or '',
253
+ azure_deployment=deployment,
254
+ )
255
+ return self.client
256
+
224
257
  def _call_openai(
225
258
  self,
226
259
  messages: List[ChatMessage],
@@ -232,28 +265,25 @@ class AzureOpenAIChat(OpenAIChat):
232
265
  deployment_id = llm_parameters.get('deployment_id')
233
266
  resource_name = llm_parameters.get('resource_name')
234
267
  endpoint = f'https://{resource_name}.openai.azure.com'
235
- self.configure_openai(
236
- provider_config.azure,
237
- api_base=endpoint,
238
- api_type='azure',
239
- api_version=api_version
240
- )
241
268
  llm_parameters.pop('resource_name')
269
+
270
+ client = self.get_azure_client(
271
+ azure_config=provider_config.azure,
272
+ api_version=api_version,
273
+ endpoint=endpoint,
274
+ deployment=deployment_id,
275
+ )
276
+
242
277
  try:
243
- return openai.ChatCompletion.create(
244
- messages=messages,
278
+ return client.chat.completions.create(
279
+ messages=cast(List[ChatCompletionMessageParam], messages),
245
280
  **self.get_model_params(llm_parameters),
246
- engine=deployment_id,
247
281
  stream=stream,
248
- ) # type: ignore
249
- except (InvalidRequestError, AuthenticationError) as e:
250
- raise LLMClientError("Unable to call OpenAI") from e
282
+ )
283
+ except (BadRequestError, AuthenticationError) as e:
284
+ raise LLMClientError("Unable to call Azure") from e
251
285
  except Exception as e:
252
- raise LLMServerError("Unable to call OpenAI") from e
253
-
254
- @property
255
- def provider(self) -> str:
256
- return "azure"
286
+ raise LLMServerError("Unable to call Azure") from e
257
287
 
258
288
 
259
289
  class AnthropicClaudeText(Flavor):
freeplay/freeplay.py CHANGED
@@ -7,8 +7,14 @@ from typing import cast, Any, Dict, Generator, List, Optional, Tuple, Union
7
7
 
8
8
  from . import api_support
9
9
  from .api_support import try_decode
10
- from .completions import PromptTemplates, CompletionResponse, CompletionChunk, PromptTemplateWithMetadata, \
11
- ChatCompletionResponse, ChatMessage
10
+ from .completions import (
11
+ PromptTemplates,
12
+ CompletionResponse,
13
+ CompletionChunk,
14
+ PromptTemplateWithMetadata,
15
+ ChatCompletionResponse,
16
+ ChatMessage
17
+ )
12
18
  from .errors import FreeplayConfigurationError, freeplay_response_error, FreeplayServerError
13
19
  from .flavors import Flavor, ChatFlavor
14
20
  from .llm_parameters import LLMParameters
@@ -700,6 +706,7 @@ def require_chat_flavor(flavor: Flavor) -> ChatFlavor:
700
706
 
701
707
  return flavor
702
708
 
709
+
703
710
  def check_all_values_string_or_number(metadata: Optional[Dict[str, Union[str,int,float]]]) -> None:
704
711
  if metadata:
705
712
  for key, value in metadata.items():
@@ -7,7 +7,7 @@ from .errors import FreeplayConfigurationError
7
7
  @dataclass
8
8
  class OpenAIConfig:
9
9
  api_key: str
10
- api_base: Optional[str] = None
10
+ base_url: Optional[str] = None
11
11
 
12
12
  def validate(self) -> None:
13
13
  if not self.api_key or not self.api_key.strip():
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2023 228Labs
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: freeplay
3
- Version: 0.2.27
3
+ Version: 0.2.30
4
4
  Summary:
5
5
  License: MIT
6
6
  Author: FreePlay Engineering
@@ -15,7 +15,7 @@ Classifier: Programming Language :: Python :: 3.11
15
15
  Requires-Dist: anthropic (>=0.7.7,<0.8.0)
16
16
  Requires-Dist: click (==8.1.7)
17
17
  Requires-Dist: dacite (>=1.8.0,<2.0.0)
18
- Requires-Dist: openai (>=0.27.8,<0.28.0)
18
+ Requires-Dist: openai (>=1,<2)
19
19
  Requires-Dist: pystache (>=0.6.5,<0.7.0)
20
20
  Requires-Dist: requests (>=2.20.0,<3.0.0dev)
21
21
  Description-Content-Type: text/markdown
@@ -0,0 +1,17 @@
1
+ freeplay/__init__.py,sha256=74A9S9hmLq9BNHsdx0-37yDxlSukudNl9bJ0TE60Z30,61
2
+ freeplay/api_support.py,sha256=FJCm7yb1JhY0Z1Hn-JIVM7cCfBvWfCrqMQHRzzBzGLE,2012
3
+ freeplay/completions.py,sha256=JQtwe5THpkWqywUvOLyQFNa9t8kzNhIVkJni67U3HyI,1165
4
+ freeplay/errors.py,sha256=bPqsw32YX-xSr7O-G49M0sSFF7mq-YF1WGq928UV47s,631
5
+ freeplay/flavors.py,sha256=AUVAjZaP8VtK5F3NtQOa7_LD9YIs84gUafbUIqt6U84,18492
6
+ freeplay/freeplay.py,sha256=ObfW0QnSzuagfF5aMC44NPAZIokED3BUALuhqUY5-SA,28760
7
+ freeplay/freeplay_cli.py,sha256=nasbc_ckSr5-YtUKfg_w-6X1geQZ9s5u79VzRULGsbs,3868
8
+ freeplay/freeplay_thin.py,sha256=2YFfqFCYimUboRg_umasvHqi2-tzWTvpuf5b9-mQK3I,827
9
+ freeplay/llm_parameters.py,sha256=bQbfuC8EICF0XMZQa5pwI3FkQqxmCUVqHO3gYHy3Tg8,898
10
+ freeplay/provider_config.py,sha256=hruf3Khusrwb76_-hv7ouuxmvJuaRyC1UxIw7XlJx8A,1416
11
+ freeplay/record.py,sha256=pNLu5vPw6HyyPz5u-32pDfElY3hxeujaM1h7Xzah-VE,2961
12
+ freeplay/utils.py,sha256=SxZYKF5H6uotQa0m2B92CTk3ADr5N5SsUS_a6HQ0fnk,1026
13
+ freeplay-0.2.30.dist-info/LICENSE,sha256=_jzIw45hB1XHGxiQ8leZ0GH_X7bR_a8qgxaqnHbCUOo,1064
14
+ freeplay-0.2.30.dist-info/METADATA,sha256=XE8Y8IZibHed27Gi74EUrlfacpqVxWGxF7eyjE5mQZs,1633
15
+ freeplay-0.2.30.dist-info/WHEEL,sha256=d2fvjOD7sXsVzChCqf0Ty0JbHKBaLYwDbGQDwQTnJ50,88
16
+ freeplay-0.2.30.dist-info/entry_points.txt,sha256=32s3rf2UUCqiJT4jnClEXZhdXlvl30uwpcxz-Gsy4UU,54
17
+ freeplay-0.2.30.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: poetry-core 1.6.1
2
+ Generator: poetry-core 1.7.0
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
@@ -1,16 +0,0 @@
1
- freeplay/__init__.py,sha256=74A9S9hmLq9BNHsdx0-37yDxlSukudNl9bJ0TE60Z30,61
2
- freeplay/api_support.py,sha256=FJCm7yb1JhY0Z1Hn-JIVM7cCfBvWfCrqMQHRzzBzGLE,2012
3
- freeplay/completions.py,sha256=THmAfRGnGGvfJzC2iBHmUWzh8YK9VN82G3c2w58U3vI,1014
4
- freeplay/errors.py,sha256=bPqsw32YX-xSr7O-G49M0sSFF7mq-YF1WGq928UV47s,631
5
- freeplay/flavors.py,sha256=J60TxT_KRAS_FN_stTyRtEHaoJ7x4VLBupFBZWs2r0o,17380
6
- freeplay/freeplay.py,sha256=k2dbwcyjvZOELxgwZRQincBYNkF5uBbTA_I-VS439FI,28737
7
- freeplay/freeplay_cli.py,sha256=nasbc_ckSr5-YtUKfg_w-6X1geQZ9s5u79VzRULGsbs,3868
8
- freeplay/freeplay_thin.py,sha256=2YFfqFCYimUboRg_umasvHqi2-tzWTvpuf5b9-mQK3I,827
9
- freeplay/llm_parameters.py,sha256=bQbfuC8EICF0XMZQa5pwI3FkQqxmCUVqHO3gYHy3Tg8,898
10
- freeplay/provider_config.py,sha256=vcFSRErjDF3uNMN0J-QBhzetjD-CywtDTdF3XZn35es,1416
11
- freeplay/record.py,sha256=pNLu5vPw6HyyPz5u-32pDfElY3hxeujaM1h7Xzah-VE,2961
12
- freeplay/utils.py,sha256=SxZYKF5H6uotQa0m2B92CTk3ADr5N5SsUS_a6HQ0fnk,1026
13
- freeplay-0.2.27.dist-info/METADATA,sha256=EKpk0fLkIsNc9CqaI57m1BMjOrFppWQ5GfCNzEX4a84,1643
14
- freeplay-0.2.27.dist-info/WHEEL,sha256=Zb28QaM1gQi8f4VCBhsUklF61CTlNYfs9YAZn-TOGFk,88
15
- freeplay-0.2.27.dist-info/entry_points.txt,sha256=32s3rf2UUCqiJT4jnClEXZhdXlvl30uwpcxz-Gsy4UU,54
16
- freeplay-0.2.27.dist-info/RECORD,,