freeplay 0.2.14__tar.gz → 0.2.15__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: freeplay
3
- Version: 0.2.14
3
+ Version: 0.2.15
4
4
  Summary:
5
5
  License: MIT
6
6
  Author: FreePlay Engineering
@@ -2,6 +2,18 @@ class FreeplayError(Exception):
2
2
  pass
3
3
 
4
4
 
5
+ class APITypeMissingError(Exception):
6
+ pass
7
+
8
+
9
+ class APIVersionMissingError(Exception):
10
+ pass
11
+
12
+
13
+ class APIEngineMissingError(Exception):
14
+ pass
15
+
16
+
5
17
  class APIKeyMissingError(Exception):
6
18
  pass
7
19
 
@@ -1,7 +1,8 @@
1
1
  import json
2
2
  from abc import abstractmethod, ABC
3
3
  from copy import copy
4
- from typing import Optional, Generator, Any
4
+ from typing import Optional, Generator, Any, Union
5
+ from dataclasses import dataclass, asdict
5
6
 
6
7
  import anthropic # type: ignore
7
8
  import openai
@@ -24,6 +25,8 @@ class Flavor(ABC):
24
25
  return OpenAIChat()
25
26
  case AnthropicClaudeText.record_format_type:
26
27
  return AnthropicClaudeText()
28
+ case AzureOpenAIChat.record_format_type:
29
+ return AzureOpenAIChat()
27
30
  case _:
28
31
  raise FreeplayError(
29
32
  'Configured flavor not found in SDK. Please update your SDK version or configure a different model in the Freeplay UI.')
@@ -89,15 +92,28 @@ class ChatFlavor(Flavor, ABC):
89
92
 
90
93
 
91
94
  class OpenAI(Flavor, ABC):
92
- def configure_openai(self, openai_config: Optional[OpenAIConfig]) -> None:
95
+ def configure_openai(self,
96
+ openai_config: Optional[OpenAIConfig],
97
+ api_base: Optional[str] = None,
98
+ api_version: Optional[str] = None,
99
+ api_type: Optional[str] = None,
100
+ ) -> None:
93
101
  super().__init__()
94
102
  if not openai_config:
95
103
  raise APIKeyMissingError(
96
104
  "Missing OpenAI key. Use a ProviderConfig to specify keys prior to getting completion.")
97
105
 
98
- if openai_config.api_base:
106
+ if api_base:
107
+ openai.api_base = api_base
108
+ elif openai_config.api_base:
99
109
  openai.api_base = openai_config.api_base
100
110
 
111
+ if api_type:
112
+ openai.api_type = api_type
113
+
114
+ if api_version:
115
+ openai.api_version = api_version
116
+
101
117
  if not openai_config.api_key or not openai_config.api_key.strip():
102
118
  raise APIKeyMissingError("OpenAI API key is not set. It must be set to make calls to the service.")
103
119
 
@@ -123,7 +139,7 @@ class OpenAIText(OpenAI):
123
139
  provider_config: ProviderConfig,
124
140
  llm_parameters: LLMParameters
125
141
  ) -> CompletionResponse:
126
- completion = self.__call_openai(formatted_prompt, provider_config, llm_parameters, stream=False)
142
+ completion = self._call_openai(formatted_prompt, provider_config, llm_parameters, stream=False)
127
143
  return CompletionResponse(
128
144
  content=completion.choices[0].text,
129
145
  is_complete=completion.choices[0].finish_reason == "stop"
@@ -135,7 +151,7 @@ class OpenAIText(OpenAI):
135
151
  provider_config: ProviderConfig,
136
152
  llm_parameters: LLMParameters
137
153
  ) -> Generator[CompletionChunk, None, None]:
138
- completion = self.__call_openai(formatted_prompt, provider_config, llm_parameters, stream=True)
154
+ completion = self._call_openai(formatted_prompt, provider_config, llm_parameters, stream=True)
139
155
 
140
156
  for chunk in completion:
141
157
  yield CompletionChunk(
@@ -143,7 +159,7 @@ class OpenAIText(OpenAI):
143
159
  is_complete=chunk.choices[0].finish_reason == "stop"
144
160
  )
145
161
 
146
- def __call_openai(
162
+ def _call_openai(
147
163
  self,
148
164
  prompt: str,
149
165
  provider_config: ProviderConfig,
@@ -181,7 +197,7 @@ class OpenAIChat(OpenAI, ChatFlavor):
181
197
  llm_parameters: LLMParameters
182
198
  ) -> CompletionResponse:
183
199
  messages = json.loads(formatted_prompt)
184
- completion = self.__call_openai(messages, provider_config, llm_parameters, stream=False)
200
+ completion = self._call_openai(messages, provider_config, llm_parameters, stream=False)
185
201
  return CompletionResponse(
186
202
  content=completion.choices[0].message.content or '',
187
203
  is_complete=completion.choices[0].finish_reason == 'stop',
@@ -195,7 +211,7 @@ class OpenAIChat(OpenAI, ChatFlavor):
195
211
  llm_parameters: LLMParameters
196
212
  ) -> Generator[CompletionChunk, None, None]:
197
213
  messages = json.loads(formatted_prompt)
198
- completion_stream = self.__call_openai(messages, provider_config, llm_parameters, stream=True)
214
+ completion_stream = self._call_openai(messages, provider_config, llm_parameters, stream=True)
199
215
  for chunk in completion_stream:
200
216
  yield CompletionChunk(
201
217
  text=chunk.choices[0].delta.get('content') or '',
@@ -209,7 +225,7 @@ class OpenAIChat(OpenAI, ChatFlavor):
209
225
  provider_config: ProviderConfig,
210
226
  llm_parameters: LLMParameters
211
227
  ) -> ChatCompletionResponse:
212
- completion = self.__call_openai(messages, provider_config, llm_parameters, stream=False)
228
+ completion = self._call_openai(messages, provider_config, llm_parameters, stream=False)
213
229
 
214
230
  message_history = copy(messages)
215
231
  message_history.append(completion.choices[0].message.to_dict())
@@ -225,14 +241,14 @@ class OpenAIChat(OpenAI, ChatFlavor):
225
241
  provider_config: ProviderConfig,
226
242
  llm_parameters: LLMParameters
227
243
  ) -> Generator[CompletionChunk, None, None]:
228
- completion_stream = self.__call_openai(messages, provider_config, llm_parameters, stream=True)
244
+ completion_stream = self._call_openai(messages, provider_config, llm_parameters, stream=True)
229
245
  for chunk in completion_stream:
230
246
  yield CompletionChunk(
231
247
  text=chunk.choices[0].delta.get('content', ''),
232
248
  is_complete=chunk.choices[0].finish_reason == "stop"
233
249
  )
234
250
 
235
- def __call_openai(
251
+ def _call_openai(
236
252
  self,
237
253
  messages: list[ChatMessage],
238
254
  provider_config: ProviderConfig,
@@ -240,13 +256,47 @@ class OpenAIChat(OpenAI, ChatFlavor):
240
256
  stream: bool
241
257
  ) -> Any:
242
258
  self.configure_openai(provider_config.openai)
259
+ llm_parameters.pop('endpoint')
243
260
  return openai.ChatCompletion.create(
244
261
  messages=messages,
245
262
  **self.get_model_params(llm_parameters),
246
- stream=stream
263
+ stream=stream,
247
264
  ) # type: ignore
248
265
 
249
266
 
267
+ class AzureOpenAIChat(OpenAIChat):
268
+ record_format_type = "azure_openai_chat"
269
+
270
+ def _call_openai(
271
+ self,
272
+ messages: list[ChatMessage],
273
+ provider_config: ProviderConfig,
274
+ llm_parameters: LLMParameters,
275
+ stream: bool
276
+ ) -> Any:
277
+ api_version = llm_parameters.get('api_version')
278
+ deployment_id = llm_parameters.get('deployment_id')
279
+ resource_name = llm_parameters.get('resource_name')
280
+ endpoint = f'https://{resource_name}.openai.azure.com'
281
+ self.configure_openai(
282
+ provider_config.azure,
283
+ api_base=endpoint,
284
+ api_type='azure',
285
+ api_version=api_version
286
+ )
287
+ llm_parameters.pop('resource_name')
288
+ return openai.ChatCompletion.create(
289
+ messages=messages,
290
+ **self.get_model_params(llm_parameters),
291
+ engine=deployment_id,
292
+ stream=stream,
293
+ ) # type: ignore
294
+
295
+ @property
296
+ def provider(self) -> str:
297
+ return "azure"
298
+
299
+
250
300
  class AnthropicClaudeText(Flavor):
251
301
  record_format_type = "anthropic_text"
252
302
  _model_params_with_defaults = LLMParameters({
@@ -24,3 +24,6 @@ class LLMParameters(dict[str, Any]):
24
24
  updated_params[model_param_key] = value
25
25
 
26
26
  return updated_params
27
+
28
+ def pop(self, key: str, default: Optional[Any] = None) -> Any:
29
+ return super().pop(key, default)
@@ -0,0 +1,70 @@
1
+ from dataclasses import dataclass
2
+ from typing import Optional
3
+
4
+ from .errors import (
5
+ APIEngineMissingError,
6
+ APIKeyMissingError,
7
+ APITypeMissingError,
8
+ APIVersionMissingError,
9
+ )
10
+
11
+
12
+ @dataclass
13
+ class OpenAIConfig:
14
+ api_key: str
15
+ api_base: Optional[str]
16
+
17
+ def __init__(self,
18
+ api_key: str,
19
+ api_base: Optional[str] = None,
20
+ ) -> None:
21
+ self.api_key = api_key
22
+ self.api_base = api_base
23
+
24
+ def validate(self) -> None:
25
+ if not self.api_key or not self.api_key.strip():
26
+ raise APIKeyMissingError("OpenAI API key not set. It must be set to make calls to the service.")
27
+
28
+ @dataclass
29
+ class AzureConfig(OpenAIConfig):
30
+ engine: Optional[str]
31
+ api_version: Optional[str]
32
+
33
+ def __init__(self,
34
+ api_key: str,
35
+ api_base: Optional[str] = None,
36
+ engine: Optional[str] = None,
37
+ api_version: Optional[str] = None
38
+ ):
39
+ super().__init__(api_key, api_base)
40
+ self.api_version = api_version
41
+ self.engine = engine
42
+
43
+ def validate(self) -> None:
44
+ super().validate()
45
+
46
+ if self.api_version is None:
47
+ raise APIVersionMissingError(
48
+ "OpenAI API version not set. It must be set to make calls to the service.")
49
+
50
+ if self.engine is None:
51
+ raise APIEngineMissingError("Azure engine is not set. It must be set to make calls to the service.")
52
+
53
+
54
+ @dataclass
55
+ class AnthropicConfig:
56
+ api_key: str
57
+
58
+
59
+ @dataclass
60
+ class ProviderConfig:
61
+ anthropic: Optional[AnthropicConfig] = None
62
+ openai: Optional[OpenAIConfig] = None
63
+ azure: Optional[AzureConfig] = None
64
+
65
+ def validate(self) -> None:
66
+ if self.anthropic is None and self.openai is None:
67
+ APIKeyMissingError("At least one provider key must be set in ProviderConfig.")
68
+
69
+ if self.openai is not None:
70
+ self.openai.validate()
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "freeplay"
3
- version = "0.2.14"
3
+ version = "0.2.15"
4
4
  description = ""
5
5
  authors = ["FreePlay Engineering <engineering@freeplay.ai>"]
6
6
  license = "MIT"
@@ -1,36 +0,0 @@
1
- from dataclasses import dataclass
2
- from typing import Optional
3
-
4
- from .errors import APIKeyMissingError
5
-
6
-
7
- @dataclass
8
- class OpenAIConfig:
9
- api_key: str
10
- api_base: Optional[str]
11
-
12
- def __init__(self, api_key: str, api_base: Optional[str] = None) -> None:
13
- self.api_key = api_key
14
- self.api_base = api_base
15
-
16
- def validate(self) -> None:
17
- if not self.api_key or not self.api_key.strip():
18
- raise APIKeyMissingError("OpenAI API key not set. It must be set to make calls to the service.")
19
-
20
-
21
- @dataclass
22
- class AnthropicConfig:
23
- api_key: str
24
-
25
-
26
- @dataclass
27
- class ProviderConfig:
28
- anthropic: Optional[AnthropicConfig] = None
29
- openai: Optional[OpenAIConfig] = None
30
-
31
- def validate(self) -> None:
32
- if self.anthropic is None and self.openai is None:
33
- APIKeyMissingError("At least one provider key must be set in ProviderConfig.")
34
- if self.openai is not None:
35
- self.openai.validate()
36
-
File without changes
File without changes