freeplay 0.2.42__py3-none-any.whl → 0.3.0a2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- freeplay/__init__.py +10 -1
- freeplay/freeplay.py +26 -412
- freeplay/freeplay_cli.py +14 -28
- freeplay/model.py +6 -9
- freeplay/{thin/resources → resources}/prompts.py +97 -41
- freeplay/{thin/resources → resources}/recordings.py +5 -15
- freeplay/{thin/resources → resources}/test_runs.py +1 -1
- freeplay/support.py +57 -296
- freeplay/utils.py +15 -7
- {freeplay-0.2.42.dist-info → freeplay-0.3.0a2.dist-info}/METADATA +1 -3
- freeplay-0.3.0a2.dist-info/RECORD +20 -0
- {freeplay-0.2.42.dist-info → freeplay-0.3.0a2.dist-info}/WHEEL +1 -1
- freeplay/completions.py +0 -56
- freeplay/flavors.py +0 -459
- freeplay/provider_config.py +0 -49
- freeplay/py.typed +0 -0
- freeplay/record.py +0 -113
- freeplay/thin/__init__.py +0 -14
- freeplay/thin/freeplay_thin.py +0 -42
- freeplay-0.2.42.dist-info/RECORD +0 -27
- /freeplay/{thin/resources → resources}/__init__.py +0 -0
- /freeplay/{thin/resources → resources}/customer_feedback.py +0 -0
- /freeplay/{thin/resources → resources}/sessions.py +0 -0
- {freeplay-0.2.42.dist-info → freeplay-0.3.0a2.dist-info}/LICENSE +0 -0
- {freeplay-0.2.42.dist-info → freeplay-0.3.0a2.dist-info}/entry_points.txt +0 -0
freeplay/__init__.py
CHANGED
@@ -1,5 +1,14 @@
|
|
1
1
|
from .freeplay import Freeplay
|
2
|
+
from .resources.prompts import PromptInfo
|
3
|
+
from .resources.recordings import CallInfo, ResponseInfo, RecordPayload, TestRunInfo
|
4
|
+
from .resources.sessions import SessionInfo
|
2
5
|
|
3
6
|
__all__ = [
|
4
|
-
'
|
7
|
+
'CallInfo',
|
8
|
+
'Freeplay',
|
9
|
+
'PromptInfo',
|
10
|
+
'RecordPayload',
|
11
|
+
'ResponseInfo',
|
12
|
+
'SessionInfo',
|
13
|
+
'TestRunInfo',
|
5
14
|
]
|
freeplay/freeplay.py
CHANGED
@@ -1,240 +1,12 @@
|
|
1
|
-
import
|
2
|
-
import logging
|
3
|
-
from dataclasses import dataclass
|
4
|
-
from typing import Any, Dict, Generator, List, Optional, Tuple, Union
|
1
|
+
from typing import Optional
|
5
2
|
|
6
|
-
from .
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
from .errors import FreeplayConfigurationError
|
14
|
-
from .flavors import Flavor, ChatFlavor, require_chat_flavor, get_chat_flavor_from_config
|
15
|
-
from .llm_parameters import LLMParameters
|
16
|
-
from .model import InputVariables
|
17
|
-
from .provider_config import ProviderConfig
|
18
|
-
from .record import (
|
19
|
-
RecordProcessor,
|
20
|
-
DefaultRecordProcessor
|
21
|
-
)
|
22
|
-
from .support import CallSupport
|
23
|
-
|
24
|
-
logger = logging.getLogger(__name__)
|
25
|
-
default_tag = 'latest'
|
26
|
-
|
27
|
-
|
28
|
-
class Session:
|
29
|
-
def __init__(
|
30
|
-
self,
|
31
|
-
call_support: CallSupport,
|
32
|
-
session_id: str,
|
33
|
-
prompts: PromptTemplates,
|
34
|
-
flavor: Optional[Flavor],
|
35
|
-
provider_config: ProviderConfig,
|
36
|
-
tag: str = default_tag,
|
37
|
-
test_run_id: Optional[str] = None
|
38
|
-
) -> None:
|
39
|
-
self.tag = tag
|
40
|
-
self.call_support = call_support
|
41
|
-
self.session_flavor = flavor
|
42
|
-
self.provider_config = provider_config
|
43
|
-
self.session_id = session_id
|
44
|
-
self.prompts = prompts
|
45
|
-
self.test_run_id = test_run_id
|
46
|
-
|
47
|
-
def get_completion(
|
48
|
-
self,
|
49
|
-
template_name: str,
|
50
|
-
variables: InputVariables,
|
51
|
-
flavor: Optional[Flavor] = None,
|
52
|
-
**kwargs: Any
|
53
|
-
) -> CompletionResponse:
|
54
|
-
completion_flavor = flavor or self.session_flavor
|
55
|
-
return self.call_support.prepare_and_make_call(self.session_id,
|
56
|
-
self.prompts,
|
57
|
-
template_name,
|
58
|
-
variables,
|
59
|
-
completion_flavor,
|
60
|
-
self.provider_config,
|
61
|
-
self.tag,
|
62
|
-
self.test_run_id,
|
63
|
-
completion_parameters=LLMParameters(kwargs))
|
64
|
-
|
65
|
-
def get_completion_stream(
|
66
|
-
self,
|
67
|
-
template_name: str,
|
68
|
-
variables: InputVariables,
|
69
|
-
flavor: Optional[Flavor] = None,
|
70
|
-
**kwargs: Any
|
71
|
-
) -> Generator[CompletionChunk, None, None]:
|
72
|
-
completion_flavor = flavor or self.session_flavor
|
73
|
-
return self.call_support.prepare_and_make_call_stream(self.session_id,
|
74
|
-
self.prompts,
|
75
|
-
template_name,
|
76
|
-
variables,
|
77
|
-
completion_flavor,
|
78
|
-
self.provider_config,
|
79
|
-
self.tag,
|
80
|
-
self.test_run_id,
|
81
|
-
completion_parameters=LLMParameters(kwargs))
|
82
|
-
|
83
|
-
|
84
|
-
class ChatSession(Session):
|
85
|
-
def __init__(
|
86
|
-
self,
|
87
|
-
call_support: CallSupport,
|
88
|
-
session_id: str,
|
89
|
-
prompts: PromptTemplates,
|
90
|
-
flavor: Optional[ChatFlavor],
|
91
|
-
provider_config: ProviderConfig,
|
92
|
-
template_name: str,
|
93
|
-
variables: InputVariables,
|
94
|
-
tag: str = default_tag,
|
95
|
-
test_run_id: Optional[str] = None,
|
96
|
-
messages: Optional[List[ChatMessage]] = None,
|
97
|
-
metadata: Optional[Dict[str, Union[str, int, float]]] = None,
|
98
|
-
) -> None:
|
99
|
-
super().__init__(call_support, session_id, prompts, flavor, provider_config, tag, test_run_id)
|
100
|
-
# A Chat Session tracks the template_name and variables for a set of chat completions.
|
101
|
-
# Assumes these will be the same for subsequent chat messages.
|
102
|
-
self.message_history = messages or []
|
103
|
-
self.variables = variables
|
104
|
-
self.metadata = metadata
|
105
|
-
self.target_template = self.call_support.find_template_by_name(self.prompts, template_name)
|
106
|
-
self.flavor = get_chat_flavor_from_config(flavor, self.target_template.flavor_name)
|
107
|
-
self.__initial_messages = json.loads(self.flavor.format(self.target_template, self.variables))
|
108
|
-
|
109
|
-
def last_message(self) -> Optional[ChatMessage]:
|
110
|
-
return self.message_history[len(self.message_history) - 1]
|
111
|
-
|
112
|
-
def store_new_messages(self, new_messages: List[ChatMessage]) -> None:
|
113
|
-
for message in new_messages:
|
114
|
-
self.message_history.append({
|
115
|
-
"role": message["role"],
|
116
|
-
"content": message["content"]
|
117
|
-
})
|
118
|
-
|
119
|
-
def start_chat(self, **kwargs: Any) -> ChatCompletionResponse:
|
120
|
-
response = self.call_support.prepare_and_make_chat_call(
|
121
|
-
self.session_id,
|
122
|
-
flavor=self.flavor,
|
123
|
-
provider_config=self.provider_config,
|
124
|
-
tag=self.tag,
|
125
|
-
test_run_id=self.test_run_id,
|
126
|
-
target_template=self.target_template,
|
127
|
-
variables=self.variables,
|
128
|
-
message_history=self.__initial_messages,
|
129
|
-
new_messages=None,
|
130
|
-
completion_parameters=LLMParameters(kwargs),
|
131
|
-
metadata=self.metadata,
|
132
|
-
)
|
133
|
-
|
134
|
-
self.store_new_messages(response.message_history)
|
135
|
-
return response
|
136
|
-
|
137
|
-
def start_chat_stream(self, **kwargs: Any) -> Generator[CompletionChunk, None, None]:
|
138
|
-
return self.continue_chat_stream(new_messages=None, **kwargs)
|
139
|
-
|
140
|
-
def aggregate_message_from_response(
|
141
|
-
self,
|
142
|
-
response: Generator[CompletionChunk, None, None]
|
143
|
-
) -> Generator[CompletionChunk, Any, None]:
|
144
|
-
message: ChatMessage = {
|
145
|
-
"role": "assistant",
|
146
|
-
"content": ""
|
147
|
-
}
|
148
|
-
|
149
|
-
for chunk in response:
|
150
|
-
message["content"] += chunk.text
|
151
|
-
yield chunk
|
152
|
-
|
153
|
-
self.message_history.append(message)
|
154
|
-
|
155
|
-
def continue_chat(
|
156
|
-
self,
|
157
|
-
new_messages: Optional[List[ChatMessage]] = None,
|
158
|
-
**kwargs: Any
|
159
|
-
) -> ChatCompletionResponse:
|
160
|
-
|
161
|
-
response = self.call_support.prepare_and_make_chat_call(
|
162
|
-
self.session_id,
|
163
|
-
flavor=self.flavor,
|
164
|
-
provider_config=self.provider_config,
|
165
|
-
tag=self.tag,
|
166
|
-
test_run_id=self.test_run_id,
|
167
|
-
target_template=self.target_template,
|
168
|
-
variables=self.variables,
|
169
|
-
message_history=self.message_history,
|
170
|
-
new_messages=new_messages,
|
171
|
-
completion_parameters=LLMParameters(kwargs),
|
172
|
-
metadata=self.metadata,
|
173
|
-
)
|
174
|
-
|
175
|
-
if new_messages is not None:
|
176
|
-
self.store_new_messages(new_messages)
|
177
|
-
if response.content:
|
178
|
-
self.message_history.append(response.message_history[-1])
|
179
|
-
|
180
|
-
return response
|
181
|
-
|
182
|
-
def continue_chat_stream(
|
183
|
-
self,
|
184
|
-
new_messages: Optional[List[ChatMessage]] = None,
|
185
|
-
**kwargs: Any
|
186
|
-
) -> Generator[CompletionChunk, None, None]:
|
187
|
-
new_messages = new_messages or []
|
188
|
-
if len(self.message_history) == 0:
|
189
|
-
self.message_history = self.__initial_messages
|
190
|
-
|
191
|
-
response = self.call_support.prepare_and_make_chat_call_stream(
|
192
|
-
self.session_id,
|
193
|
-
flavor=self.flavor,
|
194
|
-
provider_config=self.provider_config,
|
195
|
-
tag=self.tag,
|
196
|
-
target_template=self.target_template,
|
197
|
-
variables=self.variables,
|
198
|
-
message_history=self.message_history,
|
199
|
-
test_run_id=self.test_run_id,
|
200
|
-
completion_parameters=LLMParameters(kwargs),
|
201
|
-
metadata=self.metadata,
|
202
|
-
)
|
203
|
-
|
204
|
-
self.store_new_messages(new_messages)
|
205
|
-
yield from self.aggregate_message_from_response(response)
|
206
|
-
|
207
|
-
|
208
|
-
@dataclass()
|
209
|
-
class FreeplayTestRun:
|
210
|
-
def __init__(
|
211
|
-
self,
|
212
|
-
call_support: CallSupport,
|
213
|
-
flavor: Optional[Flavor],
|
214
|
-
provider_config: ProviderConfig,
|
215
|
-
test_run_id: str,
|
216
|
-
inputs: List[InputVariables]
|
217
|
-
):
|
218
|
-
self.call_support = call_support
|
219
|
-
self.flavor = flavor
|
220
|
-
self.provider_config = provider_config
|
221
|
-
self.test_run_id = test_run_id
|
222
|
-
self.inputs = inputs
|
223
|
-
|
224
|
-
def get_inputs(self) -> List[InputVariables]:
|
225
|
-
return self.inputs
|
226
|
-
|
227
|
-
def create_session(self, project_id: str, tag: str = default_tag) -> Session:
|
228
|
-
session_id = self.call_support.create_session_id()
|
229
|
-
prompts = self.call_support.get_prompts(project_id, tag)
|
230
|
-
return Session(self.call_support, session_id, prompts, self.flavor, self.provider_config,
|
231
|
-
tag, self.test_run_id)
|
232
|
-
|
233
|
-
|
234
|
-
# This SDK prototype does not support full functionality of either OpenAI's API or Freeplay's
|
235
|
-
# The simplifications are:
|
236
|
-
# - Always assumes there is a single choice returned, does not support multiple
|
237
|
-
# - Does not support an "escape hatch" to allow use of features we don't explicitly expose
|
3
|
+
from freeplay.errors import FreeplayConfigurationError
|
4
|
+
from freeplay.resources.customer_feedback import CustomerFeedback
|
5
|
+
from freeplay.resources.prompts import Prompts, APITemplateResolver, TemplateResolver
|
6
|
+
from freeplay.resources.recordings import Recordings
|
7
|
+
from freeplay.resources.sessions import Sessions
|
8
|
+
from freeplay.resources.test_runs import TestRuns
|
9
|
+
from freeplay.support import CallSupport
|
238
10
|
|
239
11
|
|
240
12
|
class Freeplay:
|
@@ -242,185 +14,27 @@ class Freeplay:
|
|
242
14
|
self,
|
243
15
|
freeplay_api_key: str,
|
244
16
|
api_base: str,
|
245
|
-
|
246
|
-
flavor: Optional[Flavor] = None,
|
247
|
-
record_processor: Optional[RecordProcessor] = None,
|
248
|
-
**kwargs: Any
|
17
|
+
template_resolver: Optional[TemplateResolver] = None
|
249
18
|
) -> None:
|
250
19
|
if not freeplay_api_key or not freeplay_api_key.strip():
|
251
|
-
raise FreeplayConfigurationError("Freeplay API key not set. It must be set to
|
252
|
-
provider_config.validate()
|
20
|
+
raise FreeplayConfigurationError("Freeplay API key not set. It must be set to the Freeplay API.")
|
253
21
|
|
254
|
-
self.
|
255
|
-
|
256
|
-
|
257
|
-
|
22
|
+
self.call_support = CallSupport(
|
23
|
+
freeplay_api_key,
|
24
|
+
api_base
|
25
|
+
)
|
258
26
|
self.freeplay_api_key = freeplay_api_key
|
259
27
|
self.api_base = api_base
|
260
28
|
|
261
|
-
|
262
|
-
|
263
|
-
|
264
|
-
|
265
|
-
|
266
|
-
|
267
|
-
|
268
|
-
|
269
|
-
|
270
|
-
|
271
|
-
|
272
|
-
|
273
|
-
self,
|
274
|
-
project_id: str,
|
275
|
-
session_id: str,
|
276
|
-
template_name: str,
|
277
|
-
variables: InputVariables,
|
278
|
-
tag: str = default_tag,
|
279
|
-
flavor: Optional[Flavor] = None,
|
280
|
-
**kwargs: Any
|
281
|
-
) -> CompletionResponse:
|
282
|
-
prompts = self.call_support.get_prompts(project_id, tag)
|
283
|
-
completion_flavor = flavor or self.client_flavor
|
284
|
-
return self.call_support.prepare_and_make_call(
|
285
|
-
session_id=session_id,
|
286
|
-
prompts=prompts,
|
287
|
-
template_name=template_name,
|
288
|
-
variables=variables,
|
289
|
-
flavor=completion_flavor,
|
290
|
-
provider_config=self.provider_config,
|
291
|
-
tag=tag,
|
292
|
-
completion_parameters=LLMParameters(kwargs),
|
293
|
-
)
|
294
|
-
|
295
|
-
def get_completion(
|
296
|
-
self,
|
297
|
-
project_id: str,
|
298
|
-
template_name: str,
|
299
|
-
variables: InputVariables,
|
300
|
-
tag: str = default_tag,
|
301
|
-
flavor: Optional[Flavor] = None,
|
302
|
-
metadata: Optional[Dict[str, Union[str, int, float]]] = None,
|
303
|
-
**kwargs: Any
|
304
|
-
) -> CompletionResponse:
|
305
|
-
self.call_support.check_all_values_string_or_number(metadata)
|
306
|
-
session_id = self.call_support.create_session_id()
|
307
|
-
prompts = self.call_support.get_prompts(project_id, tag)
|
308
|
-
completion_flavor = flavor or self.client_flavor
|
309
|
-
|
310
|
-
return self.call_support.prepare_and_make_call(session_id,
|
311
|
-
prompts,
|
312
|
-
template_name,
|
313
|
-
variables,
|
314
|
-
completion_flavor,
|
315
|
-
self.provider_config,
|
316
|
-
tag,
|
317
|
-
completion_parameters=LLMParameters(kwargs),
|
318
|
-
metadata=metadata)
|
319
|
-
|
320
|
-
def get_completion_stream(
|
321
|
-
self,
|
322
|
-
project_id: str,
|
323
|
-
template_name: str,
|
324
|
-
variables: InputVariables,
|
325
|
-
tag: str = default_tag,
|
326
|
-
flavor: Optional[Flavor] = None,
|
327
|
-
metadata: Optional[Dict[str, Union[str, int, float]]] = None,
|
328
|
-
**kwargs: Any
|
329
|
-
) -> Generator[CompletionChunk, None, None]:
|
330
|
-
self.call_support.check_all_values_string_or_number(metadata)
|
331
|
-
session_id = self.call_support.create_session_id()
|
332
|
-
prompts = self.call_support.get_prompts(project_id, tag)
|
333
|
-
completion_flavor = flavor or self.client_flavor
|
334
|
-
|
335
|
-
return self.call_support.prepare_and_make_call_stream(session_id,
|
336
|
-
prompts,
|
337
|
-
template_name,
|
338
|
-
variables,
|
339
|
-
completion_flavor,
|
340
|
-
self.provider_config,
|
341
|
-
tag,
|
342
|
-
completion_parameters=LLMParameters(kwargs),
|
343
|
-
metadata=metadata)
|
344
|
-
|
345
|
-
def start_chat(
|
346
|
-
self,
|
347
|
-
project_id: str,
|
348
|
-
template_name: str,
|
349
|
-
variables: InputVariables,
|
350
|
-
tag: str = default_tag,
|
351
|
-
metadata: Optional[Dict[str, Union[str, int, float]]] = None,
|
352
|
-
**kwargs: Any
|
353
|
-
) -> Tuple[ChatSession, ChatCompletionResponse]:
|
354
|
-
session = self.__create_chat_session(project_id, tag, template_name, variables, metadata)
|
355
|
-
completion_response = session.start_chat(**kwargs)
|
356
|
-
return session, completion_response
|
357
|
-
|
358
|
-
def restore_chat_session(
|
359
|
-
self,
|
360
|
-
project_id: str,
|
361
|
-
template_name: str,
|
362
|
-
session_id: str,
|
363
|
-
variables: InputVariables,
|
364
|
-
tag: str = default_tag,
|
365
|
-
messages: Optional[List[ChatMessage]] = None,
|
366
|
-
flavor: Optional[ChatFlavor] = None) -> ChatSession:
|
367
|
-
prompts = self.call_support.get_prompts(project_id, tag)
|
368
|
-
chat_flavor = flavor or require_chat_flavor(self.client_flavor) if self.client_flavor else None
|
369
|
-
return ChatSession(
|
370
|
-
call_support=self.call_support,
|
371
|
-
session_id=session_id,
|
372
|
-
prompts=prompts,
|
373
|
-
flavor=chat_flavor,
|
374
|
-
provider_config=self.provider_config,
|
375
|
-
template_name=template_name,
|
376
|
-
variables=variables,
|
377
|
-
tag=tag,
|
378
|
-
messages=messages or []
|
379
|
-
)
|
380
|
-
|
381
|
-
def start_chat_stream(
|
382
|
-
self,
|
383
|
-
project_id: str,
|
384
|
-
template_name: str,
|
385
|
-
variables: InputVariables,
|
386
|
-
tag: str = default_tag,
|
387
|
-
metadata: Optional[Dict[str, Union[str, int, float]]] = None,
|
388
|
-
**kwargs: Any
|
389
|
-
) -> Tuple[ChatSession, Generator[CompletionChunk, None, None]]:
|
390
|
-
"""Returns a chat session, the base prompt template messages, and a streamed response from the LLM."""
|
391
|
-
session = self.__create_chat_session(project_id, tag, template_name, variables, metadata)
|
392
|
-
completion_response = session.start_chat_stream(**kwargs)
|
393
|
-
return session, completion_response
|
394
|
-
|
395
|
-
def create_test_run(self, project_id: str, testlist: str) -> FreeplayTestRun:
|
396
|
-
test_run_response = self.call_support.create_test_run(project_id=project_id, testlist=testlist)
|
397
|
-
|
398
|
-
return FreeplayTestRun(
|
399
|
-
self.call_support,
|
400
|
-
self.client_flavor,
|
401
|
-
self.provider_config,
|
402
|
-
test_run_response.test_run_id,
|
403
|
-
[test_case.variables for test_case in test_run_response.test_cases]
|
404
|
-
)
|
405
|
-
|
406
|
-
def __create_chat_session(
|
407
|
-
self,
|
408
|
-
project_id: str,
|
409
|
-
tag: str,
|
410
|
-
template_name: str,
|
411
|
-
variables: InputVariables,
|
412
|
-
metadata: Optional[Dict[str, Union[str, int, float]]] = None) -> ChatSession:
|
413
|
-
chat_flavor = require_chat_flavor(self.client_flavor) if self.client_flavor else None
|
414
|
-
|
415
|
-
session_id = self.call_support.create_session_id()
|
416
|
-
prompts = self.call_support.get_prompts(project_id, tag)
|
417
|
-
return ChatSession(
|
418
|
-
self.call_support,
|
419
|
-
session_id,
|
420
|
-
prompts,
|
421
|
-
chat_flavor,
|
422
|
-
self.provider_config,
|
423
|
-
template_name,
|
424
|
-
variables,
|
425
|
-
tag,
|
426
|
-
metadata=metadata)
|
29
|
+
resolver: TemplateResolver
|
30
|
+
if template_resolver is None:
|
31
|
+
resolver = APITemplateResolver(self.call_support)
|
32
|
+
else:
|
33
|
+
resolver = template_resolver
|
34
|
+
|
35
|
+
# Resources ========
|
36
|
+
self.customer_feedback = CustomerFeedback(self.call_support)
|
37
|
+
self.prompts = Prompts(self.call_support, resolver)
|
38
|
+
self.recordings = Recordings(self.call_support)
|
39
|
+
self.sessions = Sessions()
|
40
|
+
self.test_runs = TestRuns(self.call_support)
|
freeplay/freeplay_cli.py
CHANGED
@@ -1,4 +1,3 @@
|
|
1
|
-
import dataclasses
|
2
1
|
import json
|
3
2
|
import os
|
4
3
|
import sys
|
@@ -7,9 +6,9 @@ from stat import S_IREAD, S_IRGRP, S_IROTH, S_IWUSR
|
|
7
6
|
|
8
7
|
import click
|
9
8
|
|
10
|
-
from .
|
11
|
-
from
|
12
|
-
from .
|
9
|
+
from freeplay.errors import FreeplayClientError, FreeplayServerError
|
10
|
+
from freeplay import Freeplay
|
11
|
+
from freeplay.support import PromptTemplates, PromptTemplate, PromptTemplateEncoder
|
13
12
|
|
14
13
|
|
15
14
|
@click.group()
|
@@ -37,8 +36,9 @@ def download(project_id: str, environment: str, output_dir: str) -> None:
|
|
37
36
|
freeplay_api_url = f'{os.environ["FREEPLAY_API_URL"]}/api'
|
38
37
|
click.echo("Using URL override for Freeplay specified in the FREEPLAY_API_URL environment variable")
|
39
38
|
|
40
|
-
click.echo(
|
41
|
-
|
39
|
+
click.echo(
|
40
|
+
f"Downloading prompts for project {project_id}, environment {environment}, "
|
41
|
+
f"to directory {output_dir} from {freeplay_api_url}")
|
42
42
|
|
43
43
|
fp_client = Freeplay(
|
44
44
|
freeplay_api_key=FREEPLAY_API_KEY,
|
@@ -47,19 +47,19 @@ def download(project_id: str, environment: str, output_dir: str) -> None:
|
|
47
47
|
|
48
48
|
try:
|
49
49
|
prompts: PromptTemplates = fp_client.prompts.get_all(project_id, environment=environment)
|
50
|
-
click.echo("Found
|
50
|
+
click.echo(f"Found {len(prompts.prompt_templates)} prompt templates")
|
51
51
|
|
52
|
-
for prompt in prompts.
|
52
|
+
for prompt in prompts.prompt_templates:
|
53
53
|
__write_single_file(environment, output_dir, project_id, prompt)
|
54
54
|
except FreeplayClientError as e:
|
55
|
-
print("Error downloading templates:
|
55
|
+
print(f"Error downloading templates: {e}.\nIs your project ID correct?", file=sys.stderr)
|
56
56
|
exit(1)
|
57
57
|
except FreeplayServerError as e:
|
58
|
-
print("Error on Freeplay's servers downloading templates:
|
58
|
+
print(f"Error on Freeplay's servers downloading templates: {e}.\nTry again after a short wait.",
|
59
59
|
file=sys.stderr)
|
60
60
|
exit(2)
|
61
61
|
except Exception as e:
|
62
|
-
print("Error downloading templates:
|
62
|
+
print(f"Error downloading templates: {e}", file=sys.stderr)
|
63
63
|
exit(3)
|
64
64
|
|
65
65
|
|
@@ -67,33 +67,19 @@ def __write_single_file(
|
|
67
67
|
environment: str,
|
68
68
|
output_dir: str,
|
69
69
|
project_id: str,
|
70
|
-
prompt:
|
70
|
+
prompt: PromptTemplate
|
71
71
|
) -> None:
|
72
72
|
directory = __root_dir(environment, output_dir, project_id)
|
73
|
-
basename = f'{prompt.
|
73
|
+
basename = f'{prompt.prompt_template_name}'
|
74
74
|
prompt_path = directory / f'{basename}.json'
|
75
75
|
click.echo("Writing prompt file: %s" % prompt_path)
|
76
76
|
|
77
|
-
full_dict = dataclasses.asdict(prompt)
|
78
|
-
del full_dict['prompt_template_id']
|
79
|
-
del full_dict['prompt_template_version_id']
|
80
|
-
del full_dict['name']
|
81
|
-
del full_dict['content']
|
82
|
-
|
83
|
-
output_dict = {
|
84
|
-
'prompt_template_id': prompt.prompt_template_id,
|
85
|
-
'prompt_template_version_id': prompt.prompt_template_version_id,
|
86
|
-
'name': prompt.name,
|
87
|
-
'content': prompt.content,
|
88
|
-
'metadata': full_dict
|
89
|
-
}
|
90
|
-
|
91
77
|
# Make sure it's owner writable if it already exists
|
92
78
|
if prompt_path.is_file():
|
93
79
|
os.chmod(prompt_path, S_IWUSR | S_IREAD)
|
94
80
|
|
95
81
|
with prompt_path.open(mode='w') as f:
|
96
|
-
f.write(json.dumps(
|
82
|
+
f.write(json.dumps(prompt, sort_keys=True, indent=4, cls=PromptTemplateEncoder))
|
97
83
|
f.write('\n')
|
98
84
|
|
99
85
|
# Make the file read-only to discourage local changes
|
freeplay/model.py
CHANGED
@@ -1,16 +1,8 @@
|
|
1
1
|
from dataclasses import dataclass
|
2
|
-
from typing import List, Union, Any, Dict, Mapping
|
3
|
-
|
4
|
-
from pydantic import RootModel
|
2
|
+
from typing import List, Union, Any, Dict, Mapping, TypedDict
|
5
3
|
|
6
4
|
InputValue = Union[str, int, bool, Dict[str, Any], List[Any]]
|
7
|
-
InputVariable = RootModel[Union[Dict[str, "InputVariable"], List["InputVariable"], str, int, bool, float]]
|
8
|
-
InputVariable.model_rebuild()
|
9
|
-
|
10
5
|
InputVariables = Mapping[str, InputValue]
|
11
|
-
|
12
|
-
PydanticInputVariables = RootModel[Dict[str, InputVariable]]
|
13
|
-
|
14
6
|
TestRunInput = Mapping[str, InputValue]
|
15
7
|
|
16
8
|
|
@@ -18,3 +10,8 @@ TestRunInput = Mapping[str, InputValue]
|
|
18
10
|
class TestRun:
|
19
11
|
id: str
|
20
12
|
inputs: List[TestRunInput]
|
13
|
+
|
14
|
+
|
15
|
+
class OpenAIFunctionCall(TypedDict):
|
16
|
+
name: str
|
17
|
+
arguments: str
|