simile 0.2.14__tar.gz → 0.3.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: simile
3
- Version: 0.2.14
3
+ Version: 0.3.1
4
4
  Summary: Package for interfacing with Simile AI agents for simulation
5
5
  Author-email: Simile AI <cqz@simile.ai>
6
6
  License: MIT
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "simile"
7
- version = "0.2.14"
7
+ version = "0.3.1"
8
8
  authors = [
9
9
  { name="Simile AI", email="cqz@simile.ai" },
10
10
  ]
@@ -4,10 +4,10 @@ from .models import (
4
4
  Population, Agent, DataItem,
5
5
  CreatePopulationPayload, CreateAgentPayload, CreateDataItemPayload, UpdateDataItemPayload,
6
6
  DeletionResponse,
7
- QualGenerationRequest,
8
- QualGenerationResponse,
9
- MCGenerationRequest,
10
- MCGenerationResponse
7
+ OpenGenerationRequest,
8
+ OpenGenerationResponse,
9
+ ClosedGenerationRequest,
10
+ ClosedGenerationResponse
11
11
  )
12
12
  from .exceptions import (
13
13
  SimileAPIError,
@@ -22,9 +22,9 @@ __all__ = [
22
22
  "Population", "Agent", "DataItem",
23
23
  "CreatePopulationPayload", "CreateAgentPayload", "CreateDataItemPayload", "UpdateDataItemPayload",
24
24
  "DeletionResponse",
25
- "QualGenerationRequest", "QualGenerationResponse",
26
- "MCGenerationRequest", "MCGenerationResponse",
25
+ "OpenGenerationRequest", "OpenGenerationResponse",
26
+ "ClosedGenerationRequest", "ClosedGenerationResponse",
27
27
  "SimileAPIError", "SimileAuthenticationError", "SimileNotFoundError", "SimileBadRequestError"
28
28
  ]
29
29
 
30
- __version__ = "0.2.13"
30
+ __version__ = "0.2.15"
@@ -9,10 +9,10 @@ from .models import (
9
9
  Agent as AgentModel,
10
10
  DataItem,
11
11
  DeletionResponse,
12
- QualGenerationRequest,
13
- QualGenerationResponse,
14
- MCGenerationRequest,
15
- MCGenerationResponse,
12
+ OpenGenerationRequest,
13
+ OpenGenerationResponse,
14
+ ClosedGenerationRequest,
15
+ ClosedGenerationResponse,
16
16
  CreatePopulationPayload,
17
17
  CreateAgentPayload,
18
18
  CreateDataItemPayload,
@@ -279,17 +279,17 @@ class Simile:
279
279
  )
280
280
  return response_data
281
281
 
282
- async def generate_qual_response(
282
+ async def generate_open_response(
283
283
  self,
284
284
  agent_id: uuid.UUID,
285
285
  question: str,
286
286
  data_types: Optional[List[str]] = None,
287
287
  exclude_data_types: Optional[List[str]] = None,
288
288
  images: Optional[Dict[str, str]] = None,
289
- ) -> QualGenerationResponse:
290
- """Generates a qualitative response from an agent based on a question."""
291
- endpoint = f"/generation/qual/{str(agent_id)}"
292
- request_payload = QualGenerationRequest(
289
+ ) -> OpenGenerationResponse:
290
+ """Generates an open response from an agent based on a question."""
291
+ endpoint = f"/generation/open/{str(agent_id)}"
292
+ request_payload = OpenGenerationRequest(
293
293
  question=question,
294
294
  data_types=data_types,
295
295
  exclude_data_types=exclude_data_types,
@@ -299,27 +299,33 @@ class Simile:
299
299
  "POST",
300
300
  endpoint,
301
301
  json=request_payload.model_dump(),
302
- response_model=QualGenerationResponse,
302
+ response_model=OpenGenerationResponse,
303
303
  )
304
304
  return response_data
305
305
 
306
- async def generate_mc_response(
306
+ async def generate_closed_response(
307
307
  self,
308
308
  agent_id: uuid.UUID,
309
309
  question: str,
310
310
  options: List[str],
311
- image_url: Optional[str] = None,
312
- ) -> MCGenerationResponse:
313
- """Generates a multiple-choice response from an agent."""
314
- endpoint = f"generation/mc/{str(agent_id)}"
315
- request_payload = MCGenerationRequest(
316
- question=question, options=options, image_url=image_url
311
+ data_types: Optional[List[str]] = None,
312
+ exclude_data_types: Optional[List[str]] = None,
313
+ images: Optional[Dict[str, str]] = None,
314
+ ) -> ClosedGenerationResponse:
315
+ """Generates a closed response from an agent."""
316
+ endpoint = f"generation/closed/{str(agent_id)}"
317
+ request_payload = ClosedGenerationRequest(
318
+ question=question,
319
+ options=options,
320
+ data_types=data_types,
321
+ exclude_data_types=exclude_data_types,
322
+ images=images,
317
323
  )
318
324
  response_data = await self._request(
319
325
  "POST",
320
326
  endpoint,
321
327
  json=request_payload.model_dump(),
322
- response_model=MCGenerationResponse,
328
+ response_model=ClosedGenerationResponse,
323
329
  )
324
330
  return response_data
325
331
 
@@ -61,7 +61,7 @@ class DeletionResponse(BaseModel):
61
61
 
62
62
 
63
63
  # --- Generation Operation Models ---
64
- class QualGenerationRequest(BaseModel):
64
+ class OpenGenerationRequest(BaseModel):
65
65
  question: str
66
66
  data_types: Optional[List[str]] = None
67
67
  exclude_data_types: Optional[List[str]] = None
@@ -70,21 +70,23 @@ class QualGenerationRequest(BaseModel):
70
70
  )
71
71
 
72
72
 
73
- class QualGenerationResponse(BaseModel):
73
+ class OpenGenerationResponse(BaseModel):
74
74
  question: str
75
75
  answer: str
76
76
 
77
77
 
78
- class MCGenerationRequest(BaseModel):
78
+ class ClosedGenerationRequest(BaseModel):
79
79
  question: str
80
80
  options: List[str]
81
+ data_types: Optional[List[str]] = None
82
+ exclude_data_types: Optional[List[str]] = None
81
83
  images: Optional[Dict[str, str]] = None
82
84
 
83
85
 
84
- class MCGenerationResponse(BaseModel):
86
+ class ClosedGenerationResponse(BaseModel):
85
87
  question: str
86
88
  options: List[str]
87
- chosen_option: str
89
+ response: str
88
90
 
89
91
 
90
92
  class AddContextRequest(BaseModel):
@@ -102,8 +104,8 @@ class TurnType(str, Enum):
102
104
 
103
105
  CONTEXT = "context"
104
106
  IMAGE = "image"
105
- QUALITATIVE_QUESTION = "qualitative_question"
106
- MULTIPLE_CHOICE_QUESTION = "multiple_choice_question"
107
+ OPEN_QUESTION = "open_question"
108
+ CLOSED_QUESTION = "closed_question"
107
109
 
108
110
 
109
111
  class BaseTurn(BaseModel):
@@ -131,48 +133,48 @@ class ImageTurn(BaseTurn):
131
133
  caption: Optional[str] = None
132
134
 
133
135
 
134
- class QualitativeQuestionTurn(BaseTurn):
135
- """A qualitative question-answer turn."""
136
+ class OpenQuestionTurn(BaseTurn):
137
+ """An open question-answer turn."""
136
138
 
137
- type: Literal[TurnType.QUALITATIVE_QUESTION] = TurnType.QUALITATIVE_QUESTION
139
+ type: Literal[TurnType.OPEN_QUESTION] = TurnType.OPEN_QUESTION
138
140
  user_question: str
139
141
  user_images: Optional[Dict[str, str]] = None
140
142
  llm_response: Optional[str] = None
141
143
 
142
144
 
143
- class MultipleChoiceQuestionTurn(BaseTurn):
144
- """A multiple choice question-answer turn."""
145
+ class ClosedQuestionTurn(BaseTurn):
146
+ """A closed question-answer turn."""
145
147
 
146
- type: Literal[TurnType.MULTIPLE_CHOICE_QUESTION] = TurnType.MULTIPLE_CHOICE_QUESTION
148
+ type: Literal[TurnType.CLOSED_QUESTION] = TurnType.CLOSED_QUESTION
147
149
  user_question: str
148
150
  user_options: List[str]
149
151
  user_images: Optional[Dict[str, str]] = None
150
- llm_chosen_option: Optional[str] = None
152
+ llm_response: Optional[str] = None
151
153
 
152
154
  @validator("user_options")
153
155
  def validate_options(cls, v):
154
156
  if not v:
155
- raise ValueError("Multiple choice questions must have at least one option")
157
+ raise ValueError("Closed questions must have at least one option")
156
158
  if len(v) < 2:
157
159
  raise ValueError(
158
- "Multiple choice questions should have at least two options"
160
+ "Closed questions should have at least two options"
159
161
  )
160
162
  return v
161
163
 
162
- @validator("llm_chosen_option")
163
- def validate_chosen_option(cls, v, values):
164
+ @validator("llm_response")
165
+ def validate_response(cls, v, values):
164
166
  if (
165
167
  v is not None
166
168
  and "user_options" in values
167
169
  and v not in values["user_options"]
168
170
  ):
169
- raise ValueError(f"Chosen option '{v}' must be one of the provided options")
171
+ raise ValueError(f"Response '{v}' must be one of the provided options")
170
172
  return v
171
173
 
172
174
 
173
175
  # Union type for all possible turn types
174
176
  SurveySessionTurn = Union[
175
- ContextTurn, ImageTurn, QualitativeQuestionTurn, MultipleChoiceQuestionTurn
177
+ ContextTurn, ImageTurn, OpenQuestionTurn, ClosedQuestionTurn
176
178
  ]
177
179
 
178
180
 
@@ -0,0 +1,319 @@
1
+ import uuid
2
+ from typing import TYPE_CHECKING, List, Optional, Dict
3
+
4
+ from .models import (
5
+ OpenGenerationRequest,
6
+ OpenGenerationResponse,
7
+ ClosedGenerationRequest,
8
+ ClosedGenerationResponse,
9
+ SurveySessionCloseResponse,
10
+ AddContextRequest,
11
+ AddContextResponse,
12
+ SurveySessionDetailResponse,
13
+ )
14
+
15
+ if TYPE_CHECKING:
16
+ from .client import Simile
17
+
18
+
19
+ class Agent:
20
+ """Represents an agent and provides methods for interacting with it directly."""
21
+
22
+ def __init__(self, agent_id: uuid.UUID, client: "Simile"):
23
+ self._agent_id = agent_id
24
+ self._client = client
25
+
26
+ @property
27
+ def id(self) -> uuid.UUID:
28
+ return self._agent_id
29
+
30
+ async def generate_open_response(
31
+ self,
32
+ question: str,
33
+ data_types: Optional[List[str]] = None,
34
+ exclude_data_types: Optional[List[str]] = None,
35
+ images: Optional[Dict[str, str]] = None,
36
+ ) -> OpenGenerationResponse:
37
+ """Generates an open response from this agent based on a question."""
38
+ return await self._client.generate_open_response(
39
+ agent_id=self._agent_id,
40
+ question=question,
41
+ data_types=data_types,
42
+ exclude_data_types=exclude_data_types,
43
+ images=images,
44
+ )
45
+
46
+ async def generate_closed_response(
47
+ self,
48
+ question: str,
49
+ options: List[str],
50
+ data_types: Optional[List[str]] = None,
51
+ exclude_data_types: Optional[List[str]] = None,
52
+ images: Optional[Dict[str, str]] = None,
53
+ ) -> ClosedGenerationResponse:
54
+ """Generates a closed response from this agent."""
55
+ return await self._client.generate_closed_response(
56
+ agent_id=self._agent_id,
57
+ question=question,
58
+ options=options,
59
+ data_types=data_types,
60
+ exclude_data_types=exclude_data_types,
61
+ images=images,
62
+ )
63
+
64
+
65
+ class SurveySession:
66
+ """Represents an active survey session with an agent, allowing for contextual multi-turn generation."""
67
+
68
+ def __init__(
69
+ self, id: uuid.UUID, agent_id: uuid.UUID, status: str, client: "Simile"
70
+ ):
71
+ self._id = id
72
+ self._agent_id = agent_id
73
+ self._status = status
74
+ self._client = client
75
+
76
+ @property
77
+ def id(self) -> uuid.UUID:
78
+ return self._id
79
+
80
+ @property
81
+ def agent_id(self) -> uuid.UUID:
82
+ return self._agent_id
83
+
84
+ @property
85
+ def status(self) -> str:
86
+ return self._status
87
+
88
+ async def get_details(self) -> SurveySessionDetailResponse:
89
+ """Retrieves detailed information about this survey session including typed conversation history."""
90
+ return await self._client.get_survey_session_details(self._id)
91
+
92
+ async def view(self) -> SurveySessionDetailResponse:
93
+ """Alias for get_details() - retrieves all turns in this session."""
94
+ return await self.get_details()
95
+
96
+ async def generate_open_response(
97
+ self,
98
+ question: str,
99
+ data_types: Optional[List[str]] = None,
100
+ exclude_data_types: Optional[List[str]] = None,
101
+ images: Optional[Dict[str, str]] = None,
102
+ ) -> OpenGenerationResponse:
103
+ """Generates an open response within this survey session."""
104
+ endpoint = f"sessions/{str(self._id)}/open"
105
+ payload = OpenGenerationRequest(
106
+ question=question,
107
+ data_types=data_types,
108
+ exclude_data_types=exclude_data_types,
109
+ images=images,
110
+ )
111
+ return await self._client._request(
112
+ "POST",
113
+ endpoint,
114
+ json=payload.model_dump(),
115
+ response_model=OpenGenerationResponse,
116
+ )
117
+
118
+ async def generate_closed_response(
119
+ self,
120
+ question: str,
121
+ options: List[str],
122
+ data_types: Optional[List[str]] = None,
123
+ exclude_data_types: Optional[List[str]] = None,
124
+ images: Optional[Dict[str, str]] = None,
125
+ ) -> ClosedGenerationResponse:
126
+ """Generates a closed response within this survey session."""
127
+ endpoint = f"sessions/{str(self._id)}/closed"
128
+ payload = ClosedGenerationRequest(
129
+ question=question,
130
+ options=options,
131
+ data_types=data_types,
132
+ exclude_data_types=exclude_data_types,
133
+ images=images,
134
+ )
135
+ return await self._client._request(
136
+ "POST",
137
+ endpoint,
138
+ json=payload.model_dump(),
139
+ response_model=ClosedGenerationResponse,
140
+ )
141
+
142
+ async def add_context(self, ctx: str) -> AddContextResponse:
143
+ """Adds text to the SurveySession without requesting a response."""
144
+ endpoint = f"sessions/{str(self._id)}/context"
145
+ payload = AddContextRequest(context=ctx)
146
+ return await self._client._request(
147
+ "POST",
148
+ endpoint,
149
+ json=payload.model_dump(),
150
+ response_model=AddContextResponse,
151
+ )
152
+
153
+ async def add_context_with_timestamp(
154
+ self,
155
+ context_text: str,
156
+ timestamp: str,
157
+ ) -> Dict:
158
+ """Adds context to this session with a specific timestamp.
159
+
160
+ This is a lower-level method that allows specifying when the context was added.
161
+ For normal use, prefer the add_context() method.
162
+
163
+ Args:
164
+ context_text: The context text to add
165
+ timestamp: ISO timestamp of when this interaction occurred
166
+
167
+ Returns:
168
+ Dictionary with success status and the added turn details
169
+ """
170
+ endpoint = f"sessions/{str(self._id)}/add-turn"
171
+ payload = {
172
+ "turn_type": "context",
173
+ "context_text": context_text,
174
+ "timestamp": timestamp,
175
+ }
176
+
177
+ return await self._client._request(
178
+ "POST",
179
+ endpoint,
180
+ json=payload,
181
+ response_model=None, # Return raw dict since we don't have a specific model
182
+ )
183
+
184
+ async def add_images(
185
+ self,
186
+ images: Dict[str, str],
187
+ timestamp: Optional[str] = None,
188
+ ) -> Dict:
189
+ """Adds images to the session's conversation history.
190
+
191
+ Args:
192
+ images: Dictionary mapping image descriptions to URLs
193
+ timestamp: Optional ISO timestamp of when this interaction occurred
194
+
195
+ Returns:
196
+ Dictionary with success status and the added turn details
197
+ """
198
+ endpoint = f"sessions/{str(self._id)}/add-turn"
199
+ payload = {
200
+ "turn_type": "image",
201
+ "images": images,
202
+ }
203
+ if timestamp:
204
+ payload["timestamp"] = timestamp
205
+
206
+ return await self._client._request(
207
+ "POST",
208
+ endpoint,
209
+ json=payload,
210
+ response_model=None, # Return raw dict since we don't have a specific model
211
+ )
212
+
213
+ async def add_open_response(
214
+ self,
215
+ question: str,
216
+ response: str,
217
+ timestamp: Optional[str] = None,
218
+ ) -> Dict:
219
+ """Adds an open question-answer pair to the session's history.
220
+
221
+ Args:
222
+ question: The open question text
223
+ response: The response that was given
224
+ timestamp: Optional ISO timestamp of when this interaction occurred
225
+
226
+ Returns:
227
+ Dictionary with success status and the added turn details
228
+ """
229
+ endpoint = f"sessions/{str(self._id)}/add-turn"
230
+ payload = {
231
+ "turn_type": "open",
232
+ "question": question,
233
+ "response": response,
234
+ }
235
+ if timestamp:
236
+ payload["timestamp"] = timestamp
237
+
238
+ return await self._client._request(
239
+ "POST",
240
+ endpoint,
241
+ json=payload,
242
+ response_model=None, # Return raw dict since we don't have a specific model
243
+ )
244
+
245
+ async def close(self) -> SurveySessionCloseResponse:
246
+ """Closes this survey session on the server."""
247
+ endpoint = f"sessions/{str(self._id)}/close"
248
+ return await self._client._request(
249
+ "POST", endpoint, response_model=SurveySessionCloseResponse
250
+ )
251
+
252
+ async def add_closed_response(
253
+ self,
254
+ question: str,
255
+ options: List[str],
256
+ response: str,
257
+ timestamp: Optional[str] = None,
258
+ ) -> Dict:
259
+ """Adds a closed question-answer pair to the session's history.
260
+
261
+ Args:
262
+ question: The closed question text
263
+ options: List of answer options
264
+ response: The option that was selected
265
+ timestamp: Optional ISO timestamp of when this interaction occurred
266
+
267
+ Returns:
268
+ Dictionary with success status and the added turn details
269
+ """
270
+ endpoint = f"sessions/{str(self._id)}/add-turn"
271
+ payload = {
272
+ "turn_type": "closed",
273
+ "question": question,
274
+ "options": options,
275
+ "response": response,
276
+ }
277
+ if timestamp:
278
+ payload["timestamp"] = timestamp
279
+
280
+ return await self._client._request(
281
+ "POST",
282
+ endpoint,
283
+ json=payload,
284
+ response_model=None, # Return raw dict since we don't have a specific model
285
+ )
286
+
287
+
288
+ async def fork(self, turn_index: int) -> "SurveySession":
289
+ """Fork this session at a specific turn.
290
+
291
+ Creates a new session with the same agent and copies turns from this session
292
+ up to and including the specified turn index.
293
+
294
+ Args:
295
+ turn_index: The 0-based index of the last turn to include in the fork
296
+
297
+ Returns:
298
+ A new SurveySession object representing the forked session
299
+
300
+ Raises:
301
+ Simile.APIError: If the API request fails
302
+ """
303
+ endpoint = f"sessions/{str(self._id)}/fork"
304
+ params = {"turn_index": turn_index}
305
+
306
+ response = await self._client._request(
307
+ "POST",
308
+ endpoint,
309
+ params=params,
310
+ response_model=None, # Raw response
311
+ )
312
+
313
+ # Create a new SurveySession instance from the response
314
+ return SurveySession(
315
+ id=uuid.UUID(response["id"]),
316
+ agent_id=uuid.UUID(response["agent_id"]),
317
+ status=response.get("status", "active"),
318
+ client=self._client,
319
+ )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: simile
3
- Version: 0.2.14
3
+ Version: 0.3.1
4
4
  Summary: Package for interfacing with Simile AI agents for simulation
5
5
  Author-email: Simile AI <cqz@simile.ai>
6
6
  License: MIT
@@ -1,167 +0,0 @@
1
- import uuid
2
- from typing import TYPE_CHECKING, List, Optional, Dict
3
-
4
- from .models import (
5
- QualGenerationRequest,
6
- QualGenerationResponse,
7
- MCGenerationRequest,
8
- MCGenerationResponse,
9
- SurveySessionCloseResponse,
10
- AddContextRequest,
11
- AddContextResponse,
12
- SurveySessionDetailResponse,
13
- )
14
-
15
- if TYPE_CHECKING:
16
- from .client import Simile
17
-
18
-
19
- class Agent:
20
- """Represents an agent and provides methods for interacting with it directly."""
21
-
22
- def __init__(self, agent_id: uuid.UUID, client: "Simile"):
23
- self._agent_id = agent_id
24
- self._client = client
25
-
26
- @property
27
- def id(self) -> uuid.UUID:
28
- return self._agent_id
29
-
30
- async def generate_qual_response(
31
- self, question: str, images: Optional[Dict[str, str]] = None
32
- ) -> QualGenerationResponse:
33
- """Generates a qualitative response from this agent based on a question."""
34
- return await self._client.generate_qual_response(
35
- agent_id=self._agent_id, question=question, images=images
36
- )
37
-
38
- async def generate_mc_response(
39
- self, question: str, options: List[str], images: Optional[Dict[str, str]] = None
40
- ) -> MCGenerationResponse:
41
- """Generates a multiple-choice response from this agent."""
42
- return await self._client.generate_mc_response(
43
- agent_id=self._agent_id,
44
- question=question,
45
- options=options,
46
- images=images,
47
- )
48
-
49
-
50
- class SurveySession:
51
- """Represents an active survey session with an agent, allowing for contextual multi-turn generation."""
52
-
53
- def __init__(
54
- self, id: uuid.UUID, agent_id: uuid.UUID, status: str, client: "Simile"
55
- ):
56
- self._id = id
57
- self._agent_id = agent_id
58
- self._status = status
59
- self._client = client
60
-
61
- @property
62
- def id(self) -> uuid.UUID:
63
- return self._id
64
-
65
- @property
66
- def agent_id(self) -> uuid.UUID:
67
- return self._agent_id
68
-
69
- @property
70
- def status(self) -> str:
71
- return self._status
72
-
73
- async def get_details(self) -> SurveySessionDetailResponse:
74
- """Retrieves detailed information about this survey session including typed conversation history."""
75
- return await self._client.get_survey_session_details(self._id)
76
-
77
- async def generate_qual_response(
78
- self,
79
- question: str,
80
- images: Optional[Dict[str, str]] = None,
81
- ) -> QualGenerationResponse:
82
- """Generates a qualitative response within this survey session."""
83
- endpoint = f"sessions/{str(self._id)}/qual"
84
- payload = QualGenerationRequest(
85
- question=question,
86
- data_types=None,
87
- exclude_data_types=None,
88
- images=images,
89
- )
90
- return await self._client._request(
91
- "POST",
92
- endpoint,
93
- json=payload.model_dump(),
94
- response_model=QualGenerationResponse,
95
- )
96
-
97
- async def generate_mc_response(
98
- self, question: str, options: List[str], images: Optional[Dict[str, str]] = None
99
- ) -> MCGenerationResponse:
100
- """Generates a multiple-choice response within this survey session."""
101
- endpoint = f"sessions/{str(self._id)}/mc"
102
- payload = MCGenerationRequest(question=question, options=options, images=images)
103
- return await self._client._request(
104
- "POST",
105
- endpoint,
106
- json=payload.model_dump(),
107
- response_model=MCGenerationResponse,
108
- )
109
-
110
- async def add_context(self, ctx: str) -> AddContextResponse:
111
- """Adds text to the SurveySession without requesting a response."""
112
- endpoint = f"sessions/{str(self._id)}/context"
113
- payload = AddContextRequest(context=ctx)
114
- return await self._client._request(
115
- "POST",
116
- endpoint,
117
- json=payload.model_dump(),
118
- response_model=AddContextResponse,
119
- )
120
-
121
- async def close(self) -> SurveySessionCloseResponse:
122
- """Closes this survey session on the server."""
123
- endpoint = f"sessions/{str(self._id)}/close"
124
- return await self._client._request(
125
- "POST", endpoint, response_model=SurveySessionCloseResponse
126
- )
127
-
128
- async def add_historical_mc_turn(
129
- self,
130
- question: str,
131
- options: List[str],
132
- chosen_option: str,
133
- timestamp: Optional[str] = None,
134
- ) -> Dict:
135
- """Adds a historical multiple choice turn to this session with a pre-specified answer.
136
-
137
- This method allows you to add a multiple choice question-answer pair to the session's
138
- conversation history without generating a new response. This is useful for recreating
139
- conversation history or adding context from previous interactions.
140
-
141
- Args:
142
- question: The multiple choice question text
143
- options: List of answer options
144
- chosen_option: The option that was selected
145
- timestamp: Optional ISO timestamp of when this interaction occurred
146
-
147
- Returns:
148
- Dictionary with success status and the added turn details
149
-
150
- Raises:
151
- Simile.APIError: If the API request fails
152
- """
153
- endpoint = f"sessions/{str(self._id)}/add-turn"
154
- payload = {
155
- "question": question,
156
- "options": options,
157
- "chosen_option": chosen_option,
158
- }
159
- if timestamp:
160
- payload["timestamp"] = timestamp
161
-
162
- return await self._client._request(
163
- "POST",
164
- endpoint,
165
- json=payload,
166
- response_model=None, # Return raw dict since we don't have a specific model
167
- )
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes