simile 0.3.12__py3-none-any.whl → 0.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of simile might be problematic. Click here for more details.

simile/__init__.py CHANGED
@@ -14,6 +14,13 @@ from .models import (
14
14
  OpenGenerationResponse,
15
15
  ClosedGenerationRequest,
16
16
  ClosedGenerationResponse,
17
+ MemoryStream,
18
+ MemoryTurn,
19
+ MemoryTurnType,
20
+ ContextMemoryTurn,
21
+ ImageMemoryTurn,
22
+ OpenQuestionMemoryTurn,
23
+ ClosedQuestionMemoryTurn,
17
24
  )
18
25
  from .exceptions import (
19
26
  SimileAPIError,
@@ -38,6 +45,13 @@ __all__ = [
38
45
  "OpenGenerationResponse",
39
46
  "ClosedGenerationRequest",
40
47
  "ClosedGenerationResponse",
48
+ "MemoryStream",
49
+ "MemoryTurn",
50
+ "MemoryTurnType",
51
+ "ContextMemoryTurn",
52
+ "ImageMemoryTurn",
53
+ "OpenQuestionMemoryTurn",
54
+ "ClosedQuestionMemoryTurn",
41
55
  "SimileAPIError",
42
56
  "SimileAuthenticationError",
43
57
  "SimileNotFoundError",
simile/client.py CHANGED
@@ -21,6 +21,7 @@ from .models import (
21
21
  InitialDataItemPayload,
22
22
  SurveySessionCreateResponse,
23
23
  SurveySessionDetailResponse,
24
+ MemoryStream,
24
25
  )
25
26
  from .resources import Agent, SurveySession
26
27
  from .exceptions import (
@@ -254,6 +255,18 @@ class Simile:
254
255
  "DELETE", f"agents/{str(agent_id)}/populations/{str(population_id)}"
255
256
  )
256
257
  return raw_response.json()
258
+
259
+ async def batch_add_agents_to_population(
260
+ self, agent_ids: List[Union[str, uuid.UUID]], population_id: Union[str, uuid.UUID]
261
+ ) -> Dict[str, Any]:
262
+ """Add multiple agents to a population in a single batch operation."""
263
+ agent_id_strs = [str(aid) for aid in agent_ids]
264
+ raw_response = await self._request(
265
+ "POST",
266
+ f"populations/{str(population_id)}/agents/batch",
267
+ json=agent_id_strs
268
+ )
269
+ return raw_response.json()
257
270
 
258
271
  async def get_populations_for_agent(
259
272
  self, agent_id: Union[str, uuid.UUID]
@@ -333,6 +346,7 @@ class Simile:
333
346
  exclude_data_types: Optional[List[str]] = None,
334
347
  images: Optional[Dict[str, str]] = None,
335
348
  reasoning: bool = False,
349
+ memory_stream: Optional[MemoryStream] = None,
336
350
  ) -> AsyncGenerator[str, None]:
337
351
  """Streams an open response from an agent."""
338
352
  endpoint = f"/generation/open-stream/{str(agent_id)}"
@@ -398,22 +412,75 @@ class Simile:
398
412
  exclude_data_types: Optional[List[str]] = None,
399
413
  images: Optional[Dict[str, str]] = None,
400
414
  reasoning: bool = False,
415
+ memory_stream: Optional[MemoryStream] = None,
416
+ use_memory: Optional[Union[str, uuid.UUID]] = None, # Session ID to load memory from
417
+ exclude_memory_ids: Optional[List[str]] = None, # Study/question IDs to exclude
418
+ save_memory: Optional[Union[str, uuid.UUID]] = None, # Session ID to save memory to
401
419
  ) -> OpenGenerationResponse:
402
- """Generates an open response from an agent based on a question."""
420
+ """Generates an open response from an agent based on a question.
421
+
422
+ Args:
423
+ agent_id: The agent to query
424
+ question: The question to ask
425
+ data_types: Optional data types to include
426
+ exclude_data_types: Optional data types to exclude
427
+ images: Optional images dict
428
+ reasoning: Whether to include reasoning
429
+ memory_stream: Explicit memory stream to use (overrides use_memory)
430
+ use_memory: Session ID to automatically load memory from
431
+ exclude_memory_ids: Study/question IDs to exclude from loaded memory
432
+ save_memory: Session ID to automatically save response to memory
433
+ """
434
+ # If use_memory is provided and no explicit memory_stream, load it
435
+ if use_memory and not memory_stream:
436
+ memory_stream = await self.get_memory(
437
+ session_id=use_memory,
438
+ agent_id=agent_id,
439
+ exclude_study_ids=exclude_memory_ids,
440
+ use_memory=True
441
+ )
442
+
403
443
  endpoint = f"/generation/open/{str(agent_id)}"
404
- request_payload = OpenGenerationRequest(
405
- question=question,
406
- data_types=data_types,
407
- exclude_data_types=exclude_data_types,
408
- images=images,
409
- reasoning=reasoning,
410
- )
444
+ # Build request payload directly as dict to avoid serialization issues
445
+ request_payload = {
446
+ "question": question,
447
+ "data_types": data_types,
448
+ "exclude_data_types": exclude_data_types,
449
+ "images": images,
450
+ "reasoning": reasoning,
451
+ }
452
+
453
+ if memory_stream:
454
+ request_payload["memory_stream"] = memory_stream.to_dict()
455
+
411
456
  response_data = await self._request(
412
457
  "POST",
413
458
  endpoint,
414
- json=request_payload.model_dump(),
459
+ json=request_payload,
415
460
  response_model=OpenGenerationResponse,
416
461
  )
462
+
463
+ # If save_memory is provided, save the response
464
+ if save_memory and response_data:
465
+ from .models import OpenQuestionMemoryTurn
466
+
467
+ memory_turn = OpenQuestionMemoryTurn(
468
+ user_question=question,
469
+ user_images=images,
470
+ llm_response=response_data.answer,
471
+ llm_reasoning=response_data.reasoning if reasoning else None
472
+ )
473
+
474
+ await self.save_memory(
475
+ agent_id=agent_id,
476
+ response=response_data.answer,
477
+ session_id=save_memory,
478
+ memory_turn=memory_turn.to_dict(),
479
+ memory_stream_used=memory_stream.to_dict() if memory_stream else None,
480
+ reasoning=response_data.reasoning if reasoning else None,
481
+ metadata={"question_type": "open"}
482
+ )
483
+
417
484
  return response_data
418
485
 
419
486
  async def generate_closed_response(
@@ -425,25 +492,260 @@ class Simile:
425
492
  exclude_data_types: Optional[List[str]] = None,
426
493
  images: Optional[Dict[str, str]] = None,
427
494
  reasoning: bool = False,
495
+ memory_stream: Optional[MemoryStream] = None,
496
+ use_memory: Optional[Union[str, uuid.UUID]] = None, # Session ID to load memory from
497
+ exclude_memory_ids: Optional[List[str]] = None, # Study/question IDs to exclude
498
+ save_memory: Optional[Union[str, uuid.UUID]] = None, # Session ID to save memory to
428
499
  ) -> ClosedGenerationResponse:
429
- """Generates a closed response from an agent."""
500
+ """Generates a closed response from an agent.
501
+
502
+ Args:
503
+ agent_id: The agent to query
504
+ question: The question to ask
505
+ options: The options to choose from
506
+ data_types: Optional data types to include
507
+ exclude_data_types: Optional data types to exclude
508
+ images: Optional images dict
509
+ reasoning: Whether to include reasoning
510
+ memory_stream: Explicit memory stream to use (overrides use_memory)
511
+ use_memory: Session ID to automatically load memory from
512
+ exclude_memory_ids: Study/question IDs to exclude from loaded memory
513
+ save_memory: Session ID to automatically save response to memory
514
+ """
515
+ # If use_memory is provided and no explicit memory_stream, load it
516
+ if use_memory and not memory_stream:
517
+ memory_stream = await self.get_memory(
518
+ session_id=use_memory,
519
+ agent_id=agent_id,
520
+ exclude_study_ids=exclude_memory_ids,
521
+ use_memory=True
522
+ )
523
+
430
524
  endpoint = f"generation/closed/{str(agent_id)}"
431
- request_payload = ClosedGenerationRequest(
432
- question=question,
433
- options=options,
434
- data_types=data_types,
435
- exclude_data_types=exclude_data_types,
436
- images=images,
437
- reasoning=reasoning,
438
- )
525
+ # Build request payload directly as dict to avoid serialization issues
526
+ request_payload = {
527
+ "question": question,
528
+ "options": options,
529
+ "data_types": data_types,
530
+ "exclude_data_types": exclude_data_types,
531
+ "images": images,
532
+ "reasoning": reasoning,
533
+ }
534
+
535
+ if memory_stream:
536
+ request_payload["memory_stream"] = memory_stream.to_dict()
537
+
439
538
  response_data = await self._request(
440
539
  "POST",
441
540
  endpoint,
442
- json=request_payload.model_dump(),
541
+ json=request_payload,
443
542
  response_model=ClosedGenerationResponse,
444
543
  )
544
+
545
+ # If save_memory is provided, save the response
546
+ if save_memory and response_data:
547
+ from .models import ClosedQuestionMemoryTurn
548
+
549
+ memory_turn = ClosedQuestionMemoryTurn(
550
+ user_question=question,
551
+ user_options=options,
552
+ user_images=images,
553
+ llm_response=response_data.response,
554
+ llm_reasoning=response_data.reasoning if reasoning else None
555
+ )
556
+
557
+ await self.save_memory(
558
+ agent_id=agent_id,
559
+ response=response_data.response,
560
+ session_id=save_memory,
561
+ memory_turn=memory_turn.to_dict(),
562
+ memory_stream_used=memory_stream.to_dict() if memory_stream else None,
563
+ reasoning=response_data.reasoning if reasoning else None,
564
+ metadata={"question_type": "closed", "options": options}
565
+ )
566
+
445
567
  return response_data
446
568
 
569
+ # Memory Management Methods
570
+
571
+ async def save_memory(
572
+ self,
573
+ agent_id: Union[str, uuid.UUID],
574
+ response: str,
575
+ session_id: Optional[Union[str, uuid.UUID]] = None,
576
+ question_id: Optional[Union[str, uuid.UUID]] = None,
577
+ study_id: Optional[Union[str, uuid.UUID]] = None,
578
+ memory_turn: Optional[Dict[str, Any]] = None,
579
+ memory_stream_used: Optional[Dict[str, Any]] = None,
580
+ reasoning: Optional[str] = None,
581
+ metadata: Optional[Dict[str, Any]] = None,
582
+ ) -> str:
583
+ """
584
+ Save a response with associated memory information.
585
+
586
+ Args:
587
+ agent_id: The agent ID
588
+ response: The agent's response text
589
+ session_id: Session ID for memory continuity
590
+ question_id: The question ID (optional)
591
+ study_id: The study ID (optional)
592
+ memory_turn: The memory turn to save
593
+ memory_stream_used: The memory stream that was used
594
+ reasoning: Optional reasoning
595
+ metadata: Additional metadata
596
+
597
+ Returns:
598
+ Response ID if saved successfully
599
+ """
600
+ payload = {
601
+ "agent_id": str(agent_id),
602
+ "response": response,
603
+ }
604
+
605
+ if session_id:
606
+ payload["session_id"] = str(session_id)
607
+ if question_id:
608
+ payload["question_id"] = str(question_id)
609
+ if study_id:
610
+ payload["study_id"] = str(study_id)
611
+ if memory_turn:
612
+ payload["memory_turn"] = memory_turn
613
+ if memory_stream_used:
614
+ payload["memory_stream_used"] = memory_stream_used
615
+ if reasoning:
616
+ payload["reasoning"] = reasoning
617
+ if metadata:
618
+ payload["metadata"] = metadata
619
+
620
+ response = await self._request("POST", "memory/save", json=payload)
621
+ data = response.json()
622
+ if data.get("success"):
623
+ return data.get("response_id")
624
+ raise SimileAPIError("Failed to save memory")
625
+
626
+ async def get_memory(
627
+ self,
628
+ session_id: Union[str, uuid.UUID],
629
+ agent_id: Union[str, uuid.UUID],
630
+ exclude_study_ids: Optional[List[Union[str, uuid.UUID]]] = None,
631
+ exclude_question_ids: Optional[List[Union[str, uuid.UUID]]] = None,
632
+ limit: Optional[int] = None,
633
+ use_memory: bool = True,
634
+ ) -> Optional[MemoryStream]:
635
+ """
636
+ Retrieve the memory stream for an agent in a session.
637
+
638
+ Args:
639
+ session_id: Session ID to filter by
640
+ agent_id: The agent ID
641
+ exclude_study_ids: List of study IDs to exclude
642
+ exclude_question_ids: List of question IDs to exclude
643
+ limit: Maximum number of turns to include
644
+ use_memory: Whether to use memory at all
645
+
646
+ Returns:
647
+ MemoryStream object or None
648
+ """
649
+ payload = {
650
+ "session_id": str(session_id),
651
+ "agent_id": str(agent_id),
652
+ "use_memory": use_memory,
653
+ }
654
+
655
+ if exclude_study_ids:
656
+ payload["exclude_study_ids"] = [str(id) for id in exclude_study_ids]
657
+ if exclude_question_ids:
658
+ payload["exclude_question_ids"] = [str(id) for id in exclude_question_ids]
659
+ if limit:
660
+ payload["limit"] = limit
661
+
662
+ response = await self._request("POST", "memory/get", json=payload)
663
+ data = response.json()
664
+
665
+ if data.get("success") and data.get("memory_stream"):
666
+ return MemoryStream.from_dict(data["memory_stream"])
667
+ return None
668
+
669
+ async def get_memory_summary(
670
+ self,
671
+ session_id: Union[str, uuid.UUID],
672
+ ) -> Dict[str, Any]:
673
+ """
674
+ Get a summary of memory usage for a session.
675
+
676
+ Args:
677
+ session_id: Session ID to analyze
678
+
679
+ Returns:
680
+ Dictionary with memory statistics
681
+ """
682
+ response = await self._request("GET", f"memory/summary/{session_id}")
683
+ data = response.json()
684
+ if data.get("success"):
685
+ return data.get("summary", {})
686
+ return {}
687
+
688
+ async def clear_memory(
689
+ self,
690
+ session_id: Union[str, uuid.UUID],
691
+ agent_id: Optional[Union[str, uuid.UUID]] = None,
692
+ study_id: Optional[Union[str, uuid.UUID]] = None,
693
+ ) -> bool:
694
+ """
695
+ Clear memory for a session, optionally filtered by agent or study.
696
+
697
+ Args:
698
+ session_id: Session ID to clear memory for
699
+ agent_id: Optional agent ID to filter by
700
+ study_id: Optional study ID to filter by
701
+
702
+ Returns:
703
+ True if cleared successfully, False otherwise
704
+ """
705
+ payload = {
706
+ "session_id": str(session_id),
707
+ }
708
+
709
+ if agent_id:
710
+ payload["agent_id"] = str(agent_id)
711
+ if study_id:
712
+ payload["study_id"] = str(study_id)
713
+
714
+ response = await self._request("POST", "memory/clear", json=payload)
715
+ data = response.json()
716
+ return data.get("success", False)
717
+
718
+ async def copy_memory(
719
+ self,
720
+ from_session_id: Union[str, uuid.UUID],
721
+ to_session_id: Union[str, uuid.UUID],
722
+ agent_id: Optional[Union[str, uuid.UUID]] = None,
723
+ ) -> int:
724
+ """
725
+ Copy memory from one session to another.
726
+
727
+ Args:
728
+ from_session_id: Source session ID
729
+ to_session_id: Destination session ID
730
+ agent_id: Optional agent ID to filter by
731
+
732
+ Returns:
733
+ Number of memory turns copied
734
+ """
735
+ payload = {
736
+ "from_session_id": str(from_session_id),
737
+ "to_session_id": str(to_session_id),
738
+ }
739
+
740
+ if agent_id:
741
+ payload["agent_id"] = str(agent_id)
742
+
743
+ response = await self._request("POST", "memory/copy", json=payload)
744
+ data = response.json()
745
+ if data.get("success"):
746
+ return data.get("copied_turns", 0)
747
+ return 0
748
+
447
749
  async def aclose(self):
448
750
  await self._client.aclose()
449
751
 
simile/models.py CHANGED
@@ -80,6 +80,7 @@ class OpenGenerationRequest(BaseModel):
80
80
  None # Dict of {description: url} for multiple images
81
81
  )
82
82
  reasoning: bool = False
83
+ memory_stream: Optional["MemoryStream"] = None
83
84
 
84
85
 
85
86
  class OpenGenerationResponse(BaseModel):
@@ -95,6 +96,7 @@ class ClosedGenerationRequest(BaseModel):
95
96
  exclude_data_types: Optional[List[str]] = None
96
97
  images: Optional[Dict[str, str]] = None
97
98
  reasoning: bool = False
99
+ memory_stream: Optional["MemoryStream"] = None
98
100
 
99
101
 
100
102
  class ClosedGenerationResponse(BaseModel):
@@ -229,3 +231,170 @@ class SurveySessionCloseResponse(BaseModel):
229
231
  status: str
230
232
  updated_at: datetime
231
233
  message: Optional[str] = None
234
+
235
+
236
+ # --- Memory Stream Models (to replace Survey Sessions) ---
237
+ class MemoryTurnType(str, Enum):
238
+ """Enum for different types of memory turns."""
239
+
240
+ CONTEXT = "context"
241
+ IMAGE = "image"
242
+ OPEN_QUESTION = "open_question"
243
+ CLOSED_QUESTION = "closed_question"
244
+
245
+
246
+ class BaseMemoryTurn(BaseModel):
247
+ """Base model for all memory turns."""
248
+
249
+ timestamp: datetime = Field(default_factory=lambda: datetime.now())
250
+ type: MemoryTurnType
251
+
252
+ class Config:
253
+ use_enum_values = True
254
+
255
+ def to_dict(self) -> Dict[str, Any]:
256
+ """Convert to dictionary for serialization."""
257
+ data = self.model_dump()
258
+ # Remove timestamp - let API handle it
259
+ data.pop("timestamp", None)
260
+ # Ensure enum is serialized as string
261
+ if "type" in data:
262
+ if hasattr(data["type"], "value"):
263
+ data["type"] = data["type"].value
264
+ return data
265
+
266
+
267
+ class ContextMemoryTurn(BaseMemoryTurn):
268
+ """A context turn that provides background information."""
269
+
270
+ type: MemoryTurnType = Field(default=MemoryTurnType.CONTEXT)
271
+ user_context: str
272
+
273
+
274
+ class ImageMemoryTurn(BaseMemoryTurn):
275
+ """A standalone image turn (e.g., for context or reference)."""
276
+
277
+ type: MemoryTurnType = Field(default=MemoryTurnType.IMAGE)
278
+ images: Dict[str, str]
279
+ caption: Optional[str] = None
280
+
281
+
282
+ class OpenQuestionMemoryTurn(BaseMemoryTurn):
283
+ """An open question-answer turn."""
284
+
285
+ type: MemoryTurnType = Field(default=MemoryTurnType.OPEN_QUESTION)
286
+ user_question: str
287
+ user_images: Optional[Dict[str, str]] = None
288
+ llm_response: Optional[str] = None
289
+ llm_reasoning: Optional[str] = None
290
+
291
+
292
+ class ClosedQuestionMemoryTurn(BaseMemoryTurn):
293
+ """A closed question-answer turn."""
294
+
295
+ type: MemoryTurnType = Field(default=MemoryTurnType.CLOSED_QUESTION)
296
+ user_question: str
297
+ user_options: List[str]
298
+ user_images: Optional[Dict[str, str]] = None
299
+ llm_response: Optional[str] = None
300
+ llm_reasoning: Optional[str] = None
301
+
302
+
303
+ # Discriminated union of all memory turn types
304
+ MemoryTurn = Union[
305
+ ContextMemoryTurn, ImageMemoryTurn, OpenQuestionMemoryTurn, ClosedQuestionMemoryTurn
306
+ ]
307
+
308
+
309
+ class MemoryStream(BaseModel):
310
+ """
311
+ A flexible memory stream that can be passed to generation functions.
312
+ This replaces the session-based approach with a more flexible paradigm.
313
+ """
314
+
315
+ turns: List[MemoryTurn] = Field(default_factory=list)
316
+
317
+ def add_turn(self, turn: MemoryTurn) -> None:
318
+ """Add a turn to the memory stream."""
319
+ self.turns.append(turn)
320
+
321
+ def remove_turn(self, index: int) -> Optional[MemoryTurn]:
322
+ """Remove a turn at the specified index."""
323
+ if 0 <= index < len(self.turns):
324
+ return self.turns.pop(index)
325
+ return None
326
+
327
+ def get_turns_by_type(self, turn_type: MemoryTurnType) -> List[MemoryTurn]:
328
+ """Get all turns of a specific type."""
329
+ return [turn for turn in self.turns if turn.type == turn_type]
330
+
331
+ def get_last_turn(self) -> Optional[MemoryTurn]:
332
+ """Get the most recent turn."""
333
+ return self.turns[-1] if self.turns else None
334
+
335
+ def clear(self) -> None:
336
+ """Clear all turns from the memory stream."""
337
+ self.turns = []
338
+
339
+ def __len__(self) -> int:
340
+ """Return the number of turns in the memory stream."""
341
+ return len(self.turns)
342
+
343
+ def __bool__(self) -> bool:
344
+ """Return True if the memory stream has any turns."""
345
+ return bool(self.turns)
346
+
347
+ def to_dict(self) -> Dict[str, Any]:
348
+ """Convert memory stream to a dictionary for serialization."""
349
+ return {
350
+ "turns": [turn.to_dict() for turn in self.turns]
351
+ }
352
+
353
+ @classmethod
354
+ def from_dict(cls, data: Dict[str, Any]) -> "MemoryStream":
355
+ """Create a MemoryStream from a dictionary."""
356
+ memory = cls()
357
+ for turn_data in data.get("turns", []):
358
+ turn_type = turn_data.get("type")
359
+ if turn_type == MemoryTurnType.CONTEXT:
360
+ memory.add_turn(ContextMemoryTurn(**turn_data))
361
+ elif turn_type == MemoryTurnType.IMAGE:
362
+ memory.add_turn(ImageMemoryTurn(**turn_data))
363
+ elif turn_type == MemoryTurnType.OPEN_QUESTION:
364
+ memory.add_turn(OpenQuestionMemoryTurn(**turn_data))
365
+ elif turn_type == MemoryTurnType.CLOSED_QUESTION:
366
+ memory.add_turn(ClosedQuestionMemoryTurn(**turn_data))
367
+ return memory
368
+
369
+ def fork(self, up_to_index: Optional[int] = None) -> "MemoryStream":
370
+ """Create a copy of this memory stream, optionally up to a specific index."""
371
+ new_memory = MemoryStream()
372
+ turns_to_copy = self.turns[:up_to_index] if up_to_index is not None else self.turns
373
+ for turn in turns_to_copy:
374
+ new_memory.add_turn(turn.model_copy())
375
+ return new_memory
376
+
377
+ def filter_by_type(self, turn_type: MemoryTurnType) -> "MemoryStream":
378
+ """Create a new memory stream with only turns of a specific type."""
379
+ new_memory = MemoryStream()
380
+ for turn in self.get_turns_by_type(turn_type):
381
+ new_memory.add_turn(turn.model_copy())
382
+ return new_memory
383
+
384
+ def get_question_answer_pairs(self) -> List[tuple]:
385
+ """Extract question-answer pairs from the memory."""
386
+ pairs = []
387
+ for turn in self.turns:
388
+ if isinstance(turn, (OpenQuestionMemoryTurn, ClosedQuestionMemoryTurn)):
389
+ if turn.llm_response:
390
+ pairs.append((turn.user_question, turn.llm_response))
391
+ return pairs
392
+
393
+ def truncate(self, max_turns: int) -> None:
394
+ """Keep only the most recent N turns."""
395
+ if len(self.turns) > max_turns:
396
+ self.turns = self.turns[-max_turns:]
397
+
398
+ def insert_turn(self, index: int, turn: MemoryTurn) -> None:
399
+ """Insert a turn at a specific position."""
400
+ self.turns.insert(index, turn)
simile/resources.py CHANGED
@@ -11,6 +11,7 @@ from .models import (
11
11
  AddContextResponse,
12
12
  SurveySessionDetailResponse,
13
13
  SurveySessionCreateResponse,
14
+ MemoryStream,
14
15
  )
15
16
 
16
17
  if TYPE_CHECKING:
@@ -34,6 +35,7 @@ class Agent:
34
35
  data_types: Optional[List[str]] = None,
35
36
  exclude_data_types: Optional[List[str]] = None,
36
37
  images: Optional[Dict[str, str]] = None,
38
+ memory_stream: Optional[MemoryStream] = None,
37
39
  ) -> OpenGenerationResponse:
38
40
  """Generates an open response from this agent based on a question."""
39
41
  return await self._client.generate_open_response(
@@ -42,6 +44,7 @@ class Agent:
42
44
  data_types=data_types,
43
45
  exclude_data_types=exclude_data_types,
44
46
  images=images,
47
+ memory_stream=memory_stream,
45
48
  )
46
49
 
47
50
  async def generate_closed_response(
@@ -51,6 +54,7 @@ class Agent:
51
54
  data_types: Optional[List[str]] = None,
52
55
  exclude_data_types: Optional[List[str]] = None,
53
56
  images: Optional[Dict[str, str]] = None,
57
+ memory_stream: Optional[MemoryStream] = None,
54
58
  ) -> ClosedGenerationResponse:
55
59
  """Generates a closed response from this agent."""
56
60
  return await self._client.generate_closed_response(
@@ -60,6 +64,7 @@ class Agent:
60
64
  data_types=data_types,
61
65
  exclude_data_types=exclude_data_types,
62
66
  images=images,
67
+ memory_stream=memory_stream,
63
68
  )
64
69
 
65
70
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: simile
3
- Version: 0.3.12
3
+ Version: 0.4.1
4
4
  Summary: Package for interfacing with Simile AI agents for simulation
5
5
  Author-email: Simile AI <cqz@simile.ai>
6
6
  License: MIT
@@ -0,0 +1,11 @@
1
+ simile/__init__.py,sha256=JAYtieyGg6YYiCackJ6YNlGJkFWmhryzbwwVt4K67uI,1360
2
+ simile/auth_client.py,sha256=ICImmaA5fZX9ADbIPIUh4RED3hBZvLf3XSiaqELDAME,7923
3
+ simile/client.py,sha256=bou2NijZT8XF5cBzy5zrqnwE2bmLOYqagmpXHgZYwNk,28288
4
+ simile/exceptions.py,sha256=Q1lbfwR7mEn_LYmwjAnsMc8BW79JNPvmCmVoPibYisU,1502
5
+ simile/models.py,sha256=7-meixtd7oyMAc_k24SbKPjMD05tTI0npHVKAeGFcDo,11693
6
+ simile/resources.py,sha256=LSYZSzx1YO69xvShGxxLFVPjQHw1WukHXhdYc1EyuOs,10555
7
+ simile-0.4.1.dist-info/licenses/LICENSE,sha256=tpxX3bpODfyOQVyEM6kCMvPHFCpkjFDj0AICRqKqOFA,1066
8
+ simile-0.4.1.dist-info/METADATA,sha256=YGu7eL55DR7TVW5Uon8MQfJq-sxOr1blFjG0vmQDuqY,1598
9
+ simile-0.4.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
10
+ simile-0.4.1.dist-info/top_level.txt,sha256=41lJneubAG4-ZOAs5qn7iDtDb-MDxa6DdvgBKwNX84M,7
11
+ simile-0.4.1.dist-info/RECORD,,
@@ -1,11 +0,0 @@
1
- simile/__init__.py,sha256=D88zktKWLzF2EgbNm743Ype_p4s7xXWvNEQBr6mRsCI,1034
2
- simile/auth_client.py,sha256=ICImmaA5fZX9ADbIPIUh4RED3hBZvLf3XSiaqELDAME,7923
3
- simile/client.py,sha256=EdOUXgkvIhGL1p1FXO8XiYn-lHyDJGuMoDC1K_Ll74A,16732
4
- simile/exceptions.py,sha256=Q1lbfwR7mEn_LYmwjAnsMc8BW79JNPvmCmVoPibYisU,1502
5
- simile/models.py,sha256=pT5jDIwVPlPIOK4qQ1CkyrYp0dLZqFCQTWmGX07dGug,5713
6
- simile/resources.py,sha256=D6K3crR4tb3CAFH7mF-y536pJwJ3rmNDCtm7Y2vMtsc,10347
7
- simile-0.3.12.dist-info/licenses/LICENSE,sha256=tpxX3bpODfyOQVyEM6kCMvPHFCpkjFDj0AICRqKqOFA,1066
8
- simile-0.3.12.dist-info/METADATA,sha256=tHwqTkgi4GKNJsLuBefJJ-zzVpOlZEmKt_T-c-Zp12U,1599
9
- simile-0.3.12.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
10
- simile-0.3.12.dist-info/top_level.txt,sha256=41lJneubAG4-ZOAs5qn7iDtDb-MDxa6DdvgBKwNX84M,7
11
- simile-0.3.12.dist-info/RECORD,,