simile 0.4.3__tar.gz → 0.4.4__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of simile might be problematic. Click here for more details.
- {simile-0.4.3 → simile-0.4.4}/PKG-INFO +1 -1
- {simile-0.4.3 → simile-0.4.4}/pyproject.toml +1 -1
- {simile-0.4.3 → simile-0.4.4}/simile/client.py +82 -47
- {simile-0.4.3 → simile-0.4.4}/simile/models.py +12 -5
- {simile-0.4.3 → simile-0.4.4}/simile.egg-info/PKG-INFO +1 -1
- {simile-0.4.3 → simile-0.4.4}/LICENSE +0 -0
- {simile-0.4.3 → simile-0.4.4}/README.md +0 -0
- {simile-0.4.3 → simile-0.4.4}/setup.cfg +0 -0
- {simile-0.4.3 → simile-0.4.4}/setup.py +0 -0
- {simile-0.4.3 → simile-0.4.4}/simile/__init__.py +0 -0
- {simile-0.4.3 → simile-0.4.4}/simile/auth_client.py +0 -0
- {simile-0.4.3 → simile-0.4.4}/simile/exceptions.py +0 -0
- {simile-0.4.3 → simile-0.4.4}/simile/resources.py +0 -0
- {simile-0.4.3 → simile-0.4.4}/simile.egg-info/SOURCES.txt +0 -0
- {simile-0.4.3 → simile-0.4.4}/simile.egg-info/dependency_links.txt +0 -0
- {simile-0.4.3 → simile-0.4.4}/simile.egg-info/requires.txt +0 -0
- {simile-0.4.3 → simile-0.4.4}/simile.egg-info/top_level.txt +0 -0
|
@@ -7,6 +7,7 @@ from pydantic import BaseModel
|
|
|
7
7
|
from .models import (
|
|
8
8
|
Population,
|
|
9
9
|
PopulationInfo,
|
|
10
|
+
UpdatePopulationMetadataPayload,
|
|
10
11
|
Agent as AgentModel,
|
|
11
12
|
DataItem,
|
|
12
13
|
DeletionResponse,
|
|
@@ -164,6 +165,32 @@ class Simile:
|
|
|
164
165
|
)
|
|
165
166
|
return response_data
|
|
166
167
|
|
|
168
|
+
async def update_population_metadata(
|
|
169
|
+
self,
|
|
170
|
+
population_id: Union[str, uuid.UUID],
|
|
171
|
+
metadata: Dict[str, Any],
|
|
172
|
+
mode: str = "merge",
|
|
173
|
+
) -> Population:
|
|
174
|
+
"""
|
|
175
|
+
Update a population's metadata (jsonb).
|
|
176
|
+
|
|
177
|
+
Args:
|
|
178
|
+
population_id: The ID of the population
|
|
179
|
+
metadata: A dictionary of metadata to merge or replace
|
|
180
|
+
mode: Either "merge" (default) or "replace"
|
|
181
|
+
|
|
182
|
+
Returns:
|
|
183
|
+
Updated Population object
|
|
184
|
+
"""
|
|
185
|
+
payload = UpdatePopulationMetadataPayload(metadata=metadata, mode=mode)
|
|
186
|
+
response_data = await self._request(
|
|
187
|
+
"PATCH",
|
|
188
|
+
f"populations/{str(population_id)}/metadata",
|
|
189
|
+
json=payload,
|
|
190
|
+
response_model=Population,
|
|
191
|
+
)
|
|
192
|
+
return response_data
|
|
193
|
+
|
|
167
194
|
async def get_population(self, population_id: Union[str, uuid.UUID]) -> Population:
|
|
168
195
|
response_data = await self._request(
|
|
169
196
|
"GET", f"populations/get/{str(population_id)}", response_model=Population
|
|
@@ -255,16 +282,16 @@ class Simile:
|
|
|
255
282
|
"DELETE", f"agents/{str(agent_id)}/populations/{str(population_id)}"
|
|
256
283
|
)
|
|
257
284
|
return raw_response.json()
|
|
258
|
-
|
|
285
|
+
|
|
259
286
|
async def batch_add_agents_to_population(
|
|
260
|
-
self,
|
|
287
|
+
self,
|
|
288
|
+
agent_ids: List[Union[str, uuid.UUID]],
|
|
289
|
+
population_id: Union[str, uuid.UUID],
|
|
261
290
|
) -> Dict[str, Any]:
|
|
262
291
|
"""Add multiple agents to a population in a single batch operation."""
|
|
263
292
|
agent_id_strs = [str(aid) for aid in agent_ids]
|
|
264
293
|
raw_response = await self._request(
|
|
265
|
-
"POST",
|
|
266
|
-
f"populations/{str(population_id)}/agents/batch",
|
|
267
|
-
json=agent_id_strs
|
|
294
|
+
"POST", f"populations/{str(population_id)}/agents/batch", json=agent_id_strs
|
|
268
295
|
)
|
|
269
296
|
return raw_response.json()
|
|
270
297
|
|
|
@@ -419,12 +446,16 @@ class Simile:
|
|
|
419
446
|
evidence: bool = False,
|
|
420
447
|
confidence: bool = False,
|
|
421
448
|
memory_stream: Optional[MemoryStream] = None,
|
|
422
|
-
use_memory: Optional[
|
|
449
|
+
use_memory: Optional[
|
|
450
|
+
Union[str, uuid.UUID]
|
|
451
|
+
] = None, # Session ID to load memory from
|
|
423
452
|
exclude_memory_ids: Optional[List[str]] = None, # Study/question IDs to exclude
|
|
424
|
-
save_memory: Optional[
|
|
453
|
+
save_memory: Optional[
|
|
454
|
+
Union[str, uuid.UUID]
|
|
455
|
+
] = None, # Session ID to save memory to
|
|
425
456
|
) -> OpenGenerationResponse:
|
|
426
457
|
"""Generates an open response from an agent based on a question.
|
|
427
|
-
|
|
458
|
+
|
|
428
459
|
Args:
|
|
429
460
|
agent_id: The agent to query
|
|
430
461
|
question: The question to ask
|
|
@@ -448,30 +479,30 @@ class Simile:
|
|
|
448
479
|
"evidence": evidence,
|
|
449
480
|
"confidence": confidence,
|
|
450
481
|
}
|
|
451
|
-
|
|
482
|
+
|
|
452
483
|
# Pass memory parameters to API for server-side handling
|
|
453
484
|
if use_memory:
|
|
454
485
|
request_payload["use_memory"] = str(use_memory)
|
|
455
486
|
if exclude_memory_ids:
|
|
456
487
|
request_payload["exclude_memory_ids"] = exclude_memory_ids
|
|
457
|
-
|
|
488
|
+
|
|
458
489
|
if save_memory:
|
|
459
490
|
request_payload["save_memory"] = str(save_memory)
|
|
460
|
-
|
|
491
|
+
|
|
461
492
|
# Only include explicit memory_stream if provided directly
|
|
462
493
|
if memory_stream:
|
|
463
494
|
request_payload["memory_stream"] = memory_stream.to_dict()
|
|
464
|
-
|
|
495
|
+
|
|
465
496
|
response_data = await self._request(
|
|
466
497
|
"POST",
|
|
467
498
|
endpoint,
|
|
468
499
|
json=request_payload,
|
|
469
500
|
response_model=OpenGenerationResponse,
|
|
470
501
|
)
|
|
471
|
-
|
|
502
|
+
|
|
472
503
|
# Don't save memory here - API should handle it when save_memory is passed
|
|
473
504
|
# Memory saving is now handled server-side for better performance
|
|
474
|
-
|
|
505
|
+
|
|
475
506
|
return response_data
|
|
476
507
|
|
|
477
508
|
async def generate_closed_response(
|
|
@@ -486,12 +517,16 @@ class Simile:
|
|
|
486
517
|
evidence: bool = False,
|
|
487
518
|
confidence: bool = False,
|
|
488
519
|
memory_stream: Optional[MemoryStream] = None,
|
|
489
|
-
use_memory: Optional[
|
|
520
|
+
use_memory: Optional[
|
|
521
|
+
Union[str, uuid.UUID]
|
|
522
|
+
] = None, # Session ID to load memory from
|
|
490
523
|
exclude_memory_ids: Optional[List[str]] = None, # Study/question IDs to exclude
|
|
491
|
-
save_memory: Optional[
|
|
524
|
+
save_memory: Optional[
|
|
525
|
+
Union[str, uuid.UUID]
|
|
526
|
+
] = None, # Session ID to save memory to
|
|
492
527
|
) -> ClosedGenerationResponse:
|
|
493
528
|
"""Generates a closed response from an agent.
|
|
494
|
-
|
|
529
|
+
|
|
495
530
|
Args:
|
|
496
531
|
agent_id: The agent to query
|
|
497
532
|
question: The question to ask
|
|
@@ -517,34 +552,34 @@ class Simile:
|
|
|
517
552
|
"evidence": evidence,
|
|
518
553
|
"confidence": confidence,
|
|
519
554
|
}
|
|
520
|
-
|
|
555
|
+
|
|
521
556
|
# Pass memory parameters to API for server-side handling
|
|
522
557
|
if use_memory:
|
|
523
558
|
request_payload["use_memory"] = str(use_memory)
|
|
524
559
|
if exclude_memory_ids:
|
|
525
560
|
request_payload["exclude_memory_ids"] = exclude_memory_ids
|
|
526
|
-
|
|
561
|
+
|
|
527
562
|
if save_memory:
|
|
528
563
|
request_payload["save_memory"] = str(save_memory)
|
|
529
|
-
|
|
564
|
+
|
|
530
565
|
# Only include explicit memory_stream if provided directly
|
|
531
566
|
if memory_stream:
|
|
532
567
|
request_payload["memory_stream"] = memory_stream.to_dict()
|
|
533
|
-
|
|
568
|
+
|
|
534
569
|
response_data = await self._request(
|
|
535
570
|
"POST",
|
|
536
571
|
endpoint,
|
|
537
572
|
json=request_payload,
|
|
538
573
|
response_model=ClosedGenerationResponse,
|
|
539
574
|
)
|
|
540
|
-
|
|
575
|
+
|
|
541
576
|
# Don't save memory here - API should handle it when save_memory is passed
|
|
542
577
|
# Memory saving is now handled server-side for better performance
|
|
543
|
-
|
|
578
|
+
|
|
544
579
|
return response_data
|
|
545
580
|
|
|
546
581
|
# Memory Management Methods
|
|
547
|
-
|
|
582
|
+
|
|
548
583
|
async def save_memory(
|
|
549
584
|
self,
|
|
550
585
|
agent_id: Union[str, uuid.UUID],
|
|
@@ -559,7 +594,7 @@ class Simile:
|
|
|
559
594
|
) -> str:
|
|
560
595
|
"""
|
|
561
596
|
Save a response with associated memory information.
|
|
562
|
-
|
|
597
|
+
|
|
563
598
|
Args:
|
|
564
599
|
agent_id: The agent ID
|
|
565
600
|
response: The agent's response text
|
|
@@ -570,7 +605,7 @@ class Simile:
|
|
|
570
605
|
memory_stream_used: The memory stream that was used
|
|
571
606
|
reasoning: Optional reasoning
|
|
572
607
|
metadata: Additional metadata
|
|
573
|
-
|
|
608
|
+
|
|
574
609
|
Returns:
|
|
575
610
|
Response ID if saved successfully
|
|
576
611
|
"""
|
|
@@ -578,7 +613,7 @@ class Simile:
|
|
|
578
613
|
"agent_id": str(agent_id),
|
|
579
614
|
"response": response,
|
|
580
615
|
}
|
|
581
|
-
|
|
616
|
+
|
|
582
617
|
if session_id:
|
|
583
618
|
payload["session_id"] = str(session_id)
|
|
584
619
|
if question_id:
|
|
@@ -593,13 +628,13 @@ class Simile:
|
|
|
593
628
|
payload["reasoning"] = reasoning
|
|
594
629
|
if metadata:
|
|
595
630
|
payload["metadata"] = metadata
|
|
596
|
-
|
|
631
|
+
|
|
597
632
|
response = await self._request("POST", "memory/save", json=payload)
|
|
598
633
|
data = response.json()
|
|
599
634
|
if data.get("success"):
|
|
600
635
|
return data.get("response_id")
|
|
601
636
|
raise SimileAPIError("Failed to save memory")
|
|
602
|
-
|
|
637
|
+
|
|
603
638
|
async def get_memory(
|
|
604
639
|
self,
|
|
605
640
|
session_id: Union[str, uuid.UUID],
|
|
@@ -611,7 +646,7 @@ class Simile:
|
|
|
611
646
|
) -> Optional[MemoryStream]:
|
|
612
647
|
"""
|
|
613
648
|
Retrieve the memory stream for an agent in a session.
|
|
614
|
-
|
|
649
|
+
|
|
615
650
|
Args:
|
|
616
651
|
session_id: Session ID to filter by
|
|
617
652
|
agent_id: The agent ID
|
|
@@ -619,7 +654,7 @@ class Simile:
|
|
|
619
654
|
exclude_question_ids: List of question IDs to exclude
|
|
620
655
|
limit: Maximum number of turns to include
|
|
621
656
|
use_memory: Whether to use memory at all
|
|
622
|
-
|
|
657
|
+
|
|
623
658
|
Returns:
|
|
624
659
|
MemoryStream object or None
|
|
625
660
|
"""
|
|
@@ -628,31 +663,31 @@ class Simile:
|
|
|
628
663
|
"agent_id": str(agent_id),
|
|
629
664
|
"use_memory": use_memory,
|
|
630
665
|
}
|
|
631
|
-
|
|
666
|
+
|
|
632
667
|
if exclude_study_ids:
|
|
633
668
|
payload["exclude_study_ids"] = [str(id) for id in exclude_study_ids]
|
|
634
669
|
if exclude_question_ids:
|
|
635
670
|
payload["exclude_question_ids"] = [str(id) for id in exclude_question_ids]
|
|
636
671
|
if limit:
|
|
637
672
|
payload["limit"] = limit
|
|
638
|
-
|
|
673
|
+
|
|
639
674
|
response = await self._request("POST", "memory/get", json=payload)
|
|
640
675
|
data = response.json()
|
|
641
|
-
|
|
676
|
+
|
|
642
677
|
if data.get("success") and data.get("memory_stream"):
|
|
643
678
|
return MemoryStream.from_dict(data["memory_stream"])
|
|
644
679
|
return None
|
|
645
|
-
|
|
680
|
+
|
|
646
681
|
async def get_memory_summary(
|
|
647
682
|
self,
|
|
648
683
|
session_id: Union[str, uuid.UUID],
|
|
649
684
|
) -> Dict[str, Any]:
|
|
650
685
|
"""
|
|
651
686
|
Get a summary of memory usage for a session.
|
|
652
|
-
|
|
687
|
+
|
|
653
688
|
Args:
|
|
654
689
|
session_id: Session ID to analyze
|
|
655
|
-
|
|
690
|
+
|
|
656
691
|
Returns:
|
|
657
692
|
Dictionary with memory statistics
|
|
658
693
|
"""
|
|
@@ -661,7 +696,7 @@ class Simile:
|
|
|
661
696
|
if data.get("success"):
|
|
662
697
|
return data.get("summary", {})
|
|
663
698
|
return {}
|
|
664
|
-
|
|
699
|
+
|
|
665
700
|
async def clear_memory(
|
|
666
701
|
self,
|
|
667
702
|
session_id: Union[str, uuid.UUID],
|
|
@@ -670,28 +705,28 @@ class Simile:
|
|
|
670
705
|
) -> bool:
|
|
671
706
|
"""
|
|
672
707
|
Clear memory for a session, optionally filtered by agent or study.
|
|
673
|
-
|
|
708
|
+
|
|
674
709
|
Args:
|
|
675
710
|
session_id: Session ID to clear memory for
|
|
676
711
|
agent_id: Optional agent ID to filter by
|
|
677
712
|
study_id: Optional study ID to filter by
|
|
678
|
-
|
|
713
|
+
|
|
679
714
|
Returns:
|
|
680
715
|
True if cleared successfully, False otherwise
|
|
681
716
|
"""
|
|
682
717
|
payload = {
|
|
683
718
|
"session_id": str(session_id),
|
|
684
719
|
}
|
|
685
|
-
|
|
720
|
+
|
|
686
721
|
if agent_id:
|
|
687
722
|
payload["agent_id"] = str(agent_id)
|
|
688
723
|
if study_id:
|
|
689
724
|
payload["study_id"] = str(study_id)
|
|
690
|
-
|
|
725
|
+
|
|
691
726
|
response = await self._request("POST", "memory/clear", json=payload)
|
|
692
727
|
data = response.json()
|
|
693
728
|
return data.get("success", False)
|
|
694
|
-
|
|
729
|
+
|
|
695
730
|
async def copy_memory(
|
|
696
731
|
self,
|
|
697
732
|
from_session_id: Union[str, uuid.UUID],
|
|
@@ -700,12 +735,12 @@ class Simile:
|
|
|
700
735
|
) -> int:
|
|
701
736
|
"""
|
|
702
737
|
Copy memory from one session to another.
|
|
703
|
-
|
|
738
|
+
|
|
704
739
|
Args:
|
|
705
740
|
from_session_id: Source session ID
|
|
706
741
|
to_session_id: Destination session ID
|
|
707
742
|
agent_id: Optional agent ID to filter by
|
|
708
|
-
|
|
743
|
+
|
|
709
744
|
Returns:
|
|
710
745
|
Number of memory turns copied
|
|
711
746
|
"""
|
|
@@ -713,10 +748,10 @@ class Simile:
|
|
|
713
748
|
"from_session_id": str(from_session_id),
|
|
714
749
|
"to_session_id": str(to_session_id),
|
|
715
750
|
}
|
|
716
|
-
|
|
751
|
+
|
|
717
752
|
if agent_id:
|
|
718
753
|
payload["agent_id"] = str(agent_id)
|
|
719
|
-
|
|
754
|
+
|
|
720
755
|
response = await self._request("POST", "memory/copy", json=payload)
|
|
721
756
|
data = response.json()
|
|
722
757
|
if data.get("success"):
|
|
@@ -11,6 +11,7 @@ class Population(BaseModel):
|
|
|
11
11
|
description: Optional[str] = None
|
|
12
12
|
created_at: datetime
|
|
13
13
|
updated_at: datetime
|
|
14
|
+
metadata: Optional[Dict[str, Any]] = None
|
|
14
15
|
|
|
15
16
|
|
|
16
17
|
class PopulationInfo(BaseModel):
|
|
@@ -18,6 +19,7 @@ class PopulationInfo(BaseModel):
|
|
|
18
19
|
name: str
|
|
19
20
|
description: Optional[str] = None
|
|
20
21
|
agent_count: int
|
|
22
|
+
metadata: Optional[Dict[str, Any]] = None
|
|
21
23
|
|
|
22
24
|
|
|
23
25
|
class DataItem(BaseModel):
|
|
@@ -44,6 +46,11 @@ class CreatePopulationPayload(BaseModel):
|
|
|
44
46
|
description: Optional[str] = None
|
|
45
47
|
|
|
46
48
|
|
|
49
|
+
class UpdatePopulationMetadataPayload(BaseModel):
|
|
50
|
+
metadata: Dict[str, Any]
|
|
51
|
+
mode: Optional[Literal["merge", "replace"]] = "merge"
|
|
52
|
+
|
|
53
|
+
|
|
47
54
|
class InitialDataItemPayload(BaseModel):
|
|
48
55
|
data_type: str
|
|
49
56
|
content: Any
|
|
@@ -259,7 +266,7 @@ class BaseMemoryTurn(BaseModel):
|
|
|
259
266
|
|
|
260
267
|
class Config:
|
|
261
268
|
use_enum_values = True
|
|
262
|
-
|
|
269
|
+
|
|
263
270
|
def to_dict(self) -> Dict[str, Any]:
|
|
264
271
|
"""Convert to dictionary for serialization."""
|
|
265
272
|
data = self.model_dump()
|
|
@@ -354,9 +361,7 @@ class MemoryStream(BaseModel):
|
|
|
354
361
|
|
|
355
362
|
def to_dict(self) -> Dict[str, Any]:
|
|
356
363
|
"""Convert memory stream to a dictionary for serialization."""
|
|
357
|
-
return {
|
|
358
|
-
"turns": [turn.to_dict() for turn in self.turns]
|
|
359
|
-
}
|
|
364
|
+
return {"turns": [turn.to_dict() for turn in self.turns]}
|
|
360
365
|
|
|
361
366
|
@classmethod
|
|
362
367
|
def from_dict(cls, data: Dict[str, Any]) -> "MemoryStream":
|
|
@@ -377,7 +382,9 @@ class MemoryStream(BaseModel):
|
|
|
377
382
|
def fork(self, up_to_index: Optional[int] = None) -> "MemoryStream":
|
|
378
383
|
"""Create a copy of this memory stream, optionally up to a specific index."""
|
|
379
384
|
new_memory = MemoryStream()
|
|
380
|
-
turns_to_copy =
|
|
385
|
+
turns_to_copy = (
|
|
386
|
+
self.turns[:up_to_index] if up_to_index is not None else self.turns
|
|
387
|
+
)
|
|
381
388
|
for turn in turns_to_copy:
|
|
382
389
|
new_memory.add_turn(turn.model_copy())
|
|
383
390
|
return new_memory
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|