simile 0.3.6__tar.gz → 0.3.7__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of simile might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: simile
3
- Version: 0.3.6
3
+ Version: 0.3.7
4
4
  Summary: Package for interfacing with Simile AI agents for simulation
5
5
  Author-email: Simile AI <cqz@simile.ai>
6
6
  License: MIT
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "simile"
7
- version = "0.3.6"
7
+ version = "0.3.7"
8
8
  authors = [
9
9
  { name="Simile AI", email="cqz@simile.ai" },
10
10
  ]
@@ -1,6 +1,6 @@
1
1
  import httpx
2
2
  from httpx import AsyncClient
3
- from typing import List, Dict, Any, Optional, Union, Type
3
+ from typing import List, Dict, Any, Optional, Union, Type, AsyncGenerator
4
4
  import uuid
5
5
  from pydantic import BaseModel
6
6
 
@@ -312,6 +312,68 @@ class Simile:
312
312
  )
313
313
  return response_data
314
314
 
315
+ async def stream_open_response(
316
+ self,
317
+ agent_id: uuid.UUID,
318
+ question: str,
319
+ data_types: Optional[List[str]] = None,
320
+ exclude_data_types: Optional[List[str]] = None,
321
+ images: Optional[Dict[str, str]] = None,
322
+ ) -> AsyncGenerator[str, None]:
323
+ """Streams an open response from an agent."""
324
+ endpoint = f"/generation/open-stream/{str(agent_id)}"
325
+ request_payload = OpenGenerationRequest(
326
+ question=question,
327
+ data_types=data_types,
328
+ exclude_data_types=exclude_data_types,
329
+ images=images,
330
+ )
331
+
332
+ url = self.base_url + endpoint # assuming self.base_url is defined
333
+
334
+ async with httpx.AsyncClient(timeout=None) as client:
335
+ async with client.stream("POST", url, json=request_payload.model_dump()) as response:
336
+ response.raise_for_status()
337
+ async for line in response.aiter_lines():
338
+ if line.strip(): # skip empty lines
339
+ if line.startswith("data: "): # optional, if using SSE format
340
+ yield line.removeprefix("data: ").strip()
341
+ else:
342
+ yield line.strip()
343
+
344
+ async def stream_closed_response(
345
+ self,
346
+ agent_id: uuid.UUID,
347
+ question: str,
348
+ options: List[str],
349
+ data_types: Optional[List[str]] = None,
350
+ exclude_data_types: Optional[List[str]] = None,
351
+ images: Optional[Dict[str, str]] = None,
352
+ ) -> AsyncGenerator[str, None]:
353
+ """Streams a closed response from an agent."""
354
+ endpoint = f"/generation/closed-stream/{str(agent_id)}"
355
+
356
+ request_payload = {
357
+ "question": question,
358
+ "options": options,
359
+ "data_types": data_types,
360
+ "exclude_data_types": exclude_data_types,
361
+ "images": images,
362
+ }
363
+
364
+ url = self.base_url + endpoint # assuming self.base_url is defined
365
+
366
+ async with httpx.AsyncClient(timeout=None) as client:
367
+ async with client.stream("POST", url, json=request_payload) as response:
368
+ response.raise_for_status()
369
+ async for line in response.aiter_lines():
370
+ if line.strip(): # skip empty lines
371
+ if line.startswith("data: "): # optional, if using SSE format
372
+ yield line.removeprefix("data: ").strip()
373
+ else:
374
+ yield line.strip()
375
+
376
+
315
377
  async def generate_open_response(
316
378
  self,
317
379
  agent_id: uuid.UUID,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: simile
3
- Version: 0.3.6
3
+ Version: 0.3.7
4
4
  Summary: Package for interfacing with Simile AI agents for simulation
5
5
  Author-email: Simile AI <cqz@simile.ai>
6
6
  License: MIT
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes