inferencesh 0.4.17__py3-none-any.whl → 0.4.19__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of inferencesh might be problematic. Click here for more details.

inferencesh/models/llm.py CHANGED
@@ -21,7 +21,6 @@ class Message(BaseAppInput):
21
21
  role: ContextMessageRole
22
22
  content: str
23
23
 
24
-
25
24
  class ContextMessage(BaseAppInput):
26
25
  role: ContextMessageRole = Field(
27
26
  description="the role of the message. user, assistant, or system",
@@ -33,6 +32,10 @@ class ContextMessage(BaseAppInput):
33
32
  description="the image file of the message",
34
33
  default=None
35
34
  )
35
+ tool_calls: Optional[List[Dict[str, Any]]] = Field(
36
+ description="the tool calls of the message",
37
+ default=None
38
+ )
36
39
 
37
40
  class BaseLLMInput(BaseAppInput):
38
41
  """Base class with common LLM fields."""
@@ -53,8 +56,12 @@ class BaseLLMInput(BaseAppInput):
53
56
  ]
54
57
  ]
55
58
  )
59
+ role: ContextMessageRole = Field(
60
+ description="the role of the input text",
61
+ default=ContextMessageRole.USER
62
+ )
56
63
  text: str = Field(
57
- description="the user prompt to use for the model",
64
+ description="the input text to use for the model",
58
65
  examples=[
59
66
  "write a haiku about artificial general intelligence"
60
67
  ]
@@ -217,6 +224,8 @@ def build_messages(
217
224
  parts.append({"type": "image_url", "image_url": {"url": image_data_uri}})
218
225
  elif msg.image.uri:
219
226
  parts.append({"type": "image_url", "image_url": {"url": msg.image.uri}})
227
+ if msg.tool_calls:
228
+ parts.append({"type": "tool_call", "tool_calls": msg.tool_calls})
220
229
  if allow_multipart:
221
230
  return parts
222
231
  if len(parts) == 1 and parts[0]["type"] == "text":
@@ -650,6 +659,7 @@ def stream_generate(
650
659
  last_activity = time.time()
651
660
  init_timeout = 30.0 # 30 seconds for initial response
652
661
  chunk_timeout = 10.0 # 10 seconds between chunks
662
+ chunks_begun = False
653
663
 
654
664
  try:
655
665
  # Wait for initial setup
@@ -680,7 +690,7 @@ def stream_generate(
680
690
  pass
681
691
 
682
692
  # Check for timeout
683
- if time.time() - last_activity > chunk_timeout:
693
+ if chunks_begun and time.time() - last_activity > chunk_timeout:
684
694
  raise RuntimeError(f"No response from model for {chunk_timeout} seconds")
685
695
 
686
696
  # Get next chunk
@@ -705,6 +715,8 @@ def stream_generate(
705
715
  if not timing.first_token_time:
706
716
  timing.mark_first_token()
707
717
 
718
+ chunks_begun = True
719
+
708
720
  # Update response state from chunk
709
721
  response.update_from_chunk(chunk, timing)
710
722
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: inferencesh
3
- Version: 0.4.17
3
+ Version: 0.4.19
4
4
  Summary: inference.sh Python SDK
5
5
  Author-email: "Inference Shell Inc." <hello@inference.sh>
6
6
  Project-URL: Homepage, https://github.com/inference-sh/sdk
@@ -3,13 +3,13 @@ inferencesh/client.py,sha256=sMgr6vVPD84P3LpQfjmuKqlRpbgOUmWLFQ9IJM5kWzc,39410
3
3
  inferencesh/models/__init__.py,sha256=FDwcdtT6c4hbRitymjmN-hZMlQa8RbKSftkZZyjtUXA,536
4
4
  inferencesh/models/base.py,sha256=eTwRvXAjMGh6b8AUXWKSGpRyeScAkPs6bNYY8AXKSz8,5505
5
5
  inferencesh/models/file.py,sha256=H4s-A2RWfNTMCcLqSLpqNHvGfSvYEnbZaJj-rWftrL0,11743
6
- inferencesh/models/llm.py,sha256=C7FzaQrv_tfhMrkhbHaaprskQPPmagW41jByaC0-buQ,27966
6
+ inferencesh/models/llm.py,sha256=mFktrSFDEZIVruvh8qNY4H6u7H9mlJDhtTPjiL4lXRs,28438
7
7
  inferencesh/utils/__init__.py,sha256=-xiD6uo2XzcrPAWFb_fUbaimmnW4KFKc-8IvBzaxNd4,148
8
8
  inferencesh/utils/download.py,sha256=DRGBudiPVa5bDS35KfR-DYeGRk7gO03WOelnisecwMo,1815
9
9
  inferencesh/utils/storage.py,sha256=E4J8emd4eFKdmdDgAqzz3TpaaDd3n0l8gYlMHuY8yIU,519
10
- inferencesh-0.4.17.dist-info/licenses/LICENSE,sha256=OsgqEWIh2el_QMj0y8O1A5Q5Dl-dxqqYbFE6fszuR4s,1086
11
- inferencesh-0.4.17.dist-info/METADATA,sha256=h8uT3xyrUok1dMVjJJz_Ao54v5RhePQoKlrqHPeULjk,5406
12
- inferencesh-0.4.17.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
13
- inferencesh-0.4.17.dist-info/entry_points.txt,sha256=6IC-fyozAqW3ljsMLGCXxJ0_ui2Jb-2fLHtoH1RTnEE,45
14
- inferencesh-0.4.17.dist-info/top_level.txt,sha256=TSMHg3T1ThMl1HGAWmzBClwOYH1ump5neof9BfHIwaA,12
15
- inferencesh-0.4.17.dist-info/RECORD,,
10
+ inferencesh-0.4.19.dist-info/licenses/LICENSE,sha256=OsgqEWIh2el_QMj0y8O1A5Q5Dl-dxqqYbFE6fszuR4s,1086
11
+ inferencesh-0.4.19.dist-info/METADATA,sha256=pgDRGh7twCQj6x7vK1zD221SDCQa8sZvw7CCZW_QFOc,5406
12
+ inferencesh-0.4.19.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
13
+ inferencesh-0.4.19.dist-info/entry_points.txt,sha256=6IC-fyozAqW3ljsMLGCXxJ0_ui2Jb-2fLHtoH1RTnEE,45
14
+ inferencesh-0.4.19.dist-info/top_level.txt,sha256=TSMHg3T1ThMl1HGAWmzBClwOYH1ump5neof9BfHIwaA,12
15
+ inferencesh-0.4.19.dist-info/RECORD,,