inferencesh 0.4.16__py3-none-any.whl → 0.4.18__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of inferencesh might be problematic. Click here for more details.

inferencesh/models/llm.py CHANGED
@@ -14,6 +14,7 @@ class ContextMessageRole(str, Enum):
14
14
  USER = "user"
15
15
  ASSISTANT = "assistant"
16
16
  SYSTEM = "system"
17
+ TOOL = "tool"
17
18
 
18
19
 
19
20
  class Message(BaseAppInput):
@@ -649,6 +650,7 @@ def stream_generate(
649
650
  last_activity = time.time()
650
651
  init_timeout = 30.0 # 30 seconds for initial response
651
652
  chunk_timeout = 10.0 # 10 seconds between chunks
653
+ chunks_begun = False
652
654
 
653
655
  try:
654
656
  # Wait for initial setup
@@ -679,7 +681,7 @@ def stream_generate(
679
681
  pass
680
682
 
681
683
  # Check for timeout
682
- if time.time() - last_activity > chunk_timeout:
684
+ if chunks_begun and time.time() - last_activity > chunk_timeout:
683
685
  raise RuntimeError(f"No response from model for {chunk_timeout} seconds")
684
686
 
685
687
  # Get next chunk
@@ -704,6 +706,8 @@ def stream_generate(
704
706
  if not timing.first_token_time:
705
707
  timing.mark_first_token()
706
708
 
709
+ chunks_begun = True
710
+
707
711
  # Update response state from chunk
708
712
  response.update_from_chunk(chunk, timing)
709
713
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: inferencesh
3
- Version: 0.4.16
3
+ Version: 0.4.18
4
4
  Summary: inference.sh Python SDK
5
5
  Author-email: "Inference Shell Inc." <hello@inference.sh>
6
6
  Project-URL: Homepage, https://github.com/inference-sh/sdk
@@ -3,13 +3,13 @@ inferencesh/client.py,sha256=sMgr6vVPD84P3LpQfjmuKqlRpbgOUmWLFQ9IJM5kWzc,39410
3
3
  inferencesh/models/__init__.py,sha256=FDwcdtT6c4hbRitymjmN-hZMlQa8RbKSftkZZyjtUXA,536
4
4
  inferencesh/models/base.py,sha256=eTwRvXAjMGh6b8AUXWKSGpRyeScAkPs6bNYY8AXKSz8,5505
5
5
  inferencesh/models/file.py,sha256=H4s-A2RWfNTMCcLqSLpqNHvGfSvYEnbZaJj-rWftrL0,11743
6
- inferencesh/models/llm.py,sha256=N5eP549tWYh5qlbAwMbMTcHwx-bET7OCvxJ2SlSLfVk,27948
6
+ inferencesh/models/llm.py,sha256=ytgSG-qTdEDmq_vrf7JfyCnevMMTimPiDUad4X-F2-o,28065
7
7
  inferencesh/utils/__init__.py,sha256=-xiD6uo2XzcrPAWFb_fUbaimmnW4KFKc-8IvBzaxNd4,148
8
8
  inferencesh/utils/download.py,sha256=DRGBudiPVa5bDS35KfR-DYeGRk7gO03WOelnisecwMo,1815
9
9
  inferencesh/utils/storage.py,sha256=E4J8emd4eFKdmdDgAqzz3TpaaDd3n0l8gYlMHuY8yIU,519
10
- inferencesh-0.4.16.dist-info/licenses/LICENSE,sha256=OsgqEWIh2el_QMj0y8O1A5Q5Dl-dxqqYbFE6fszuR4s,1086
11
- inferencesh-0.4.16.dist-info/METADATA,sha256=uGxSDfljhrZy_IQG83Y9E09gbuu-cbNSikBP7TjI9rI,5406
12
- inferencesh-0.4.16.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
13
- inferencesh-0.4.16.dist-info/entry_points.txt,sha256=6IC-fyozAqW3ljsMLGCXxJ0_ui2Jb-2fLHtoH1RTnEE,45
14
- inferencesh-0.4.16.dist-info/top_level.txt,sha256=TSMHg3T1ThMl1HGAWmzBClwOYH1ump5neof9BfHIwaA,12
15
- inferencesh-0.4.16.dist-info/RECORD,,
10
+ inferencesh-0.4.18.dist-info/licenses/LICENSE,sha256=OsgqEWIh2el_QMj0y8O1A5Q5Dl-dxqqYbFE6fszuR4s,1086
11
+ inferencesh-0.4.18.dist-info/METADATA,sha256=RuUr8wSL3N_vIt5zahGI3ADvNdbDKxlyilQ6n3eS5Aw,5406
12
+ inferencesh-0.4.18.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
13
+ inferencesh-0.4.18.dist-info/entry_points.txt,sha256=6IC-fyozAqW3ljsMLGCXxJ0_ui2Jb-2fLHtoH1RTnEE,45
14
+ inferencesh-0.4.18.dist-info/top_level.txt,sha256=TSMHg3T1ThMl1HGAWmzBClwOYH1ump5neof9BfHIwaA,12
15
+ inferencesh-0.4.18.dist-info/RECORD,,