inferencesh 0.2.32__py3-none-any.whl → 0.2.34__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of inferencesh might be problematic. Click here for more details.

inferencesh/models/llm.py CHANGED
@@ -65,9 +65,8 @@ class BaseLLMInput(BaseAppInput):
65
65
  "Explain quantum computing in simple terms"
66
66
  ]
67
67
  )
68
- temperature: float = Field(default=0.7)
69
- top_p: float = Field(default=0.95)
70
- max_tokens: int = Field(default=4096)
68
+ temperature: float = Field(default=0.7, ge=0.0, le=1.0)
69
+ top_p: float = Field(default=0.95, ge=0.0, le=1.0)
71
70
  context_size: int = Field(default=4096)
72
71
 
73
72
  class ImageCapabilityMixin(BaseModel):
@@ -229,7 +228,6 @@ def build_messages(
229
228
  return parts[0]["text"]
230
229
  raise ValueError("Image content requires multipart support")
231
230
 
232
- multipart = any(m.image for m in input_data.context) or input_data.image is not None
233
231
  messages = [{"role": "system", "content": input_data.system_prompt}] if input_data.system_prompt is not None and input_data.system_prompt != "" else []
234
232
 
235
233
  def merge_messages(messages: List[ContextMessage]) -> ContextMessage:
@@ -241,9 +239,13 @@ def build_messages(
241
239
  user_input_text = ""
242
240
  if hasattr(input_data, "text"):
243
241
  user_input_text = transform_user_message(input_data.text) if transform_user_message else input_data.text
242
+
244
243
  user_input_image = None
244
+ multipart = any(m.image for m in input_data.context)
245
245
  if hasattr(input_data, "image"):
246
246
  user_input_image = input_data.image
247
+ multipart = multipart or input_data.image is not None
248
+
247
249
  user_msg = ContextMessage(role=ContextMessageRole.USER, text=user_input_text, image=user_input_image)
248
250
 
249
251
  input_data.context.append(user_msg)
@@ -585,7 +587,6 @@ def stream_generate(
585
587
  tool_choice: Optional[Dict[str, Any]] = None,
586
588
  temperature: float = 0.7,
587
589
  top_p: float = 0.95,
588
- max_tokens: int = 4096,
589
590
  stop: Optional[List[str]] = None,
590
591
  verbose: bool = False,
591
592
  output_cls: type[BaseLLMOutput] = LLMOutput,
@@ -609,7 +610,6 @@ def stream_generate(
609
610
  "stream": True,
610
611
  "temperature": temperature,
611
612
  "top_p": top_p,
612
- "max_tokens": max_tokens,
613
613
  "stop": stop
614
614
  }
615
615
  if tools is not None:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: inferencesh
3
- Version: 0.2.32
3
+ Version: 0.2.34
4
4
  Summary: inference.sh Python SDK
5
5
  Author: Inference Shell Inc.
6
6
  Author-email: "Inference Shell Inc." <hello@inference.sh>
@@ -2,13 +2,13 @@ inferencesh/__init__.py,sha256=WdADtOhfa3HDOunoE9HLFCTFlXRykYstBIH1FpyWvj8,613
2
2
  inferencesh/models/__init__.py,sha256=FDwcdtT6c4hbRitymjmN-hZMlQa8RbKSftkZZyjtUXA,536
3
3
  inferencesh/models/base.py,sha256=4gZQRi8J7y9U6PrGD9pRIehd1MJVJAqGakPQDs2AKFM,3251
4
4
  inferencesh/models/file.py,sha256=5xnpypcRahM1YcEjj64rv9g2gTimxrZb41YT4r440hU,7393
5
- inferencesh/models/llm.py,sha256=nN0gGcVRB0YS3yQcKi-rPy1Fx3B_blLhS-obWxFbhCE,28264
5
+ inferencesh/models/llm.py,sha256=SHDOHeGd78FKE9MPnUBEnFkDnO6QeZTE-A5NhCxi3rA,28224
6
6
  inferencesh/utils/__init__.py,sha256=-xiD6uo2XzcrPAWFb_fUbaimmnW4KFKc-8IvBzaxNd4,148
7
7
  inferencesh/utils/download.py,sha256=7n5twvoNYDcFnKJyefImaj2YfzRI7vddQw4usZbj38c,1521
8
8
  inferencesh/utils/storage.py,sha256=E4J8emd4eFKdmdDgAqzz3TpaaDd3n0l8gYlMHuY8yIU,519
9
- inferencesh-0.2.32.dist-info/licenses/LICENSE,sha256=OsgqEWIh2el_QMj0y8O1A5Q5Dl-dxqqYbFE6fszuR4s,1086
10
- inferencesh-0.2.32.dist-info/METADATA,sha256=YQmwDhvu8aMtp-QNoka9aAjUqIcltH8pWpz-LIz_uT4,2757
11
- inferencesh-0.2.32.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
12
- inferencesh-0.2.32.dist-info/entry_points.txt,sha256=6IC-fyozAqW3ljsMLGCXxJ0_ui2Jb-2fLHtoH1RTnEE,45
13
- inferencesh-0.2.32.dist-info/top_level.txt,sha256=TSMHg3T1ThMl1HGAWmzBClwOYH1ump5neof9BfHIwaA,12
14
- inferencesh-0.2.32.dist-info/RECORD,,
9
+ inferencesh-0.2.34.dist-info/licenses/LICENSE,sha256=OsgqEWIh2el_QMj0y8O1A5Q5Dl-dxqqYbFE6fszuR4s,1086
10
+ inferencesh-0.2.34.dist-info/METADATA,sha256=phcaj3JElykRBTDXylJ26H1yKSInGchs3IhNKWrebyI,2757
11
+ inferencesh-0.2.34.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
12
+ inferencesh-0.2.34.dist-info/entry_points.txt,sha256=6IC-fyozAqW3ljsMLGCXxJ0_ui2Jb-2fLHtoH1RTnEE,45
13
+ inferencesh-0.2.34.dist-info/top_level.txt,sha256=TSMHg3T1ThMl1HGAWmzBClwOYH1ump5neof9BfHIwaA,12
14
+ inferencesh-0.2.34.dist-info/RECORD,,