inferencesh 0.4.19__tar.gz → 0.4.22__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of inferencesh might be problematic. Click here for more details.

Files changed (22) hide show
  1. {inferencesh-0.4.19/src/inferencesh.egg-info → inferencesh-0.4.22}/PKG-INFO +1 -1
  2. {inferencesh-0.4.19 → inferencesh-0.4.22}/pyproject.toml +1 -1
  3. {inferencesh-0.4.19 → inferencesh-0.4.22}/src/inferencesh/models/llm.py +13 -5
  4. {inferencesh-0.4.19 → inferencesh-0.4.22/src/inferencesh.egg-info}/PKG-INFO +1 -1
  5. {inferencesh-0.4.19 → inferencesh-0.4.22}/LICENSE +0 -0
  6. {inferencesh-0.4.19 → inferencesh-0.4.22}/README.md +0 -0
  7. {inferencesh-0.4.19 → inferencesh-0.4.22}/setup.cfg +0 -0
  8. {inferencesh-0.4.19 → inferencesh-0.4.22}/src/inferencesh/__init__.py +0 -0
  9. {inferencesh-0.4.19 → inferencesh-0.4.22}/src/inferencesh/client.py +0 -0
  10. {inferencesh-0.4.19 → inferencesh-0.4.22}/src/inferencesh/models/__init__.py +0 -0
  11. {inferencesh-0.4.19 → inferencesh-0.4.22}/src/inferencesh/models/base.py +0 -0
  12. {inferencesh-0.4.19 → inferencesh-0.4.22}/src/inferencesh/models/file.py +0 -0
  13. {inferencesh-0.4.19 → inferencesh-0.4.22}/src/inferencesh/utils/__init__.py +0 -0
  14. {inferencesh-0.4.19 → inferencesh-0.4.22}/src/inferencesh/utils/download.py +0 -0
  15. {inferencesh-0.4.19 → inferencesh-0.4.22}/src/inferencesh/utils/storage.py +0 -0
  16. {inferencesh-0.4.19 → inferencesh-0.4.22}/src/inferencesh.egg-info/SOURCES.txt +0 -0
  17. {inferencesh-0.4.19 → inferencesh-0.4.22}/src/inferencesh.egg-info/dependency_links.txt +0 -0
  18. {inferencesh-0.4.19 → inferencesh-0.4.22}/src/inferencesh.egg-info/entry_points.txt +0 -0
  19. {inferencesh-0.4.19 → inferencesh-0.4.22}/src/inferencesh.egg-info/requires.txt +0 -0
  20. {inferencesh-0.4.19 → inferencesh-0.4.22}/src/inferencesh.egg-info/top_level.txt +0 -0
  21. {inferencesh-0.4.19 → inferencesh-0.4.22}/tests/test_client.py +0 -0
  22. {inferencesh-0.4.19 → inferencesh-0.4.22}/tests/test_sdk.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: inferencesh
3
- Version: 0.4.19
3
+ Version: 0.4.22
4
4
  Summary: inference.sh Python SDK
5
5
  Author-email: "Inference Shell Inc." <hello@inference.sh>
6
6
  Project-URL: Homepage, https://github.com/inference-sh/sdk
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "inferencesh"
7
- version = "0.4.19"
7
+ version = "0.4.22"
8
8
  description = "inference.sh Python SDK"
9
9
  authors = [
10
10
  {name = "Inference Shell Inc.", email = "hello@inference.sh"},
@@ -224,8 +224,6 @@ def build_messages(
224
224
  parts.append({"type": "image_url", "image_url": {"url": image_data_uri}})
225
225
  elif msg.image.uri:
226
226
  parts.append({"type": "image_url", "image_url": {"url": msg.image.uri}})
227
- if msg.tool_calls:
228
- parts.append({"type": "tool_call", "tool_calls": msg.tool_calls})
229
227
  if allow_multipart:
230
228
  return parts
231
229
  if len(parts) == 1 and parts[0]["type"] == "text":
@@ -239,6 +237,13 @@ def build_messages(
239
237
  images = [msg.image for msg in messages if msg.image]
240
238
  image = images[0] if images else None # TODO: handle multiple images
241
239
  return ContextMessage(role=messages[0].role, text=text, image=image)
240
+
241
+ def merge_tool_calls(messages: List[ContextMessage]) -> List[Dict[str, Any]]:
242
+ tool_calls = []
243
+ for msg in messages:
244
+ if msg.tool_calls:
245
+ tool_calls.extend(msg.tool_calls)
246
+ return tool_calls
242
247
 
243
248
  user_input_text = ""
244
249
  if hasattr(input_data, "text"):
@@ -250,7 +255,8 @@ def build_messages(
250
255
  user_input_image = input_data.image
251
256
  multipart = multipart or input_data.image is not None
252
257
 
253
- user_msg = ContextMessage(role=ContextMessageRole.USER, text=user_input_text, image=user_input_image)
258
+ input_role = input_data.role if hasattr(input_data, "role") else ContextMessageRole.USER
259
+ user_msg = ContextMessage(role=input_role, text=user_input_text, image=user_input_image)
254
260
 
255
261
  input_data.context.append(user_msg)
256
262
 
@@ -264,14 +270,16 @@ def build_messages(
264
270
  else:
265
271
  messages.append({
266
272
  "role": current_role,
267
- "content": render_message(merge_messages(current_messages), allow_multipart=multipart)
273
+ "content": render_message(merge_messages(current_messages), allow_multipart=multipart),
274
+ "tool_calls": merge_tool_calls(current_messages)
268
275
  })
269
276
  current_messages = [msg]
270
277
  current_role = msg.role
271
278
  if len(current_messages) > 0:
272
279
  messages.append({
273
280
  "role": current_role,
274
- "content": render_message(merge_messages(current_messages), allow_multipart=multipart)
281
+ "content": render_message(merge_messages(current_messages), allow_multipart=multipart),
282
+ "tool_calls": merge_tool_calls(current_messages)
275
283
  })
276
284
 
277
285
  return messages
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: inferencesh
3
- Version: 0.4.19
3
+ Version: 0.4.22
4
4
  Summary: inference.sh Python SDK
5
5
  Author-email: "Inference Shell Inc." <hello@inference.sh>
6
6
  Project-URL: Homepage, https://github.com/inference-sh/sdk
File without changes
File without changes
File without changes