inferencesh 0.4.14__tar.gz → 0.4.15__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of inferencesh might be problematic. Click here for more details.

Files changed (22) hide show
  1. {inferencesh-0.4.14/src/inferencesh.egg-info → inferencesh-0.4.15}/PKG-INFO +1 -1
  2. {inferencesh-0.4.14 → inferencesh-0.4.15}/pyproject.toml +1 -1
  3. {inferencesh-0.4.14 → inferencesh-0.4.15}/src/inferencesh/models/llm.py +17 -16
  4. {inferencesh-0.4.14 → inferencesh-0.4.15/src/inferencesh.egg-info}/PKG-INFO +1 -1
  5. {inferencesh-0.4.14 → inferencesh-0.4.15}/LICENSE +0 -0
  6. {inferencesh-0.4.14 → inferencesh-0.4.15}/README.md +0 -0
  7. {inferencesh-0.4.14 → inferencesh-0.4.15}/setup.cfg +0 -0
  8. {inferencesh-0.4.14 → inferencesh-0.4.15}/src/inferencesh/__init__.py +0 -0
  9. {inferencesh-0.4.14 → inferencesh-0.4.15}/src/inferencesh/client.py +0 -0
  10. {inferencesh-0.4.14 → inferencesh-0.4.15}/src/inferencesh/models/__init__.py +0 -0
  11. {inferencesh-0.4.14 → inferencesh-0.4.15}/src/inferencesh/models/base.py +0 -0
  12. {inferencesh-0.4.14 → inferencesh-0.4.15}/src/inferencesh/models/file.py +0 -0
  13. {inferencesh-0.4.14 → inferencesh-0.4.15}/src/inferencesh/utils/__init__.py +0 -0
  14. {inferencesh-0.4.14 → inferencesh-0.4.15}/src/inferencesh/utils/download.py +0 -0
  15. {inferencesh-0.4.14 → inferencesh-0.4.15}/src/inferencesh/utils/storage.py +0 -0
  16. {inferencesh-0.4.14 → inferencesh-0.4.15}/src/inferencesh.egg-info/SOURCES.txt +0 -0
  17. {inferencesh-0.4.14 → inferencesh-0.4.15}/src/inferencesh.egg-info/dependency_links.txt +0 -0
  18. {inferencesh-0.4.14 → inferencesh-0.4.15}/src/inferencesh.egg-info/entry_points.txt +0 -0
  19. {inferencesh-0.4.14 → inferencesh-0.4.15}/src/inferencesh.egg-info/requires.txt +0 -0
  20. {inferencesh-0.4.14 → inferencesh-0.4.15}/src/inferencesh.egg-info/top_level.txt +0 -0
  21. {inferencesh-0.4.14 → inferencesh-0.4.15}/tests/test_client.py +0 -0
  22. {inferencesh-0.4.14 → inferencesh-0.4.15}/tests/test_sdk.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: inferencesh
3
- Version: 0.4.14
3
+ Version: 0.4.15
4
4
  Summary: inference.sh Python SDK
5
5
  Author-email: "Inference Shell Inc." <hello@inference.sh>
6
6
  Project-URL: Homepage, https://github.com/inference-sh/sdk
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "inferencesh"
7
- version = "0.4.14"
7
+ version = "0.4.15"
8
8
  description = "inference.sh Python SDK"
9
9
  authors = [
10
10
  {name = "Inference Shell Inc.", email = "hello@inference.sh"},
@@ -464,26 +464,27 @@ class ResponseTransformer:
464
464
  text: Cleaned text to process for reasoning
465
465
  """
466
466
  # Default implementation for <think> style reasoning
467
- if "<think>" in text and not self.state.state_changes["reasoning_started"]:
467
+ # Check for tags in the complete buffer
468
+ if "<think>" in self.state.buffer and not self.state.state_changes["reasoning_started"]:
468
469
  self.state.state_changes["reasoning_started"] = True
469
470
  if self.timing:
470
471
  self.timing.start_reasoning()
471
472
 
472
- if "</think>" in text and not self.state.state_changes["reasoning_ended"]:
473
- self.state.state_changes["reasoning_ended"] = True
474
- if self.timing:
475
- # Estimate token count from character count (rough approximation)
476
- token_count = len(self.state.buffer.split("<think>")[1].split("</think>")[0]) // 4
477
- self.timing.end_reasoning(token_count)
478
-
479
- if "<think>" in self.state.buffer:
480
- parts = self.state.buffer.split("</think>", 1)
481
- if len(parts) > 1:
482
- self.state.reasoning = parts[0].split("<think>", 1)[1].strip()
483
- self.state.response = parts[1].strip()
484
- else:
485
- self.state.reasoning = self.state.buffer.split("<think>", 1)[1].strip()
486
- self.state.response = ""
473
+ # Extract content and handle end of reasoning
474
+ parts = self.state.buffer.split("<think>", 1)
475
+ if len(parts) > 1:
476
+ reasoning_text = parts[1]
477
+ end_parts = reasoning_text.split("</think>", 1)
478
+ self.state.reasoning = end_parts[0].strip()
479
+ self.state.response = end_parts[1].strip() if len(end_parts) > 1 else ""
480
+
481
+ # Check for end tag in complete buffer
482
+ if "</think>" in self.state.buffer and not self.state.state_changes["reasoning_ended"]:
483
+ self.state.state_changes["reasoning_ended"] = True
484
+ if self.timing:
485
+ # Estimate token count from character count (rough approximation)
486
+ token_count = len(self.state.reasoning) // 4
487
+ self.timing.end_reasoning(token_count)
487
488
  else:
488
489
  self.state.response = self.state.buffer
489
490
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: inferencesh
3
- Version: 0.4.14
3
+ Version: 0.4.15
4
4
  Summary: inference.sh Python SDK
5
5
  Author-email: "Inference Shell Inc." <hello@inference.sh>
6
6
  Project-URL: Homepage, https://github.com/inference-sh/sdk
File without changes
File without changes
File without changes