inferencesh 0.2.33__tar.gz → 0.2.34__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of inferencesh might be problematic. Click here for more details.
- {inferencesh-0.2.33/src/inferencesh.egg-info → inferencesh-0.2.34}/PKG-INFO +1 -1
- {inferencesh-0.2.33 → inferencesh-0.2.34}/pyproject.toml +1 -1
- {inferencesh-0.2.33 → inferencesh-0.2.34}/src/inferencesh/models/llm.py +2 -5
- {inferencesh-0.2.33 → inferencesh-0.2.34/src/inferencesh.egg-info}/PKG-INFO +1 -1
- {inferencesh-0.2.33 → inferencesh-0.2.34}/LICENSE +0 -0
- {inferencesh-0.2.33 → inferencesh-0.2.34}/README.md +0 -0
- {inferencesh-0.2.33 → inferencesh-0.2.34}/setup.cfg +0 -0
- {inferencesh-0.2.33 → inferencesh-0.2.34}/setup.py +0 -0
- {inferencesh-0.2.33 → inferencesh-0.2.34}/src/inferencesh/__init__.py +0 -0
- {inferencesh-0.2.33 → inferencesh-0.2.34}/src/inferencesh/models/__init__.py +0 -0
- {inferencesh-0.2.33 → inferencesh-0.2.34}/src/inferencesh/models/base.py +0 -0
- {inferencesh-0.2.33 → inferencesh-0.2.34}/src/inferencesh/models/file.py +0 -0
- {inferencesh-0.2.33 → inferencesh-0.2.34}/src/inferencesh/utils/__init__.py +0 -0
- {inferencesh-0.2.33 → inferencesh-0.2.34}/src/inferencesh/utils/download.py +0 -0
- {inferencesh-0.2.33 → inferencesh-0.2.34}/src/inferencesh/utils/storage.py +0 -0
- {inferencesh-0.2.33 → inferencesh-0.2.34}/src/inferencesh.egg-info/SOURCES.txt +0 -0
- {inferencesh-0.2.33 → inferencesh-0.2.34}/src/inferencesh.egg-info/dependency_links.txt +0 -0
- {inferencesh-0.2.33 → inferencesh-0.2.34}/src/inferencesh.egg-info/entry_points.txt +0 -0
- {inferencesh-0.2.33 → inferencesh-0.2.34}/src/inferencesh.egg-info/requires.txt +0 -0
- {inferencesh-0.2.33 → inferencesh-0.2.34}/src/inferencesh.egg-info/top_level.txt +0 -0
- {inferencesh-0.2.33 → inferencesh-0.2.34}/tests/test_sdk.py +0 -0
|
@@ -65,9 +65,8 @@ class BaseLLMInput(BaseAppInput):
|
|
|
65
65
|
"Explain quantum computing in simple terms"
|
|
66
66
|
]
|
|
67
67
|
)
|
|
68
|
-
temperature: float = Field(default=0.7)
|
|
69
|
-
top_p: float = Field(default=0.95)
|
|
70
|
-
max_tokens: int = Field(default=4096)
|
|
68
|
+
temperature: float = Field(default=0.7, ge=0.0, le=1.0)
|
|
69
|
+
top_p: float = Field(default=0.95, ge=0.0, le=1.0)
|
|
71
70
|
context_size: int = Field(default=4096)
|
|
72
71
|
|
|
73
72
|
class ImageCapabilityMixin(BaseModel):
|
|
@@ -588,7 +587,6 @@ def stream_generate(
|
|
|
588
587
|
tool_choice: Optional[Dict[str, Any]] = None,
|
|
589
588
|
temperature: float = 0.7,
|
|
590
589
|
top_p: float = 0.95,
|
|
591
|
-
max_tokens: int = 4096,
|
|
592
590
|
stop: Optional[List[str]] = None,
|
|
593
591
|
verbose: bool = False,
|
|
594
592
|
output_cls: type[BaseLLMOutput] = LLMOutput,
|
|
@@ -612,7 +610,6 @@ def stream_generate(
|
|
|
612
610
|
"stream": True,
|
|
613
611
|
"temperature": temperature,
|
|
614
612
|
"top_p": top_p,
|
|
615
|
-
"max_tokens": max_tokens,
|
|
616
613
|
"stop": stop
|
|
617
614
|
}
|
|
618
615
|
if tools is not None:
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|