sutro 0.1.14__tar.gz → 0.1.16__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sutro might be problematic. Click here for more details.

@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sutro
3
- Version: 0.1.14
3
+ Version: 0.1.16
4
4
  Summary: Sutro Python SDK
5
5
  Project-URL: Homepage, https://sutro.sh
6
6
  Project-URL: Documentation, https://docs.sutro.sh
@@ -9,7 +9,7 @@ installer = "uv"
9
9
 
10
10
  [project]
11
11
  name = "sutro"
12
- version = "0.1.14"
12
+ version = "0.1.16"
13
13
  description = "Sutro Python SDK"
14
14
  readme = "README.md"
15
15
  requires-python = ">=3.10"
@@ -30,6 +30,23 @@ def is_jupyter() -> bool:
30
30
  YASPIN_COLOR = None if is_jupyter() else "blue"
31
31
  SPINNER = Spinners.dots14
32
32
 
33
+ # Models available for inference. Keep in sync with the backend configuration
34
+ # so users get helpful autocompletion when selecting a model.
35
+ ModelOptions = Literal[
36
+ "llama-3.2-3b",
37
+ "llama-3.1-8b",
38
+ "llama-3.3-70b",
39
+ "llama-3.3-70b",
40
+ "qwen-3-4b",
41
+ "qwen-3-32b",
42
+ "qwen-3-4b-thinking",
43
+ "qwen-3-32b-thinking",
44
+ "gemma-3-4b-it",
45
+ "gemma-3-27b-it",
46
+ "multilingual-e5-large-instruct",
47
+ "gte-qwen2-7b-instruct",
48
+ ]
49
+
33
50
 
34
51
  def to_colored_text(
35
52
  text: str, state: Optional[Literal["success", "fail"]] = None
@@ -156,7 +173,7 @@ class Sutro:
156
173
  def infer(
157
174
  self,
158
175
  data: Union[List, pd.DataFrame, pl.DataFrame, str],
159
- model: str = "llama-3.1-8b",
176
+ model: ModelOptions = "llama-3.1-8b",
160
177
  column: str = None,
161
178
  output_column: str = "inference_result",
162
179
  job_priority: int = 0,
@@ -176,7 +193,7 @@ class Sutro:
176
193
 
177
194
  Args:
178
195
  data (Union[List, pd.DataFrame, pl.DataFrame, str]): The data to run inference on.
179
- model (str, optional): The model to use for inference. Defaults to "llama-3.1-8b".
196
+ model (ModelOptions, optional): The model to use for inference. Defaults to "llama-3.1-8b".
180
197
  column (str, optional): The column name to use for inference. Required if data is a DataFrame, file path, or dataset.
181
198
  output_column (str, optional): The column name to store the inference results in if the input is a DataFrame. Defaults to "inference_result".
182
199
  job_priority (int, optional): The priority of the job. Defaults to 0.
File without changes
File without changes
File without changes
File without changes
File without changes