not-again-ai 0.16.0__py3-none-any.whl → 0.16.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -28,6 +28,7 @@ OLLAMA_PARAMETER_MAP = {
28
28
  "logit_bias": None,
29
29
  "top_logprobs": None,
30
30
  "presence_penalty": None,
31
+ "max_tokens": "num_predict",
31
32
  }
32
33
 
33
34
 
@@ -45,6 +46,10 @@ def validate(request: ChatCompletionRequest) -> None:
45
46
  logger.warning("Parameter 'stop' needs to be a string and not a list. It will be ignored.")
46
47
  request.stop = None
47
48
 
49
+ # Raise an error if both "max_tokens" and "max_completion_tokens" are provided
50
+ if request.max_tokens is not None and request.max_completion_tokens is not None:
51
+ raise ValueError("`max_tokens` and `max_completion_tokens` cannot both be provided.")
52
+
48
53
 
49
54
  def ollama_chat_completion(
50
55
  request: ChatCompletionRequest,
@@ -31,6 +31,10 @@ def validate(request: ChatCompletionRequest) -> None:
31
31
  if request.json_mode and request.structured_outputs is not None:
32
32
  raise ValueError("json_schema and json_mode cannot be used together.")
33
33
 
34
+ # Raise an error if both "max_tokens" and "max_completion_tokens" are provided
35
+ if request.max_tokens is not None and request.max_completion_tokens is not None:
36
+ raise ValueError("`max_tokens` and `max_completion_tokens` cannot both be provided.")
37
+
34
38
 
35
39
  def openai_chat_completion(
36
40
  request: ChatCompletionRequest,
@@ -118,6 +118,11 @@ class ChatCompletionRequest(BaseModel):
118
118
  top_k: int | None = Field(default=None)
119
119
  min_p: float | None = Field(default=None)
120
120
 
121
+ max_tokens: int | None = Field(
122
+ default=None,
123
+ description="Sometimes `max_completion_tokens` is not correctly supported so we provide this as a fallback.",
124
+ )
125
+
121
126
 
122
127
  class ChatCompletionChoice(BaseModel):
123
128
  message: AssistantMessage
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: not-again-ai
3
- Version: 0.16.0
3
+ Version: 0.16.1
4
4
  Summary: Designed to once and for all collect all the little things that come up over and over again in AI projects and put them in one place.
5
5
  License: MIT
6
6
  Author: DaveCoDev
@@ -27,7 +27,7 @@ Requires-Dist: numpy (>=2.2) ; extra == "viz"
27
27
  Requires-Dist: ollama (>=0.4) ; extra == "llm"
28
28
  Requires-Dist: openai (>=1) ; extra == "llm"
29
29
  Requires-Dist: pandas (>=2.2) ; extra == "viz"
30
- Requires-Dist: playwright (>=1.49) ; extra == "data"
30
+ Requires-Dist: playwright (>=1.50) ; extra == "data"
31
31
  Requires-Dist: pydantic (>=2.10)
32
32
  Requires-Dist: pytest-playwright (>=0.7) ; extra == "data"
33
33
  Requires-Dist: python-liquid (>=1.12) ; extra == "llm"
@@ -8,9 +8,9 @@ not_again_ai/llm/__init__.py,sha256=_wNUL6FDaT369Z8W48FsaC_NkcOZ-ib2MMUvnaLOS-0,
8
8
  not_again_ai/llm/chat_completion/__init__.py,sha256=a2qmmmrXjMKyHGZDjt_xdqYbSrEOBea_VvZArzMboe0,200
9
9
  not_again_ai/llm/chat_completion/interface.py,sha256=FCyE-1gLdhwuS0Lv8iTbZvraa4iZjnKB8qb31WF53uk,1204
10
10
  not_again_ai/llm/chat_completion/providers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
- not_again_ai/llm/chat_completion/providers/ollama_api.py,sha256=iBTMyF8edo8uxxrorNPtShzmCXG7m0RlEBunWLSO4Mo,7999
12
- not_again_ai/llm/chat_completion/providers/openai_api.py,sha256=S7TZhDIQ_xpp3JakRVcd3Gpw2UjeHCETdA9MfRKUjCU,12294
13
- not_again_ai/llm/chat_completion/types.py,sha256=q8APUWWzwCKL0Rs_zEFfph9uBcwh5nAT0f0rp4crvk0,4039
11
+ not_again_ai/llm/chat_completion/providers/ollama_api.py,sha256=plW_nN452rd85NqxPKjSLlHn-Z9s4rQXynsxatX8xkA,8295
12
+ not_again_ai/llm/chat_completion/providers/openai_api.py,sha256=e8rOVeDq-gB8AQ5fycUjIgNSrgjYU3s8LdGUuYeahM8,12557
13
+ not_again_ai/llm/chat_completion/types.py,sha256=RnOy_CphOaFU1f0ag-zTJPgAPXOnTH4WHD1P688GQKM,4222
14
14
  not_again_ai/llm/embedding/__init__.py,sha256=wscUfROukvw0M0vYccfaVTdXV0P-eICAT5mqM0LaHHc,182
15
15
  not_again_ai/llm/embedding/interface.py,sha256=Hj3UiktXEeCUeMwpIDtRkwBfKgaJSnJvclLNyjwUAtE,1144
16
16
  not_again_ai/llm/embedding/providers/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -32,7 +32,7 @@ not_again_ai/viz/distributions.py,sha256=OyWwJaNI6lMRm_iSrhq-CORLNvXfeuLSgDtVo3u
32
32
  not_again_ai/viz/scatterplot.py,sha256=5CUOWeknbBOaZPeX9oPin5sBkRKEwk8qeFH45R-9LlY,2292
33
33
  not_again_ai/viz/time_series.py,sha256=pOGZqXp_2nd6nKo-PUQNCtmMh__69jxQ6bQibTGLwZA,5212
34
34
  not_again_ai/viz/utils.py,sha256=hN7gwxtBt3U6jQni2K8j5m5pCXpaJDoNzGhBBikEU28,238
35
- not_again_ai-0.16.0.dist-info/LICENSE,sha256=btjOgNGpp-ux5xOo1Gx1MddxeWtT9sof3s3Nui29QfA,1071
36
- not_again_ai-0.16.0.dist-info/METADATA,sha256=kvwxTcEi-elRl-LuHyh2QtFLrpYHd-U6HjyuAkHYvWQ,15035
37
- not_again_ai-0.16.0.dist-info/WHEEL,sha256=IYZQI976HJqqOpQU6PHkJ8fb3tMNBFjg-Cn-pwAbaFM,88
38
- not_again_ai-0.16.0.dist-info/RECORD,,
35
+ not_again_ai-0.16.1.dist-info/LICENSE,sha256=btjOgNGpp-ux5xOo1Gx1MddxeWtT9sof3s3Nui29QfA,1071
36
+ not_again_ai-0.16.1.dist-info/METADATA,sha256=6j9ar7vYJ77yTkg1NDzw9PbYK0-ioo5NGI2joAPHpp0,15035
37
+ not_again_ai-0.16.1.dist-info/WHEEL,sha256=IYZQI976HJqqOpQU6PHkJ8fb3tMNBFjg-Cn-pwAbaFM,88
38
+ not_again_ai-0.16.1.dist-info/RECORD,,