llama-index-llms-openai 0.3.11__tar.gz → 0.3.13__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: llama-index-llms-openai
3
- Version: 0.3.11
3
+ Version: 0.3.13
4
4
  Summary: llama-index llms openai integration
5
5
  License: MIT
6
6
  Author: llama-index
@@ -170,7 +170,7 @@ class OpenAI(FunctionCallingLLM):
170
170
  default=DEFAULT_TEMPERATURE,
171
171
  description="The temperature to use during generation.",
172
172
  ge=0.0,
173
- le=1.0,
173
+ le=2.0,
174
174
  )
175
175
  max_tokens: Optional[int] = Field(
176
176
  description="The maximum number of tokens to generate.",
@@ -42,6 +42,13 @@ O1_MODELS: Dict[str, int] = {
42
42
  "o1-mini-2024-09-12": 128000,
43
43
  }
44
44
 
45
+ O1_MODELS_WITHOUT_FUNCTION_CALLING = {
46
+ "o1-preview",
47
+ "o1-preview-2024-09-12",
48
+ "o1-mini",
49
+ "o1-mini-2024-09-12",
50
+ }
51
+
45
52
  GPT4_MODELS: Dict[str, int] = {
46
53
  # stable model names:
47
54
  # resolves to gpt-4-0314 before 2023-06-27,
@@ -247,9 +254,7 @@ def is_function_calling_model(model: str) -> bool:
247
254
 
248
255
  is_chat_model_ = is_chat_model(model)
249
256
  is_old = "0314" in model or "0301" in model
250
-
251
- # TODO: This is temporary for openai's beta
252
- is_o1_beta = "o1" in model
257
+ is_o1_beta = model in O1_MODELS_WITHOUT_FUNCTION_CALLING
253
258
 
254
259
  return is_chat_model_ and not is_old and not is_o1_beta
255
260
 
@@ -29,7 +29,7 @@ exclude = ["**/BUILD"]
29
29
  license = "MIT"
30
30
  name = "llama-index-llms-openai"
31
31
  readme = "README.md"
32
- version = "0.3.11"
32
+ version = "0.3.13"
33
33
 
34
34
  [tool.poetry.dependencies]
35
35
  python = ">=3.9,<4.0"