LLMFunctionObjects 0.2.2__tar.gz → 0.2.3__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (25) hide show
  1. {llmfunctionobjects-0.2.2 → llmfunctionobjects-0.2.3}/LLMFunctionObjects/LLMFunctions.py +44 -13
  2. {llmfunctionobjects-0.2.2 → llmfunctionobjects-0.2.3}/LLMFunctionObjects.egg-info/PKG-INFO +1 -1
  3. {llmfunctionobjects-0.2.2 → llmfunctionobjects-0.2.3}/PKG-INFO +1 -1
  4. {llmfunctionobjects-0.2.2 → llmfunctionobjects-0.2.3}/setup.py +1 -1
  5. {llmfunctionobjects-0.2.2 → llmfunctionobjects-0.2.3}/LICENSE +0 -0
  6. {llmfunctionobjects-0.2.2 → llmfunctionobjects-0.2.3}/LLMFunctionObjects/Chat.py +0 -0
  7. {llmfunctionobjects-0.2.2 → llmfunctionobjects-0.2.3}/LLMFunctionObjects/Configuration.py +0 -0
  8. {llmfunctionobjects-0.2.2 → llmfunctionobjects-0.2.3}/LLMFunctionObjects/Evaluator.py +0 -0
  9. {llmfunctionobjects-0.2.2 → llmfunctionobjects-0.2.3}/LLMFunctionObjects/EvaluatorChat.py +0 -0
  10. {llmfunctionobjects-0.2.2 → llmfunctionobjects-0.2.3}/LLMFunctionObjects/EvaluatorChatGPT.py +0 -0
  11. {llmfunctionobjects-0.2.2 → llmfunctionobjects-0.2.3}/LLMFunctionObjects/EvaluatorChatGemini.py +0 -0
  12. {llmfunctionobjects-0.2.2 → llmfunctionobjects-0.2.3}/LLMFunctionObjects/EvaluatorChatOllama.py +0 -0
  13. {llmfunctionobjects-0.2.2 → llmfunctionobjects-0.2.3}/LLMFunctionObjects/EvaluatorChatPaLM.py +0 -0
  14. {llmfunctionobjects-0.2.2 → llmfunctionobjects-0.2.3}/LLMFunctionObjects/EvaluatorGemini.py +0 -0
  15. {llmfunctionobjects-0.2.2 → llmfunctionobjects-0.2.3}/LLMFunctionObjects/EvaluatorOllama.py +0 -0
  16. {llmfunctionobjects-0.2.2 → llmfunctionobjects-0.2.3}/LLMFunctionObjects/Functor.py +0 -0
  17. {llmfunctionobjects-0.2.2 → llmfunctionobjects-0.2.3}/LLMFunctionObjects/SubParser.py +0 -0
  18. {llmfunctionobjects-0.2.2 → llmfunctionobjects-0.2.3}/LLMFunctionObjects/__init__.py +0 -0
  19. {llmfunctionobjects-0.2.2 → llmfunctionobjects-0.2.3}/LLMFunctionObjects.egg-info/SOURCES.txt +0 -0
  20. {llmfunctionobjects-0.2.2 → llmfunctionobjects-0.2.3}/LLMFunctionObjects.egg-info/dependency_links.txt +0 -0
  21. {llmfunctionobjects-0.2.2 → llmfunctionobjects-0.2.3}/LLMFunctionObjects.egg-info/requires.txt +0 -0
  22. {llmfunctionobjects-0.2.2 → llmfunctionobjects-0.2.3}/LLMFunctionObjects.egg-info/top_level.txt +0 -0
  23. {llmfunctionobjects-0.2.2 → llmfunctionobjects-0.2.3}/README.md +0 -0
  24. {llmfunctionobjects-0.2.2 → llmfunctionobjects-0.2.3}/pyproject.toml +0 -0
  25. {llmfunctionobjects-0.2.2 → llmfunctionobjects-0.2.3}/setup.cfg +0 -0
@@ -31,10 +31,10 @@ def llm_configuration(spec, **kwargs):
31
31
  base_url=None,
32
32
  api_user_id='user',
33
33
  module='openai',
34
- model='gpt-3.5-turbo-instruct', # was 'text-davinci-003'
34
+ model='gpt-4.1-mini', # was 'text-davinci-003'
35
35
  function=openai.completions.create, # was openai.Completion.create
36
- temperature=0.2,
37
- max_tokens=512,
36
+ temperature=0.4,
37
+ max_tokens=4096,
38
38
  total_probability_cutoff=0.03,
39
39
  prompts=None,
40
40
  prompt_delimiter=' ',
@@ -61,28 +61,59 @@ def llm_configuration(spec, **kwargs):
61
61
  client = openai.OpenAI(**client_kwargs)
62
62
 
63
63
  default_chat_model = os.environ.get("OPENAI_CHAT_MODEL", os.environ.get("OPENAI_MODEL", "gpt-4.1-mini"))
64
+ chat_model = kwargs.get("model", default_chat_model)
65
+
66
+ def _chatgpt_param_settings(model_name):
67
+ model_name_local = str(model_name or "").lower()
68
+ is_gpt5 = bool(re.match(r"^gpt[-=]?5", model_name_local))
69
+
70
+ if is_gpt5:
71
+ temperature = 1
72
+ argument_renames = {
73
+ "max_tokens": "max_output_tokens"
74
+ }
75
+ known_params = ["model", "messages", "functions", "function_call",
76
+ "tools", "tool_choice", "response_format",
77
+ "temperature", "top_p", "n", "seed",
78
+ "reasoning_effort", "verbosity",
79
+ "stream", "logprobs", "stop",
80
+ "presence_penalty", "frequency_penalty", "logit_bias",
81
+ "max_completion_tokens",
82
+ "user"]
83
+ else:
84
+ temperature = 0.4
85
+ argument_renames = {"max_tokens": "max_completion_tokens"}
86
+ known_params = ["model", "messages", "functions", "function_call",
87
+ "tools", "tool_choice", "response_format",
88
+ "temperature", "top_p", "n", "seed",
89
+ "stream", "logprobs", "stop",
90
+ "presence_penalty", "frequency_penalty", "logit_bias",
91
+ "max_completion_tokens",
92
+ "max_tokens",
93
+ "user"]
94
+ return argument_renames, known_params, temperature
95
+
96
+ argument_renames, known_params, temperature = _chatgpt_param_settings(chat_model)
64
97
 
65
98
  confChatGPT = llm_configuration("openai",
66
99
  name="chatgpt",
67
100
  module='openai',
68
- model=default_chat_model,
101
+ model=chat_model,
69
102
  function=client.chat.completions.create, # was openai.ChatCompletion.create,
103
+ temperature=temperature,
70
104
  max_tokens=8192,
71
- argument_renames={"max_tokens": "max_completion_tokens"},
72
- known_params=["model", "messages", "functions", "function_call",
73
- "tools", "tool_choice", "response_format",
74
- "temperature", "top_p", "n", "seed",
75
- "stream", "logprobs", "stop",
76
- "presence_penalty", "frequency_penalty", "logit_bias",
77
- "max_completion_tokens",
78
- "max_tokens",
79
- "user"],
105
+ argument_renames=argument_renames,
106
+ known_params=known_params,
80
107
  response_value_keys=[])
81
108
 
82
109
  # Apparently, base_url cannot be included in known_params -- it is for the client object only.
83
110
 
84
111
  if len(kwargs) > 0:
85
112
  confChatGPT = confChatGPT.combine(kwargs)
113
+ argument_renames, known_params, temperature = _chatgpt_param_settings(confChatGPT.model)
114
+ confChatGPT.argument_renames = argument_renames
115
+ confChatGPT.known_params = known_params
116
+ confChatGPT.temperature = temperature
86
117
 
87
118
  # Evaluator class
88
119
  confChatGPT.llm_evaluator = None
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: LLMFunctionObjects
3
- Version: 0.2.2
3
+ Version: 0.2.3
4
4
  Summary: Large Language Models (LLMs) functions package.
5
5
  Home-page: https://github.com/antononcube/Python-packages/tree/main/LLMFunctionObjects
6
6
  Author: Anton Antonov
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: LLMFunctionObjects
3
- Version: 0.2.2
3
+ Version: 0.2.3
4
4
  Summary: Large Language Models (LLMs) functions package.
5
5
  Home-page: https://github.com/antononcube/Python-packages/tree/main/LLMFunctionObjects
6
6
  Author: Anton Antonov
@@ -5,7 +5,7 @@ with open("README.md", "r") as fh:
5
5
 
6
6
  setuptools.setup(
7
7
  name="LLMFunctionObjects",
8
- version="0.2.2",
8
+ version="0.2.3",
9
9
  author="Anton Antonov",
10
10
  author_email="antononcube@posteo.net",
11
11
  description="Large Language Models (LLMs) functions package.",