openaiext 0.0.2 → 0.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. checksums.yaml +4 -4
  2. data/lib/openaiext.rb +10 -5
  3. metadata +2 -2
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 276d45b2a3e4b7e984a1fe2dbc7a05275500c8e15f0e331fe2a2f980db72dea0
4
- data.tar.gz: 3366f551da7e14cfb6b0f273e352d79e9eb2c9eb159eb579b4e426f522d3e800
3
+ metadata.gz: 321e5e7aaba2691bcc8f5fbf258bd33551920d474cee4bcdfcdde4dae744b937
4
+ data.tar.gz: 9bc3fe053ae856d540925b6dc2815cf345383572cf2243c886589119649424ef
5
5
  SHA512:
6
- metadata.gz: 38cd47294905c538f82f2a594148e4e8e64fb63caf9858be1f3efdd44613890654f941baab616e4f524f47cacaa5266809c23df9a0a87c360e4ebe70b4783d20
7
- data.tar.gz: 6705205f99790956d259acc4793444d3c5740e656cf0e05427aa9f01bd1df0b8941363850793c146cf8bfe898f1cd267d08e7e052bb8d82e7fef20f5b9da5749
6
+ metadata.gz: a244914cf823cb4fc0c6d1ec678858ad8a0467c015c6535cfcde6fa1756d1f4a67b0ecd1b4a23c8aff35d0dc3e84f01e0af0aa1fd6571c60b1a5033a02d2c448
7
+ data.tar.gz: 207768e9d24034baea6278ff4c4f3fc03a664bc29d82904f65f143ca3e3a6cf2f7563bc77765ec0c5298685c0301f8abbacf03711c5c529f79ad6b4e8258937f
data/lib/openaiext.rb CHANGED
@@ -19,15 +19,15 @@ module OpenAIExt
19
19
  chat(messages: [{ role: :user, content: message_content }], model:, response_format:, max_tokens:, store:, tools:, auto_run_functions:, function_context:)
20
20
  end
21
21
 
22
- def self.single_prompt(prompt:, model: :gpt_basic, response_format: nil, max_tokens: MAX_TOKENS, store: true, metadata: nil, tools: nil, auto_run_functions: false, function_context: nil)
23
- chat(messages: [{ user: prompt }], model:, response_format:, max_tokens:, store:, tools:, auto_run_functions:, function_context:)
22
+ def self.single_prompt(prompt:, model: :gpt_basic, response_format: nil, max_tokens: MAX_TOKENS, store: true, metadata: nil, tools: nil, auto_run_functions: false, function_context: nil, temperature: nil, top_p: nil, frequency_penalty: nil, presence_penalty: nil)
23
+ chat(messages: [{ user: prompt }], model:, response_format:, max_tokens:, store:, tools:, auto_run_functions:, function_context:, temperature:, top_p:, frequency_penalty:, presence_penalty:)
24
24
  end
25
25
 
26
- def self.single_chat(system:, user:, model: :gpt_basic, response_format: nil, max_tokens: MAX_TOKENS, store: true, metadata: nil, tools: nil, auto_run_functions: false, function_context: nil)
27
- chat(messages: [{ system: }, { user: }], model:, response_format:, max_tokens:, store:, tools:, auto_run_functions:, function_context:)
26
+ def self.single_chat(system:, user:, model: :gpt_basic, response_format: nil, max_tokens: MAX_TOKENS, store: true, metadata: nil, tools: nil, auto_run_functions: false, function_context: nil, temperature: nil, top_p: nil, frequency_penalty: nil, presence_penalty: nil)
27
+ chat(messages: [{ system: }, { user: }], model:, response_format:, max_tokens:, store:, tools:, auto_run_functions:, function_context:, temperature:, top_p:, frequency_penalty:, presence_penalty:)
28
28
  end
29
29
 
30
- def self.chat(messages:, model: :gpt_basic, response_format: nil, max_tokens: MAX_TOKENS, store: true, metadata: nil, tools: nil, auto_run_functions: false, function_context: nil)
30
+ def self.chat(messages:, model: :gpt_basic, response_format: nil, max_tokens: MAX_TOKENS, store: true, metadata: nil, tools: nil, auto_run_functions: false, function_context: nil, temperature: nil, top_p: nil, frequency_penalty: nil, presence_penalty: nil)
31
31
  model = OpenAIExt::Model.select(model)
32
32
  is_o1_model = model.start_with?('o1')
33
33
 
@@ -43,6 +43,11 @@ module OpenAIExt
43
43
  parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
44
44
  parameters[:tools] = tools if tools
45
45
 
46
+ parameters[:temperature] = temperature if temperature
47
+ parameters[:top_p] = top_p if top_p
48
+ parameters[:frequency_penalty] = frequency_penalty if frequency_penalty
49
+ parameters[:presence_penalty] = presence_penalty if presence_penalty
50
+
46
51
  begin
47
52
  response = OpenAI::Client.new.chat(parameters:)
48
53
  rescue => e
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: openaiext
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.2
4
+ version: 0.0.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - Gedean Dias
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2024-10-12 00:00:00.000000000 Z
11
+ date: 2024-10-13 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: ruby-openai