openaiext 0.0.6 → 0.0.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. checksums.yaml +4 -4
  2. data/lib/openaiext.rb +6 -5
  3. metadata +2 -2
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: fa387e4a2018bbe15a88f28a24d55aefe1b69d722fe060abf4700f85fa2c1e1b
4
- data.tar.gz: c2ae11b8a9287fdabfebde0e97f5d6c20ffc0fc91728bea6bdfcc0ed0ac7c1c1
3
+ metadata.gz: 41b05586226d777a40b9a92f3df343a8c4dabbf3308879b3dbf333707148ee06
4
+ data.tar.gz: 932e68b7d141a9082a703a1e848b620a40ea56005d510c843b619b370f54f1ea
5
5
  SHA512:
6
- metadata.gz: 1d8bd813c1f19c6f3940216a800cf9647df431c198516bbbf31d17f2e784b55f28df1b7d33b92cbdae0271e75f194b623fffe0e2967a9c9f669b988690883bbb
7
- data.tar.gz: 705d4a98839780cd4ae56f419c241c0e85645620c385634250d69685f861631950f12ba4bbf6f584510534b3b32965dc8adc663a0f77d30762fdb9a499e78674
6
+ metadata.gz: 2e7e8fb1032594f7eeaa01ecce877932bb661a890ec2388ba97488fdc36795f168a36a30e4fa207294cfbd3bcffb89fd85fe91a35da775a4f6fd035dffc4ea79
7
+ data.tar.gz: 16fb70150afa2ca8d8ec1616f487d247bea38a356ac3cb9e89923a032867b1bde70d565846edcdf0b282919ab607f1f028cedbba249754a40a13dd96e0981015
data/lib/openaiext.rb CHANGED
@@ -19,15 +19,15 @@ module OpenAIExt
19
19
  chat(messages: [{ role: :user, content: message_content }], model:, response_format:, max_tokens:, store:, tools:, auto_run_functions:, function_context:)
20
20
  end
21
21
 
22
- def self.single_prompt(prompt:, model: :gpt_basic, response_format: nil, max_tokens: MAX_TOKENS, store: true, metadata: nil, tools: nil, auto_run_functions: false, function_context: nil, temperature: nil, top_p: nil, frequency_penalty: nil, presence_penalty: nil)
23
- chat(messages: [{ user: prompt }], model:, response_format:, max_tokens:, store:, tools:, auto_run_functions:, function_context:, temperature:, top_p:, frequency_penalty:, presence_penalty:)
22
+ def self.single_prompt(prompt:, model: :gpt_basic, response_format: nil, max_tokens: MAX_TOKENS, store: true, metadata: nil, tools: nil, auto_run_functions: false, function_context: nil, temperature: nil, top_p: nil, frequency_penalty: nil, presence_penalty: nil, prediction: nil)
23
+ chat(messages: [{ user: prompt }], model:, response_format:, max_tokens:, store:, tools:, auto_run_functions:, function_context:, temperature:, top_p:, frequency_penalty:, presence_penalty:, prediction:)
24
24
  end
25
25
 
26
- def self.single_chat(system:, user:, model: :gpt_basic, response_format: nil, max_tokens: MAX_TOKENS, store: true, metadata: nil, tools: nil, auto_run_functions: false, function_context: nil, temperature: nil, top_p: nil, frequency_penalty: nil, presence_penalty: nil)
27
- chat(messages: [{ system: }, { user: }], model:, response_format:, max_tokens:, store:, tools:, auto_run_functions:, function_context:, temperature:, top_p:, frequency_penalty:, presence_penalty:)
26
+ def self.single_chat(system:, user:, model: :gpt_basic, response_format: nil, max_tokens: MAX_TOKENS, store: true, metadata: nil, tools: nil, auto_run_functions: false, function_context: nil, temperature: nil, top_p: nil, frequency_penalty: nil, presence_penalty: nil, prediction: nil)
27
+ chat(messages: [{ system: }, { user: }], model:, response_format:, max_tokens:, store:, tools:, auto_run_functions:, function_context:, temperature:, top_p:, frequency_penalty:, presence_penalty:, prediction:)
28
28
  end
29
29
 
30
- def self.chat(messages:, model: :gpt_basic, response_format: nil, max_tokens: MAX_TOKENS, store: true, metadata: nil, tools: nil, auto_run_functions: false, function_context: nil, temperature: nil, top_p: nil, frequency_penalty: nil, presence_penalty: nil)
30
+ def self.chat(messages:, model: :gpt_basic, response_format: nil, max_tokens: MAX_TOKENS, store: true, metadata: nil, tools: nil, auto_run_functions: false, function_context: nil, temperature: nil, top_p: nil, frequency_penalty: nil, presence_penalty: nil, prediction: nil)
31
31
  model = OpenAIExt::Model.select(model)
32
32
  is_o1_model = model.start_with?('o1')
33
33
 
@@ -47,6 +47,7 @@ module OpenAIExt
47
47
  parameters[:top_p] = top_p if top_p
48
48
  parameters[:frequency_penalty] = frequency_penalty if frequency_penalty
49
49
  parameters[:presence_penalty] = presence_penalty if presence_penalty
50
+ parameters[:prediction] = prediction if prediction
50
51
 
51
52
  begin
52
53
  response = OpenAI::Client.new.chat(parameters:)
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: openaiext
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.6
4
+ version: 0.0.7
5
5
  platform: ruby
6
6
  authors:
7
7
  - Gedean Dias
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2024-10-13 00:00:00.000000000 Z
11
+ date: 2024-11-04 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: ruby-openai