openaiext 0.0.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/README.md +1 -0
- data/lib/openaiext/agent.rb +64 -0
- data/lib/openaiext/messages.rb +34 -0
- data/lib/openaiext/response_extender.rb +44 -0
- data/lib/openaiext.rb +87 -0
- metadata +89 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA256:
|
3
|
+
metadata.gz: 4622de30e5df0ef4fd41d49f329dd9ec64ca84f87bf8be59dbf3301e75b99429
|
4
|
+
data.tar.gz: fbd7a1741c752c1852f7d7962aaa44bb2dcbf641029de864963783d3b3d2f6a4
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: bd2d2f0ccda1be4e864414de652fbb628f16b172e786d6f8ba45266009829ceda9dacd9996a9af12bf2be63f2eb322363374942b5d6ddbac3edbca5bc61edbdb
|
7
|
+
data.tar.gz: 0ad5bb389487ccd355ae6cb0ac23080eb713b465434b189fe7c86ecedab1301ad7dbd7106ca24c2e5f26c8e2a1223b44a827774df8b14d6d007020fad428b1c4
|
data/README.md
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
# openaiext
|
@@ -0,0 +1,64 @@
|
|
1
|
+
module OpenAIExt
|
2
|
+
class Agent
|
3
|
+
extend OpenAI
|
4
|
+
|
5
|
+
attr_reader :assistant, :thread, :instructions, :vector_store_id
|
6
|
+
|
7
|
+
def initialize(assistant_id: nil, thread_id: nil, thread_instructions: nil, vector_store_id: nil)
|
8
|
+
@openai_client = OpenAI::Client.new
|
9
|
+
|
10
|
+
assistant_id ||= ENV.fetch('OPENAI_ASSISTANT_ID')
|
11
|
+
@assistant = @openai_client.assistants.retrieve(id: assistant_id)
|
12
|
+
|
13
|
+
thread_params = {}
|
14
|
+
|
15
|
+
# Only one vector store can be attached, according to the OpenAI API documentation
|
16
|
+
@vector_store_id = vector_store_id
|
17
|
+
thread_params = { tool_resources: { file_search: { vector_store_ids: [vector_store_id] } } } if @vector_store_id
|
18
|
+
|
19
|
+
thread_id ||= @openai_client.threads.create(parameters: thread_params)['id']
|
20
|
+
@thread = @openai_client.threads.retrieve(id: thread_id)
|
21
|
+
|
22
|
+
@instructions = thread_instructions || @assistant['instructions']
|
23
|
+
end
|
24
|
+
|
25
|
+
def add_message(text, role: 'user') = @openai_client.messages.create(thread_id: @thread['id'], parameters: { role: role, content: text })
|
26
|
+
def messages = @openai_client.messages.list(thread_id: @thread['id'])
|
27
|
+
def last_message = messages['data'].first['content'].first['text']['value']
|
28
|
+
def runs = @openai_client.runs.list(thread_id: @thread['id'])
|
29
|
+
|
30
|
+
def run(instructions: nil, additional_instructions: nil, additional_message: nil, model: nil, tool_choice: nil)
|
31
|
+
params = { assistant_id: @assistant['id'] }
|
32
|
+
|
33
|
+
params[:instructions] = instructions || @instructions
|
34
|
+
params[:additional_instructions] = additional_instructions unless additional_instructions.nil?
|
35
|
+
params[:tool_choice] = tool_choice unless tool_choice.nil?
|
36
|
+
|
37
|
+
params[:additional_messages] = [{ role: :user, content: additional_message }] unless additional_message.nil?
|
38
|
+
|
39
|
+
params[:model] = model || @assistant['model']
|
40
|
+
|
41
|
+
run_id = @openai_client.runs.create(thread_id: @thread['id'], parameters: params)['id']
|
42
|
+
|
43
|
+
loop do
|
44
|
+
response = @openai_client.runs.retrieve(id: run_id, thread_id: @thread['id'])
|
45
|
+
|
46
|
+
case response['status']
|
47
|
+
when 'queued', 'in_progress', 'cancelling'
|
48
|
+
puts 'Status: Waiting AI Processing finish'
|
49
|
+
sleep 1
|
50
|
+
when 'completed'
|
51
|
+
puts last_message
|
52
|
+
break
|
53
|
+
when 'requires_action'
|
54
|
+
# Handle tool calls (see below)
|
55
|
+
when 'cancelled', 'failed', 'expired'
|
56
|
+
puts response['last_error'].inspect
|
57
|
+
break # or `exit`
|
58
|
+
else
|
59
|
+
puts "Unknown status response: #{status}"
|
60
|
+
end
|
61
|
+
end
|
62
|
+
end
|
63
|
+
end
|
64
|
+
end
|
@@ -0,0 +1,34 @@
|
|
1
|
+
module OpenAIExt
|
2
|
+
class Messages < Array
|
3
|
+
def initialize messages = nil
|
4
|
+
super parse_messages(messages)
|
5
|
+
end
|
6
|
+
|
7
|
+
def add(message) = concat(parse_messages(message))
|
8
|
+
|
9
|
+
private
|
10
|
+
def parse_messages(messages)
|
11
|
+
return [] if messages.nil?
|
12
|
+
|
13
|
+
messages = [messages] unless messages.is_a?(Array)
|
14
|
+
|
15
|
+
# if first element is ok, then do not parse the rest
|
16
|
+
return messages if messages.first in { role: String | Symbol, content: String | Array | Hash}
|
17
|
+
|
18
|
+
messages.flat_map do |msg|
|
19
|
+
if msg.is_a?(Hash)
|
20
|
+
if msg.keys.size == 1
|
21
|
+
role, content = msg.first
|
22
|
+
{ role: role.to_s, content: content }
|
23
|
+
elsif msg.key?(:role) && msg.key?(:content)
|
24
|
+
{ role: msg[:role].to_s, content: msg[:content] }
|
25
|
+
else
|
26
|
+
msg.map { |role, content| { role: role.to_s, content: content } }
|
27
|
+
end
|
28
|
+
else
|
29
|
+
raise ArgumentError, "Invalid message format: #{msg}"
|
30
|
+
end
|
31
|
+
end
|
32
|
+
end
|
33
|
+
end
|
34
|
+
end
|
@@ -0,0 +1,44 @@
|
|
1
|
+
module ResponseExtender
|
2
|
+
def chat_params = self[:chat_params]
|
3
|
+
|
4
|
+
def message = dig('choices', 0, 'message')
|
5
|
+
|
6
|
+
def content = dig('choices', 0, 'message', 'content')
|
7
|
+
def content? = !content.nil?
|
8
|
+
|
9
|
+
def tool_calls = dig('choices', 0, 'message', 'tool_calls')
|
10
|
+
def tool_calls? = !tool_calls.nil?
|
11
|
+
|
12
|
+
def functions
|
13
|
+
return if tool_calls.nil?
|
14
|
+
|
15
|
+
functions = tool_calls.filter { |tool| tool['type'].eql? 'function' }
|
16
|
+
return if functions.empty?
|
17
|
+
|
18
|
+
functions_list = []
|
19
|
+
functions.map.with_index do |function, function_index|
|
20
|
+
function_info = tool_calls.dig(function_index, 'function')
|
21
|
+
function_def = { id: function['id'], name: function_info['name'], arguments: Oj.load(function_info['arguments'], symbol_keys: true) }
|
22
|
+
|
23
|
+
def function_def.run(context:)
|
24
|
+
{
|
25
|
+
tool_call_id: self[:id],
|
26
|
+
role: :tool,
|
27
|
+
name: self[:name],
|
28
|
+
content: context.send(self[:name], **self[:arguments])
|
29
|
+
}
|
30
|
+
end
|
31
|
+
|
32
|
+
functions_list << function_def
|
33
|
+
end
|
34
|
+
|
35
|
+
functions_list
|
36
|
+
end
|
37
|
+
|
38
|
+
def functions_run_all(context:)
|
39
|
+
raise 'No functions to run' if functions.nil?
|
40
|
+
functions.map { |function| function.run(context:) }
|
41
|
+
end
|
42
|
+
|
43
|
+
def functions? = !functions.nil?
|
44
|
+
end
|
data/lib/openaiext.rb
ADDED
@@ -0,0 +1,87 @@
|
|
1
|
+
require 'openai'
|
2
|
+
|
3
|
+
require 'openaiext/messages'
|
4
|
+
require 'openaiext/response_extender'
|
5
|
+
require 'openaiext/agent'
|
6
|
+
|
7
|
+
module OpenAIExt
|
8
|
+
GPT_BASIC_MODEL = ENV.fetch('OPENAI_GPT_BASIC_MODEL', 'gpt-4o-mini')
|
9
|
+
GPT_ADVANCED_MODEL = ENV.fetch('OPENAI_GPT_ADVANCED_MODEL', 'gpt-4o-2024-08-06')
|
10
|
+
|
11
|
+
O1_BASIC_MODEL = ENV.fetch('OPENAI_O1_BASIC_MODEL', 'o1-mini')
|
12
|
+
O1_ADVANCED_MODEL = ENV.fetch('OPENAI_O1_ADVANCED_MODEL', 'o1-preview')
|
13
|
+
|
14
|
+
MAX_TOKENS = ENV.fetch('OPENAI_MAX_TOKENS', 16_383).to_i
|
15
|
+
|
16
|
+
def self.embeddings(input, model: 'text-embedding-3-large')
|
17
|
+
response = OpenAI::Client.new.embeddings(parameters: { input:, model: })
|
18
|
+
def response.embeddings = dig('data', 0, 'embedding')
|
19
|
+
response
|
20
|
+
end
|
21
|
+
|
22
|
+
def self.vision(prompt:, image_url:, model: :gpt_advanced, response_format: nil, max_tokens: MAX_TOKENS, store: true, metadata: nil, tools: nil, auto_run_functions: false, function_context: nil)
|
23
|
+
message_content = [{ type: :text, text: prompt }, { type: :image_url, image_url: { url: image_url } }]
|
24
|
+
chat(messages: [{ role: :user, content: message_content }], model:, response_format:, max_tokens:, store:, tools:, auto_run_functions:, function_context:)
|
25
|
+
end
|
26
|
+
|
27
|
+
def self.single_prompt(prompt:, model: :gpt_basic, response_format: nil, max_tokens: MAX_TOKENS, store: true, metadata: nil, tools: nil, auto_run_functions: false, function_context: nil)
|
28
|
+
chat(messages: [{ user: prompt }], model:, response_format:, max_tokens:, store:, tools:, auto_run_functions:, function_context:)
|
29
|
+
end
|
30
|
+
|
31
|
+
def self.single_chat(system:, user:, model: :gpt_basic, response_format: nil, max_tokens: MAX_TOKENS, store: true, metadata: nil, tools: nil, auto_run_functions: false, function_context: nil)
|
32
|
+
chat(messages: [{ system: }, { user: }], model:, response_format:, max_tokens:, store:, tools:, auto_run_functions:, function_context:)
|
33
|
+
end
|
34
|
+
|
35
|
+
def self.chat(messages:, model: :gpt_basic, response_format: nil, max_tokens: MAX_TOKENS, store: true, metadata: nil, tools: nil, auto_run_functions: false, function_context: nil)
|
36
|
+
model = select_model(model)
|
37
|
+
is_o1_model = model.start_with?('o1')
|
38
|
+
|
39
|
+
messages = OpenAIExt::Messages.new(messages) unless messages.is_a?(OpenAIExt::Messages)
|
40
|
+
|
41
|
+
parameters = { model:, messages:, store: }
|
42
|
+
parameters[:metadata] = metadata if metadata
|
43
|
+
|
44
|
+
# o1 family models doesn't support max_tokens params. Instead, use max_completion_tokens
|
45
|
+
parameters[:max_completion_tokens] = max_tokens if is_o1_model
|
46
|
+
parameters[:max_tokens] = max_tokens unless is_o1_model
|
47
|
+
|
48
|
+
parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
|
49
|
+
parameters[:tools] = tools if tools
|
50
|
+
|
51
|
+
begin
|
52
|
+
response = OpenAI::Client.new.chat(parameters:)
|
53
|
+
rescue => e
|
54
|
+
raise "Error in OpenAI chat: #{e.message}\nParameters: #{parameters.inspect}"
|
55
|
+
end
|
56
|
+
|
57
|
+
response[:chat_params] = parameters
|
58
|
+
response.extend(ResponseExtender)
|
59
|
+
|
60
|
+
if response.functions? && auto_run_functions
|
61
|
+
raise 'Function context not provided for auto-running functions' if function_context.nil?
|
62
|
+
parameters[:messages] << response.message
|
63
|
+
parameters[:messages] += response.functions_run_all(context: function_context)
|
64
|
+
|
65
|
+
response = chat(**parameters.except(:chat_params))
|
66
|
+
end
|
67
|
+
|
68
|
+
response
|
69
|
+
end
|
70
|
+
|
71
|
+
def self.models = OpenAI::Client.new.models.list
|
72
|
+
|
73
|
+
def self.select_model(model)
|
74
|
+
case model
|
75
|
+
when :gpt_basic
|
76
|
+
GPT_BASIC_MODEL
|
77
|
+
when :gpt_advanced
|
78
|
+
GPT_ADVANCED_MODEL
|
79
|
+
when :o1_basic
|
80
|
+
O1_BASIC_MODEL
|
81
|
+
when :o1_advanced
|
82
|
+
O1_ADVANCED_MODEL
|
83
|
+
else
|
84
|
+
model
|
85
|
+
end
|
86
|
+
end
|
87
|
+
end
|
metadata
ADDED
@@ -0,0 +1,89 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: openaiext
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.0.1
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Gedean Dias
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
date: 2024-10-12 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
name: ruby-openai
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - "~>"
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: '7.3'
|
20
|
+
type: :runtime
|
21
|
+
prerelease: false
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
23
|
+
requirements:
|
24
|
+
- - "~>"
|
25
|
+
- !ruby/object:Gem::Version
|
26
|
+
version: '7.3'
|
27
|
+
- !ruby/object:Gem::Dependency
|
28
|
+
name: anthropic
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
30
|
+
requirements:
|
31
|
+
- - "~>"
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: '0.3'
|
34
|
+
type: :runtime
|
35
|
+
prerelease: false
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
37
|
+
requirements:
|
38
|
+
- - "~>"
|
39
|
+
- !ruby/object:Gem::Version
|
40
|
+
version: '0.3'
|
41
|
+
- !ruby/object:Gem::Dependency
|
42
|
+
name: oj
|
43
|
+
requirement: !ruby/object:Gem::Requirement
|
44
|
+
requirements:
|
45
|
+
- - "~>"
|
46
|
+
- !ruby/object:Gem::Version
|
47
|
+
version: '3'
|
48
|
+
type: :runtime
|
49
|
+
prerelease: false
|
50
|
+
version_requirements: !ruby/object:Gem::Requirement
|
51
|
+
requirements:
|
52
|
+
- - "~>"
|
53
|
+
- !ruby/object:Gem::Version
|
54
|
+
version: '3'
|
55
|
+
description: Based on ruby-openai, adds some extra features
|
56
|
+
email: gedean.dias@gmail.com
|
57
|
+
executables: []
|
58
|
+
extensions: []
|
59
|
+
extra_rdoc_files: []
|
60
|
+
files:
|
61
|
+
- README.md
|
62
|
+
- lib/openaiext.rb
|
63
|
+
- lib/openaiext/agent.rb
|
64
|
+
- lib/openaiext/messages.rb
|
65
|
+
- lib/openaiext/response_extender.rb
|
66
|
+
homepage: https://github.com/gedean/openaiext
|
67
|
+
licenses:
|
68
|
+
- MIT
|
69
|
+
metadata: {}
|
70
|
+
post_install_message:
|
71
|
+
rdoc_options: []
|
72
|
+
require_paths:
|
73
|
+
- lib
|
74
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
75
|
+
requirements:
|
76
|
+
- - ">="
|
77
|
+
- !ruby/object:Gem::Version
|
78
|
+
version: '3'
|
79
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
80
|
+
requirements:
|
81
|
+
- - ">="
|
82
|
+
- !ruby/object:Gem::Version
|
83
|
+
version: '0'
|
84
|
+
requirements: []
|
85
|
+
rubygems_version: 3.5.21
|
86
|
+
signing_key:
|
87
|
+
specification_version: 4
|
88
|
+
summary: Ruby OpenAI Extended
|
89
|
+
test_files: []
|