ruby-openai-swarm 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,59 @@
1
+
2
+ OpenAI.configure do |config|
3
+ config.access_token = ENV['OPEN_ROUTER_ACCESS_TOKEN']
4
+ config.uri_base = "https://openrouter.ai/api/v1"
5
+ end
6
+
7
+ client = OpenAISwarm.new
8
+
9
+ # Client chat parameters: {:model=>"gpt-4", :messages=>[{:role=>"system", :content=>"You are a helpful agent."}, {"role"=>"user", "content"=>"Do I need an umbrella today? I'm in chicago."}, {"role"=>"assistant", "content"=>nil, "refusal"=>nil, "tool_calls"=>[{"index"=>0, "id"=>"call_spvHva4SFuDfTUk57EhuhArl", "type"=>"function", "function"=>{"name"=>"get_weather", "arguments"=>"{\n \"location\": \"chicago\"\n}"}}], :sender=>"Weather Agent"}, {:role=>"tool", :tool_call_id=>"call_spvHva4SFuDfTUk57EhuhArl", :tool_name=>"get_weather", :content=>"{\"location\":{},\"temperature\":\"65\",\"time\":\"now\"}"}], :tools=>[{:type=>"function", :function=>{:name=>"send_email", :description=>"", :parameters=>{:type=>"object", :properties=>{:recipient=>{:type=>"string"}, :subject=>{:type=>"string"}, :body=>{:type=>"string"}}, :required=>["recipient", "subject", "body"]}}}, {:type=>"function", :function=>{:name=>"get_weather", :description=>"Get the current weather in a given location. Location MUST be a city.", :parameters=>{:type=>"object", :properties=>{:location=>{:type=>"string"}, :time=>{:type=>"string"}}, :required=>["location"]}}}], :stream=>false, :parallel_tool_calls=>true}
10
+ def get_weather(location, time= Time.now)
11
+ { location: location, temperature: "65", time: time }.to_json
12
+ end
13
+
14
+ def send_email(recipient, subject, body)
15
+ puts "Sending email..."
16
+ puts "To: #{recipient}"
17
+ puts "Subject: #{subject}"
18
+ puts "Body: #{body}"
19
+ puts "Sent!"
20
+ end
21
+
22
+ function_instance_send_email = OpenAISwarm::FunctionDescriptor.new(
23
+ target_method: :send_email
24
+ )
25
+
26
+ function_instance_get_weather = OpenAISwarm::FunctionDescriptor.new(
27
+ target_method: :get_weather,
28
+ description: 'Get the current weather in a given location. Location MUST be a city.'
29
+ )
30
+
31
+ weather_agent = OpenAISwarm::Agent.new(
32
+ name: "Weather Agent",
33
+ instructions: "You are a helpful agent.",
34
+ model: "gpt-4o-mini",
35
+ functions: [
36
+ function_instance_send_email,
37
+ function_instance_get_weather
38
+ ]
39
+ )
40
+
41
+ msg1 = "Do I need an umbrella today? I'm in chicago."
42
+ # model: "gpt-4",
43
+ # return: The current temperature in Chicago is 65 degrees. It doesn't look like you'll need an umbrella today!
44
+ msg2 = "Tell me the weather in London."
45
+ # return: The current temperature in London is 65°F.
46
+ response = client.run(
47
+ messages: [{"role" => "user", "content" => msg2}],
48
+ agent: weather_agent,
49
+ debug: true,
50
+ )
51
+ # print(response.messages[-1]["content"])
52
+
53
+ response = client.run(
54
+ messages: [{"role" => "user", "content" => "What is the time right now?",}],
55
+ agent: weather_agent,
56
+ debug: true,
57
+ )
58
+ # p response.messages[-1]["content"]
59
+ # return: I'm sorry for the confusion, but as an AI, I don't have the ability to provide real-time information such as the current time. Please check the time on your device.
File without changes
@@ -0,0 +1,21 @@
1
+ module OpenAISwarm
2
+ class Agent
3
+ attr_accessor :name, :model, :instructions, :functions, :tool_choice, :parallel_tool_calls
4
+
5
+ def initialize(
6
+ name: "Agent",
7
+ model: "gpt-4",
8
+ instructions: "You are a helpful agent.",
9
+ functions: [],
10
+ tool_choice: nil,
11
+ parallel_tool_calls: true
12
+ )
13
+ @name = name
14
+ @model = model
15
+ @instructions = instructions
16
+ @functions = functions
17
+ @tool_choice = tool_choice
18
+ @parallel_tool_calls = parallel_tool_calls
19
+ end
20
+ end
21
+ end
@@ -0,0 +1,269 @@
1
+ require 'ruby/openai'
2
+ begin
3
+ require 'pry'
4
+ rescue LoadError
5
+ end
6
+
7
+ module OpenAISwarm
8
+ class Core
9
+ include Util
10
+ CTX_VARS_NAME = 'context_variables'
11
+
12
+ def initialize(client = nil)
13
+ @client = client || OpenAI::Client.new
14
+ end
15
+
16
+ def get_chat_completion(agent, history, context_variables, model_override, stream, debug)
17
+ context_variables = context_variables.dup
18
+ instructions = agent.instructions.respond_to?(:call) ? agent.instructions.call(context_variables) : agent.instructions
19
+ messages = [{ role: 'system', content: instructions }] + history
20
+ Util.debug_print(debug, "Getting chat completion for...:", messages)
21
+
22
+ tools = agent.functions.map { |f| Util.function_to_json(f) }
23
+ # hide context_variables from model
24
+ tools.each do |tool|
25
+ params = tool[:function][:parameters]
26
+ params[:properties].delete(CTX_VARS_NAME.to_sym)
27
+ params[:required]&.delete(CTX_VARS_NAME.to_sym)
28
+ end
29
+
30
+ create_params = {
31
+ model: model_override || agent.model,
32
+ messages: messages,
33
+ tools: tools.empty? ? nil : tools,
34
+ stream: stream
35
+ }
36
+
37
+ # TODO: https://platform.openai.com/docs/guides/function-calling/how-do-functions-differ-from-tools
38
+ # create_params[:functions] = tools unless tools.empty?
39
+ # create_params[:function_call] = agent.tool_choice if agent.tool_choice
40
+
41
+ create_params[:tool_choice] = agent.tool_choice if agent.tool_choice
42
+ create_params[:parallel_tool_calls] = agent.parallel_tool_calls if tools.any?
43
+
44
+ Util.debug_print(debug, "Client chat parameters:", create_params)
45
+ response = @client.chat(parameters: create_params)
46
+ Util.debug_print(debug, "API Response:", response)
47
+ response
48
+ rescue OpenAI::Error => e
49
+ Util.debug_print(true, "OpenAI API Error:", e.message)
50
+ raise
51
+ end
52
+
53
+ def handle_function_result(result, debug)
54
+ case result
55
+ when Result
56
+ result
57
+ when Agent
58
+ Result.new(
59
+ value: JSON.generate({ assistant: result.name }),
60
+ agent: result
61
+ )
62
+ else
63
+ begin
64
+ Result.new(value: result.to_s)
65
+ rescue => e
66
+ error_message = "Failed to cast response to string: #{result}. Make sure agent functions return a string or Result object. Error: #{e}"
67
+ Util.debug_print(debug, error_message)
68
+ raise TypeError, error_message
69
+ end
70
+ end
71
+ end
72
+
73
+ def handle_tool_calls(tool_calls, active_agent, context_variables, debug)
74
+ functions = active_agent.functions
75
+
76
+ function_map = functions.map do |f|
77
+ if f.is_a?(OpenAISwarm::FunctionDescriptor)
78
+ [f.target_method.name, f.target_method]
79
+ else
80
+ [f.name, f]
81
+ end
82
+ end.to_h.transform_keys(&:to_s)
83
+
84
+ partial_response = Response.new(
85
+ messages: [],
86
+ agent: nil,
87
+ context_variables: {}
88
+ )
89
+
90
+ tool_calls.each do |tool_call|
91
+ name = tool_call.dig('function', 'name')
92
+ unless function_map.key?(name)
93
+ Util.debug_print(debug, "Tool #{name} not found in function map.")
94
+ partial_response.messages << {
95
+ role: 'tool',
96
+ tool_call_id: tool_call.id,
97
+ tool_name: name,
98
+ content: "Error: Tool #{name} not found."
99
+ }
100
+ next
101
+ end
102
+
103
+ args = JSON.parse(tool_call.dig('function', 'arguments') || '{}')
104
+ Util.debug_print(debug, "Processing tool call: #{name} with arguments #{args}")
105
+
106
+ func = function_map[name]
107
+ # pass context_variables to agent functions
108
+ args[CTX_VARS_NAME] = context_variables if func.parameters.map(&:last).include?(CTX_VARS_NAME.to_sym)
109
+ is_parameters = func.parameters.any?
110
+ arguments = args.transform_keys(&:to_sym)
111
+
112
+ raw_result = is_parameters ? func.call(**arguments) : func.call
113
+ result = handle_function_result(raw_result, debug)
114
+
115
+ partial_response.messages << {
116
+ role: 'tool',
117
+ tool_call_id: tool_call['id'],
118
+ tool_name: name,
119
+ content: result.value
120
+ }
121
+
122
+ partial_response.context_variables.merge!(result.context_variables)
123
+ partial_response.agent = result.agent if result.agent
124
+ end
125
+
126
+ partial_response
127
+ end
128
+
129
+ def run(agent:, messages:, context_variables: {}, model_override: nil, stream: false, debug: false, max_turns: Float::INFINITY, execute_tools: true)
130
+ if stream
131
+ return run_and_stream(
132
+ agent: agent,
133
+ messages: messages,
134
+ context_variables: context_variables,
135
+ model_override: model_override,
136
+ debug: debug,
137
+ max_turns: max_turns,
138
+ execute_tools: execute_tools
139
+ )
140
+ end
141
+
142
+ active_agent = agent
143
+ context_variables = context_variables.dup
144
+ history = messages.dup
145
+ init_len = messages.length
146
+
147
+ while history.length - init_len < max_turns && active_agent
148
+ completion = get_chat_completion(
149
+ active_agent,
150
+ history,
151
+ context_variables,
152
+ model_override,
153
+ stream,
154
+ debug
155
+ )
156
+
157
+ message = completion.dig('choices', 0, 'message')
158
+ Util.debug_print(debug, "Received completion:", message)
159
+
160
+ message['sender'] = active_agent.name
161
+ history << message
162
+
163
+ break if !message['tool_calls'] || !execute_tools
164
+
165
+ partial_response = handle_tool_calls(
166
+ message['tool_calls'],
167
+ active_agent,
168
+ context_variables,
169
+ debug
170
+ )
171
+
172
+ history.concat(partial_response.messages)
173
+ context_variables.merge!(partial_response.context_variables)
174
+ active_agent = partial_response.agent if partial_response.agent
175
+ end
176
+
177
+ Response.new(
178
+ messages: history[init_len..],
179
+ agent: active_agent,
180
+ context_variables: context_variables
181
+ )
182
+ end
183
+
184
+ private
185
+
186
+ def run_and_stream(agent:, messages:, context_variables: {}, model_override: nil, debug: false, max_turns: Float::INFINITY, execute_tools: true)
187
+ active_agent = agent
188
+ context_variables = context_variables.dup
189
+ history = messages.dup
190
+ init_len = messages.length
191
+
192
+ while history.length - init_len < max_turns && active_agent
193
+ message = {
194
+ content: "",
195
+ sender: agent.name,
196
+ role: "assistant",
197
+ function_call: nil,
198
+ tool_calls: Hash.new do |h, k|
199
+ h[k] = {
200
+ function: { arguments: "", name: "" },
201
+ id: "",
202
+ type: ""
203
+ }
204
+ end
205
+ }
206
+
207
+ completion = get_chat_completion(
208
+ active_agent,
209
+ history,
210
+ context_variables,
211
+ model_override,
212
+ true, # stream
213
+ debug
214
+ )
215
+
216
+ yield({ delim: "start" })
217
+ completion.each do |chunk|
218
+ delta = JSON.parse(chunk.choices[0].delta.to_json, symbolize_names: true)
219
+ if delta[:role] == "assistant"
220
+ delta[:sender] = active_agent.name
221
+ end
222
+ yield delta
223
+ delta.delete(:role)
224
+ delta.delete(:sender)
225
+ Util.merge_chunk(message, delta)
226
+ end
227
+ yield({ delim: "end" })
228
+
229
+ message[:tool_calls] = message[:tool_calls].values
230
+ message[:tool_calls] = nil if message[:tool_calls].empty?
231
+ Util.debug_print(debug, "Received completion:", message)
232
+ history << message
233
+
234
+ break if !message[:tool_calls] || !execute_tools
235
+
236
+ # convert tool_calls to objects
237
+ tool_calls = message[:tool_calls].map do |tool_call|
238
+ OpenStruct.new(
239
+ id: tool_call[:id],
240
+ function: OpenStruct.new(
241
+ arguments: tool_call[:function][:arguments],
242
+ name: tool_call[:function][:name]
243
+ ),
244
+ type: tool_call[:type]
245
+ )
246
+ end
247
+
248
+ partial_response = handle_tool_calls(
249
+ tool_calls,
250
+ active_agent.functions, # TODO: will check
251
+ context_variables,
252
+ debug
253
+ )
254
+
255
+ history.concat(partial_response.messages)
256
+ context_variables.merge!(partial_response.context_variables)
257
+ active_agent = partial_response.agent if partial_response.agent
258
+ end
259
+
260
+ yield({
261
+ response: Response.new(
262
+ messages: history[init_len..],
263
+ agent: active_agent,
264
+ context_variables: context_variables
265
+ )
266
+ })
267
+ end
268
+ end
269
+ end
@@ -0,0 +1,10 @@
1
+ module OpenAISwarm
2
+ class FunctionDescriptor
3
+ attr_reader :target_method, :description
4
+
5
+ def initialize(target_method:, description: '')
6
+ @target_method = target_method.is_a?(Method) ? target_method : method(target_method)
7
+ @description = description
8
+ end
9
+ end
10
+ end
@@ -0,0 +1,90 @@
1
+ module OpenAISwarm
2
+ class Repl
3
+ class << self
4
+ def process_and_print_streaming_response(response)
5
+ content = ""
6
+ last_sender = ""
7
+ response.each do |chunk|
8
+ last_sender = chunk['sender'] if chunk.key?('sender')
9
+
10
+ if chunk.key?("content") && !chunk["content"].nil?
11
+ if content.empty? && !last_sender.empty?
12
+ print "\033[94m#{last_sender}:\033[0m "
13
+ last_sender = ""
14
+ end
15
+ print chunk["content"]
16
+ content += chunk["content"]
17
+ end
18
+
19
+ if chunk.key?("tool_calls") && !chunk["tool_calls"].nil?
20
+ chunk["tool_calls"].each do |tool_call|
21
+ f = tool_call["function"]
22
+ name = f["name"]
23
+ next if name.empty?
24
+ print "\033[94m#{last_sender}:\033[95m#{name}\033[0m()"
25
+ end
26
+ end
27
+
28
+ if chunk.key?("delim") && chunk["delim"] == "end" && !content.empty?
29
+ puts
30
+ content = ""
31
+ end
32
+
33
+ return chunk["response"] if chunk.key?("response")
34
+ end
35
+ end
36
+
37
+ def pretty_print_messages(messages)
38
+ messages.each do |message|
39
+ next unless message["role"] == "assistant"
40
+
41
+ print "\033[94m#{message[:sender]}\033[0m: "
42
+
43
+ puts message["content"] if message["content"]
44
+
45
+ tool_calls = message.fetch("tool_calls", [])
46
+ puts if tool_calls.length > 1
47
+ tool_calls.each do |tool_call|
48
+ func = tool_call["function"]
49
+ name = func["name"]
50
+ args = JSON.parse(func["arguments"] || "{}").map { |k, v| "#{k}=#{v}" }.join(", ")
51
+ puts "\e[95m#{name}\e[0m(#{args})"
52
+ end
53
+ end
54
+ end
55
+
56
+ def run_demo_loop(starting_agent, context_variables: {}, stream: false, debug: false)
57
+ client = OpenAISwarm.new
58
+ puts "Starting Swarm CLI 🐝"
59
+
60
+ messages = []
61
+ agent = starting_agent
62
+
63
+ loop do
64
+ print "\033[90mUser\033[0m: "
65
+ user_input = gets.chomp
66
+ break if user_input.downcase == "exit"
67
+
68
+ messages << { "role" => "user", "content" => user_input }
69
+
70
+ response = client.run(
71
+ agent: agent,
72
+ messages: messages,
73
+ context_variables: context_variables,
74
+ stream: stream,
75
+ debug: debug
76
+ )
77
+
78
+ if stream
79
+ response = process_and_print_streaming_response(response)
80
+ else
81
+ pretty_print_messages(response.messages)
82
+ end
83
+
84
+ messages.concat(response.messages)
85
+ agent = response.agent
86
+ end
87
+ end
88
+ end
89
+ end
90
+ end
@@ -0,0 +1,11 @@
1
+ module OpenAISwarm
2
+ class Response
3
+ attr_accessor :messages, :agent, :context_variables
4
+
5
+ def initialize(messages: [], agent: nil, context_variables: {})
6
+ @messages = messages
7
+ @agent = agent
8
+ @context_variables = context_variables
9
+ end
10
+ end
11
+ end
@@ -0,0 +1,11 @@
1
+ module OpenAISwarm
2
+ class Result
3
+ attr_accessor :value, :agent, :context_variables
4
+
5
+ def initialize(value: "", agent: nil, context_variables: {})
6
+ @value = value
7
+ @agent = agent
8
+ @context_variables = context_variables
9
+ end
10
+ end
11
+ end
@@ -0,0 +1,78 @@
1
+ module OpenAISwarm
2
+ module Util
3
+ def self.debug_print(debug, *args)
4
+ return unless debug
5
+ timestamp = Time.now.strftime("%Y-%m-%d %H:%M:%S")
6
+ message = args.map(&:to_s).join(' ')
7
+ puts "\e[97m[\e[90m#{timestamp}\e[97m]\e[90m #{message}\e[0m"
8
+ end
9
+
10
+ def self.merge_fields(target, source)
11
+ source.each do |key, value|
12
+ if value.is_a?(String)
13
+ target[key] = target[key].to_s + value
14
+ elsif value && value.is_a?(Hash)
15
+ target[key] ||= {}
16
+ merge_fields(target[key], value)
17
+ end
18
+ end
19
+ end
20
+
21
+ def self.merge_chunk(final_response, delta)
22
+ delta.delete(:role)
23
+ merge_fields(final_response, delta)
24
+
25
+ if delta[:tool_calls]&.any?
26
+ index = delta[:tool_calls][0].delete(:index)
27
+ merge_fields(final_response[:tool_calls][index], delta[:tool_calls][0])
28
+ end
29
+ end
30
+
31
+ def self.function_to_json(func_instance)
32
+ is_target_method = func_instance.respond_to?(:target_method) || func_instance.is_a?(OpenAISwarm::FunctionDescriptor)
33
+ func = is_target_method ? func_instance.target_method : func_instance
34
+
35
+ function_name = func.name
36
+ function_parameters = func.parameters
37
+
38
+ type_map = {
39
+ String => "string",
40
+ Integer => "integer",
41
+ Float => "number",
42
+ TrueClass => "boolean",
43
+ FalseClass => "boolean",
44
+ Array => "array",
45
+ Hash => "object",
46
+ NilClass => "null"
47
+ }
48
+ parameters = {}
49
+
50
+ function_parameters.each do |type, param_name|
51
+ param_type = type_map[param_name.class] || "string"
52
+ if param_name.to_s == 'context_variables' && type == :opt #type == :keyreq
53
+ param_type = 'object'
54
+ end
55
+ parameters[param_name] = { type: param_type }
56
+ end
57
+
58
+ required = function_parameters
59
+ .select { |type, _| [:req, :keyreq].include?(type) }
60
+ .map { |_, name| name.to_s }
61
+
62
+ description = func_instance.respond_to?(:description) ? func_instance&.description : nil
63
+
64
+ {
65
+ type: "function",
66
+ function: {
67
+ name: function_name,
68
+ description: description || '',
69
+ parameters: {
70
+ type: "object",
71
+ properties: parameters,
72
+ required: required
73
+ }
74
+ }
75
+ }
76
+ end
77
+ end
78
+ end
@@ -0,0 +1,4 @@
1
+ module OpenAISwarm
2
+ VERSION = "0.1.0"
3
+ end
4
+
@@ -0,0 +1,18 @@
1
+ require 'ruby-openai-swarm/version'
2
+ require 'ruby-openai-swarm/agent'
3
+ require 'ruby-openai-swarm/response'
4
+ require 'ruby-openai-swarm/result'
5
+ require 'ruby-openai-swarm/util'
6
+ require 'ruby-openai-swarm/core'
7
+ require 'ruby-openai-swarm/function_descriptor'
8
+ require 'ruby-openai-swarm/repl'
9
+
10
+ module OpenAISwarm
11
+ class Error < StandardError; end
12
+
13
+ class << self
14
+ def new(client = nil)
15
+ Core.new(client)
16
+ end
17
+ end
18
+ end
@@ -0,0 +1,28 @@
1
+ require_relative 'lib/ruby-openai-swarm/version'
2
+
3
+ Gem::Specification.new do |spec|
4
+ spec.name = "ruby-openai-swarm"
5
+ spec.version = OpenAISwarm::VERSION
6
+ spec.authors = ["Grayson"]
7
+ spec.email = ["cgg5207@gmail.com"]
8
+
9
+ spec.summary = " A Ruby implementation of OpenAI function calling swarm"
10
+ spec.description = "Allows for creating swarms of AI agents that can call functions and interact with each other"
11
+ spec.homepage = "https://github.com/grayson/ruby-openai-swarm"
12
+ spec.license = "MIT"
13
+ spec.required_ruby_version = Gem::Requirement.new(">= 2.6.0")
14
+
15
+ spec.metadata["homepage_uri"] = spec.homepage
16
+ spec.metadata["source_code_uri"] = spec.homepage
17
+ spec.metadata["changelog_uri"] = "#{spec.homepage}/blob/main/CHANGELOG.md"
18
+
19
+ spec.files = Dir.chdir(File.expand_path(__dir__)) do
20
+ `git ls-files -z`.split("\x0").reject { |f| f.match(%r{\A(?:test|spec|features)/}) }
21
+ end
22
+
23
+ spec.require_paths = ["lib"]
24
+ spec.add_dependency "ruby-openai", "~> 7.3"
25
+ spec.add_development_dependency "rspec", "~> 3.0"
26
+ spec.add_development_dependency "rake", "~> 13.0"
27
+ spec.add_development_dependency "pry"
28
+ end