scout-ai 1.0.0 → 1.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. checksums.yaml +4 -4
  2. data/.vimproject +80 -15
  3. data/README.md +296 -0
  4. data/Rakefile +2 -0
  5. data/VERSION +1 -1
  6. data/doc/Agent.md +279 -0
  7. data/doc/Chat.md +258 -0
  8. data/doc/LLM.md +446 -0
  9. data/doc/Model.md +513 -0
  10. data/doc/RAG.md +129 -0
  11. data/lib/scout/llm/agent/chat.rb +51 -1
  12. data/lib/scout/llm/agent/delegate.rb +39 -0
  13. data/lib/scout/llm/agent/iterate.rb +44 -0
  14. data/lib/scout/llm/agent.rb +42 -21
  15. data/lib/scout/llm/ask.rb +38 -6
  16. data/lib/scout/llm/backends/anthropic.rb +147 -0
  17. data/lib/scout/llm/backends/bedrock.rb +1 -1
  18. data/lib/scout/llm/backends/ollama.rb +23 -29
  19. data/lib/scout/llm/backends/openai.rb +34 -40
  20. data/lib/scout/llm/backends/responses.rb +158 -110
  21. data/lib/scout/llm/chat.rb +250 -94
  22. data/lib/scout/llm/embed.rb +4 -4
  23. data/lib/scout/llm/mcp.rb +28 -0
  24. data/lib/scout/llm/parse.rb +1 -0
  25. data/lib/scout/llm/rag.rb +9 -0
  26. data/lib/scout/llm/tools/call.rb +66 -0
  27. data/lib/scout/llm/tools/knowledge_base.rb +158 -0
  28. data/lib/scout/llm/tools/mcp.rb +59 -0
  29. data/lib/scout/llm/tools/workflow.rb +69 -0
  30. data/lib/scout/llm/tools.rb +58 -143
  31. data/lib/scout-ai.rb +1 -0
  32. data/scout-ai.gemspec +31 -18
  33. data/scout_commands/agent/ask +28 -71
  34. data/scout_commands/documenter +148 -0
  35. data/scout_commands/llm/ask +2 -2
  36. data/scout_commands/llm/server +319 -0
  37. data/share/server/chat.html +138 -0
  38. data/share/server/chat.js +468 -0
  39. data/test/scout/llm/backends/test_anthropic.rb +134 -0
  40. data/test/scout/llm/backends/test_openai.rb +45 -6
  41. data/test/scout/llm/backends/test_responses.rb +124 -0
  42. data/test/scout/llm/test_agent.rb +0 -70
  43. data/test/scout/llm/test_ask.rb +3 -1
  44. data/test/scout/llm/test_chat.rb +43 -1
  45. data/test/scout/llm/test_mcp.rb +29 -0
  46. data/test/scout/llm/tools/test_knowledge_base.rb +22 -0
  47. data/test/scout/llm/tools/test_mcp.rb +11 -0
  48. data/test/scout/llm/tools/test_workflow.rb +39 -0
  49. metadata +56 -17
  50. data/README.rdoc +0 -18
  51. data/python/scout_ai/__pycache__/__init__.cpython-310.pyc +0 -0
  52. data/python/scout_ai/__pycache__/__init__.cpython-311.pyc +0 -0
  53. data/python/scout_ai/__pycache__/huggingface.cpython-310.pyc +0 -0
  54. data/python/scout_ai/__pycache__/huggingface.cpython-311.pyc +0 -0
  55. data/python/scout_ai/__pycache__/util.cpython-310.pyc +0 -0
  56. data/python/scout_ai/__pycache__/util.cpython-311.pyc +0 -0
  57. data/python/scout_ai/atcold/plot_lib.py +0 -141
  58. data/python/scout_ai/atcold/spiral.py +0 -27
  59. data/python/scout_ai/huggingface/train/__pycache__/__init__.cpython-310.pyc +0 -0
  60. data/python/scout_ai/huggingface/train/__pycache__/next_token.cpython-310.pyc +0 -0
  61. data/python/scout_ai/language_model.py +0 -70
  62. /data/{python/scout_ai/atcold/__init__.py → test/scout/llm/tools/test_call.rb} +0 -0
@@ -1,7 +1,7 @@
1
1
  module LLM
2
2
  class Agent
3
3
  def start_chat
4
- @start_chat ||= Chat.setup []
4
+ @start_chat ||= Chat.setup([])
5
5
  end
6
6
 
7
7
  def start(chat=nil)
@@ -20,5 +20,55 @@ module LLM
20
20
  def method_missing(name,...)
21
21
  current_chat.send(name, ...)
22
22
  end
23
+
24
+ def respond(...)
25
+ self.ask(current_chat, ...)
26
+ end
27
+
28
+ def chat(model = nil, options = {})
29
+ new = self.ask(current_chat, model, options.merge(return_messages: true))
30
+ current_chat.concat(new)
31
+ new.last['content']
32
+ end
33
+
34
+ def chat(model = nil, options = {})
35
+ response = ask(current_chat, model, options.merge(return_messages: true))
36
+ if Array === response
37
+ current_chat.concat(response)
38
+ current_chat.answer
39
+ else
40
+ current_chat.push({role: :assistant, content: response})
41
+ response
42
+ end
43
+ end
44
+
45
+
46
+ def json(...)
47
+ current_chat.format :json
48
+ output = ask(current_chat, ...)
49
+ obj = JSON.parse output
50
+ if (Hash === obj) and obj.keys == ['content']
51
+ obj['content']
52
+ else
53
+ obj
54
+ end
55
+ end
56
+
57
+ def json_format(format, ...)
58
+ current_chat.format format
59
+ output = ask(current_chat, ...)
60
+ obj = JSON.parse output
61
+ if (Hash === obj) and obj.keys == ['content']
62
+ obj['content']
63
+ else
64
+ obj
65
+ end
66
+ end
67
+
68
+ def get_previous_response_id
69
+ msg = current_chat.reverse.find{|msg| msg[:role].to_sym == :previous_response_id }
70
+ msg.nil? ? nil : msg['content']
71
+ end
72
+
23
73
  end
24
74
  end
@@ -0,0 +1,39 @@
1
+ module LLM
2
+ class Agent
3
+
4
+ def delegate(agent, name, description, &block)
5
+ @other_options[:tools] ||= {}
6
+ task_name = "hand_off_to_#{name}"
7
+
8
+ block ||= Proc.new do |name, parameters|
9
+ message = parameters[:message]
10
+ agent.user message
11
+ agent.chat
12
+ end
13
+
14
+ properties = {
15
+ message: {
16
+ "type": :string,
17
+ "description": "Message to pass to the agent"
18
+ }
19
+ }
20
+
21
+ required_inputs = [:message]
22
+
23
+ function = {
24
+ name: task_name,
25
+ description: description,
26
+ parameters: {
27
+ type: "object",
28
+ properties: properties,
29
+ required: required_inputs
30
+ }
31
+ }
32
+
33
+ definition = IndiferentHash.setup function.merge(type: 'function', function: function)
34
+
35
+
36
+ @other_options[:tools][task_name] = [block, definition]
37
+ end
38
+ end
39
+ end
@@ -0,0 +1,44 @@
1
+ module LLM
2
+ class Agent
3
+
4
+ def iterate(prompt = nil, &block)
5
+ self.endpoint :responses
6
+ self.user prompt if prompt
7
+
8
+ obj = self.json_format({
9
+ "$schema": "http://json-schema.org/draft-07/schema#",
10
+ "type": "object",
11
+ "properties": {
12
+ "content": {
13
+ "type": "array",
14
+ "items": { "type": "string" }
15
+ }
16
+ },
17
+ "required": ["content"],
18
+ "additionalProperties": false
19
+ })
20
+
21
+ self.option :format, :text
22
+
23
+ list = Hash === obj ? obj['content'] : obj
24
+
25
+ list.each &block
26
+ end
27
+
28
+ def iterate_dictionary(prompt = nil, &block)
29
+ self.endpoint :responses
30
+ self.user prompt if prompt
31
+
32
+ dict = self.json_format({
33
+ name: 'dictionary',
34
+ type: 'object',
35
+ properties: {},
36
+ additionalProperties: {type: :string}
37
+ })
38
+
39
+ self.option :format, :text
40
+
41
+ dict.each &block
42
+ end
43
+ end
44
+ end
@@ -1,12 +1,17 @@
1
1
  require_relative 'ask'
2
2
 
3
3
  module LLM
4
+ def self.agent(...)
5
+ LLM::Agent.new(...)
6
+ end
7
+
4
8
  class Agent
5
- attr_accessor :workflow, :knowledge_base, :start_chat
9
+ attr_accessor :workflow, :knowledge_base, :start_chat, :process_exception, :other_options
6
10
  def initialize(workflow: nil, knowledge_base: nil, start_chat: nil, **kwargs)
7
11
  @workflow = workflow
12
+ @workflow = Workflow.require_workflow @workflow if String === @workflow
8
13
  @knowledge_base = knowledge_base
9
- @other_options = kwargs
14
+ @other_options = IndiferentHash.setup(kwargs.dup)
10
15
  @start_chat = start_chat
11
16
  end
12
17
 
@@ -48,31 +53,47 @@ You have access to the following databases associating entities:
48
53
  messages = [messages] unless messages.is_a? Array
49
54
  model ||= @model if model
50
55
 
51
- tools = []
52
- tools += LLM.workflow_tools(workflow) if workflow
53
- tools += LLM.knowledge_base_tool_definition(knowledge_base) if knowledge_base and knowledge_base.all_databases.any?
54
-
55
- LLM.ask prompt(messages), @other_options.merge(log_errors: true, tools: tools) do |name,parameters|
56
- case name
57
- when 'children'
58
- parameters = IndiferentHash.setup(parameters)
59
- database, entities = parameters.values_at "database", "entities"
60
- Log.high "Finding #{entities} children in #{database}"
61
- knowledge_base.children(database, entities)
56
+ tools = options[:tools] || {}
57
+ tools = tools.merge @other_options[:tools] if @other_options[:tools]
58
+ options[:tools] = tools
59
+ begin
60
+ if workflow || knowledge_base
61
+ tools.merge!(LLM.workflow_tools(workflow)) if workflow
62
+ tools.merge!(LLM.knowledge_base_tool_definition(knowledge_base)) if knowledge_base and knowledge_base.all_databases.any?
63
+ options[:tools] = tools
64
+ LLM.ask messages, @other_options.merge(log_errors: true).merge(options)
62
65
  else
63
- if workflow
64
- begin
65
- Log.high "Calling #{workflow}##{name} with #{Log.fingerprint parameters}"
66
- workflow.job(name, parameters).run
67
- rescue
68
- $!.message
69
- end
66
+ LLM.ask messages, @other_options.merge(log_errors: true).merge(options)
67
+ end
68
+ rescue
69
+ exception = $!
70
+ if Proc === self.process_exception
71
+ try_again = self.process_exception.call exception
72
+ if try_again
73
+ retry
70
74
  else
71
- raise "What?"
75
+ raise exception
72
76
  end
77
+ else
78
+ raise exception
73
79
  end
74
80
  end
75
81
  end
82
+
83
+ def self.load_from_path(path, workflow: nil, knowledge_base: nil, chat: nil)
84
+ workflow_path = path['workflow.rb'].find
85
+ knowledge_base_path = path['knowledge_base']
86
+ chat_path = path['start_chat']
87
+
88
+ workflow = Workflow.require_workflow workflow_path if workflow_path.exists?
89
+ knowledge_base = KnowledgeBase.new knowledge_base_path if knowledge_base_path.exists?
90
+ chat = LLM.chat chat_path if chat_path.exists?
91
+
92
+ LLM::Agent.new workflow: workflow, knowledge_base: knowledge_base, start_chat: chat
93
+ end
76
94
  end
77
95
  end
96
+
78
97
  require_relative 'agent/chat'
98
+ require_relative 'agent/iterate'
99
+ require_relative 'agent/delegate'
data/lib/scout/llm/ask.rb CHANGED
@@ -1,21 +1,44 @@
1
1
  require 'scout'
2
- require_relative 'backends/openai'
3
- require_relative 'backends/ollama'
4
- require_relative 'backends/openwebui'
5
- require_relative 'backends/bedrock'
6
- require_relative 'backends/relay'
7
- require_relative 'backends/responses'
2
+ require_relative 'chat'
8
3
 
9
4
  module LLM
10
5
  def self.ask(question, options = {}, &block)
11
6
  messages = LLM.chat(question)
12
7
  options = IndiferentHash.add_defaults LLM.options(messages), options
13
8
 
9
+ agent = IndiferentHash.process_options options, :agent
10
+
11
+ if agent
12
+ agent_file = Scout.workflows[agent]
13
+
14
+ agent_file = Scout.chats[agent] unless agent_file.exists?
15
+
16
+ agent_file = agent_file.find_with_extension('rb') unless agent_file.exists?
17
+
18
+
19
+ if agent_file.exists?
20
+ if agent_file.directory?
21
+ if agent_file.agent.find_with_extension('rb').exists?
22
+ agent = load agent_file.agent.find_with_extension('rb')
23
+ else
24
+ agent = LLM::Agent.load_from_path agent_file
25
+ end
26
+ else
27
+ agent = load agent_file
28
+ end
29
+ else
30
+ raise "Agent not found: #{agent}"
31
+ end
32
+ return agent.ask(question, options)
33
+ end
34
+
14
35
  endpoint, persist = IndiferentHash.process_options options, :endpoint, :persist, persist: true
15
36
 
16
37
  endpoint ||= Scout::Config.get :endpoint, :ask, :llm, env: 'ASK_ENDPOINT,LLM_ENDPOINT'
17
38
  if endpoint && Scout.etc.AI[endpoint].exists?
18
39
  options = IndiferentHash.add_defaults options, Scout.etc.AI[endpoint].yaml
40
+ elsif endpoint && endpoint != ""
41
+ raise "Endpoint not found #{endpoint}"
19
42
  end
20
43
 
21
44
  Persist.persist(endpoint, :json, prefix: "LLM ask", other: options.merge(messages: messages), persist: persist) do
@@ -24,16 +47,25 @@ module LLM
24
47
 
25
48
  case backend
26
49
  when :openai, "openai"
50
+ require_relative 'backends/openai'
27
51
  LLM::OpenAI.ask(messages, options, &block)
52
+ when :anthropic, "anthropic"
53
+ require_relative 'backends/anthropic'
54
+ LLM::Anthropic.ask(messages, options, &block)
28
55
  when :responses, "responses"
56
+ require_relative 'backends/responses'
29
57
  LLM::Responses.ask(messages, options, &block)
30
58
  when :ollama, "ollama"
59
+ require_relative 'backends/ollama'
31
60
  LLM::OLlama.ask(messages, options, &block)
32
61
  when :openwebui, "openwebui"
62
+ require_relative 'backends/openwebui'
33
63
  LLM::OpenWebUI.ask(messages, options, &block)
34
64
  when :relay, "relay"
65
+ require_relative 'backends/relay'
35
66
  LLM::Relay.ask(messages, options, &block)
36
67
  when :bedrock, "bedrock"
68
+ require_relative 'backends/bedrock'
37
69
  LLM::Bedrock.ask(messages, options, &block)
38
70
  else
39
71
  raise "Unknown backend: #{backend}"
@@ -0,0 +1,147 @@
1
+ require 'scout'
2
+ require 'anthropic'
3
+ require_relative '../chat'
4
+
5
+ module LLM
6
+ module Anthropic
7
+
8
+ def self.client(url = nil, key = nil, log_errors = false, request_timeout: 1200)
9
+ url ||= Scout::Config.get(:url, :openai_ask, :ask, :anthropic, env: 'ANTHROPIC_URL')
10
+ key ||= LLM.get_url_config(:key, url, :openai_ask, :ask, :anthropic, env: 'ANTHROPIC_KEY')
11
+ Object::Anthropic::Client.new(access_token:key, log_errors: log_errors, uri_base: url, request_timeout: request_timeout)
12
+ end
13
+
14
+ def self.process_input(messages)
15
+ messages.collect do |message|
16
+ if message[:role] == 'image'
17
+ Log.warn "Endpoint 'anthropic' does not support images, try 'responses': #{message[:content]}"
18
+ next
19
+ else
20
+ message
21
+ end
22
+ end.flatten.compact
23
+ end
24
+
25
+ def self.process_response(response, tools, &block)
26
+ Log.debug "Respose: #{Log.fingerprint response}"
27
+
28
+ response['content'].collect do |output|
29
+ case output['type']
30
+ when 'text'
31
+ IndiferentHash.setup({role: :assistant, content: output['text']})
32
+ when 'reasoning'
33
+ next
34
+ when 'tool_use'
35
+ LLM.process_calls(tools, [output], &block)
36
+ when 'web_search_call'
37
+ next
38
+ else
39
+ eee response
40
+ eee output
41
+ raise
42
+ end
43
+ end.compact.flatten
44
+ end
45
+
46
+
47
+ def self.ask(question, options = {}, &block)
48
+ original_options = options.dup
49
+
50
+ messages = LLM.chat(question)
51
+ options = options.merge LLM.options messages
52
+
53
+ options = IndiferentHash.add_defaults options, max_tokens: 1000
54
+
55
+ client, url, key, model, log_errors, return_messages, format, tool_choice_next, previous_response_id, tools = IndiferentHash.process_options options,
56
+ :client, :url, :key, :model, :log_errors, :return_messages, :format, :tool_choice_next, :previous_response_id, :tools,
57
+ log_errors: true, tool_choice_next: :none
58
+
59
+ if client.nil?
60
+ url ||= Scout::Config.get(:url, :openai_ask, :ask, :anthropic, env: 'ANTHROPIC_URL')
61
+ key ||= LLM.get_url_config(:key, url, :openai_ask, :ask, :anthropic, env: 'ANTHROPIC_KEY')
62
+ client = self.client url, key, log_errors
63
+ end
64
+
65
+ if model.nil?
66
+ url ||= Scout::Config.get(:url, :openai_ask, :ask, :anthropic, env: 'ANTHROPIC_URL')
67
+ model ||= LLM.get_url_config(:model, url, :openai_ask, :ask, :anthropic, env: 'ANTHROPIC_MODEL', default: "claude-sonnet-4-20250514")
68
+ end
69
+
70
+ case format.to_sym
71
+ when :json, :json_object
72
+ options[:response_format] = {type: 'json_object'}
73
+ else
74
+ options[:response_format] = {type: format}
75
+ end if format
76
+
77
+ parameters = options.merge(model: model)
78
+
79
+ # Process tools
80
+
81
+ case tools
82
+ when Array
83
+ tools = tools.inject({}) do |acc,definition|
84
+ IndiferentHash.setup definition
85
+ name = definition.dig('name') || definition.dig('function', 'name')
86
+ acc.merge(name => definition)
87
+ end
88
+ when nil
89
+ tools = {}
90
+ end
91
+
92
+ tools.merge!(LLM.tools messages)
93
+ tools.merge!(LLM.associations messages)
94
+
95
+ if tools.any?
96
+ parameters[:tools] = tools.values.collect{|obj,definition| Hash === obj ? obj : definition}
97
+ end
98
+
99
+ parameters[:tools] = parameters[:tools].collect do |info|
100
+ IndiferentHash.setup(info)
101
+ info[:type] = 'custom' if info[:type] == 'function'
102
+ info[:input_schema] = info.delete('parameters') if info["parameters"]
103
+ info
104
+ end if parameters[:tools]
105
+
106
+ messages = self.process_input messages
107
+
108
+ Log.low "Calling anthropic #{url}: #{Log.fingerprint parameters}}"
109
+
110
+ parameters[:messages] = LLM.tools_to_anthropic messages
111
+
112
+ response = self.process_response client.messages(parameters: parameters), tools, &block
113
+
114
+ res = if response.last[:role] == 'function_call_output'
115
+ #response + self.ask(messages + response, original_options.merge(tool_choice: tool_choice_next, return_messages: true, tools: tools ), &block)
116
+ response + self.ask(messages + response, original_options.merge(return_messages: true, tools: tools ), &block)
117
+ else
118
+ response
119
+ end
120
+
121
+ if return_messages
122
+ res
123
+ else
124
+ res.last['content']
125
+ end
126
+ end
127
+
128
+ def self.embed(text, options = {})
129
+
130
+ client, url, key, model, log_errors = IndiferentHash.process_options options, :client, :url, :key, :model, :log_errors
131
+
132
+ if client.nil?
133
+ url ||= Scout::Config.get(:url, :openai_embed, :embed, :anthropic, env: 'ANTHROPIC_URL')
134
+ key ||= LLM.get_url_config(:key, url, :openai_embed, :embed, :anthropic, env: 'ANTHROPIC_KEY')
135
+ client = self.client url, key, log_errors
136
+ end
137
+
138
+ if model.nil?
139
+ url ||= Scout::Config.get(:url, :openai_embed, :embed, :anthropic, env: 'ANTHROPIC_URL')
140
+ model ||= LLM.get_url_config(:model, url, :openai_embed, :embed, :anthropic, env: 'ANTHROPIC_MODEL', default: "gpt-3.5-turbo")
141
+ end
142
+
143
+ response = client.embeddings(parameters: {input: text, model: model})
144
+ response.dig('data', 0, 'embedding')
145
+ end
146
+ end
147
+ end
@@ -45,7 +45,7 @@ module LLM
45
45
  model ||= Scout::Config.get(:model, :bedrock_ask, :ask, :bedrock, env: 'BEDROCK_MODEL_ID')
46
46
  type ||= Scout::Config.get(:type, model, default: :messages)
47
47
 
48
- role = IndiferentHash.process_options options, :role
48
+ role, previous_response_id, tools = IndiferentHash.process_options options, :role, :previous_response_id, :tools
49
49
  messages = LLM.parse(question, role)
50
50
 
51
51
  case type.to_sym
@@ -17,7 +17,7 @@ module LLM
17
17
  end
18
18
 
19
19
 
20
- def self.process_response(responses, &block)
20
+ def self.process_response(responses, tools, &block)
21
21
  responses.collect do |response|
22
22
  Log.debug "Respose: #{Log.fingerprint response}"
23
23
 
@@ -26,7 +26,7 @@ module LLM
26
26
  response.dig("message", "tool_calls")
27
27
 
28
28
  if tool_calls && tool_calls.any?
29
- LLM.call_tools tool_calls, &block
29
+ LLM.process_calls tools, tool_calls, &block
30
30
  else
31
31
  [message]
32
32
  end
@@ -38,11 +38,9 @@ module LLM
38
38
 
39
39
  messages = LLM.chat(question)
40
40
  options = options.merge LLM.options messages
41
- tools = LLM.tools messages
42
- associations = LLM.associations messages
43
41
 
44
- client, url, key, model, return_messages, format, stream = IndiferentHash.process_options options,
45
- :client, :url, :key, :model, :return_messages, :format, :stream,
42
+ client, url, key, model, return_messages, format, stream, previous_response_id, tools = IndiferentHash.process_options options,
43
+ :client, :url, :key, :model, :return_messages, :format, :stream, :previous_response_id, :tools,
46
44
  stream: false
47
45
 
48
46
  if client.nil?
@@ -66,28 +64,24 @@ module LLM
66
64
 
67
65
  parameters = options.merge(model: model)
68
66
 
69
- if tools.any? || associations.any?
70
- parameters[:tools] = []
71
- parameters[:tools] += tools.values.collect{|a| a.last } if tools
72
- parameters[:tools] += associations.values.collect{|a| a.last } if associations
73
- if not block_given?
74
- block = Proc.new do |name,parameters|
75
- IndiferentHash.setup parameters
76
- if tools[name]
77
- workflow = tools[name].first
78
- jobname = parameters.delete :jobname
79
- workflow.job(name, jobname, parameters).run
80
- else
81
- kb = associations[name].first
82
- entities, reverse = IndiferentHash.process_options parameters, :entities, :reverse
83
- if reverse
84
- kb.parents(name, entities)
85
- else
86
- kb.children(name, entities)
87
- end
88
- end
89
- end
67
+ # Process tools
68
+
69
+ case tools
70
+ when Array
71
+ tools = tools.inject({}) do |acc,definition|
72
+ IndiferentHash.setup definition
73
+ name = definition.dig('name') || definition.dig('function', 'name')
74
+ acc.merge(name => definition)
90
75
  end
76
+ when nil
77
+ tools = {}
78
+ end
79
+
80
+ tools.merge!(LLM.tools messages)
81
+ tools.merge!(LLM.associations messages)
82
+
83
+ if tools.any?
84
+ parameters[:tools] = tools.values.collect{|obj,definition| Hash === obj ? obj : definition}
91
85
  end
92
86
 
93
87
  Log.low "Calling client with parameters #{Log.fingerprint parameters}\n#{LLM.print messages}"
@@ -96,10 +90,10 @@ module LLM
96
90
 
97
91
  parameters[:stream] = stream
98
92
 
99
- response = self.process_response client.chat(parameters), &block
93
+ response = self.process_response client.chat(parameters), tools, &block
100
94
 
101
95
  res = if response.last[:role] == 'function_call_output'
102
- response + self.ask(messages + response, original_options.except(:tool_choice).merge(return_messages: true, tools: parameters[:tools]), &block)
96
+ response + self.ask(messages + response, original_options.except(:tool_choice).merge(return_messages: true, tools: tools), &block)
103
97
  else
104
98
  response
105
99
  end
@@ -5,23 +5,24 @@ require_relative '../chat'
5
5
  module LLM
6
6
  module OpenAI
7
7
 
8
- def self.client(url, key, log_errors = false)
9
- Object::OpenAI::Client.new(access_token:key, log_errors: log_errors, uri_base: url)
8
+ def self.client(url = nil, key = nil, log_errors = false, request_timeout: 1200)
9
+ url ||= Scout::Config.get(:url, :openai_ask, :ask, :openai, env: 'OPENAI_URL')
10
+ key ||= LLM.get_url_config(:key, url, :openai_ask, :ask, :openai, env: 'OPENAI_KEY')
11
+ Object::OpenAI::Client.new(access_token:key, log_errors: log_errors, uri_base: url, request_timeout: request_timeout)
10
12
  end
11
13
 
12
14
  def self.process_input(messages)
13
15
  messages.collect do |message|
14
- if message[:role] == 'function_call'
15
- {role: 'assistant', tool_calls: [message[:content]]}
16
- elsif message[:role] == 'function_call_output'
17
- message[:content]
16
+ if message[:role] == 'image'
17
+ Log.warn "Endpoint 'openai' does not support images, try 'responses': #{message[:content]}"
18
+ next
18
19
  else
19
20
  message
20
21
  end
21
- end.flatten
22
+ end.flatten.compact
22
23
  end
23
24
 
24
- def self.process_response(response, &block)
25
+ def self.process_response(response, tools, &block)
25
26
  Log.debug "Respose: #{Log.fingerprint response}"
26
27
  raise Exception, response["error"] if response["error"]
27
28
 
@@ -30,7 +31,7 @@ module LLM
30
31
  response.dig("choices", 0, "message", "tool_calls")
31
32
 
32
33
  if tool_calls && tool_calls.any?
33
- LLM.call_tools tool_calls, &block
34
+ LLM.process_calls(tools, tool_calls, &block)
34
35
  else
35
36
  [message]
36
37
  end
@@ -41,12 +42,10 @@ module LLM
41
42
 
42
43
  messages = LLM.chat(question)
43
44
  options = options.merge LLM.options messages
44
- tools = LLM.tools messages
45
- associations = LLM.associations messages
46
45
 
47
- client, url, key, model, log_errors, return_messages, format = IndiferentHash.process_options options,
48
- :client, :url, :key, :model, :log_errors, :return_messages, :format,
49
- log_errors: true
46
+ client, url, key, model, log_errors, return_messages, format, tool_choice_next, previous_response_id, tools, = IndiferentHash.process_options options,
47
+ :client, :url, :key, :model, :log_errors, :return_messages, :format, :tool_choice_next, :previous_response_id, :tools,
48
+ log_errors: true, tool_choice_next: :none
50
49
 
51
50
  if client.nil?
52
51
  url ||= Scout::Config.get(:url, :openai_ask, :ask, :openai, env: 'OPENAI_URL')
@@ -59,8 +58,6 @@ module LLM
59
58
  model ||= LLM.get_url_config(:model, url, :openai_ask, :ask, :openai, env: 'OPENAI_MODEL', default: "gpt-4.1")
60
59
  end
61
60
 
62
- #role = IndiferentHash.process_options options, :role
63
-
64
61
  case format.to_sym
65
62
  when :json, :json_object
66
63
  options[:response_format] = {type: 'json_object'}
@@ -70,39 +67,36 @@ module LLM
70
67
 
71
68
  parameters = options.merge(model: model)
72
69
 
73
- if tools.any? || associations.any?
74
- parameters[:tools] = []
75
- parameters[:tools] += tools.values.collect{|a| a.last } if tools
76
- parameters[:tools] += associations.values.collect{|a| a.last } if associations
77
- if not block_given?
78
- block = Proc.new do |name,parameters|
79
- IndiferentHash.setup parameters
80
- if tools[name]
81
- workflow = tools[name].first
82
- jobname = parameters.delete :jobname
83
- workflow.job(name, jobname, parameters).run
84
- else
85
- kb = associations[name].first
86
- entities, reverse = IndiferentHash.process_options parameters, :entities, :reverse
87
- if reverse
88
- kb.parents(name, entities)
89
- else
90
- kb.children(name, entities)
91
- end
92
- end
93
- end
70
+ # Process tools
71
+
72
+ case tools
73
+ when Array
74
+ tools = tools.inject({}) do |acc,definition|
75
+ IndiferentHash.setup definition
76
+ name = definition.dig('name') || definition.dig('function', 'name')
77
+ acc.merge(name => definition)
94
78
  end
79
+ when nil
80
+ tools = {}
95
81
  end
96
82
 
83
+ tools.merge!(LLM.tools messages)
84
+ tools.merge!(LLM.associations messages)
85
+
86
+ if tools.any?
87
+ parameters[:tools] = tools.values.collect{|obj,definition| Hash === obj ? obj : definition}
88
+ end
89
+
90
+ messages = self.process_input messages
91
+
97
92
  Log.low "Calling openai #{url}: #{Log.fingerprint parameters}}"
98
- Log.debug LLM.print messages
99
93
 
100
94
  parameters[:messages] = LLM.tools_to_openai messages
101
95
 
102
- response = self.process_response client.chat(parameters: parameters), &block
96
+ response = self.process_response client.chat(parameters: parameters), tools, &block
103
97
 
104
98
  res = if response.last[:role] == 'function_call_output'
105
- response + self.ask(messages + response, original_options.except(:tool_choice).merge(return_messages: true, tools: parameters[:tools]), &block)
99
+ response + self.ask(messages + response, original_options.merge(tool_choice: tool_choice_next, return_messages: true, tools: tools ), &block)
106
100
  else
107
101
  response
108
102
  end