scout-ai 1.0.0 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. checksums.yaml +4 -4
  2. data/.vimproject +87 -15
  3. data/README.md +296 -0
  4. data/Rakefile +2 -0
  5. data/VERSION +1 -1
  6. data/doc/Agent.md +279 -0
  7. data/doc/Chat.md +258 -0
  8. data/doc/LLM.md +446 -0
  9. data/doc/Model.md +513 -0
  10. data/doc/RAG.md +129 -0
  11. data/lib/scout/llm/agent/chat.rb +48 -1
  12. data/lib/scout/llm/agent/delegate.rb +51 -0
  13. data/lib/scout/llm/agent/iterate.rb +44 -0
  14. data/lib/scout/llm/agent.rb +43 -22
  15. data/lib/scout/llm/ask.rb +47 -7
  16. data/lib/scout/llm/backends/anthropic.rb +147 -0
  17. data/lib/scout/llm/backends/bedrock.rb +1 -1
  18. data/lib/scout/llm/backends/ollama.rb +27 -30
  19. data/lib/scout/llm/backends/openai.rb +36 -41
  20. data/lib/scout/llm/backends/responses.rb +166 -113
  21. data/lib/scout/llm/chat.rb +270 -102
  22. data/lib/scout/llm/embed.rb +4 -4
  23. data/lib/scout/llm/mcp.rb +28 -0
  24. data/lib/scout/llm/parse.rb +1 -0
  25. data/lib/scout/llm/rag.rb +9 -0
  26. data/lib/scout/llm/tools/call.rb +76 -0
  27. data/lib/scout/llm/tools/knowledge_base.rb +159 -0
  28. data/lib/scout/llm/tools/mcp.rb +59 -0
  29. data/lib/scout/llm/tools/workflow.rb +106 -0
  30. data/lib/scout/llm/tools.rb +98 -141
  31. data/lib/scout-ai.rb +1 -0
  32. data/scout-ai.gemspec +31 -18
  33. data/scout_commands/agent/ask +59 -78
  34. data/scout_commands/documenter +148 -0
  35. data/scout_commands/llm/ask +3 -2
  36. data/scout_commands/llm/server +319 -0
  37. data/share/server/chat.html +138 -0
  38. data/share/server/chat.js +468 -0
  39. data/test/scout/llm/backends/test_anthropic.rb +134 -0
  40. data/test/scout/llm/backends/test_ollama.rb +1 -1
  41. data/test/scout/llm/backends/test_openai.rb +45 -6
  42. data/test/scout/llm/backends/test_responses.rb +124 -0
  43. data/test/scout/llm/test_agent.rb +1 -93
  44. data/test/scout/llm/test_ask.rb +3 -1
  45. data/test/scout/llm/test_chat.rb +43 -1
  46. data/test/scout/llm/test_mcp.rb +29 -0
  47. data/test/scout/llm/tools/test_knowledge_base.rb +22 -0
  48. data/test/scout/llm/tools/test_mcp.rb +11 -0
  49. data/test/scout/llm/tools/test_workflow.rb +39 -0
  50. metadata +56 -17
  51. data/README.rdoc +0 -18
  52. data/python/scout_ai/__pycache__/__init__.cpython-310.pyc +0 -0
  53. data/python/scout_ai/__pycache__/__init__.cpython-311.pyc +0 -0
  54. data/python/scout_ai/__pycache__/huggingface.cpython-310.pyc +0 -0
  55. data/python/scout_ai/__pycache__/huggingface.cpython-311.pyc +0 -0
  56. data/python/scout_ai/__pycache__/util.cpython-310.pyc +0 -0
  57. data/python/scout_ai/__pycache__/util.cpython-311.pyc +0 -0
  58. data/python/scout_ai/atcold/plot_lib.py +0 -141
  59. data/python/scout_ai/atcold/spiral.py +0 -27
  60. data/python/scout_ai/huggingface/train/__pycache__/__init__.cpython-310.pyc +0 -0
  61. data/python/scout_ai/huggingface/train/__pycache__/next_token.cpython-310.pyc +0 -0
  62. data/python/scout_ai/language_model.py +0 -70
  63. /data/{python/scout_ai/atcold/__init__.py → test/scout/llm/tools/test_call.rb} +0 -0
@@ -1,7 +1,7 @@
1
1
  module LLM
2
2
  class Agent
3
3
  def start_chat
4
- @start_chat ||= Chat.setup []
4
+ @start_chat ||= Chat.setup([])
5
5
  end
6
6
 
7
7
  def start(chat=nil)
@@ -9,6 +9,8 @@ module LLM
9
9
  (@current_chat || start_chat).annotate chat unless Chat === chat
10
10
  @current_chat = chat
11
11
  else
12
+ start_chat = self.start_chat
13
+ Chat.setup(start_chat) unless Chat === start_chat
12
14
  @current_chat = start_chat.branch
13
15
  end
14
16
  end
@@ -20,5 +22,50 @@ module LLM
20
22
  def method_missing(name,...)
21
23
  current_chat.send(name, ...)
22
24
  end
25
+
26
+ def respond(...)
27
+ self.ask(current_chat, ...)
28
+ end
29
+
30
+
31
+ def chat(model = nil, options = {})
32
+ response = ask(current_chat, model, options.merge(return_messages: true))
33
+ if Array === response
34
+ current_chat.concat(response)
35
+ current_chat.answer
36
+ else
37
+ current_chat.push({role: :assistant, content: response})
38
+ response
39
+ end
40
+ end
41
+
42
+
43
+ def json(...)
44
+ current_chat.format :json
45
+ output = ask(current_chat, ...)
46
+ obj = JSON.parse output
47
+ if (Hash === obj) and obj.keys == ['content']
48
+ obj['content']
49
+ else
50
+ obj
51
+ end
52
+ end
53
+
54
+ def json_format(format, ...)
55
+ current_chat.format format
56
+ output = ask(current_chat, ...)
57
+ obj = JSON.parse output
58
+ if (Hash === obj) and obj.keys == ['content']
59
+ obj['content']
60
+ else
61
+ obj
62
+ end
63
+ end
64
+
65
+ def get_previous_response_id
66
+ msg = current_chat.reverse.find{|msg| msg[:role].to_sym == :previous_response_id }
67
+ msg.nil? ? nil : msg['content']
68
+ end
69
+
23
70
  end
24
71
  end
@@ -0,0 +1,51 @@
1
+ module LLM
2
+ class Agent
3
+
4
+ def delegate(agent, name, description, &block)
5
+ @other_options[:tools] ||= {}
6
+ task_name = "hand_off_to_#{name}"
7
+
8
+ block ||= Proc.new do |name, parameters|
9
+ message = parameters[:message]
10
+ new_conversation = parameters[:new_conversation]
11
+ Log.medium "Delegated to #{agent}: " + Log.fingerprint(message)
12
+ if new_conversation
13
+ agent.start
14
+ else
15
+ agent.purge
16
+ end
17
+ agent.user message
18
+ agent.chat
19
+ end
20
+
21
+ properties = {
22
+ message: {
23
+ "type": :string,
24
+ "description": "Message to pass to the agent"
25
+ },
26
+ new_conversation: {
27
+ "type": :boolean,
28
+ "description": "Erase conversation history and start a new conversation with this message",
29
+ "default": false
30
+ }
31
+ }
32
+
33
+ required_inputs = [:message]
34
+
35
+ function = {
36
+ name: task_name,
37
+ description: description,
38
+ parameters: {
39
+ type: "object",
40
+ properties: properties,
41
+ required: required_inputs
42
+ }
43
+ }
44
+
45
+ definition = IndiferentHash.setup function.merge(type: 'function', function: function)
46
+
47
+
48
+ @other_options[:tools][task_name] = [block, definition]
49
+ end
50
+ end
51
+ end
@@ -0,0 +1,44 @@
1
+ module LLM
2
+ class Agent
3
+
4
+ def iterate(prompt = nil, &block)
5
+ self.endpoint :responses
6
+ self.user prompt if prompt
7
+
8
+ obj = self.json_format({
9
+ "$schema": "http://json-schema.org/draft-07/schema#",
10
+ "type": "object",
11
+ "properties": {
12
+ "content": {
13
+ "type": "array",
14
+ "items": { "type": "string" }
15
+ }
16
+ },
17
+ "required": ["content"],
18
+ "additionalProperties": false
19
+ })
20
+
21
+ self.option :format, :text
22
+
23
+ list = Hash === obj ? obj['content'] : obj
24
+
25
+ list.each &block
26
+ end
27
+
28
+ def iterate_dictionary(prompt = nil, &block)
29
+ self.endpoint :responses
30
+ self.user prompt if prompt
31
+
32
+ dict = self.json_format({
33
+ name: 'dictionary',
34
+ type: 'object',
35
+ properties: {},
36
+ additionalProperties: {type: :string}
37
+ })
38
+
39
+ self.option :format, :text
40
+
41
+ dict.each &block
42
+ end
43
+ end
44
+ end
@@ -1,12 +1,17 @@
1
1
  require_relative 'ask'
2
2
 
3
3
  module LLM
4
+ def self.agent(...)
5
+ LLM::Agent.new(...)
6
+ end
7
+
4
8
  class Agent
5
- attr_accessor :workflow, :knowledge_base, :start_chat
9
+ attr_accessor :workflow, :knowledge_base, :start_chat, :process_exception, :other_options
6
10
  def initialize(workflow: nil, knowledge_base: nil, start_chat: nil, **kwargs)
7
11
  @workflow = workflow
12
+ @workflow = Workflow.require_workflow @workflow if String === @workflow
8
13
  @knowledge_base = knowledge_base
9
- @other_options = kwargs
14
+ @other_options = IndiferentHash.setup(kwargs.dup)
10
15
  @start_chat = start_chat
11
16
  end
12
17
 
@@ -44,35 +49,51 @@ You have access to the following databases associating entities:
44
49
  end
45
50
 
46
51
  # function: takes an array of messages and calls LLM.ask with them
47
- def ask(messages, model = nil, options = {})
52
+ def ask(messages, options = {})
48
53
  messages = [messages] unless messages.is_a? Array
49
54
  model ||= @model if model
50
55
 
51
- tools = []
52
- tools += LLM.workflow_tools(workflow) if workflow
53
- tools += LLM.knowledge_base_tool_definition(knowledge_base) if knowledge_base and knowledge_base.all_databases.any?
54
-
55
- LLM.ask prompt(messages), @other_options.merge(log_errors: true, tools: tools) do |name,parameters|
56
- case name
57
- when 'children'
58
- parameters = IndiferentHash.setup(parameters)
59
- database, entities = parameters.values_at "database", "entities"
60
- Log.high "Finding #{entities} children in #{database}"
61
- knowledge_base.children(database, entities)
56
+ tools = options[:tools] || {}
57
+ tools = tools.merge @other_options[:tools] if @other_options[:tools]
58
+ options[:tools] = tools
59
+ begin
60
+ if workflow || knowledge_base
61
+ tools.merge!(LLM.workflow_tools(workflow)) if workflow
62
+ tools.merge!(LLM.knowledge_base_tool_definition(knowledge_base)) if knowledge_base and knowledge_base.all_databases.any?
63
+ options[:tools] = tools
64
+ LLM.ask messages, @other_options.merge(log_errors: true).merge(options)
62
65
  else
63
- if workflow
64
- begin
65
- Log.high "Calling #{workflow}##{name} with #{Log.fingerprint parameters}"
66
- workflow.job(name, parameters).run
67
- rescue
68
- $!.message
69
- end
66
+ LLM.ask messages, @other_options.merge(log_errors: true).merge(options)
67
+ end
68
+ rescue
69
+ exception = $!
70
+ if Proc === self.process_exception
71
+ try_again = self.process_exception.call exception
72
+ if try_again
73
+ retry
70
74
  else
71
- raise "What?"
75
+ raise exception
72
76
  end
77
+ else
78
+ raise exception
73
79
  end
74
80
  end
75
81
  end
82
+
83
+ def self.load_from_path(path, workflow: nil, knowledge_base: nil, chat: nil)
84
+ workflow_path = path['workflow.rb'].find
85
+ knowledge_base_path = path['knowledge_base']
86
+ chat_path = path['start_chat']
87
+
88
+ workflow = Workflow.require_workflow workflow_path if workflow_path.exists?
89
+ knowledge_base = KnowledgeBase.new knowledge_base_path if knowledge_base_path.exists?
90
+ chat = Chat.setup LLM.chat(chat_path.find) if chat_path.exists?
91
+
92
+ LLM::Agent.new workflow: workflow, knowledge_base: knowledge_base, start_chat: chat
93
+ end
76
94
  end
77
95
  end
96
+
78
97
  require_relative 'agent/chat'
98
+ require_relative 'agent/iterate'
99
+ require_relative 'agent/delegate'
data/lib/scout/llm/ask.rb CHANGED
@@ -1,44 +1,84 @@
1
1
  require 'scout'
2
- require_relative 'backends/openai'
3
- require_relative 'backends/ollama'
4
- require_relative 'backends/openwebui'
5
- require_relative 'backends/bedrock'
6
- require_relative 'backends/relay'
7
- require_relative 'backends/responses'
2
+ require_relative 'chat'
8
3
 
9
4
  module LLM
10
5
  def self.ask(question, options = {}, &block)
11
6
  messages = LLM.chat(question)
12
7
  options = IndiferentHash.add_defaults LLM.options(messages), options
13
8
 
9
+ agent = IndiferentHash.process_options options, :agent
10
+
11
+ if agent
12
+ agent_file = Scout.workflows[agent]
13
+
14
+ agent_file = Scout.chats[agent] unless agent_file.exists?
15
+
16
+ agent_file = agent_file.find_with_extension('rb') unless agent_file.exists?
17
+
18
+
19
+ if agent_file.exists?
20
+ if agent_file.directory?
21
+ if agent_file.agent.find_with_extension('rb').exists?
22
+ agent = load agent_file.agent.find_with_extension('rb')
23
+ else
24
+ agent = LLM::Agent.load_from_path agent_file
25
+ end
26
+ else
27
+ agent = load agent_file
28
+ end
29
+ else
30
+ raise "Agent not found: #{agent}"
31
+ end
32
+ return agent.ask(question, options)
33
+ end
34
+
14
35
  endpoint, persist = IndiferentHash.process_options options, :endpoint, :persist, persist: true
15
36
 
16
37
  endpoint ||= Scout::Config.get :endpoint, :ask, :llm, env: 'ASK_ENDPOINT,LLM_ENDPOINT'
17
38
  if endpoint && Scout.etc.AI[endpoint].exists?
18
39
  options = IndiferentHash.add_defaults options, Scout.etc.AI[endpoint].yaml
40
+ elsif endpoint && endpoint != ""
41
+ raise "Endpoint not found #{endpoint}"
19
42
  end
20
43
 
21
- Persist.persist(endpoint, :json, prefix: "LLM ask", other: options.merge(messages: messages), persist: persist) do
44
+ Log.high Log.color :green, "Asking #{endpoint || 'client'}:\n" + LLM.print(messages)
45
+ tools = options[:tools]
46
+ Log.high "Tools: #{Log.fingerprint tools.keys}}" if tools
47
+
48
+ res = Persist.persist(endpoint, :json, prefix: "LLM ask", other: options.merge(messages: messages), persist: persist) do
22
49
  backend = IndiferentHash.process_options options, :backend
23
50
  backend ||= Scout::Config.get :backend, :ask, :llm, env: 'ASK_BACKEND,LLM_BACKEND', default: :openai
24
51
 
25
52
  case backend
26
53
  when :openai, "openai"
54
+ require_relative 'backends/openai'
27
55
  LLM::OpenAI.ask(messages, options, &block)
56
+ when :anthropic, "anthropic"
57
+ require_relative 'backends/anthropic'
58
+ LLM::Anthropic.ask(messages, options, &block)
28
59
  when :responses, "responses"
60
+ require_relative 'backends/responses'
29
61
  LLM::Responses.ask(messages, options, &block)
30
62
  when :ollama, "ollama"
63
+ require_relative 'backends/ollama'
31
64
  LLM::OLlama.ask(messages, options, &block)
32
65
  when :openwebui, "openwebui"
66
+ require_relative 'backends/openwebui'
33
67
  LLM::OpenWebUI.ask(messages, options, &block)
34
68
  when :relay, "relay"
69
+ require_relative 'backends/relay'
35
70
  LLM::Relay.ask(messages, options, &block)
36
71
  when :bedrock, "bedrock"
72
+ require_relative 'backends/bedrock'
37
73
  LLM::Bedrock.ask(messages, options, &block)
38
74
  else
39
75
  raise "Unknown backend: #{backend}"
40
76
  end
41
77
  end
78
+
79
+ Log.high Log.color :blue, "Response:\n" + LLM.print(res)
80
+
81
+ res
42
82
  end
43
83
 
44
84
  def self.workflow_ask(workflow, question, options = {})
@@ -0,0 +1,147 @@
1
+ require 'scout'
2
+ require 'anthropic'
3
+ require_relative '../chat'
4
+
5
+ module LLM
6
+ module Anthropic
7
+
8
+ def self.client(url = nil, key = nil, log_errors = false, request_timeout: 1200)
9
+ url ||= Scout::Config.get(:url, :openai_ask, :ask, :anthropic, env: 'ANTHROPIC_URL')
10
+ key ||= LLM.get_url_config(:key, url, :openai_ask, :ask, :anthropic, env: 'ANTHROPIC_KEY')
11
+ Object::Anthropic::Client.new(access_token:key, log_errors: log_errors, uri_base: url, request_timeout: request_timeout)
12
+ end
13
+
14
+ def self.process_input(messages)
15
+ messages.collect do |message|
16
+ if message[:role] == 'image'
17
+ Log.warn "Endpoint 'anthropic' does not support images, try 'responses': #{message[:content]}"
18
+ next
19
+ else
20
+ message
21
+ end
22
+ end.flatten.compact
23
+ end
24
+
25
+ def self.process_response(response, tools, &block)
26
+ Log.debug "Respose: #{Log.fingerprint response}"
27
+
28
+ response['content'].collect do |output|
29
+ case output['type']
30
+ when 'text'
31
+ IndiferentHash.setup({role: :assistant, content: output['text']})
32
+ when 'reasoning'
33
+ next
34
+ when 'tool_use'
35
+ LLM.process_calls(tools, [output], &block)
36
+ when 'web_search_call'
37
+ next
38
+ else
39
+ eee response
40
+ eee output
41
+ raise
42
+ end
43
+ end.compact.flatten
44
+ end
45
+
46
+
47
+ def self.ask(question, options = {}, &block)
48
+ original_options = options.dup
49
+
50
+ messages = LLM.chat(question)
51
+ options = options.merge LLM.options messages
52
+
53
+ options = IndiferentHash.add_defaults options, max_tokens: 1000
54
+
55
+ client, url, key, model, log_errors, return_messages, format, tool_choice_next, previous_response_id, tools = IndiferentHash.process_options options,
56
+ :client, :url, :key, :model, :log_errors, :return_messages, :format, :tool_choice_next, :previous_response_id, :tools,
57
+ log_errors: true, tool_choice_next: :none
58
+
59
+ if client.nil?
60
+ url ||= Scout::Config.get(:url, :openai_ask, :ask, :anthropic, env: 'ANTHROPIC_URL')
61
+ key ||= LLM.get_url_config(:key, url, :openai_ask, :ask, :anthropic, env: 'ANTHROPIC_KEY')
62
+ client = self.client url, key, log_errors
63
+ end
64
+
65
+ if model.nil?
66
+ url ||= Scout::Config.get(:url, :openai_ask, :ask, :anthropic, env: 'ANTHROPIC_URL')
67
+ model ||= LLM.get_url_config(:model, url, :openai_ask, :ask, :anthropic, env: 'ANTHROPIC_MODEL', default: "claude-sonnet-4-20250514")
68
+ end
69
+
70
+ case format.to_sym
71
+ when :json, :json_object
72
+ options[:response_format] = {type: 'json_object'}
73
+ else
74
+ options[:response_format] = {type: format}
75
+ end if format
76
+
77
+ parameters = options.merge(model: model)
78
+
79
+ # Process tools
80
+
81
+ case tools
82
+ when Array
83
+ tools = tools.inject({}) do |acc,definition|
84
+ IndiferentHash.setup definition
85
+ name = definition.dig('name') || definition.dig('function', 'name')
86
+ acc.merge(name => definition)
87
+ end
88
+ when nil
89
+ tools = {}
90
+ end
91
+
92
+ tools.merge!(LLM.tools messages)
93
+ tools.merge!(LLM.associations messages)
94
+
95
+ if tools.any?
96
+ parameters[:tools] = tools.values.collect{|obj,definition| Hash === obj ? obj : definition}
97
+ end
98
+
99
+ parameters[:tools] = parameters[:tools].collect do |info|
100
+ IndiferentHash.setup(info)
101
+ info[:type] = 'custom' if info[:type] == 'function'
102
+ info[:input_schema] = info.delete('parameters') if info["parameters"]
103
+ info
104
+ end if parameters[:tools]
105
+
106
+ messages = self.process_input messages
107
+
108
+ Log.low "Calling anthropic #{url}: #{Log.fingerprint parameters}}"
109
+
110
+ parameters[:messages] = LLM.tools_to_anthropic messages
111
+
112
+ response = self.process_response client.messages(parameters: parameters), tools, &block
113
+
114
+ res = if response.last[:role] == 'function_call_output'
115
+ #response + self.ask(messages + response, original_options.merge(tool_choice: tool_choice_next, return_messages: true, tools: tools ), &block)
116
+ response + self.ask(messages + response, original_options.merge(return_messages: true, tools: tools ), &block)
117
+ else
118
+ response
119
+ end
120
+
121
+ if return_messages
122
+ res
123
+ else
124
+ res.last['content']
125
+ end
126
+ end
127
+
128
+ def self.embed(text, options = {})
129
+
130
+ client, url, key, model, log_errors = IndiferentHash.process_options options, :client, :url, :key, :model, :log_errors
131
+
132
+ if client.nil?
133
+ url ||= Scout::Config.get(:url, :openai_embed, :embed, :anthropic, env: 'ANTHROPIC_URL')
134
+ key ||= LLM.get_url_config(:key, url, :openai_embed, :embed, :anthropic, env: 'ANTHROPIC_KEY')
135
+ client = self.client url, key, log_errors
136
+ end
137
+
138
+ if model.nil?
139
+ url ||= Scout::Config.get(:url, :openai_embed, :embed, :anthropic, env: 'ANTHROPIC_URL')
140
+ model ||= LLM.get_url_config(:model, url, :openai_embed, :embed, :anthropic, env: 'ANTHROPIC_MODEL', default: "gpt-3.5-turbo")
141
+ end
142
+
143
+ response = client.embeddings(parameters: {input: text, model: model})
144
+ response.dig('data', 0, 'embedding')
145
+ end
146
+ end
147
+ end
@@ -45,7 +45,7 @@ module LLM
45
45
  model ||= Scout::Config.get(:model, :bedrock_ask, :ask, :bedrock, env: 'BEDROCK_MODEL_ID')
46
46
  type ||= Scout::Config.get(:type, model, default: :messages)
47
47
 
48
- role = IndiferentHash.process_options options, :role
48
+ role, previous_response_id, tools = IndiferentHash.process_options options, :role, :previous_response_id, :tools
49
49
  messages = LLM.parse(question, role)
50
50
 
51
51
  case type.to_sym
@@ -17,7 +17,7 @@ module LLM
17
17
  end
18
18
 
19
19
 
20
- def self.process_response(responses, &block)
20
+ def self.process_response(responses, tools, &block)
21
21
  responses.collect do |response|
22
22
  Log.debug "Respose: #{Log.fingerprint response}"
23
23
 
@@ -26,7 +26,7 @@ module LLM
26
26
  response.dig("message", "tool_calls")
27
27
 
28
28
  if tool_calls && tool_calls.any?
29
- LLM.call_tools tool_calls, &block
29
+ LLM.process_calls tools, tool_calls, &block
30
30
  else
31
31
  [message]
32
32
  end
@@ -38,11 +38,9 @@ module LLM
38
38
 
39
39
  messages = LLM.chat(question)
40
40
  options = options.merge LLM.options messages
41
- tools = LLM.tools messages
42
- associations = LLM.associations messages
43
41
 
44
- client, url, key, model, return_messages, format, stream = IndiferentHash.process_options options,
45
- :client, :url, :key, :model, :return_messages, :format, :stream,
42
+ client, url, key, model, return_messages, format, stream, previous_response_id, tools = IndiferentHash.process_options options,
43
+ :client, :url, :key, :model, :return_messages, :format, :stream, :previous_response_id, :tools,
46
44
  stream: false
47
45
 
48
46
  if client.nil?
@@ -66,40 +64,39 @@ module LLM
66
64
 
67
65
  parameters = options.merge(model: model)
68
66
 
69
- if tools.any? || associations.any?
70
- parameters[:tools] = []
71
- parameters[:tools] += tools.values.collect{|a| a.last } if tools
72
- parameters[:tools] += associations.values.collect{|a| a.last } if associations
73
- if not block_given?
74
- block = Proc.new do |name,parameters|
75
- IndiferentHash.setup parameters
76
- if tools[name]
77
- workflow = tools[name].first
78
- jobname = parameters.delete :jobname
79
- workflow.job(name, jobname, parameters).run
80
- else
81
- kb = associations[name].first
82
- entities, reverse = IndiferentHash.process_options parameters, :entities, :reverse
83
- if reverse
84
- kb.parents(name, entities)
85
- else
86
- kb.children(name, entities)
87
- end
88
- end
89
- end
67
+ # Process tools
68
+
69
+ case tools
70
+ when Array
71
+ tools = tools.inject({}) do |acc,definition|
72
+ IndiferentHash.setup definition
73
+ name = definition.dig('name') || definition.dig('function', 'name')
74
+ acc.merge(name => definition)
90
75
  end
76
+ when nil
77
+ tools = {}
78
+ end
79
+
80
+ tools.merge!(LLM.tools messages)
81
+ tools.merge!(LLM.associations messages)
82
+
83
+ if tools.any?
84
+ parameters[:tools] = LLM.tool_definitions_to_ollama tools
91
85
  end
92
86
 
93
- Log.low "Calling client with parameters #{Log.fingerprint parameters}\n#{LLM.print messages}"
87
+ Log.low "Calling ollama #{url}: #{Log.fingerprint(parameters.except(:tools))}}"
88
+ Log.medium "Tools: #{Log.fingerprint tools.keys}}" if tools
94
89
 
95
90
  parameters[:messages] = LLM.tools_to_ollama messages
96
91
 
97
92
  parameters[:stream] = stream
98
93
 
99
- response = self.process_response client.chat(parameters), &block
94
+ response = self.process_response client.chat(parameters), tools, &block
100
95
 
101
96
  res = if response.last[:role] == 'function_call_output'
102
- response + self.ask(messages + response, original_options.except(:tool_choice).merge(return_messages: true, tools: parameters[:tools]), &block)
97
+ #response + self.ask(messages + response, original_options.except(:tool_choice).merge(return_messages: true, tools: tools), &block)
98
+ # This version seems to keep the original message from getting forgotten
99
+ response + self.ask(response + messages, original_options.except(:tool_choice).merge(return_messages: true, tools: tools), &block)
103
100
  else
104
101
  response
105
102
  end