scout-ai 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. checksums.yaml +7 -0
  2. data/.document +5 -0
  3. data/.vimproject +61 -0
  4. data/LICENSE +20 -0
  5. data/LICENSE.txt +20 -0
  6. data/README.rdoc +18 -0
  7. data/Rakefile +44 -0
  8. data/VERSION +1 -0
  9. data/bin/scout-ai +5 -0
  10. data/lib/scout/llm/agent.rb +78 -0
  11. data/lib/scout/llm/ask.rb +50 -0
  12. data/lib/scout/llm/backends/huggingface.rb +67 -0
  13. data/lib/scout/llm/backends/ollama.rb +103 -0
  14. data/lib/scout/llm/backends/openai.rb +86 -0
  15. data/lib/scout/llm/backends/openwebui.rb +63 -0
  16. data/lib/scout/llm/backends/relay.rb +36 -0
  17. data/lib/scout/llm/embed.rb +31 -0
  18. data/lib/scout/llm/parse.rb +33 -0
  19. data/lib/scout/llm/rag.rb +16 -0
  20. data/lib/scout/llm/tools.rb +104 -0
  21. data/lib/scout/llm/utils.rb +35 -0
  22. data/lib/scout-ai.rb +7 -0
  23. data/questions/coach +2 -0
  24. data/scout_commands/agent/ask +70 -0
  25. data/scout_commands/llm/ask +56 -0
  26. data/scout_commands/llm/process +50 -0
  27. data/scout_commands/llm/template +26 -0
  28. data/test/data/person/brothers +4 -0
  29. data/test/data/person/identifiers +10 -0
  30. data/test/data/person/marriages +3 -0
  31. data/test/data/person/parents +6 -0
  32. data/test/scout/llm/backends/test_huggingface.rb +73 -0
  33. data/test/scout/llm/backends/test_ollama.rb +72 -0
  34. data/test/scout/llm/backends/test_openai.rb +68 -0
  35. data/test/scout/llm/backends/test_openwebui.rb +57 -0
  36. data/test/scout/llm/backends/test_relay.rb +10 -0
  37. data/test/scout/llm/test_agent.rb +114 -0
  38. data/test/scout/llm/test_ask.rb +63 -0
  39. data/test/scout/llm/test_embed.rb +0 -0
  40. data/test/scout/llm/test_parse.rb +19 -0
  41. data/test/scout/llm/test_rag.rb +30 -0
  42. data/test/scout/llm/test_tools.rb +54 -0
  43. data/test/scout/llm/test_utils.rb +10 -0
  44. data/test/test_helper.rb +68 -0
  45. metadata +86 -0
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA256:
3
+ metadata.gz: 36833eaae86ac11476b39ac0f188cfa2106a9cad810fcdd53de37a9b356e15e0
4
+ data.tar.gz: 014a3927cb41bd99b28b866c442bacca65c21a27b79491e3bd790c1051115f4b
5
+ SHA512:
6
+ metadata.gz: 0d4ebfecd2e6ed94901b955536d6c6a36817ac7aa1eaafcfd657c563185b69c5eb03293aef2446a0444842c1d263dc79a951342e57fd3704f96b931ef02cb027
7
+ data.tar.gz: 45ed2228cca6bfeea3d0161ff766b1abfb2137def68eb1c3c142ee9b32a234a9b95223612026b3683d0f72aba308853e1bd9d92ca4e5cdbfdb9500555810a4fa
data/.document ADDED
@@ -0,0 +1,5 @@
1
+ lib/**/*.rb
2
+ bin/*
3
+ -
4
+ features/**/*.feature
5
+ LICENSE.txt
data/.vimproject ADDED
@@ -0,0 +1,61 @@
1
+ scout-ai=/$PWD filter="*.rb *.rake Rakefile *.rdoc *.R *.sh *.js *.haml *.sass *.txt *.conf" {
2
+ Rakefile
3
+ bin=bin filter="*"{
4
+ scout-ai
5
+ }
6
+ etc=etc filter="*"{
7
+ config
8
+ }
9
+ lib=lib {
10
+ scout-ai.rb
11
+ scout=scout{
12
+ llm=llm{
13
+ utils.rb
14
+ parse.rb
15
+ backends=backends{
16
+ ollama.rb
17
+ openai.rb
18
+ openwebui.rb
19
+ huggingface.rb
20
+ relay.rb
21
+ }
22
+ ask.rb
23
+
24
+ embed.rb
25
+
26
+ rag.rb
27
+
28
+ tools.rb
29
+ agent.rb
30
+ }
31
+ }
32
+ }
33
+ test=test {
34
+ data=data filter="*"{
35
+ person=person{
36
+ identifiers
37
+ brothers
38
+ marriages
39
+ parents
40
+ }
41
+ }
42
+ test_helper.rb
43
+ }
44
+ data=data filter="*"{
45
+ }
46
+ scout_commands=scout_commands filter="*"{
47
+ llm=llm{
48
+ ask
49
+ template
50
+ process
51
+ }
52
+ agent=agent{
53
+ ask
54
+ }
55
+ }
56
+ questions=questions filter="*"{
57
+ coach
58
+ evaluator
59
+ templater
60
+ }
61
+ }
data/LICENSE ADDED
@@ -0,0 +1,20 @@
1
+ Copyright (c) 2016-2017 Miguel Vázquez García
2
+
3
+ Permission is hereby granted, free of charge, to any person obtaining
4
+ a copy of this software and associated documentation files (the
5
+ "Software"), to deal in the Software without restriction, including
6
+ without limitation the rights to use, copy, modify, merge, publish,
7
+ distribute, sublicense, and/or sell copies of the Software, and to
8
+ permit persons to whom the Software is furnished to do so, subject to
9
+ the following conditions:
10
+
11
+ The above copyright notice and this permission notice shall be
12
+ included in all copies or substantial portions of the Software.
13
+
14
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
15
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
16
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
17
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
18
+ LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
19
+ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
20
+ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
data/LICENSE.txt ADDED
@@ -0,0 +1,20 @@
1
+ Copyright (c) 2025 Miguel Vazquez
2
+
3
+ Permission is hereby granted, free of charge, to any person obtaining
4
+ a copy of this software and associated documentation files (the
5
+ "Software"), to deal in the Software without restriction, including
6
+ without limitation the rights to use, copy, modify, merge, publish,
7
+ distribute, sublicense, and/or sell copies of the Software, and to
8
+ permit persons to whom the Software is furnished to do so, subject to
9
+ the following conditions:
10
+
11
+ The above copyright notice and this permission notice shall be
12
+ included in all copies or substantial portions of the Software.
13
+
14
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
15
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
16
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
17
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
18
+ LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
19
+ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
20
+ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
data/README.rdoc ADDED
@@ -0,0 +1,18 @@
1
+ = scout-ai
2
+
3
+ Description goes here.
4
+
5
+ == Contributing to scout-ai
6
+
7
+ * Check out the latest master to make sure the feature hasn't been implemented or the bug hasn't been fixed yet.
8
+ * Check out the issue tracker to make sure someone already hasn't requested it and/or contributed it.
9
+ * Fork the project.
10
+ * Start a feature/bugfix branch.
11
+ * Commit and push until you are happy with your contribution.
12
+ * Make sure to add tests for it. This is important so I don't break it in a future version unintentionally.
13
+ * Please try not to mess with the Rakefile, version, or history. If you want to have your own version, or is otherwise necessary, that is fine, but please isolate to its own commit so I can cherry-pick around it.
14
+
15
+ == Copyright
16
+
17
+ Copyright (c) 2025 Miguel Vazquez. See LICENSE.txt for
18
+ further details.
data/Rakefile ADDED
@@ -0,0 +1,44 @@
1
+ # encoding: utf-8
2
+
3
+ ENV["BRANCH"] = 'main'
4
+
5
+ require 'rubygems'
6
+ require 'rake'
7
+ require 'juwelier'
8
+ Juwelier::Tasks.new do |gem|
9
+ # gem is a Gem::Specification... see http://guides.rubygems.org/specification-reference/ for more options
10
+ gem.name = "scout-ai"
11
+ gem.homepage = "http://github.com/mikisvaz/scout-ai"
12
+ gem.license = "MIT"
13
+ gem.summary = %Q{AI gear for scouts}
14
+ gem.description = %Q{assorted functionalities to help scouts use AI}
15
+ gem.email = "mikisvaz@gmail.com"
16
+ gem.authors = ["Miguel Vazquez"]
17
+
18
+ # dependencies defined in Gemfile
19
+ end
20
+ Juwelier::RubygemsDotOrgTasks.new
21
+ require 'rake/testtask'
22
+ Rake::TestTask.new(:test) do |test|
23
+ test.libs << 'lib' << 'test'
24
+ test.pattern = 'test/**/test_*.rb'
25
+ test.verbose = true
26
+ end
27
+
28
+ desc "Code coverage detail"
29
+ task :simplecov do
30
+ ENV['COVERAGE'] = "true"
31
+ Rake::Task['test'].execute
32
+ end
33
+
34
+ task :default => :test
35
+
36
+ require 'rdoc/task'
37
+ Rake::RDocTask.new do |rdoc|
38
+ version = File.exist?('VERSION') ? File.read('VERSION') : ""
39
+
40
+ rdoc.rdoc_dir = 'rdoc'
41
+ rdoc.title = "scout-ai #{version}"
42
+ rdoc.rdoc_files.include('README*')
43
+ rdoc.rdoc_files.include('lib/**/*.rb')
44
+ end
data/VERSION ADDED
@@ -0,0 +1 @@
1
+ 0.2.0
data/bin/scout-ai ADDED
@@ -0,0 +1,5 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require 'scout-ai'
4
+
5
+ load Scout.bin.scout.find
@@ -0,0 +1,78 @@
1
+ require_relative 'ask'
2
+
3
+ module LLM
4
+ class Agent
5
+ attr_accessor :system, :workflow, :knowledge_base
6
+ def initialize(system = nil, workflow: nil, knowledge_base: nil, model: nil, **kwargs)
7
+ @system = system
8
+ @workflow = workflow
9
+ @knowledge_base = knowledge_base
10
+ @model = model
11
+ @other_options = kwargs
12
+ end
13
+
14
+ def format_message(message, prefix = "user")
15
+ message.split(/\n\n+/).reject{|line| line.empty? }.collect do |line|
16
+ prefix + "\t" + line.gsub("\n", ' ')
17
+ end * "\n"
18
+ end
19
+
20
+ def system_prompt
21
+ system = @system
22
+ system = [system] unless system.nil? || system.is_a?(Array)
23
+
24
+ if @knowledge_base
25
+ system << <<-EOF
26
+ You have access to the following databases associating entities:
27
+ EOF
28
+
29
+ knowledge_base.all_databases.each do |database|
30
+ system << <<-EOF.strip + (knowledge_base.undirected(database) ? ". Undirected" : "")
31
+ * #{database}: #{knowledge_base.source(database)} => #{knowledge_base.target(database)}
32
+ EOF
33
+ end
34
+ end
35
+
36
+ system * "\n"
37
+ end
38
+
39
+ def prompt(messages)
40
+ if system_prompt
41
+ [format_message(system_prompt, "system"), messages.collect{|m| format_message(m)}.flatten] * "\n"
42
+ else
43
+ messages.collect{|m| format_message(m)}.flatten
44
+ end
45
+ end
46
+
47
+ # function: takes an array of messages and calls LLM.ask with them
48
+ def ask(messages, model = nil)
49
+ messages = [messages] unless messages.is_a? Array
50
+ model ||= @model
51
+
52
+ tools = []
53
+ tools += LLM.workflow_tools(workflow) if workflow
54
+ tools += LLM.knowledge_base_tool_definition(knowledge_base) if knowledge_base
55
+
56
+ LLM.ask prompt(messages), @other_options.merge(model: model, log_errors: true, tools: tools) do |name,parameters|
57
+ case name
58
+ when 'children'
59
+ parameters = IndiferentHash.setup(parameters)
60
+ database, entities = parameters.values_at "database", "entities"
61
+ Log.high "Finding #{entities} children in #{database}"
62
+ knowledge_base.children(database, entities).target
63
+ else
64
+ if workflow
65
+ begin
66
+ Log.high "Calling #{workflow}##{name} with #{Log.fingerprint parameters}"
67
+ workflow.job(name, parameters).run
68
+ rescue
69
+ $!.message
70
+ end
71
+ else
72
+ raise "What?"
73
+ end
74
+ end
75
+ end
76
+ end
77
+ end
78
+ end
@@ -0,0 +1,50 @@
1
+ require 'scout'
2
+ require_relative 'backends/openai'
3
+ require_relative 'backends/ollama'
4
+ require_relative 'backends/openwebui'
5
+ require_relative 'backends/relay'
6
+
7
+ module LLM
8
+ def self.ask(question, options = {}, &block)
9
+ endpoint = IndiferentHash.process_options options, :endpoint
10
+
11
+ endpoint ||= Scout::Config.get :endpoint, :ask, :llm, env: 'ASK_ENDPOINT,LLM_ENDPOINT', default: :openai
12
+ if endpoint && Scout.etc.AI[endpoint].exists?
13
+ options = IndiferentHash.add_defaults options, Scout.etc.AI[endpoint].yaml
14
+ end
15
+
16
+ backend = IndiferentHash.process_options options, :backend
17
+ backend ||= Scout::Config.get :backend, :ask, :llm, env: 'ASK_BACKEND,LLM_BACKEND', default: :openai
18
+
19
+
20
+ case backend
21
+ when :openai, "openai"
22
+ LLM::OpenAI.ask(question, options, &block)
23
+ when :ollama, "ollama"
24
+ LLM::OLlama.ask(question, options, &block)
25
+ when :openwebui, "openwebui"
26
+ LLM::OpenWebUI.ask(question, options, &block)
27
+ when :relay, "relay"
28
+ LLM::Relay.ask(question, options, &block)
29
+ else
30
+ raise "Unknown backend: #{backend}"
31
+ end
32
+ end
33
+
34
+ def self.workflow_ask(workflow, question, options = {})
35
+ workflow_tools = LLM.workflow_tools(workflow)
36
+ self.ask(question, options.merge(tools: workflow_tools)) do |task_name,parameters|
37
+ workflow.job(task_name, parameters).run
38
+ end
39
+ end
40
+
41
+ def self.knowledge_base_ask(knowledge_base, question, options = {})
42
+ knowledge_base_tools = LLM.knowledge_base_tool_definition(knowledge_base)
43
+ self.ask(question, options.merge(tools: knowledge_base_tools)) do |task_name,parameters|
44
+ parameters = IndiferentHash.setup(parameters)
45
+ database, entities = parameters.values_at "database", "entities"
46
+ Log.info "Finding #{entities} children in #{database}"
47
+ knowledge_base.children(database, entities).collect{|e| e.sub('~', '=>')}
48
+ end
49
+ end
50
+ end
@@ -0,0 +1,67 @@
1
+ require_relative '../parse'
2
+ require_relative '../tools'
3
+
4
+ module LLM
5
+ module Huggingface
6
+
7
+ def self.model(model_options)
8
+ require 'rbbt-util'
9
+ require 'rbbt/vector/model/huggingface'
10
+
11
+ model, task, checkpoint, dir = IndiferentHash.process_options model_options, :model, :task, :checkpoint, :dir
12
+ model ||= Scout::Config.get(:model, :huggingface, env: 'HUGGINGFACE_MODEL,HF_MODEL')
13
+
14
+ HuggingfaceModel.new task, model, dir, model_options
15
+ end
16
+
17
+ def self.ask(question, options = {}, &block)
18
+ model_options = IndiferentHash.pull_keys options, :model
19
+ model_options = IndiferentHash.add_defaults model_options, :task => "CausalLM"
20
+
21
+ model = self.model model_options
22
+
23
+ messages = LLM.parse(question)
24
+
25
+ system = []
26
+ prompt = []
27
+ messages.each do |message|
28
+ role, content = message.values_at :role, :content
29
+ if role == 'system'
30
+ system << content
31
+ else
32
+ prompt << content
33
+ end
34
+ end
35
+
36
+ parameters = options.merge(messages: messages)
37
+ Log.debug "Calling client with parameters: #{Log.fingerprint parameters}"
38
+
39
+ response = model.eval(messages)
40
+ message = response[-1]
41
+ while message["role"] == "assistant" && message["tool_calls"]
42
+ messages << message
43
+
44
+ message["tool_calls"].each do |tool_call|
45
+ response_message = LLM.tool_response(tool_call, &block)
46
+ messages << response_message
47
+ end
48
+
49
+ parameters[:messages] = messages
50
+ Log.debug "Calling client with parameters: #{Log.fingerprint parameters}"
51
+ response = model.eval(parameters)
52
+ message = response[-1]
53
+ end
54
+
55
+ message["content"]
56
+ end
57
+
58
+ def self.embed(text, options = {})
59
+ model_options = IndiferentHash.pull_keys options, :model
60
+ model_options = IndiferentHash.add_defaults model_options, :task => "Embedding"
61
+
62
+ model = self.model model_options
63
+
64
+ (Array === text) ? model.eval_list(text) : model.eval(text)
65
+ end
66
+ end
67
+ end
@@ -0,0 +1,103 @@
1
+ require 'ollama-ai'
2
+ require_relative '../parse'
3
+ require_relative '../tools'
4
+ require_relative '../utils'
5
+
6
+ module LLM
7
+ module OLlama
8
+ def self.client(url, key = nil)
9
+ Ollama.new(
10
+ credentials: {
11
+ address: url,
12
+ bearer_token: key
13
+ },
14
+ options: { stream: false, debug: true }
15
+ )
16
+ end
17
+
18
+ def self.ask(question, options = {}, &block)
19
+
20
+ client, url, key, model = IndiferentHash.process_options options, :client, :url, :key, :model
21
+
22
+ if client.nil?
23
+ url ||= Scout::Config.get(:url, :ollama_ask, :ask, :ollama, env: 'OLLAMA_URL', default: "http://localhost:11434")
24
+ key ||= LLM.get_url_config(:key, url, :ollama_ask, :ask, :ollama, env: 'OLLAMA_KEY')
25
+ client = self.client url, key
26
+ end
27
+
28
+ if model.nil?
29
+ url ||= Scout::Config.get(:url, :ollama_ask, :ask, :ollama, env: 'OLLAMA_URL', default: "http://localhost:11434")
30
+ model ||= LLM.get_url_config(:model, url, :ollama_ask, :ask, :ollama, env: 'OLLAMA_MODEL', default: "mistral")
31
+ end
32
+
33
+ mode = IndiferentHash.process_options options, :mode
34
+
35
+ messages = LLM.parse(question)
36
+
37
+ system = []
38
+ prompt = []
39
+ messages.each do |message|
40
+ role, content = message.values_at :role, :content
41
+ if role == 'system'
42
+ system << content
43
+ else
44
+ prompt << content
45
+ end
46
+ end
47
+
48
+ case mode
49
+ when :chat, 'chat'
50
+ parameters = options.merge(model: model, messages: messages)
51
+ Log.debug "Calling client with parameters: #{Log.fingerprint parameters}"
52
+
53
+ response = client.chat(parameters)
54
+ response.collect do |choice|
55
+ message=choice['message']
56
+ while message["role"] == "assistant" && message["tool_calls"]
57
+ messages << message
58
+
59
+ message["tool_calls"].each do |tool_call|
60
+ response_message = LLM.tool_response(tool_call, &block)
61
+ messages << response_message
62
+ end
63
+
64
+ parameters[:messages] = messages
65
+ Log.debug "Calling client with parameters: #{Log.fingerprint parameters}"
66
+ response = client.chat(parameters)
67
+
68
+ message = response[0]['message']
69
+ end
70
+
71
+ message["content"]
72
+ end * ""
73
+ else
74
+ parameters = options.merge(model: model, prompt: prompt * "\n", system: system*"\n")
75
+ Log.debug "Calling client with parameters: #{Log.fingerprint parameters}"
76
+ response = client.generate(parameters)
77
+ response.collect{|e| e['response']} * ""
78
+ end
79
+ end
80
+
81
+ def self.embed(text, options = {})
82
+
83
+ client, url, key, model = IndiferentHash.process_options options, :client, :url, :key, :model
84
+
85
+ if client.nil?
86
+ url ||= Scout::Config.get(:url, :ollama_embed, :embed, :ollama, env: 'OLLAMA_URL', default: "http://localhost:11434")
87
+ key ||= LLM.get_url_config(:key, url, :ollama_embed, :embed, :ollama, env: 'OLLAMA_KEY')
88
+ client = self.client url, key
89
+ end
90
+
91
+ if model.nil?
92
+ url ||= Scout::Config.get(:url, :ollama_embed, :embed, :ollama, env: 'OLLAMA_URL', default: "http://localhost:11434")
93
+ model ||= LLM.get_url_config(:model, url, :ollama_embed, :embed, :ollama, env: 'OLLAMA_MODEL', default: "mistral")
94
+ end
95
+
96
+ parameters = { input: text, model: model }
97
+ Log.debug "Calling client with parameters: #{Log.fingerprint parameters}"
98
+ embeddings = client.request('api/embed', parameters)
99
+
100
+ Array === text ? embeddings.first['embeddings'] : embeddings.first['embeddings'].first
101
+ end
102
+ end
103
+ end
@@ -0,0 +1,86 @@
1
+ require 'scout'
2
+ require 'openai'
3
+ require_relative '../parse'
4
+ require_relative '../tools'
5
+ require_relative '../utils'
6
+
7
+ module LLM
8
+ module OpenAI
9
+
10
+ def self.client(url, key, log_errors = false)
11
+ Object::OpenAI::Client.new(access_token:key, log_errors: log_errors, uri_base: url)
12
+ end
13
+
14
+ def self.ask(question, options = {}, &block)
15
+
16
+ client, url, key, model, log_errors = IndiferentHash.process_options options, :client, :url, :key, :model, :log_errors
17
+
18
+ if client.nil?
19
+ url ||= Scout::Config.get(:url, :openai_ask, :ask, :openai, env: 'OPENAI_URL')
20
+ key ||= LLM.get_url_config(:key, url, :openai_ask, :ask, :openai, env: 'OPENAI_KEY')
21
+ client = self.client url, key, log_errors
22
+ end
23
+
24
+ if model.nil?
25
+ url ||= Scout::Config.get(:url, :openai_ask, :ask, :openai, env: 'OPENAI_URL')
26
+ model ||= LLM.get_url_config(:model, url, :openai_ask, :ask, :openai, env: 'OPENAI_MODEL', default: "gpt-3.5-turbo")
27
+ end
28
+
29
+ role = IndiferentHash.process_options options, :role
30
+
31
+ messages = LLM.parse(question, role)
32
+
33
+ parameters = options.merge(model: model, messages: messages)
34
+
35
+ Log.debug "Calling client with parameters: #{Log.fingerprint parameters}"
36
+
37
+ response = client.chat(parameters: parameters)
38
+ Log.debug "Respose: #{Log.fingerprint response}"
39
+ message = response.dig("choices", 0, "message")
40
+ tool_calls = response.dig("choices", 0, "tool_calls") ||
41
+ response.dig("choices", 0, "message", "tool_calls")
42
+
43
+ parameters.delete :tool_choice
44
+
45
+ while tool_calls && tool_calls.any?
46
+ messages << message
47
+
48
+ cpus = Scout::Config.get :cpus, :tool_calling, default: 3
49
+ tool_calls.each do |tool_call|
50
+ response_message = LLM.tool_response(tool_call, &block)
51
+ messages << response_message
52
+ end
53
+
54
+ parameters[:messages] = messages.compact
55
+ Log.debug "Calling client with parameters: #{Log.fingerprint parameters}"
56
+ response = client.chat( parameters: parameters)
57
+ Log.debug "Respose: #{Log.fingerprint response}"
58
+
59
+ message = response.dig("choices", 0, "message")
60
+ tool_calls = response.dig("choices", 0, "tool_calls") ||
61
+ response.dig("choices", 0, "message", "tool_calls")
62
+ end
63
+
64
+ message.dig("content")
65
+ end
66
+
67
+ def self.embed(text, options = {})
68
+
69
+ client, url, key, model, log_errors = IndiferentHash.process_options options, :client, :url, :key, :model, :log_errors
70
+
71
+ if client.nil?
72
+ url ||= Scout::Config.get(:url, :openai_embed, :embed, :openai, env: 'OPENAI_URL')
73
+ key ||= LLM.get_url_config(:key, url, :openai_embed, :embed, :openai, env: 'OPENAI_KEY')
74
+ client = self.client url, key, log_errors
75
+ end
76
+
77
+ if model.nil?
78
+ url ||= Scout::Config.get(:url, :openai_embed, :embed, :openai, env: 'OPENAI_URL')
79
+ model ||= LLM.get_url_config(:model, url, :openai_embed, :embed, :openai, env: 'OPENAI_MODEL', default: "gpt-3.5-turbo")
80
+ end
81
+
82
+ response = client.embeddings(parameters: {input: text, model: model})
83
+ response.dig('data', 0, 'embedding')
84
+ end
85
+ end
86
+ end
@@ -0,0 +1,63 @@
1
+ require 'scout'
2
+ require 'openai'
3
+ require 'rest-client'
4
+ require_relative '../parse'
5
+ require_relative '../tools'
6
+ require_relative '../utils'
7
+
8
+ module LLM
9
+ module OpenWebUI
10
+
11
+ def self.rest(method, base_url, key, action, options = {})
12
+ url = File.join(base_url, action.to_s)
13
+ headers = {"Authorization" => "Bearer #{key}", "Content-Type" => "application/json"}
14
+ response = case method.to_sym
15
+ when :post
16
+ #RestClient.send(method, url, options, {content_type: "application/json", accept: "application/json", Authorization: "Bearer #{key}"})
17
+ iii [url, options, headers]
18
+ RestClient.post(url, options.to_json, headers)
19
+ else
20
+ RestClient.send(method, url, {content_type: "application/json", accept: "application/json", "Authorization" => "Bearer #{key}"})
21
+ end
22
+ JSON.parse(response.body)
23
+ end
24
+
25
+ def self.ask(question, options = {}, &block)
26
+
27
+ url, key, model, log_errors = IndiferentHash.process_options options, :url, :key, :model, :log_errors
28
+
29
+ url ||= Scout::Config.get(:url, :openai_ask, :ask, :openai, env: 'OPENWEBUI_URL', default: "http://localhost:3000/api")
30
+ key ||= LLM.get_url_config(:key, url, :openai_ask, :ask, :openai, env: 'OPENWEBUI_KEY')
31
+ model ||= LLM.get_url_config(:model, url, :openai_ask, :ask, :openai, env: 'OPENWEBUI_MODEL')
32
+
33
+ role = IndiferentHash.process_options options, :role
34
+ messages = LLM.parse(question, role)
35
+
36
+ parameters = options.merge(model: model, messages: messages)
37
+
38
+ Log.debug "Calling client with parameters: #{Log.fingerprint parameters}"
39
+
40
+ response = self.rest(:post, url, key, "chat/completions" , parameters)
41
+
42
+ message = response.dig("choices", 0, "message")
43
+
44
+ parameters.delete :tool_choice
45
+
46
+ while message["role"] == "assistant" && message["tool_calls"]
47
+ messages << message
48
+ message["tool_calls"].each do |tool_call|
49
+ response_message = LLM.tool_response(tool_call, &block)
50
+ messages << response_message
51
+ end
52
+
53
+ parameters[:messages] = messages
54
+ Log.debug "Calling client with parameters: #{Log.fingerprint parameters}"
55
+ response = client.chat( parameters: parameters)
56
+
57
+ message = response.dig("choices", 0, "message")
58
+ end
59
+
60
+ message.dig("content")
61
+ end
62
+ end
63
+ end
@@ -0,0 +1,36 @@
1
+ require 'scout'
2
+ require 'openai'
3
+ require_relative '../parse'
4
+ require_relative '../tools'
5
+
6
+ module LLM
7
+ module Relay
8
+ def self.upload(server, file)
9
+ id = Misc.digest(Open.read(file))
10
+ CMD.cmd("scp #{file} #{server}:.scout/var/ask/#{ id }.json")
11
+ id
12
+ end
13
+
14
+ def self.gather(server, id)
15
+ TmpFile.with_file do |file|
16
+ begin
17
+ CMD.cmd("scp #{server}:.scout/var/ask/reply/#{ id }.json #{ file }")
18
+ JSON.parse(Open.read(file))
19
+ rescue
20
+ sleep 1
21
+ retry
22
+ end
23
+ end
24
+ end
25
+
26
+ def self.ask(question, options = {}, &block)
27
+ options[:question] = question
28
+
29
+ server = Scout::Config.get(:server, :relay, default: "localhost")
30
+ TmpFile.with_file(options.to_json) do |file|
31
+ id = upload(server, file)
32
+ gather(server, id)
33
+ end
34
+ end
35
+ end
36
+ end