nano-bots 2.4.1 → 2.5.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/Gemfile.lock +14 -8
- data/README.md +598 -335
- data/components/provider.rb +10 -4
- data/components/providers/cohere.rb +1 -1
- data/components/providers/maritaca.rb +113 -0
- data/components/providers/ollama.rb +132 -0
- data/docker-compose.example.yml +10 -4
- data/logic/cartridge/streaming.rb +2 -2
- data/logic/providers/maritaca/tokens.rb +14 -0
- data/logic/providers/ollama/tokens.rb +14 -0
- data/nano-bots.gemspec +7 -4
- data/static/gem.rb +4 -4
- metadata +90 -51
data/components/provider.rb
CHANGED
@@ -1,9 +1,11 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
require_relative 'providers/google'
|
4
|
-
require_relative 'providers/mistral'
|
5
3
|
require_relative 'providers/openai'
|
4
|
+
require_relative 'providers/ollama'
|
5
|
+
require_relative 'providers/mistral'
|
6
|
+
require_relative 'providers/google'
|
6
7
|
require_relative 'providers/cohere'
|
8
|
+
require_relative 'providers/maritaca'
|
7
9
|
|
8
10
|
module NanoBot
|
9
11
|
module Components
|
@@ -12,12 +14,16 @@ module NanoBot
|
|
12
14
|
case provider[:id]
|
13
15
|
when 'openai'
|
14
16
|
Providers::OpenAI.new(nil, provider[:settings], provider[:credentials], environment:)
|
15
|
-
when '
|
16
|
-
Providers::
|
17
|
+
when 'ollama'
|
18
|
+
Providers::Ollama.new(provider[:options], provider[:settings], provider[:credentials], environment:)
|
17
19
|
when 'mistral'
|
18
20
|
Providers::Mistral.new(provider[:options], provider[:settings], provider[:credentials], environment:)
|
21
|
+
when 'google'
|
22
|
+
Providers::Google.new(provider[:options], provider[:settings], provider[:credentials], environment:)
|
19
23
|
when 'cohere'
|
20
24
|
Providers::Cohere.new(provider[:options], provider[:settings], provider[:credentials], environment:)
|
25
|
+
when 'maritaca'
|
26
|
+
Providers::Maritaca.new(provider[:options], provider[:settings], provider[:credentials], environment:)
|
21
27
|
else
|
22
28
|
raise "Unsupported provider \"#{provider[:id]}\""
|
23
29
|
end
|
@@ -0,0 +1,113 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'maritaca-ai'
|
4
|
+
|
5
|
+
require_relative 'base'
|
6
|
+
|
7
|
+
require_relative '../../logic/providers/maritaca/tokens'
|
8
|
+
require_relative '../../logic/helpers/hash'
|
9
|
+
require_relative '../../logic/cartridge/default'
|
10
|
+
|
11
|
+
module NanoBot
|
12
|
+
module Components
|
13
|
+
module Providers
|
14
|
+
class Maritaca < Base
|
15
|
+
attr_reader :settings
|
16
|
+
|
17
|
+
CHAT_SETTINGS = %i[
|
18
|
+
max_tokens model do_sample temperature top_p repetition_penalty stopping_tokens
|
19
|
+
].freeze
|
20
|
+
|
21
|
+
def initialize(options, settings, credentials, _environment)
|
22
|
+
@settings = settings
|
23
|
+
|
24
|
+
maritaca_options = if options
|
25
|
+
options.transform_keys { |key| key.to_s.gsub('-', '_').to_sym }
|
26
|
+
else
|
27
|
+
{}
|
28
|
+
end
|
29
|
+
|
30
|
+
unless maritaca_options.key?(:stream)
|
31
|
+
maritaca_options[:stream] = Logic::Helpers::Hash.fetch(
|
32
|
+
Logic::Cartridge::Default.instance.values, %i[provider options stream]
|
33
|
+
)
|
34
|
+
end
|
35
|
+
|
36
|
+
maritaca_options[:server_sent_events] = maritaca_options.delete(:stream)
|
37
|
+
|
38
|
+
@client = ::Maritaca.new(
|
39
|
+
credentials: credentials.transform_keys { |key| key.to_s.gsub('-', '_').to_sym },
|
40
|
+
options: maritaca_options
|
41
|
+
)
|
42
|
+
end
|
43
|
+
|
44
|
+
def evaluate(input, streaming, cartridge, &feedback)
|
45
|
+
messages = input[:history].map do |event|
|
46
|
+
{ role: event[:who] == 'user' ? 'user' : 'assistant',
|
47
|
+
content: event[:message],
|
48
|
+
_meta: { at: event[:at] } }
|
49
|
+
end
|
50
|
+
|
51
|
+
# TODO: Does Maritaca have system messages?
|
52
|
+
%i[backdrop directive].each do |key|
|
53
|
+
next unless input[:behavior][key]
|
54
|
+
|
55
|
+
messages.prepend(
|
56
|
+
{ role: 'user',
|
57
|
+
content: input[:behavior][key],
|
58
|
+
_meta: { at: Time.now } }
|
59
|
+
)
|
60
|
+
end
|
61
|
+
|
62
|
+
payload = { chat_mode: true, messages: }
|
63
|
+
|
64
|
+
CHAT_SETTINGS.each do |key|
|
65
|
+
payload[key] = @settings[key] unless payload.key?(key) || !@settings.key?(key)
|
66
|
+
end
|
67
|
+
|
68
|
+
raise 'Maritaca does not support tools.' if input[:tools]
|
69
|
+
|
70
|
+
if streaming
|
71
|
+
content = ''
|
72
|
+
|
73
|
+
stream_call_back = proc do |event, _raw|
|
74
|
+
partial_content = event['answer']
|
75
|
+
|
76
|
+
if partial_content
|
77
|
+
content += partial_content
|
78
|
+
feedback.call(
|
79
|
+
{ should_be_stored: false,
|
80
|
+
interaction: { who: 'AI', message: partial_content } }
|
81
|
+
)
|
82
|
+
end
|
83
|
+
end
|
84
|
+
|
85
|
+
@client.chat_inference(
|
86
|
+
Logic::Maritaca::Tokens.apply_policies!(cartridge, payload),
|
87
|
+
server_sent_events: true, &stream_call_back
|
88
|
+
)
|
89
|
+
|
90
|
+
feedback.call(
|
91
|
+
{ should_be_stored: !(content.nil? || content == ''),
|
92
|
+
interaction: content.nil? || content == '' ? nil : { who: 'AI', message: content },
|
93
|
+
finished: true }
|
94
|
+
)
|
95
|
+
else
|
96
|
+
result = @client.chat_inference(
|
97
|
+
Logic::Maritaca::Tokens.apply_policies!(cartridge, payload),
|
98
|
+
server_sent_events: false
|
99
|
+
)
|
100
|
+
|
101
|
+
content = result['answer']
|
102
|
+
|
103
|
+
feedback.call(
|
104
|
+
{ should_be_stored: !(content.nil? || content.to_s.strip == ''),
|
105
|
+
interaction: content.nil? || content == '' ? nil : { who: 'AI', message: content },
|
106
|
+
finished: true }
|
107
|
+
)
|
108
|
+
end
|
109
|
+
end
|
110
|
+
end
|
111
|
+
end
|
112
|
+
end
|
113
|
+
end
|
@@ -0,0 +1,132 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'ollama-ai'
|
4
|
+
|
5
|
+
require_relative 'base'
|
6
|
+
|
7
|
+
require_relative '../../logic/providers/ollama/tokens'
|
8
|
+
require_relative '../../logic/helpers/hash'
|
9
|
+
require_relative '../../logic/cartridge/default'
|
10
|
+
|
11
|
+
module NanoBot
|
12
|
+
module Components
|
13
|
+
module Providers
|
14
|
+
class Ollama < Base
|
15
|
+
attr_reader :settings
|
16
|
+
|
17
|
+
CHAT_SETTINGS = %i[
|
18
|
+
model template stream
|
19
|
+
].freeze
|
20
|
+
|
21
|
+
CHAT_OPTIONS = %i[
|
22
|
+
mirostat mirostat_eta mirostat_tau num_ctx num_gqa num_gpu num_thread repeat_last_n
|
23
|
+
repeat_penalty temperature seed stop tfs_z num_predict top_k top_p
|
24
|
+
].freeze
|
25
|
+
|
26
|
+
def initialize(options, settings, credentials, _environment)
|
27
|
+
@settings = settings
|
28
|
+
|
29
|
+
ollama_options = if options
|
30
|
+
options.transform_keys { |key| key.to_s.gsub('-', '_').to_sym }
|
31
|
+
else
|
32
|
+
{}
|
33
|
+
end
|
34
|
+
|
35
|
+
unless @settings.key?(:stream)
|
36
|
+
@settings = Marshal.load(Marshal.dump(@settings))
|
37
|
+
@settings[:stream] = Logic::Helpers::Hash.fetch(
|
38
|
+
Logic::Cartridge::Default.instance.values, %i[provider settings stream]
|
39
|
+
)
|
40
|
+
end
|
41
|
+
|
42
|
+
ollama_options[:server_sent_events] = @settings[:stream]
|
43
|
+
|
44
|
+
credentials ||= {}
|
45
|
+
|
46
|
+
@client = ::Ollama.new(
|
47
|
+
credentials: credentials.transform_keys { |key| key.to_s.gsub('-', '_').to_sym },
|
48
|
+
options: ollama_options
|
49
|
+
)
|
50
|
+
end
|
51
|
+
|
52
|
+
def evaluate(input, streaming, cartridge, &feedback)
|
53
|
+
messages = input[:history].map do |event|
|
54
|
+
{ role: event[:who] == 'user' ? 'user' : 'assistant',
|
55
|
+
content: event[:message],
|
56
|
+
_meta: { at: event[:at] } }
|
57
|
+
end
|
58
|
+
|
59
|
+
%i[backdrop directive].each do |key|
|
60
|
+
next unless input[:behavior][key]
|
61
|
+
|
62
|
+
messages.prepend(
|
63
|
+
{ role: key == :directive ? 'system' : 'user',
|
64
|
+
content: input[:behavior][key],
|
65
|
+
_meta: { at: Time.now } }
|
66
|
+
)
|
67
|
+
end
|
68
|
+
|
69
|
+
payload = { messages: }
|
70
|
+
|
71
|
+
CHAT_SETTINGS.each do |key|
|
72
|
+
payload[key] = @settings[key] unless payload.key?(key) || !@settings.key?(key)
|
73
|
+
end
|
74
|
+
|
75
|
+
if @settings.key?(:options)
|
76
|
+
options = {}
|
77
|
+
|
78
|
+
CHAT_OPTIONS.each do |key|
|
79
|
+
options[key] = @settings[:options][key] unless options.key?(key) || !@settings[:options].key?(key)
|
80
|
+
end
|
81
|
+
|
82
|
+
payload[:options] = options unless options.empty?
|
83
|
+
end
|
84
|
+
|
85
|
+
raise 'Ollama does not support tools.' if input[:tools]
|
86
|
+
|
87
|
+
if streaming
|
88
|
+
content = ''
|
89
|
+
|
90
|
+
stream_call_back = proc do |event, _raw|
|
91
|
+
partial_content = event.dig('message', 'content')
|
92
|
+
|
93
|
+
if partial_content
|
94
|
+
content += partial_content
|
95
|
+
feedback.call(
|
96
|
+
{ should_be_stored: false,
|
97
|
+
interaction: { who: 'AI', message: partial_content } }
|
98
|
+
)
|
99
|
+
end
|
100
|
+
|
101
|
+
if event['done']
|
102
|
+
feedback.call(
|
103
|
+
{ should_be_stored: !(content.nil? || content == ''),
|
104
|
+
interaction: content.nil? || content == '' ? nil : { who: 'AI', message: content },
|
105
|
+
finished: true }
|
106
|
+
)
|
107
|
+
end
|
108
|
+
end
|
109
|
+
|
110
|
+
@client.chat(
|
111
|
+
Logic::Ollama::Tokens.apply_policies!(cartridge, payload),
|
112
|
+
server_sent_events: true, &stream_call_back
|
113
|
+
)
|
114
|
+
else
|
115
|
+
result = @client.chat(
|
116
|
+
Logic::Ollama::Tokens.apply_policies!(cartridge, payload),
|
117
|
+
server_sent_events: false
|
118
|
+
)
|
119
|
+
|
120
|
+
content = result.map { |event| event.dig('message', 'content') }.join
|
121
|
+
|
122
|
+
feedback.call(
|
123
|
+
{ should_be_stored: !(content.nil? || content.to_s.strip == ''),
|
124
|
+
interaction: content.nil? || content == '' ? nil : { who: 'AI', message: content },
|
125
|
+
finished: true }
|
126
|
+
)
|
127
|
+
end
|
128
|
+
end
|
129
|
+
end
|
130
|
+
end
|
131
|
+
end
|
132
|
+
end
|
data/docker-compose.example.yml
CHANGED
@@ -2,10 +2,9 @@
|
|
2
2
|
services:
|
3
3
|
nano-bots:
|
4
4
|
image: ruby:3.2.2-slim-bookworm
|
5
|
-
command: sh -c "apt-get update && apt-get install -y --no-install-recommends build-essential libffi-dev libsodium-dev lua5.4-dev curl && curl -s https://raw.githubusercontent.com/babashka/babashka/master/install | bash && gem install nano-bots -v 2.
|
5
|
+
command: sh -c "apt-get update && apt-get install -y --no-install-recommends build-essential libffi-dev libsodium-dev lua5.4-dev curl && curl -s https://raw.githubusercontent.com/babashka/babashka/master/install | bash && gem install nano-bots -v 2.5.0 && bash"
|
6
6
|
environment:
|
7
|
-
|
8
|
-
OPENAI_API_KEY: your-access-token
|
7
|
+
COHERE_API_KEY: your-api-key
|
9
8
|
|
10
9
|
GOOGLE_API_KEY: your-api-key
|
11
10
|
|
@@ -13,9 +12,16 @@ services:
|
|
13
12
|
GOOGLE_PROJECT_ID: your-project-id
|
14
13
|
GOOGLE_REGION: us-east4
|
15
14
|
|
15
|
+
MARITACA_API_KEY: 'your-api-key'
|
16
|
+
|
17
|
+
MISTRAL_API_KEY: your-api-key
|
18
|
+
|
19
|
+
OLLAMA_API_ADDRESS: http://host.docker.internal:11434
|
20
|
+
|
21
|
+
OPENAI_API_KEY: your-access-token
|
22
|
+
|
16
23
|
NANO_BOTS_ENCRYPTION_PASSWORD: UNSAFE
|
17
24
|
NANO_BOTS_END_USER: your-user
|
18
|
-
|
19
25
|
volumes:
|
20
26
|
- ./google-credentials.json:/root/.config/google-credentials.json
|
21
27
|
- ./your-cartridges:/root/.local/share/nano-bots/cartridges
|
@@ -8,9 +8,9 @@ module NanoBot
|
|
8
8
|
module Streaming
|
9
9
|
def self.enabled?(cartridge, interface)
|
10
10
|
provider_stream = case Helpers::Hash.fetch(cartridge, %i[provider id])
|
11
|
-
when 'openai', 'mistral', 'cohere'
|
11
|
+
when 'openai', 'mistral', 'cohere', 'ollama'
|
12
12
|
Helpers::Hash.fetch(cartridge, %i[provider settings stream])
|
13
|
-
when 'google'
|
13
|
+
when 'google', 'maritaca'
|
14
14
|
Helpers::Hash.fetch(cartridge, %i[provider options stream])
|
15
15
|
end
|
16
16
|
|
@@ -0,0 +1,14 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module NanoBot
|
4
|
+
module Logic
|
5
|
+
module Maritaca
|
6
|
+
module Tokens
|
7
|
+
def self.apply_policies!(_cartridge, payload)
|
8
|
+
payload[:messages] = payload[:messages].map { |message| message.except(:_meta) }
|
9
|
+
payload
|
10
|
+
end
|
11
|
+
end
|
12
|
+
end
|
13
|
+
end
|
14
|
+
end
|
@@ -0,0 +1,14 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module NanoBot
|
4
|
+
module Logic
|
5
|
+
module Ollama
|
6
|
+
module Tokens
|
7
|
+
def self.apply_policies!(_cartridge, payload)
|
8
|
+
payload[:messages] = payload[:messages].map { |message| message.except(:_meta) }
|
9
|
+
payload
|
10
|
+
end
|
11
|
+
end
|
12
|
+
end
|
13
|
+
end
|
14
|
+
end
|
data/nano-bots.gemspec
CHANGED
@@ -32,16 +32,19 @@ Gem::Specification.new do |spec|
|
|
32
32
|
spec.executables = ['nb']
|
33
33
|
|
34
34
|
spec.add_dependency 'babosa', '~> 2.0'
|
35
|
-
spec.add_dependency 'cohere-ai', '~> 1.0', '>= 1.0.1'
|
36
35
|
spec.add_dependency 'concurrent-ruby', '~> 1.2', '>= 1.2.2'
|
37
36
|
spec.add_dependency 'dotenv', '~> 2.8', '>= 2.8.1'
|
38
|
-
spec.add_dependency 'gemini-ai', '~> 3.1'
|
39
|
-
spec.add_dependency 'mistral-ai', '~> 1.1'
|
40
37
|
spec.add_dependency 'pry', '~> 0.14.2'
|
41
38
|
spec.add_dependency 'rainbow', '~> 3.1', '>= 3.1.1'
|
42
39
|
spec.add_dependency 'rbnacl', '~> 7.1', '>= 7.1.1'
|
43
|
-
spec.add_dependency 'ruby-openai', '~> 6.3', '>= 6.3.1'
|
44
40
|
spec.add_dependency 'sweet-moon', '~> 0.0.7'
|
45
41
|
|
42
|
+
spec.add_dependency 'cohere-ai', '~> 1.0', '>= 1.0.1'
|
43
|
+
spec.add_dependency 'gemini-ai', '~> 3.1', '>= 3.1.2'
|
44
|
+
spec.add_dependency 'maritaca-ai', '~> 1.0'
|
45
|
+
spec.add_dependency 'mistral-ai', '~> 1.1'
|
46
|
+
spec.add_dependency 'ollama-ai', '~> 1.0'
|
47
|
+
spec.add_dependency 'ruby-openai', '~> 6.3', '>= 6.3.1'
|
48
|
+
|
46
49
|
spec.metadata['rubygems_mfa_required'] = 'true'
|
47
50
|
end
|
data/static/gem.rb
CHANGED
@@ -3,11 +3,11 @@
|
|
3
3
|
module NanoBot
|
4
4
|
GEM = {
|
5
5
|
name: 'nano-bots',
|
6
|
-
version: '2.
|
7
|
-
specification: '2.
|
6
|
+
version: '2.5.0',
|
7
|
+
specification: '2.3.0',
|
8
8
|
author: 'icebaker',
|
9
|
-
summary: 'Ruby Implementation of Nano Bots: small, AI-powered bots for OpenAI ChatGPT, Mistral AI, and Google Gemini.',
|
10
|
-
description: 'Ruby Implementation of Nano Bots: small, AI-powered bots that can be easily shared as a single file, designed to support multiple providers such as OpenAI ChatGPT, Mistral AI, and Google Gemini, with support for calling Tools (Functions).',
|
9
|
+
summary: 'Ruby Implementation of Nano Bots: small, AI-powered bots for OpenAI ChatGPT, Ollama, Mistral AI, Cohere Command, Maritaca AI MariTalk, and Google Gemini.',
|
10
|
+
description: 'Ruby Implementation of Nano Bots: small, AI-powered bots that can be easily shared as a single file, designed to support multiple providers such as OpenAI ChatGPT, Ollama, Mistral AI, Cohere Command, Maritaca AI MariTalk, and Google Gemini, with support for calling Tools (Functions).',
|
11
11
|
github: 'https://github.com/icebaker/ruby-nano-bots',
|
12
12
|
gem_server: 'https://rubygems.org',
|
13
13
|
license: 'MIT',
|