nano-bots 1.1.2 → 2.0.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 9e2cff31d309d896be4e019fa6f39a86d68c6463c83c552b44d1bdf270d160b5
4
- data.tar.gz: 8d506a1a13e2f85efb47a5149dc9ecd4519a49be8f26d139b5309f23e846aebe
3
+ metadata.gz: 2597a22180cd4e9feb55c7780fa831b682485c7158c4a562833856a87822f679
4
+ data.tar.gz: 22c3bcaf6d5c61525b2e26ecc5f70e82d723cf022e7c218ffa1f3e7ce2052a58
5
5
  SHA512:
6
- metadata.gz: a95508200927eff8dc6bbb7d6a1ca84e6c4ec4a0ff0ff549d6650f862a17ec5f56fe5e68ddfa3acf699f2ef2462527e0f63ec753c494c62149ff604cc17f6983
7
- data.tar.gz: cf285799aba18d3dfe7680cbc47a2a09805953d1bf2ed65a29f8da78553f9d4e06a56b2ec9984f5819847790659df80636b02255c3b32ba1acb58670d4398458
6
+ metadata.gz: 1afd6b0ddb4918b7cc38a1a4db8355653dad04023791c04b5ef5c69b0e69ff42298bbc903972179ebe21e2bb1a8d429928f59df64903ad5d068fa38059a89d21
7
+ data.tar.gz: 719a91a7351ea0557780ce06a436e1fee35824281c921f74a866a4c51bce8f9605157b7ef5bd27763a693885856bad87499164bb32f742a59a1130bf8879f597
data/Gemfile CHANGED
@@ -7,6 +7,6 @@ gemspec
7
7
  group :test, :development do
8
8
  gem 'pry-byebug', '~> 3.10', '>= 3.10.1'
9
9
  gem 'rspec', '~> 3.12'
10
- gem 'rubocop', '~> 1.58'
10
+ gem 'rubocop', '~> 1.59'
11
11
  gem 'rubocop-rspec', '~> 2.25'
12
12
  end
data/Gemfile.lock CHANGED
@@ -1,11 +1,11 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- nano-bots (1.1.2)
4
+ nano-bots (2.0.0)
5
5
  babosa (~> 2.0)
6
6
  concurrent-ruby (~> 1.2, >= 1.2.2)
7
7
  dotenv (~> 2.8, >= 2.8.1)
8
- faraday (~> 2.7, >= 2.7.12)
8
+ gemini-ai (~> 2.0)
9
9
  pry (~> 0.14.2)
10
10
  rainbow (~> 3.1, >= 3.1.1)
11
11
  rbnacl (~> 7.1, >= 7.1.1)
@@ -15,6 +15,8 @@ PATH
15
15
  GEM
16
16
  remote: https://rubygems.org/
17
17
  specs:
18
+ addressable (2.8.6)
19
+ public_suffix (>= 2.0.2, < 6.0)
18
20
  ast (2.4.2)
19
21
  babosa (2.0.0)
20
22
  base64 (0.2.0)
@@ -32,10 +34,26 @@ GEM
32
34
  multipart-post (~> 2)
33
35
  faraday-net_http (3.0.2)
34
36
  ffi (1.16.3)
37
+ gemini-ai (2.0.0)
38
+ event_stream_parser (~> 1.0)
39
+ faraday (~> 2.7, >= 2.7.12)
40
+ googleauth (~> 1.9, >= 1.9.1)
41
+ google-cloud-env (2.1.0)
42
+ faraday (>= 1.0, < 3.a)
43
+ googleauth (1.9.1)
44
+ faraday (>= 1.0, < 3.a)
45
+ google-cloud-env (~> 2.1)
46
+ jwt (>= 1.4, < 3.0)
47
+ multi_json (~> 1.11)
48
+ os (>= 0.9, < 2.0)
49
+ signet (>= 0.16, < 2.a)
35
50
  json (2.7.1)
51
+ jwt (2.7.1)
36
52
  language_server-protocol (3.17.0.3)
37
53
  method_source (1.0.0)
54
+ multi_json (1.15.0)
38
55
  multipart-post (2.3.0)
56
+ os (1.1.4)
39
57
  parallel (1.23.0)
40
58
  parser (3.2.2.4)
41
59
  ast (~> 2.4.1)
@@ -46,6 +64,7 @@ GEM
46
64
  pry-byebug (3.10.1)
47
65
  byebug (~> 11.0)
48
66
  pry (>= 0.13, < 0.15)
67
+ public_suffix (5.0.4)
49
68
  racc (1.7.3)
50
69
  rainbow (3.1.1)
51
70
  rbnacl (7.1.1)
@@ -65,7 +84,7 @@ GEM
65
84
  diff-lcs (>= 1.2.0, < 2.0)
66
85
  rspec-support (~> 3.12.0)
67
86
  rspec-support (3.12.1)
68
- rubocop (1.58.0)
87
+ rubocop (1.59.0)
69
88
  json (~> 2.3)
70
89
  language_server-protocol (>= 3.17.0)
71
90
  parallel (~> 1.10)
@@ -92,6 +111,11 @@ GEM
92
111
  faraday-multipart (>= 1)
93
112
  ruby-progressbar (1.13.0)
94
113
  ruby2_keywords (0.0.5)
114
+ signet (0.18.0)
115
+ addressable (~> 2.8)
116
+ faraday (>= 0.17.5, < 3.a)
117
+ jwt (>= 1.5, < 3.0)
118
+ multi_json (~> 1.10)
95
119
  sweet-moon (0.0.7)
96
120
  ffi (~> 1.15, >= 1.15.5)
97
121
  unicode-display_width (2.5.0)
@@ -103,7 +127,7 @@ DEPENDENCIES
103
127
  nano-bots!
104
128
  pry-byebug (~> 3.10, >= 3.10.1)
105
129
  rspec (~> 3.12)
106
- rubocop (~> 1.58)
130
+ rubocop (~> 1.59)
107
131
  rubocop-rspec (~> 2.25)
108
132
 
109
133
  BUNDLED WITH
data/README.md CHANGED
@@ -8,35 +8,41 @@ _Image artificially created by Midjourney through a prompt generated by a Nano B
8
8
  https://user-images.githubusercontent.com/113217272/238141567-c58a240c-7b67-4b3b-864a-0f49bbf6e22f.mp4
9
9
 
10
10
  - [Setup](#setup)
11
- - [Docker](#docker)
11
+ - [OpenAI ChatGPT](#openai-chatgpt)
12
+ - [Google Gemini](#google-gemini)
13
+ - [Docker](#docker)
14
+ - [OpenAI ChatGPT](#openai-chatgpt-1)
15
+ - [Google Gemini](#google-gemini-1)
12
16
  - [Usage](#usage)
13
- - [Command Line](#command-line)
14
- - [Library](#library)
17
+ - [Command Line](#command-line)
18
+ - [Library](#library)
15
19
  - [Cartridges](#cartridges)
16
- - [Tools (Functions)](#tools-functions)
17
- - [Experimental Clojure Support](#experimental-clojure-support)
18
- - [Marketplace](#marketplace)
20
+ - [OpenAI ChatGPT](#openai-chatgpt-2)
21
+ - [Google Gemini](#google-gemini-2)
22
+ - [Tools (Functions)](#tools-functions)
23
+ - [Experimental Clojure Support](#experimental-clojure-support)
24
+ - [Marketplace](#marketplace)
19
25
  - [Security and Privacy](#security-and-privacy)
20
- - [Cryptography](#cryptography)
21
- - [End-user IDs](#end-user-ids)
22
- - [Decrypting](#decrypting)
26
+ - [Cryptography](#cryptography)
27
+ - [End-user IDs](#end-user-ids)
28
+ - [Decrypting](#decrypting)
23
29
  - [Providers](#providers)
24
30
  - [Debugging](#debugging)
25
31
  - [Development](#development)
26
- - [Publish to RubyGems](#publish-to-rubygems)
32
+ - [Publish to RubyGems](#publish-to-rubygems)
27
33
 
28
34
  ## Setup
29
35
 
30
36
  For a system usage:
31
37
 
32
38
  ```sh
33
- gem install nano-bots -v 1.1.2
39
+ gem install nano-bots -v 2.0.0
34
40
  ```
35
41
 
36
42
  To use it in a project, add it to your `Gemfile`:
37
43
 
38
44
  ```ruby
39
- gem 'nano-bots', '~> 1.1.2'
45
+ gem 'nano-bots', '~> 2.0.0'
40
46
  ```
41
47
 
42
48
  ```sh
@@ -45,6 +51,8 @@ bundle install
45
51
 
46
52
  For credentials and configurations, relevant environment variables can be set in your `.bashrc`, `.zshrc`, or equivalent files, as well as in your Docker Container or System Environment. Example:
47
53
 
54
+ ### OpenAI ChatGPT
55
+
48
56
  ```sh
49
57
  export OPENAI_API_ADDRESS=https://api.openai.com
50
58
  export OPENAI_API_KEY=your-access-token
@@ -69,6 +77,62 @@ NANO_BOTS_END_USER=your-user
69
77
  # NANO_BOTS_CARTRIDGES_DIRECTORY=/home/user/.local/share/nano-bots/cartridges
70
78
  ```
71
79
 
80
+ ### Google Gemini
81
+
82
+ Click [here](https://github.com/gbaptista/gemini-ai#credentials) to learn how to obtain your credentials.
83
+
84
+ #### Option 1: API Key (Generative Language API)
85
+
86
+ ```sh
87
+ export GOOGLE_API_KEY=your-api-key
88
+
89
+ export NANO_BOTS_ENCRYPTION_PASSWORD=UNSAFE
90
+ export NANO_BOTS_END_USER=your-user
91
+
92
+ # export NANO_BOTS_STATE_DIRECTORY=/home/user/.local/state/nano-bots
93
+ # export NANO_BOTS_CARTRIDGES_DIRECTORY=/home/user/.local/share/nano-bots/cartridges
94
+ ```
95
+
96
+ Alternatively, if your current directory has a `.env` file with the environment variables, they will be automatically loaded:
97
+
98
+ ```sh
99
+ GOOGLE_API_KEY=your-api-key
100
+
101
+ NANO_BOTS_ENCRYPTION_PASSWORD=UNSAFE
102
+ NANO_BOTS_END_USER=your-user
103
+
104
+ # NANO_BOTS_STATE_DIRECTORY=/home/user/.local/state/nano-bots
105
+ # NANO_BOTS_CARTRIDGES_DIRECTORY=/home/user/.local/share/nano-bots/cartridges
106
+ ```
107
+
108
+ #### Option 2: Service Account (Vertex AI API)
109
+
110
+ ```sh
111
+ export GOOGLE_CREDENTIALS_FILE_PATH=google-credentials.json
112
+ export GOOGLE_PROJECT_ID=your-project-id
113
+ export GOOGLE_REGION=us-east4
114
+
115
+ export NANO_BOTS_ENCRYPTION_PASSWORD=UNSAFE
116
+ export NANO_BOTS_END_USER=your-user
117
+
118
+ # export NANO_BOTS_STATE_DIRECTORY=/home/user/.local/state/nano-bots
119
+ # export NANO_BOTS_CARTRIDGES_DIRECTORY=/home/user/.local/share/nano-bots/cartridges
120
+ ```
121
+
122
+ Alternatively, if your current directory has a `.env` file with the environment variables, they will be automatically loaded:
123
+
124
+ ```sh
125
+ GOOGLE_CREDENTIALS_FILE_PATH=google-credentials.json
126
+ GOOGLE_PROJECT_ID=your-project-id
127
+ GOOGLE_REGION=us-east4
128
+
129
+ NANO_BOTS_ENCRYPTION_PASSWORD=UNSAFE
130
+ NANO_BOTS_END_USER=your-user
131
+
132
+ # NANO_BOTS_STATE_DIRECTORY=/home/user/.local/state/nano-bots
133
+ # NANO_BOTS_CARTRIDGES_DIRECTORY=/home/user/.local/share/nano-bots/cartridges
134
+ ```
135
+
72
136
  ## Docker
73
137
 
74
138
  Clone the repository and copy the Docker Compose template:
@@ -81,12 +145,14 @@ cp docker-compose.example.yml docker-compose.yml
81
145
 
82
146
  Set your provider credentials and choose your desired directory for the cartridges files:
83
147
 
148
+ ### OpenAI ChatGPT
149
+
84
150
  ```yaml
85
151
  ---
86
152
  services:
87
153
  nano-bots:
88
154
  image: ruby:3.2.2-slim-bookworm
89
- command: sh -c "apt-get update && apt-get install -y --no-install-recommends build-essential libffi-dev libsodium-dev lua5.4-dev curl && curl -s https://raw.githubusercontent.com/babashka/babashka/master/install | bash && gem install nano-bots -v 1.1.2 && bash"
155
+ command: sh -c "apt-get update && apt-get install -y --no-install-recommends build-essential libffi-dev libsodium-dev lua5.4-dev curl && curl -s https://raw.githubusercontent.com/babashka/babashka/master/install | bash && gem install nano-bots -v 2.0.0 && bash"
90
156
  environment:
91
157
  OPENAI_API_ADDRESS: https://api.openai.com
92
158
  OPENAI_API_KEY: your-access-token
@@ -97,6 +163,47 @@ services:
97
163
  - ./your-state-path:/root/.local/state/nano-bots
98
164
  ```
99
165
 
166
+ ### Google Gemini
167
+
168
+ #### Option 1: API Key (Generative Language API)
169
+
170
+ ```yaml
171
+ ---
172
+ services:
173
+ nano-bots:
174
+ image: ruby:3.2.2-slim-bookworm
175
+ command: sh -c "apt-get update && apt-get install -y --no-install-recommends build-essential libffi-dev libsodium-dev lua5.4-dev curl && curl -s https://raw.githubusercontent.com/babashka/babashka/master/install | bash && gem install nano-bots -v 2.0.0 && bash"
176
+ environment:
177
+ GOOGLE_API_KEY: your-api-key
178
+ NANO_BOTS_ENCRYPTION_PASSWORD: UNSAFE
179
+ NANO_BOTS_END_USER: your-user
180
+ volumes:
181
+ - ./your-cartridges:/root/.local/share/nano-bots/cartridges
182
+ - ./your-state-path:/root/.local/state/nano-bots
183
+ ```
184
+
185
+ #### Option 2: Service Account (Vertex AI API)
186
+
187
+ ```yaml
188
+ ---
189
+ services:
190
+ nano-bots:
191
+ image: ruby:3.2.2-slim-bookworm
192
+ command: sh -c "apt-get update && apt-get install -y --no-install-recommends build-essential libffi-dev libsodium-dev lua5.4-dev curl && curl -s https://raw.githubusercontent.com/babashka/babashka/master/install | bash && gem install nano-bots -v 2.0.0 && bash"
193
+ environment:
194
+ GOOGLE_CREDENTIALS_FILE_PATH: /root/.config/google-credentials.json
195
+ GOOGLE_PROJECT_ID: your-project-id
196
+ GOOGLE_REGION: us-east4
197
+ NANO_BOTS_ENCRYPTION_PASSWORD: UNSAFE
198
+ NANO_BOTS_END_USER: your-user
199
+ volumes:
200
+ - ./google-credentials.json:/root/.config/google-credentials.json
201
+ - ./your-cartridges:/root/.local/share/nano-bots/cartridges
202
+ - ./your-state-path:/root/.local/state/nano-bots
203
+ ```
204
+
205
+ ### Container
206
+
100
207
  Enter the container:
101
208
  ```sh
102
209
  docker compose run nano-bots
@@ -246,8 +353,16 @@ end
246
353
 
247
354
  ## Cartridges
248
355
 
356
+ Check the Nano Bots specification to learn more about [how to build cartridges](https://spec.nbots.io/#/README?id=cartridges).
357
+
358
+ Try the [Nano Bots Clinic (Live Editor)](https://clinic.nbots.io) to learn about creating Cartridges.
359
+
249
360
  Here's what a Nano Bot Cartridge looks like:
250
361
 
362
+ ### OpenAI ChatGPT
363
+
364
+ Read the [full specification](https://spec.nbots.io/#/README?id=open-ai-chatgpt) for OpenAI ChatGPT.
365
+
251
366
  ```yaml
252
367
  ---
253
368
  meta:
@@ -269,12 +384,64 @@ provider:
269
384
  access-token: ENV/OPENAI_API_KEY
270
385
  settings:
271
386
  user: ENV/NANO_BOTS_END_USER
272
- model: gpt-3.5-turbo
387
+ model: gpt-4-1106-preview
273
388
  ```
274
389
 
275
- Check the Nano Bots specification to learn more about [how to build cartridges](https://spec.nbots.io/#/README?id=cartridges).
390
+ ### Google Gemini
276
391
 
277
- Try the [Nano Bots Clinic (Live Editor)](https://clinic.nbots.io) to learn about creating Cartridges.
392
+ Read the [full specification](https://spec.nbots.io/#/README?id=google-gemini) for Google Gemini.
393
+
394
+ #### Option 1: API Key (Generative Language API)
395
+
396
+ ```yaml
397
+ ---
398
+ meta:
399
+ symbol: 🤖
400
+ name: Nano Bot Name
401
+ author: Your Name
402
+ version: 1.0.0
403
+ license: CC0-1.0
404
+ description: A helpful assistant.
405
+
406
+ behaviors:
407
+ interaction:
408
+ directive: You are a helpful assistant.
409
+
410
+ provider:
411
+ id: google
412
+ credentials:
413
+ service: generative-language-api
414
+ api-key: ENV/GOOGLE_API_KEY
415
+ options:
416
+ model: gemini-pro
417
+ ```
418
+
419
+ #### Option 2: Service Account (Vertex AI API)
420
+
421
+ ```yaml
422
+ ---
423
+ meta:
424
+ symbol: 🤖
425
+ name: Nano Bot Name
426
+ author: Your Name
427
+ version: 1.0.0
428
+ license: CC0-1.0
429
+ description: A helpful assistant.
430
+
431
+ behaviors:
432
+ interaction:
433
+ directive: You are a helpful assistant.
434
+
435
+ provider:
436
+ id: google
437
+ credentials:
438
+ service: vertex-ai-api
439
+ file-path: ENV/GOOGLE_CREDENTIALS_FILE_PATH
440
+ project-id: ENV/GOOGLE_PROJECT_ID
441
+ region: ENV/GOOGLE_REGION
442
+ options:
443
+ model: gemini-pro
444
+ ```
278
445
 
279
446
  ### Tools (Functions)
280
447
 
@@ -301,22 +468,9 @@ The randomly generated number is 59.
301
468
 
302
469
  🤖> |
303
470
  ```
471
+ To successfully use Tools (Functions), you need to specify a provider and a model that supports them. As of the writing of this README, the provider that supports them is [OpenAI](https://platform.openai.com/docs/models), with models `gpt-3.5-turbo-1106` and `gpt-4-1106-preview`, and [Google](https://cloud.google.com/vertex-ai/docs/generative-ai/multimodal/function-calling#supported_models), with the `vertex-ai-api` service and the model `gemini-pro`.
304
472
 
305
- To successfully use Tools (Functions), you need to specify a provider and a model that support them. As of the writing of this README, the provider that supports them is [OpenAI](https://platform.openai.com/docs/models), with models `gpt-3.5-turbo-1106` and `gpt-4-1106-preview`:
306
-
307
- ```yaml
308
- ---
309
- provider:
310
- id: openai
311
- credentials:
312
- address: ENV/OPENAI_API_ADDRESS
313
- access-token: ENV/OPENAI_API_KEY
314
- settings:
315
- user: ENV/NANO_BOTS_END_USER
316
- model: gpt-4-1106-preview
317
- ```
318
-
319
- Check the [Nano Bots specification](https://spec.nbots.io/#/README?id=tools-functions-2) to learn more about them.
473
+ Check the [Nano Bots specification](https://spec.nbots.io/#/README?id=tools-functions-2) to learn more about Tools (Functions).
320
474
 
321
475
  #### Experimental Clojure Support
322
476
 
@@ -337,7 +491,7 @@ Unlike Lua and Fennel, Clojure support is not _embedded_ in this implementation.
337
491
  Here's [how to install Babashka](https://github.com/babashka/babashka#quickstart):
338
492
 
339
493
  ```sh
340
- curl -s https://raw.githubusercontent.com/babashka/babashka/master/install | bash
494
+ curl -s https://raw.githubusercontent.com/babashka/babashka/master/install | sudo bash
341
495
  ```
342
496
 
343
497
  This is a quick check to ensure that it is available and working:
@@ -469,13 +623,16 @@ If you lose your password, you lose your data. It is not possible to recover it
469
623
 
470
624
  Currently supported providers:
471
625
 
472
- - [x] [FastChat (Vicuna)](https://github.com/lm-sys/FastChat)
473
- - [x] [Open AI](https://platform.openai.com/docs/api-reference)
474
- - [ ] [Google PaLM](https://developers.generativeai.google/)
475
- - [ ] [Alpaca](https://github.com/tatsu-lab/stanford_alpaca)
476
- - [ ] [LLaMA](https://github.com/facebookresearch/llama)
626
+ - [x] [Open AI ChatGPT](https://platform.openai.com/docs/api-reference)
627
+ - [x] [Google Gemini](https://cloud.google.com/vertex-ai/docs/generative-ai/model-reference/gemini)
628
+ - [ ] [Anthropic Claude](https://www.anthropic.com)
629
+ - [ ] [Cohere Command](https://cohere.com)
630
+ - [ ] [Meta Llama](https://ai.meta.com/llama/)
631
+ - [ ] [01.AI Yi](https://01.ai)
632
+ - [ ] [WizardLM](https://wizardlm.github.io)
633
+ - [ ] [LMSYS Org FastChat Vicuna](https://github.com/lm-sys/FastChat)
477
634
 
478
- Although only OpenAI has been officially tested, some of the open-source providers offer APIs that are compatible with OpenAI, such as [FastChat](https://github.com/lm-sys/FastChat#openai-compatible-restful-apis--sdk). Therefore, it is highly probable that they will work just fine.
635
+ Although only OpenAI ChatGPT and Google Gemini have been officially tested, some alternative providers offer APIs that are compatible with, for example, OpenAI, such as [FastChat](https://github.com/lm-sys/FastChat#openai-compatible-restful-apis--sdk). Therefore, it is highly probable that they will work just fine.
479
636
 
480
637
  ## Development
481
638
 
@@ -492,5 +649,5 @@ gem build nano-bots.gemspec
492
649
 
493
650
  gem signin
494
651
 
495
- gem push nano-bots-1.1.2.gem
652
+ gem push nano-bots-2.0.0.gem
496
653
  ```
@@ -1,8 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require 'openai'
4
-
5
3
  require_relative 'providers/openai'
4
+ require_relative 'providers/google'
6
5
 
7
6
  module NanoBot
8
7
  module Components
@@ -10,7 +9,9 @@ module NanoBot
10
9
  def self.new(provider, environment: {})
11
10
  case provider[:id]
12
11
  when 'openai'
13
- Providers::OpenAI.new(provider[:settings], provider[:credentials], environment:)
12
+ Providers::OpenAI.new(nil, provider[:settings], provider[:credentials], environment:)
13
+ when 'google'
14
+ Providers::Google.new(provider[:options], provider[:settings], provider[:credentials], environment:)
14
15
  else
15
16
  raise "Unsupported provider \"#{provider[:id]}\""
16
17
  end
@@ -6,7 +6,7 @@ module NanoBot
6
6
  module Components
7
7
  module Providers
8
8
  class Base
9
- def initialize(_settings, _credentials, _environment: {})
9
+ def initialize(_options, _settings, _credentials, _environment: {})
10
10
  raise NoMethodError, "The 'initialize' method is not implemented for the current provider."
11
11
  end
12
12
 
@@ -0,0 +1,220 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'gemini-ai'
4
+
5
+ require_relative 'base'
6
+
7
+ require_relative '../../logic/providers/google/tools'
8
+ require_relative '../../logic/providers/google/tokens'
9
+
10
+ require_relative 'tools'
11
+
12
+ module NanoBot
13
+ module Components
14
+ module Providers
15
+ class Google < Base
16
+ SAFETY_SETTINGS = %i[category threshold].freeze
17
+
18
+ SETTINGS = {
19
+ generationConfig: %i[
20
+ temperature topP topK candidateCount maxOutputTokens stopSequences
21
+ ].freeze
22
+ }.freeze
23
+
24
+ attr_reader :settings
25
+
26
+ def initialize(options, settings, credentials, _environment)
27
+ @settings = settings
28
+
29
+ gemini_credentials = if credentials[:'api-key']
30
+ {
31
+ service: credentials[:service],
32
+ api_key: credentials[:'api-key'],
33
+ project_id: credentials[:'project-id'],
34
+ region: credentials[:region]
35
+ }
36
+ else
37
+ {
38
+ service: credentials[:service],
39
+ file_path: credentials[:'file-path'],
40
+ project_id: credentials[:'project-id'],
41
+ region: credentials[:region]
42
+ }
43
+ end
44
+
45
+ @client = Gemini.new(
46
+ credentials: gemini_credentials,
47
+ options: { model: options[:model], stream: options[:stream] }
48
+ )
49
+ end
50
+
51
+ def evaluate(input, streaming, cartridge, &feedback)
52
+ messages = input[:history].map do |event|
53
+ if event[:message].nil? && event[:meta] && event[:meta][:tool_calls]
54
+ { role: 'model',
55
+ parts: event[:meta][:tool_calls],
56
+ _meta: { at: event[:at] } }
57
+ elsif event[:who] == 'tool'
58
+ { role: 'function',
59
+ parts: [
60
+ { functionResponse: {
61
+ name: event[:meta][:name],
62
+ response: { name: event[:meta][:name], content: event[:message].to_s }
63
+ } }
64
+ ],
65
+ _meta: { at: event[:at] } }
66
+ else
67
+ { role: event[:who] == 'user' ? 'user' : 'model',
68
+ parts: { text: event[:message] },
69
+ _meta: { at: event[:at] } }
70
+ end
71
+ end
72
+
73
+ %i[backdrop directive].each do |key|
74
+ next unless input[:behavior][key]
75
+
76
+ messages.prepend(
77
+ { role: 'model',
78
+ parts: { text: 'Understood.' },
79
+ _meta: { at: Time.now } }
80
+ )
81
+
82
+ # TODO: Does Gemini have system messages?
83
+ messages.prepend(
84
+ { role: 'user',
85
+ parts: { text: input[:behavior][key] },
86
+ _meta: { at: Time.now } }
87
+ )
88
+ end
89
+
90
+ payload = { contents: messages, generationConfig: { candidateCount: 1 } }
91
+
92
+ if @settings
93
+ SETTINGS.each_key do |key|
94
+ SETTINGS[key].each do |sub_key|
95
+ if @settings.key?(key) && @settings[key].key?(sub_key)
96
+ payload[key] = {} unless payload.key?(key)
97
+ payload[key][sub_key] = @settings[key][sub_key]
98
+ end
99
+ end
100
+ end
101
+
102
+ if @settings[:safetySettings].is_a?(Array)
103
+ payload[:safetySettings] = [] unless payload.key?(:safetySettings)
104
+
105
+ @settings[:safetySettings].each do |safety_setting|
106
+ setting = {}
107
+ SAFETY_SETTINGS.each { |key| setting[key] = safety_setting[key] }
108
+ payload[:safetySettings] << setting
109
+ end
110
+ end
111
+ end
112
+
113
+ if input[:tools]
114
+ payload[:tools] = {
115
+ function_declarations: input[:tools].map { |raw| Logic::Google::Tools.adapt(raw) }
116
+ }
117
+ end
118
+
119
+ if streaming
120
+ content = ''
121
+ tools = []
122
+
123
+ stream_call_back = proc do |event, _parsed, _raw|
124
+ partial_content = event.dig('candidates', 0, 'content', 'parts').filter do |part|
125
+ part.key?('text')
126
+ end.map { |part| part['text'] }.join
127
+
128
+ partial_tools = event.dig('candidates', 0, 'content', 'parts').filter do |part|
129
+ part.key?('functionCall')
130
+ end
131
+
132
+ tools.concat(partial_tools) if partial_tools.size.positive?
133
+
134
+ if partial_content
135
+ content += partial_content
136
+ feedback.call(
137
+ { should_be_stored: false,
138
+ interaction: { who: 'AI', message: partial_content } }
139
+ )
140
+ end
141
+
142
+ if event.dig('candidates', 0, 'finishReason')
143
+ if tools&.size&.positive?
144
+ feedback.call(
145
+ { should_be_stored: true,
146
+ needs_another_round: true,
147
+ interaction: { who: 'AI', message: nil, meta: { tool_calls: tools } } }
148
+ )
149
+ Tools.apply(
150
+ cartridge, input[:tools], tools, feedback, Logic::Google::Tools
151
+ ).each do |interaction|
152
+ feedback.call({ should_be_stored: true, needs_another_round: true, interaction: })
153
+ end
154
+ end
155
+
156
+ feedback.call(
157
+ { should_be_stored: !(content.nil? || content == ''),
158
+ interaction: content.nil? || content == '' ? nil : { who: 'AI', message: content },
159
+ finished: true }
160
+ )
161
+ end
162
+ end
163
+
164
+ begin
165
+ @client.stream_generate_content(
166
+ Logic::Google::Tokens.apply_policies!(cartridge, payload),
167
+ stream: true, &stream_call_back
168
+ )
169
+ rescue StandardError => e
170
+ raise e.class, e.response[:body] if e.response && e.response[:body]
171
+
172
+ raise e
173
+ end
174
+ else
175
+ begin
176
+ result = @client.stream_generate_content(
177
+ Logic::Google::Tokens.apply_policies!(cartridge, payload),
178
+ stream: false
179
+ )
180
+ rescue StandardError => e
181
+ raise e.class, e.response[:body] if e.response && e.response[:body]
182
+
183
+ raise e
184
+ end
185
+
186
+ tools = result.dig(0, 'candidates', 0, 'content', 'parts').filter do |part|
187
+ part.key?('functionCall')
188
+ end
189
+
190
+ if tools&.size&.positive?
191
+ feedback.call(
192
+ { should_be_stored: true,
193
+ needs_another_round: true,
194
+ interaction: { who: 'AI', message: nil, meta: { tool_calls: tools } } }
195
+ )
196
+
197
+ Tools.apply(
198
+ cartridge, input[:tools], tools, feedback, Logic::Google::Tools
199
+ ).each do |interaction|
200
+ feedback.call({ should_be_stored: true, needs_another_round: true, interaction: })
201
+ end
202
+ end
203
+
204
+ content = result.map do |answer|
205
+ answer.dig('candidates', 0, 'content', 'parts').filter do |part|
206
+ part.key?('text')
207
+ end.map { |part| part['text'] }.join
208
+ end.join
209
+
210
+ feedback.call(
211
+ { should_be_stored: !(content.nil? || content.to_s.strip == ''),
212
+ interaction: content.nil? || content == '' ? nil : { who: 'AI', message: content },
213
+ finished: true }
214
+ )
215
+ end
216
+ end
217
+ end
218
+ end
219
+ end
220
+ end
@@ -6,9 +6,9 @@ require_relative 'base'
6
6
  require_relative '../crypto'
7
7
 
8
8
  require_relative '../../logic/providers/openai/tools'
9
- require_relative '../../controllers/interfaces/tools'
9
+ require_relative '../../logic/providers/openai/tokens'
10
10
 
11
- require_relative 'openai/tools'
11
+ require_relative 'tools'
12
12
 
13
13
  module NanoBot
14
14
  module Components
@@ -17,13 +17,14 @@ module NanoBot
17
17
  DEFAULT_ADDRESS = 'https://api.openai.com'
18
18
 
19
19
  CHAT_SETTINGS = %i[
20
- model stream temperature top_p n stop max_tokens
21
- presence_penalty frequency_penalty logit_bias
20
+ model stream frequency_penalty logit_bias logprobs top_logprobs
21
+ max_tokens n presence_penalty response_format seed stop temperature
22
+ top_p tool_choice
22
23
  ].freeze
23
24
 
24
25
  attr_reader :settings
25
26
 
26
- def initialize(settings, credentials, environment: {})
27
+ def initialize(_options, settings, credentials, environment: {})
27
28
  @settings = settings
28
29
  @credentials = credentials
29
30
  @environment = environment
@@ -40,12 +41,18 @@ module NanoBot
40
41
  def evaluate(input, streaming, cartridge, &feedback)
41
42
  messages = input[:history].map do |event|
42
43
  if event[:message].nil? && event[:meta] && event[:meta][:tool_calls]
43
- { role: 'assistant', content: nil, tool_calls: event[:meta][:tool_calls] }
44
+ { role: 'assistant', content: nil,
45
+ tool_calls: event[:meta][:tool_calls],
46
+ _meta: { at: event[:at] } }
44
47
  elsif event[:who] == 'tool'
45
48
  { role: event[:who], content: event[:message].to_s,
46
- tool_call_id: event[:meta][:id], name: event[:meta][:name] }
49
+ tool_call_id: event[:meta][:id],
50
+ name: event[:meta][:name],
51
+ _meta: { at: event[:at] } }
47
52
  else
48
- { role: event[:who] == 'user' ? 'user' : 'assistant', content: event[:message] }
53
+ { role: event[:who] == 'user' ? 'user' : 'assistant',
54
+ content: event[:message],
55
+ _meta: { at: event[:at] } }
49
56
  end
50
57
  end
51
58
 
@@ -54,7 +61,8 @@ module NanoBot
54
61
 
55
62
  messages.prepend(
56
63
  { role: key == :directive ? 'system' : 'user',
57
- content: input[:behavior][key] }
64
+ content: input[:behavior][key],
65
+ _meta: { at: Time.now } }
58
66
  )
59
67
  end
60
68
 
@@ -66,7 +74,7 @@ module NanoBot
66
74
 
67
75
  payload.delete(:logit_bias) if payload.key?(:logit_bias) && payload[:logit_bias].nil?
68
76
 
69
- payload[:tools] = input[:tools].map { |raw| NanoBot::Logic::OpenAI::Tools.adapt(raw) } if input[:tools]
77
+ payload[:tools] = input[:tools].map { |raw| Logic::OpenAI::Tools.adapt(raw) } if input[:tools]
70
78
 
71
79
  if streaming
72
80
  content = ''
@@ -114,13 +122,15 @@ module NanoBot
114
122
  needs_another_round: true,
115
123
  interaction: { who: 'AI', message: nil, meta: { tool_calls: tools } } }
116
124
  )
117
- Tools.apply(cartridge, input[:tools], tools, feedback).each do |interaction|
125
+ Tools.apply(
126
+ cartridge, input[:tools], tools, feedback, Logic::OpenAI::Tools
127
+ ).each do |interaction|
118
128
  feedback.call({ should_be_stored: true, needs_another_round: true, interaction: })
119
129
  end
120
130
  end
121
131
 
122
132
  feedback.call(
123
- { should_be_stored: !(content.nil? || content == ''),
133
+ { should_be_stored: !(content.nil? || content.to_s.strip == ''),
124
134
  interaction: content.nil? || content == '' ? nil : { who: 'AI', message: content },
125
135
  finished: true }
126
136
  )
@@ -128,7 +138,7 @@ module NanoBot
128
138
  end
129
139
 
130
140
  begin
131
- @client.chat(parameters: payload)
141
+ @client.chat(parameters: Logic::OpenAI::Tokens.apply_policies!(cartridge, payload))
132
142
  rescue StandardError => e
133
143
  raise e.class, e.response[:body] if e.response && e.response[:body]
134
144
 
@@ -136,7 +146,7 @@ module NanoBot
136
146
  end
137
147
  else
138
148
  begin
139
- result = @client.chat(parameters: payload)
149
+ result = @client.chat(parameters: Logic::OpenAI::Tokens.apply_policies!(cartridge, payload))
140
150
  rescue StandardError => e
141
151
  raise e.class, e.response[:body] if e.response && e.response[:body]
142
152
 
@@ -153,7 +163,9 @@ module NanoBot
153
163
  needs_another_round: true,
154
164
  interaction: { who: 'AI', message: nil, meta: { tool_calls: tools } } }
155
165
  )
156
- Tools.apply(cartridge, input[:tools], tools, feedback).each do |interaction|
166
+ Tools.apply(
167
+ cartridge, input[:tools], tools, feedback, Logic::OpenAI::Tools
168
+ ).each do |interaction|
157
169
  feedback.call({ should_be_stored: true, needs_another_round: true, interaction: })
158
170
  end
159
171
  end
@@ -0,0 +1,99 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative '../embedding'
4
+ require_relative '../../logic/cartridge/safety'
5
+
6
+ require 'concurrent'
7
+
8
+ module NanoBot
9
+ module Components
10
+ module Providers
11
+ module Tools
12
+ def self.confirming(tool, feedback)
13
+ feedback.call(
14
+ { should_be_stored: false,
15
+ interaction: { who: 'AI', message: nil, meta: {
16
+ tool: { action: 'confirming', id: tool[:id], name: tool[:label], parameters: tool[:parameters] }
17
+ } } }
18
+ )
19
+ end
20
+
21
+ def self.apply(cartridge, function_cartridge, tools, feedback, tools_logic)
22
+ prepared_tools = tools_logic.prepare(function_cartridge, tools)
23
+
24
+ if Logic::Cartridge::Safety.confirmable?(cartridge)
25
+ prepared_tools.each { |tool| tool[:allowed] = confirming(tool, feedback) }
26
+ else
27
+ prepared_tools.each { |tool| tool[:allowed] = true }
28
+ end
29
+
30
+ futures = prepared_tools.map do |tool|
31
+ Concurrent::Promises.future do
32
+ if tool[:allowed]
33
+ process!(tool, feedback, function_cartridge, cartridge)
34
+ else
35
+ tool[:output] =
36
+ "We asked the user you're chatting with for permission, but the user did not allow you to run this tool or function."
37
+ tool
38
+ end
39
+ end
40
+ end
41
+
42
+ results = Concurrent::Promises.zip(*futures).value!
43
+
44
+ results.map do |applied_tool|
45
+ {
46
+ who: 'tool',
47
+ message: applied_tool[:output],
48
+ meta: { id: applied_tool[:id], name: applied_tool[:name] }
49
+ }
50
+ end
51
+ end
52
+
53
+ def self.process!(tool, feedback, _function_cartridge, cartridge)
54
+ feedback.call(
55
+ { should_be_stored: false,
56
+ interaction: { who: 'AI', message: nil, meta: {
57
+ tool: { action: 'executing', id: tool[:id], name: tool[:label], parameters: tool[:parameters] }
58
+ } } }
59
+ )
60
+
61
+ call = {
62
+ parameters: %w[parameters],
63
+ values: [tool[:parameters]],
64
+ safety: { sandboxed: Logic::Cartridge::Safety.sandboxed?(cartridge) }
65
+ }
66
+
67
+ if %i[fennel lua clojure].count { |key| !tool[:source][key].nil? } > 1
68
+ raise StandardError, 'conflicting tools'
69
+ end
70
+
71
+ if !tool[:source][:fennel].nil?
72
+ call[:source] = tool[:source][:fennel]
73
+ tool[:output] = Components::Embedding.fennel(**call)
74
+ elsif !tool[:source][:clojure].nil?
75
+ call[:source] = tool[:source][:clojure]
76
+ tool[:output] = Components::Embedding.clojure(**call)
77
+ elsif !tool[:source][:lua].nil?
78
+ call[:source] = tool[:source][:lua]
79
+ tool[:output] = Components::Embedding.lua(**call)
80
+ else
81
+ raise 'missing source code'
82
+ end
83
+
84
+ feedback.call(
85
+ { should_be_stored: false,
86
+ interaction: { who: 'AI', message: nil, meta: {
87
+ tool: {
88
+ action: 'responding', id: tool[:id], name: tool[:label],
89
+ parameters: tool[:parameters], output: tool[:output]
90
+ }
91
+ } } }
92
+ )
93
+
94
+ tool
95
+ end
96
+ end
97
+ end
98
+ end
99
+ end
@@ -63,6 +63,7 @@ module NanoBot
63
63
  behavior = Logic::Helpers::Hash.fetch(@cartridge, %i[behaviors boot]) || {}
64
64
 
65
65
  @state[:history] << {
66
+ at: Time.now,
66
67
  who: 'user',
67
68
  mode: mode.to_s,
68
69
  input: instruction,
@@ -78,6 +79,7 @@ module NanoBot
78
79
  behavior = Logic::Helpers::Hash.fetch(@cartridge, %i[behaviors interaction]) || {}
79
80
 
80
81
  @state[:history] << {
82
+ at: Time.now,
81
83
  who: 'user',
82
84
  mode: mode.to_s,
83
85
  input: message,
@@ -159,7 +161,10 @@ module NanoBot
159
161
  end
160
162
  end
161
163
 
162
- @state[:history] << event if feedback[:should_be_stored]
164
+ if feedback[:should_be_stored]
165
+ event[:at] = Time.now
166
+ @state[:history] << event
167
+ end
163
168
 
164
169
  if event[:output] && ((!feedback[:finished] && streaming) || (!streaming && feedback[:finished]))
165
170
  self.print(color ? Rainbow(event[:output]).send(color) : event[:output])
@@ -2,12 +2,21 @@
2
2
  services:
3
3
  nano-bots:
4
4
  image: ruby:3.2.2-slim-bookworm
5
- command: sh -c "apt-get update && apt-get install -y --no-install-recommends build-essential libffi-dev libsodium-dev lua5.4-dev curl && curl -s https://raw.githubusercontent.com/babashka/babashka/master/install | bash && gem install nano-bots -v 1.1.2 && bash"
5
+ command: sh -c "apt-get update && apt-get install -y --no-install-recommends build-essential libffi-dev libsodium-dev lua5.4-dev curl && curl -s https://raw.githubusercontent.com/babashka/babashka/master/install | bash && gem install nano-bots -v 2.0.0 && bash"
6
6
  environment:
7
7
  OPENAI_API_ADDRESS: https://api.openai.com
8
8
  OPENAI_API_KEY: your-access-token
9
+
10
+ GOOGLE_API_KEY: your-api-key
11
+
12
+ GOOGLE_CREDENTIALS_FILE_PATH: /root/.config/google-credentials.json
13
+ GOOGLE_PROJECT_ID: your-project-id
14
+ GOOGLE_REGION: us-east4
15
+
9
16
  NANO_BOTS_ENCRYPTION_PASSWORD: UNSAFE
10
17
  NANO_BOTS_END_USER: your-user
18
+
11
19
  volumes:
20
+ - ./google-credentials.json:/root/.config/google-credentials.json
12
21
  - ./your-cartridges:/root/.local/share/nano-bots/cartridges
13
22
  - ./your-state-path:/root/.local/state/nano-bots
@@ -7,7 +7,14 @@ module NanoBot
7
7
  module Cartridge
8
8
  module Streaming
9
9
  def self.enabled?(cartridge, interface)
10
- return false if Helpers::Hash.fetch(cartridge, %i[provider settings stream]) == false
10
+ provider_stream = case Helpers::Hash.fetch(cartridge, %i[provider id])
11
+ when 'openai'
12
+ Helpers::Hash.fetch(cartridge, %i[provider settings stream])
13
+ when 'google'
14
+ Helpers::Hash.fetch(cartridge, %i[provider options stream])
15
+ end
16
+
17
+ return false if provider_stream == false
11
18
 
12
19
  specific_interface = Helpers::Hash.fetch(cartridge, [:interfaces, interface, :output, :stream])
13
20
 
@@ -0,0 +1,16 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'openai'
4
+
5
+ module NanoBot
6
+ module Logic
7
+ module Google
8
+ module Tokens
9
+ def self.apply_policies!(_cartridge, payload)
10
+ payload[:contents] = payload[:contents].map { |message| message.except(:_meta) }
11
+ payload
12
+ end
13
+ end
14
+ end
15
+ end
16
+ end
@@ -0,0 +1,60 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'json'
4
+ require 'babosa'
5
+
6
+ require_relative '../../helpers/hash'
7
+
8
+ module NanoBot
9
+ module Logic
10
+ module Google
11
+ module Tools
12
+ def self.prepare(cartridge, tools)
13
+ applies = []
14
+
15
+ tools = Marshal.load(Marshal.dump(tools))
16
+
17
+ tools.each do |tool|
18
+ tool = Helpers::Hash.symbolize_keys(tool)
19
+
20
+ cartridge.each do |candidate|
21
+ candidate_key = candidate[:name].to_slug.normalize.gsub('-', '_')
22
+ tool_key = tool[:functionCall][:name].to_slug.normalize.gsub('-', '_')
23
+
24
+ next unless candidate_key == tool_key
25
+
26
+ source = {}
27
+
28
+ source[:clojure] = candidate[:clojure] if candidate[:clojure]
29
+ source[:fennel] = candidate[:fennel] if candidate[:fennel]
30
+ source[:lua] = candidate[:lua] if candidate[:lua]
31
+
32
+ applies << {
33
+ label: candidate[:name],
34
+ name: tool[:functionCall][:name],
35
+ type: 'function',
36
+ parameters: tool[:functionCall][:args],
37
+ source:
38
+ }
39
+ end
40
+ end
41
+
42
+ raise 'missing tool' if applies.size != tools.size
43
+
44
+ applies
45
+ end
46
+
47
+ def self.adapt(cartridge)
48
+ output = {
49
+ name: cartridge[:name],
50
+ description: cartridge[:description]
51
+ }
52
+
53
+ output[:parameters] = (cartridge[:parameters] || { type: 'object', properties: {} })
54
+
55
+ output
56
+ end
57
+ end
58
+ end
59
+ end
60
+ end
@@ -0,0 +1,16 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'openai'
4
+
5
+ module NanoBot
6
+ module Logic
7
+ module OpenAI
8
+ module Tokens
9
+ def self.apply_policies!(_cartridge, payload)
10
+ payload[:messages] = payload[:messages].map { |message| message.except(:_meta) }
11
+ payload
12
+ end
13
+ end
14
+ end
15
+ end
16
+ end
@@ -1,6 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require 'json'
4
+ require 'babosa'
4
5
 
5
6
  require_relative '../../helpers/hash'
6
7
 
@@ -17,7 +18,10 @@ module NanoBot
17
18
  tool = Helpers::Hash.symbolize_keys(tool)
18
19
 
19
20
  cartridge.each do |candidate|
20
- next unless tool[:function][:name] == candidate[:name]
21
+ candidate_key = candidate[:name].to_slug.normalize.gsub('-', '_')
22
+ tool_key = tool[:function][:name].to_slug.normalize.gsub('-', '_')
23
+
24
+ next unless candidate_key == tool_key
21
25
 
22
26
  source = {}
23
27
 
@@ -27,6 +31,7 @@ module NanoBot
27
31
 
28
32
  applies << {
29
33
  id: tool[:id],
34
+ label: candidate[:name],
30
35
  name: tool[:function][:name],
31
36
  type: 'function',
32
37
  parameters: JSON.parse(tool[:function][:arguments]),
data/nano-bots.gemspec CHANGED
@@ -34,7 +34,7 @@ Gem::Specification.new do |spec|
34
34
  spec.add_dependency 'babosa', '~> 2.0'
35
35
  spec.add_dependency 'concurrent-ruby', '~> 1.2', '>= 1.2.2'
36
36
  spec.add_dependency 'dotenv', '~> 2.8', '>= 2.8.1'
37
- spec.add_dependency 'faraday', '~> 2.7', '>= 2.7.12'
37
+ spec.add_dependency 'gemini-ai', '~> 2.0'
38
38
  spec.add_dependency 'pry', '~> 0.14.2'
39
39
  spec.add_dependency 'rainbow', '~> 3.1', '>= 3.1.1'
40
40
  spec.add_dependency 'rbnacl', '~> 7.1', '>= 7.1.1'
@@ -30,5 +30,7 @@ interfaces:
30
30
  feedback: true
31
31
 
32
32
  provider:
33
+ options:
34
+ stream: true
33
35
  settings:
34
36
  stream: true
data/static/gem.rb CHANGED
@@ -3,10 +3,10 @@
3
3
  module NanoBot
4
4
  GEM = {
5
5
  name: 'nano-bots',
6
- version: '1.1.2',
6
+ version: '2.0.0',
7
7
  author: 'icebaker',
8
- summary: 'Ruby Implementation of Nano Bots: small, AI-powered bots',
9
- description: 'Ruby Implementation of Nano Bots: small, AI-powered bots easily shared as a single file, designed to support multiple providers such as Vicuna, OpenAI ChatGPT, Google PaLM, Alpaca, and LLaMA.',
8
+ summary: 'Ruby Implementation of Nano Bots: small, AI-powered bots for OpenAI ChatGPT and Google Gemini.',
9
+ description: 'Ruby Implementation of Nano Bots: small, AI-powered bots that can be easily shared as a single file, designed to support multiple providers such as OpenAI ChatGPT and Google Gemini, with support for calling Tools (Functions).',
10
10
  github: 'https://github.com/icebaker/ruby-nano-bots',
11
11
  gem_server: 'https://rubygems.org',
12
12
  license: 'MIT',
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: nano-bots
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.1.2
4
+ version: 2.0.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - icebaker
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2023-12-10 00:00:00.000000000 Z
11
+ date: 2023-12-16 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: babosa
@@ -65,25 +65,19 @@ dependencies:
65
65
  - !ruby/object:Gem::Version
66
66
  version: 2.8.1
67
67
  - !ruby/object:Gem::Dependency
68
- name: faraday
68
+ name: gemini-ai
69
69
  requirement: !ruby/object:Gem::Requirement
70
70
  requirements:
71
71
  - - "~>"
72
72
  - !ruby/object:Gem::Version
73
- version: '2.7'
74
- - - ">="
75
- - !ruby/object:Gem::Version
76
- version: 2.7.12
73
+ version: '2.0'
77
74
  type: :runtime
78
75
  prerelease: false
79
76
  version_requirements: !ruby/object:Gem::Requirement
80
77
  requirements:
81
78
  - - "~>"
82
79
  - !ruby/object:Gem::Version
83
- version: '2.7'
84
- - - ">="
85
- - !ruby/object:Gem::Version
86
- version: 2.7.12
80
+ version: '2.0'
87
81
  - !ruby/object:Gem::Dependency
88
82
  name: pry
89
83
  requirement: !ruby/object:Gem::Requirement
@@ -172,9 +166,9 @@ dependencies:
172
166
  - - "~>"
173
167
  - !ruby/object:Gem::Version
174
168
  version: 0.0.7
175
- description: 'Ruby Implementation of Nano Bots: small, AI-powered bots easily shared
176
- as a single file, designed to support multiple providers such as Vicuna, OpenAI
177
- ChatGPT, Google PaLM, Alpaca, and LLaMA.'
169
+ description: 'Ruby Implementation of Nano Bots: small, AI-powered bots that can be
170
+ easily shared as a single file, designed to support multiple providers such as OpenAI
171
+ ChatGPT and Google Gemini, with support for calling Tools (Functions).'
178
172
  email:
179
173
  executables:
180
174
  - nb
@@ -195,8 +189,9 @@ files:
195
189
  - components/embedding.rb
196
190
  - components/provider.rb
197
191
  - components/providers/base.rb
192
+ - components/providers/google.rb
198
193
  - components/providers/openai.rb
199
- - components/providers/openai/tools.rb
194
+ - components/providers/tools.rb
200
195
  - components/storage.rb
201
196
  - components/stream.rb
202
197
  - controllers/cartridges.rb
@@ -217,7 +212,10 @@ files:
217
212
  - logic/cartridge/streaming.rb
218
213
  - logic/cartridge/tools.rb
219
214
  - logic/helpers/hash.rb
215
+ - logic/providers/google/tokens.rb
216
+ - logic/providers/google/tools.rb
220
217
  - logic/providers/openai.rb
218
+ - logic/providers/openai/tokens.rb
221
219
  - logic/providers/openai/tools.rb
222
220
  - nano-bots.gemspec
223
221
  - ports/dsl/nano-bots.rb
@@ -250,8 +248,9 @@ required_rubygems_version: !ruby/object:Gem::Requirement
250
248
  - !ruby/object:Gem::Version
251
249
  version: '0'
252
250
  requirements: []
253
- rubygems_version: 3.3.3
251
+ rubygems_version: 3.4.22
254
252
  signing_key:
255
253
  specification_version: 4
256
- summary: 'Ruby Implementation of Nano Bots: small, AI-powered bots'
254
+ summary: 'Ruby Implementation of Nano Bots: small, AI-powered bots for OpenAI ChatGPT
255
+ and Google Gemini.'
257
256
  test_files: []
@@ -1,101 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- require_relative '../../embedding'
4
- require_relative '../../../logic/cartridge/safety'
5
-
6
- require 'concurrent'
7
-
8
- module NanoBot
9
- module Components
10
- module Providers
11
- class OpenAI < Base
12
- module Tools
13
- def self.confirming(tool, feedback)
14
- feedback.call(
15
- { should_be_stored: false,
16
- interaction: { who: 'AI', message: nil, meta: {
17
- tool: { action: 'confirming', id: tool[:id], name: tool[:name], parameters: tool[:parameters] }
18
- } } }
19
- )
20
- end
21
-
22
- def self.apply(cartridge, function_cartridge, tools, feedback)
23
- prepared_tools = NanoBot::Logic::OpenAI::Tools.prepare(function_cartridge, tools)
24
-
25
- if Logic::Cartridge::Safety.confirmable?(cartridge)
26
- prepared_tools.each { |tool| tool[:allowed] = confirming(tool, feedback) }
27
- else
28
- prepared_tools.each { |tool| tool[:allowed] = true }
29
- end
30
-
31
- futures = prepared_tools.map do |tool|
32
- Concurrent::Promises.future do
33
- if tool[:allowed]
34
- process!(tool, feedback, function_cartridge, cartridge)
35
- else
36
- tool[:output] =
37
- "We asked the user you're chatting with for permission, but the user did not allow you to run this tool or function."
38
- tool
39
- end
40
- end
41
- end
42
-
43
- results = Concurrent::Promises.zip(*futures).value!
44
-
45
- results.map do |applied_tool|
46
- {
47
- who: 'tool',
48
- message: applied_tool[:output],
49
- meta: { id: applied_tool[:id], name: applied_tool[:name] }
50
- }
51
- end
52
- end
53
-
54
- def self.process!(tool, feedback, _function_cartridge, cartridge)
55
- feedback.call(
56
- { should_be_stored: false,
57
- interaction: { who: 'AI', message: nil, meta: {
58
- tool: { action: 'executing', id: tool[:id], name: tool[:name], parameters: tool[:parameters] }
59
- } } }
60
- )
61
-
62
- call = {
63
- parameters: %w[parameters],
64
- values: [tool[:parameters]],
65
- safety: { sandboxed: Logic::Cartridge::Safety.sandboxed?(cartridge) }
66
- }
67
-
68
- if %i[fennel lua clojure].count { |key| !tool[:source][key].nil? } > 1
69
- raise StandardError, 'conflicting tools'
70
- end
71
-
72
- if !tool[:source][:fennel].nil?
73
- call[:source] = tool[:source][:fennel]
74
- tool[:output] = Components::Embedding.fennel(**call)
75
- elsif !tool[:source][:clojure].nil?
76
- call[:source] = tool[:source][:clojure]
77
- tool[:output] = Components::Embedding.clojure(**call)
78
- elsif !tool[:source][:lua].nil?
79
- call[:source] = tool[:source][:lua]
80
- tool[:output] = Components::Embedding.lua(**call)
81
- else
82
- raise 'missing source code'
83
- end
84
-
85
- feedback.call(
86
- { should_be_stored: false,
87
- interaction: { who: 'AI', message: nil, meta: {
88
- tool: {
89
- action: 'responding', id: tool[:id], name: tool[:name],
90
- parameters: tool[:parameters], output: tool[:output]
91
- }
92
- } } }
93
- )
94
-
95
- tool
96
- end
97
- end
98
- end
99
- end
100
- end
101
- end