everyai 0.0.0 → 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. checksums.yaml +4 -4
  2. data/lib/everyai.rb +27 -9
  3. metadata +1 -1
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 84a4a9deb6315f3817a631aa86f93a8c4be163b9b05508383907a32aabb42d89
4
- data.tar.gz: 69e82ad21f67ebab33f563e52c5ff8ff8d86940d84ff76681a96107ca3dd3af2
3
+ metadata.gz: cfb5db5a2396f0ba3be48382a13bf6be48ccdf9baf0dae4dacff56ec91d69ca0
4
+ data.tar.gz: bc289476d96b29e2034ff4a6522cb5fa5f435f597a69fbd3f38fe320ffc8abbe
5
5
  SHA512:
6
- metadata.gz: e667fccccf7eeb8b303224946b169ac6fb4d7617de4f0fbf803525469c123646e865c19a2f27483084ed50851dfe35f45bf8ac311952720aea47f7f20703722f
7
- data.tar.gz: 4aca7a987e8a214979a3a8f7cec8acd9f331933b993a9f1069f7c388e50ec3a5d7944ed5f19a7849809d59a954b9fe1fb762549a2feea0b4fcfbf1bb95220081
6
+ metadata.gz: 0f23773f74789802b9891d0ca030209574da59e56a930e421a5935e9fa1897c8bf75477486c19ee004747e6ddf7631d0c9ba90c9c6ff292bada55987fddf7a48
7
+ data.tar.gz: '082951e08d8bb58b21c6eff2175849d4a2d9fd1ad4c403ea8f1416d4f62881cae4c5ac4144a938bfb9f00da83640311e96734edd36d820b059eaabc8f92fa43e'
data/lib/everyai.rb CHANGED
@@ -1,18 +1,36 @@
1
1
  require "ollama-ai"
2
2
 
3
3
  class Everyai
4
+ attr_reader :client
5
+ def initialize
6
+ @client = Ollama.new(
7
+ credentials: { address: 'http://localhost:11434' },
8
+ opiions: { server_sent_events: true }
9
+ )
10
+ end
11
+
12
+ def generate(prompt, model)
13
+ result = client.generate(
14
+ { model: model,
15
+ prompt: prompt }
16
+ )
17
+ result.map { |r| r['response']}.join
18
+ rescue Ollama::Errors::RequestError
19
+ puts "This Ollama model is not installed. Type y to install or any key to continue"
20
+ answer = gets.chomp.downcase
21
+ `ollama run #{model}` if answer == "y"
22
+ end
23
+
4
24
  class << self
5
25
  def generate(prompt, model: "llama3.1")
6
- client = Ollama.new(
7
- credentials: { address: 'http://localhost:11434' },
8
- opiions: { server_sent_events: true }
9
- )
26
+ @ai ||= new
27
+ @ai.generate(prompt, model)
28
+ end
10
29
 
11
- result = client.generate(
12
- { model: model,
13
- prompt: prompt }
14
- )
15
- result.map { |r| r['response']}.join
30
+ def ls
31
+ puts "Listing Ollama AI Models"
32
+ str = `ollama list`
33
+ puts str.split("\n")
16
34
  end
17
35
  end
18
36
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: everyai
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.0
4
+ version: 0.0.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Enoch Tamulonis