everyai 0.0.0 → 0.0.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/everyai.rb +27 -9
- metadata +1 -1
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: cfb5db5a2396f0ba3be48382a13bf6be48ccdf9baf0dae4dacff56ec91d69ca0
|
4
|
+
data.tar.gz: bc289476d96b29e2034ff4a6522cb5fa5f435f597a69fbd3f38fe320ffc8abbe
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 0f23773f74789802b9891d0ca030209574da59e56a930e421a5935e9fa1897c8bf75477486c19ee004747e6ddf7631d0c9ba90c9c6ff292bada55987fddf7a48
|
7
|
+
data.tar.gz: '082951e08d8bb58b21c6eff2175849d4a2d9fd1ad4c403ea8f1416d4f62881cae4c5ac4144a938bfb9f00da83640311e96734edd36d820b059eaabc8f92fa43e'
|
data/lib/everyai.rb
CHANGED
@@ -1,18 +1,36 @@
|
|
1
1
|
require "ollama-ai"
|
2
2
|
|
3
3
|
class Everyai
|
4
|
+
attr_reader :client
|
5
|
+
def initialize
|
6
|
+
@client = Ollama.new(
|
7
|
+
credentials: { address: 'http://localhost:11434' },
|
8
|
+
opiions: { server_sent_events: true }
|
9
|
+
)
|
10
|
+
end
|
11
|
+
|
12
|
+
def generate(prompt, model)
|
13
|
+
result = client.generate(
|
14
|
+
{ model: model,
|
15
|
+
prompt: prompt }
|
16
|
+
)
|
17
|
+
result.map { |r| r['response']}.join
|
18
|
+
rescue Ollama::Errors::RequestError
|
19
|
+
puts "This Ollama model is not installed. Type y to install or any key to continue"
|
20
|
+
answer = gets.chomp.downcase
|
21
|
+
`ollama run #{model}` if answer == "y"
|
22
|
+
end
|
23
|
+
|
4
24
|
class << self
|
5
25
|
def generate(prompt, model: "llama3.1")
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
)
|
26
|
+
@ai ||= new
|
27
|
+
@ai.generate(prompt, model)
|
28
|
+
end
|
10
29
|
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
)
|
15
|
-
result.map { |r| r['response']}.join
|
30
|
+
def ls
|
31
|
+
puts "Listing Ollama AI Models"
|
32
|
+
str = `ollama list`
|
33
|
+
puts str.split("\n")
|
16
34
|
end
|
17
35
|
end
|
18
36
|
end
|