everyai 0.0.5 → 0.0.6
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/everyai.rb +5 -5
- metadata +1 -1
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: a0d7d1e9d63e7fd278a1fb7d64aa992c38bd55a8dc1663c486af894737343faf
|
4
|
+
data.tar.gz: 3115c0159b908b21ffc2c63e6686e87a2dd6f7ab33ad77fc8f89393cac06921f
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 7fac790345ee17c259018dbc1f5df69573a9d12206784e20c7dd37348b36b174158c2eb36d1b0b3229672bee948c4167e8bb93a926dc0c81c3f1b3e4b1f3099f
|
7
|
+
data.tar.gz: 4d3606fa5ddb6373b48dccc90f9676e4e1d0ee4945b074f4f41f7fe5d93eedccb4d034a2330a7ee4c51c56186c98793444e00f5c59dea89f1b1b75643cba249c
|
data/lib/everyai.rb
CHANGED
@@ -3,7 +3,7 @@ require 'uri'
|
|
3
3
|
require 'json'
|
4
4
|
|
5
5
|
class Everyai
|
6
|
-
DOMAIN = "https://every-llm.com" #"http://localhost:3000"
|
6
|
+
DOMAIN = "https://every-llm.com" #"http://localhost:3000"
|
7
7
|
API_GENERATIONS_PATH = "#{DOMAIN}/api/v1/generations"
|
8
8
|
|
9
9
|
class << self
|
@@ -17,9 +17,9 @@ class Everyai
|
|
17
17
|
@api_key
|
18
18
|
end
|
19
19
|
|
20
|
-
def generate(prompt, model: "llama3.1")
|
20
|
+
def generate(prompt, model: "llama3.1", **model_options)
|
21
21
|
url = URI.parse(API_GENERATIONS_PATH)
|
22
|
-
response = Net::HTTP.post_form(url, { "prompt": prompt, "model": model, "token": api_key })
|
22
|
+
response = Net::HTTP.post_form(url, { "prompt": prompt, "model": model, "token": api_key, "options": model_options.to_json })
|
23
23
|
body = JSON.parse(response.body)
|
24
24
|
if response.kind_of? Net::HTTPSuccess
|
25
25
|
body["results"]
|
@@ -32,8 +32,8 @@ class Everyai
|
|
32
32
|
generate(prompt, model: model )
|
33
33
|
end
|
34
34
|
|
35
|
-
def llama(prompt, model: "llama3.1")
|
36
|
-
generate(prompt, model: model)
|
35
|
+
def llama(prompt, model: "llama3.1", context: nil)
|
36
|
+
generate(prompt, model: model, context: context)
|
37
37
|
end
|
38
38
|
|
39
39
|
def anthropic(prompt, model: "anthropic")
|