ollama-ai 1.0.0 → 1.2.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/Gemfile +1 -1
- data/Gemfile.lock +23 -14
- data/README.md +264 -183
- data/controllers/client.rb +9 -3
- data/ollama-ai.gemspec +2 -1
- data/static/gem.rb +1 -1
- data/tasks/generate-readme.clj +1 -1
- data/template.md +231 -154
- metadata +22 -8
data/controllers/client.rb
CHANGED
@@ -1,9 +1,10 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
3
|
require 'faraday'
|
4
|
+
require 'faraday/typhoeus'
|
4
5
|
require 'json'
|
5
6
|
|
6
|
-
require_relative '../
|
7
|
+
require_relative '../components/errors'
|
7
8
|
|
8
9
|
module Ollama
|
9
10
|
module Controllers
|
@@ -12,6 +13,8 @@ module Ollama
|
|
12
13
|
|
13
14
|
ALLOWED_REQUEST_OPTIONS = %i[timeout open_timeout read_timeout write_timeout].freeze
|
14
15
|
|
16
|
+
DEFAULT_FARADAY_ADAPTER = :typhoeus
|
17
|
+
|
15
18
|
def initialize(config)
|
16
19
|
@server_sent_events = config.dig(:options, :server_sent_events)
|
17
20
|
|
@@ -30,6 +33,8 @@ module Ollama
|
|
30
33
|
else
|
31
34
|
{}
|
32
35
|
end
|
36
|
+
|
37
|
+
@faraday_adapter = config.dig(:options, :connection, :adapter) || DEFAULT_FARADAY_ADAPTER
|
33
38
|
end
|
34
39
|
|
35
40
|
def generate(payload, server_sent_events: nil, &callback)
|
@@ -79,7 +84,7 @@ module Ollama
|
|
79
84
|
url = "#{@address}#{path}"
|
80
85
|
|
81
86
|
if !callback.nil? && !server_sent_events_enabled
|
82
|
-
raise BlockWithoutServerSentEventsError,
|
87
|
+
raise Errors::BlockWithoutServerSentEventsError,
|
83
88
|
'You are trying to use a block without Server Sent Events (SSE) enabled.'
|
84
89
|
end
|
85
90
|
|
@@ -90,6 +95,7 @@ module Ollama
|
|
90
95
|
partial_json = ''
|
91
96
|
|
92
97
|
response = Faraday.new(request: @request_options) do |faraday|
|
98
|
+
faraday.adapter @faraday_adapter
|
93
99
|
faraday.response :raise_error
|
94
100
|
end.send(method_to_call) do |request|
|
95
101
|
request.url url
|
@@ -125,7 +131,7 @@ module Ollama
|
|
125
131
|
|
126
132
|
results.map { |result| result[:event] }
|
127
133
|
rescue Faraday::Error => e
|
128
|
-
raise RequestError.new(e.message, request: e, payload:)
|
134
|
+
raise Errors::RequestError.new(e.message, request: e, payload:)
|
129
135
|
end
|
130
136
|
|
131
137
|
def safe_parse_json(raw)
|
data/ollama-ai.gemspec
CHANGED
@@ -29,7 +29,8 @@ Gem::Specification.new do |spec|
|
|
29
29
|
|
30
30
|
spec.require_paths = ['ports/dsl']
|
31
31
|
|
32
|
-
spec.add_dependency 'faraday', '~> 2.
|
32
|
+
spec.add_dependency 'faraday', '~> 2.9'
|
33
|
+
spec.add_dependency 'faraday-typhoeus', '~> 1.1'
|
33
34
|
|
34
35
|
spec.metadata['rubygems_mfa_required'] = 'true'
|
35
36
|
end
|
data/static/gem.rb
CHANGED
@@ -3,7 +3,7 @@
|
|
3
3
|
module Ollama
|
4
4
|
GEM = {
|
5
5
|
name: 'ollama-ai',
|
6
|
-
version: '1.
|
6
|
+
version: '1.2.0',
|
7
7
|
author: 'gbaptista',
|
8
8
|
summary: 'Interact with Ollama API to run open source AI models locally.',
|
9
9
|
description: "A Ruby gem for interacting with Ollama's API that allows you to run open source AI LLMs (Large Language Models) locally.",
|