ollama-ai 1.2.1 → 1.3.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 2b2cbe0dd2d75850aea0dddf66463e295dc3941205d66bbd5abbb1c8590fc8a7
4
- data.tar.gz: 802c5423e24119488d9112726704e056bbed29c70c0c98247dd89a757fdefaaf
3
+ metadata.gz: ea11236a89bbbd351372a7ebe35dfe0bff07ee8728fd81163a0119b104edc1e9
4
+ data.tar.gz: 50da7ae43a519cfa1c01f2c08664f481390bff4619a5be72b85d539abff490f3
5
5
  SHA512:
6
- metadata.gz: acaa09929dfd956842f026ff504cfc6cc3213c7ae829a39a901a6302b9a45694e23795698375e25f0773780275811575dc5f6eef8b22d940849d9fdf855c4bf2
7
- data.tar.gz: bd2758a1306d2a1a44b67c4662cb029f403100b3c2a34f44925d1d6b1bb514b2015154b004035927a5320ef9e137959d872d2f5361e13e6d8fd5767182ced122
6
+ metadata.gz: 0e06ff9b6927132829e9315c33a470b0f8282cb4edd8f2780a5ed79295356f4aa4cc2b0470fb4932d40270c9e3bf49268015bd18053b6f80682af6f41beea8cc
7
+ data.tar.gz: a02a9d87919270d2aca1091819ef407826cb934c40e856403875021900ce72cca80b8b3f36f1e39280ad7d61857511bf1307b837195812f11940970fca04d109
data/Gemfile CHANGED
@@ -6,5 +6,5 @@ gemspec
6
6
 
7
7
  group :test, :development do
8
8
  gem 'pry-byebug', '~> 3.10', '>= 3.10.1'
9
- gem 'rubocop', '~> 1.63', '>= 1.63.1'
9
+ gem 'rubocop', '~> 1.65'
10
10
  end
data/Gemfile.lock CHANGED
@@ -1,9 +1,10 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- ollama-ai (1.2.1)
5
- faraday (~> 2.9)
4
+ ollama-ai (1.3.0)
5
+ faraday (~> 2.10)
6
6
  faraday-typhoeus (~> 1.1)
7
+ typhoeus (~> 1.4, >= 1.4.1)
7
8
 
8
9
  GEM
9
10
  remote: https://rubygems.org/
@@ -13,21 +14,23 @@ GEM
13
14
  coderay (1.1.3)
14
15
  ethon (0.16.0)
15
16
  ffi (>= 1.15.0)
16
- faraday (2.9.0)
17
+ faraday (2.10.0)
17
18
  faraday-net_http (>= 2.0, < 3.2)
19
+ logger
18
20
  faraday-net_http (3.1.0)
19
21
  net-http
20
22
  faraday-typhoeus (1.1.0)
21
23
  faraday (~> 2.0)
22
24
  typhoeus (~> 1.4)
23
- ffi (1.16.3)
25
+ ffi (1.17.0)
24
26
  json (2.7.2)
25
27
  language_server-protocol (3.17.0.3)
26
- method_source (1.0.0)
28
+ logger (1.6.0)
29
+ method_source (1.1.0)
27
30
  net-http (0.4.1)
28
31
  uri
29
- parallel (1.24.0)
30
- parser (3.3.0.5)
32
+ parallel (1.25.1)
33
+ parser (3.3.4.0)
31
34
  ast (~> 2.4.1)
32
35
  racc
33
36
  pry (0.14.2)
@@ -36,36 +39,39 @@ GEM
36
39
  pry-byebug (3.10.1)
37
40
  byebug (~> 11.0)
38
41
  pry (>= 0.13, < 0.15)
39
- racc (1.7.3)
42
+ racc (1.8.0)
40
43
  rainbow (3.1.1)
41
- regexp_parser (2.9.0)
42
- rexml (3.2.6)
43
- rubocop (1.63.1)
44
+ regexp_parser (2.9.2)
45
+ rexml (3.3.2)
46
+ strscan
47
+ rubocop (1.65.0)
44
48
  json (~> 2.3)
45
49
  language_server-protocol (>= 3.17.0)
46
50
  parallel (~> 1.10)
47
51
  parser (>= 3.3.0.2)
48
52
  rainbow (>= 2.2.2, < 4.0)
49
- regexp_parser (>= 1.8, < 3.0)
53
+ regexp_parser (>= 2.4, < 3.0)
50
54
  rexml (>= 3.2.5, < 4.0)
51
55
  rubocop-ast (>= 1.31.1, < 2.0)
52
56
  ruby-progressbar (~> 1.7)
53
57
  unicode-display_width (>= 2.4.0, < 3.0)
54
- rubocop-ast (1.31.2)
55
- parser (>= 3.3.0.4)
58
+ rubocop-ast (1.31.3)
59
+ parser (>= 3.3.1.0)
56
60
  ruby-progressbar (1.13.0)
61
+ strscan (3.1.0)
57
62
  typhoeus (1.4.1)
58
63
  ethon (>= 0.9.0)
59
64
  unicode-display_width (2.5.0)
60
65
  uri (0.13.0)
61
66
 
62
67
  PLATFORMS
68
+ arm64-darwin-23
63
69
  x86_64-linux
64
70
 
65
71
  DEPENDENCIES
66
72
  ollama-ai!
67
73
  pry-byebug (~> 3.10, >= 3.10.1)
68
- rubocop (~> 1.63, >= 1.63.1)
74
+ rubocop (~> 1.65)
69
75
 
70
76
  BUNDLED WITH
71
77
  2.4.22
data/README.md CHANGED
@@ -9,7 +9,7 @@ A Ruby gem for interacting with [Ollama](https://ollama.ai)'s API that allows yo
9
9
  ## TL;DR and Quick Start
10
10
 
11
11
  ```ruby
12
- gem 'ollama-ai', '~> 1.2.1'
12
+ gem 'ollama-ai', '~> 1.3.0'
13
13
  ```
14
14
 
15
15
  ```ruby
@@ -65,6 +65,7 @@ Result:
65
65
  - [Installing](#installing)
66
66
  - [Usage](#usage)
67
67
  - [Client](#client)
68
+ - [Bearer Authentication](#bearer-authentication)
68
69
  - [Methods](#methods)
69
70
  - [generate: Generate a completion](#generate-generate-a-completion)
70
71
  - [Without Streaming Events](#without-streaming-events)
@@ -105,11 +106,11 @@ Result:
105
106
  ### Installing
106
107
 
107
108
  ```sh
108
- gem install ollama-ai -v 1.2.1
109
+ gem install ollama-ai -v 1.3.0
109
110
  ```
110
111
 
111
112
  ```sh
112
- gem 'ollama-ai', '~> 1.2.1'
113
+ gem 'ollama-ai', '~> 1.3.0'
113
114
  ```
114
115
 
115
116
  ## Usage
@@ -126,6 +127,34 @@ client = Ollama.new(
126
127
  )
127
128
  ```
128
129
 
130
+ #### Bearer Authentication
131
+
132
+ ```ruby
133
+ require 'ollama-ai'
134
+
135
+ client = Ollama.new(
136
+ credentials: {
137
+ address: 'http://localhost:11434',
138
+ bearer_token: 'eyJhbG...Qssw5c'
139
+ },
140
+ options: { server_sent_events: true }
141
+ )
142
+ ```
143
+
144
+ Remember that hardcoding your credentials in code is unsafe. It's preferable to use environment variables:
145
+
146
+ ```ruby
147
+ require 'ollama-ai'
148
+
149
+ client = Ollama.new(
150
+ credentials: {
151
+ address: 'http://localhost:11434',
152
+ bearer_token: ENV['OLLAMA_BEARER_TOKEN']
153
+ },
154
+ options: { server_sent_events: true }
155
+ )
156
+ ```
157
+
129
158
  ### Methods
130
159
 
131
160
  ```ruby
@@ -885,7 +914,7 @@ gem build ollama-ai.gemspec
885
914
 
886
915
  gem signin
887
916
 
888
- gem push ollama-ai-1.2.1.gem
917
+ gem push ollama-ai-1.3.0.gem
889
918
  ```
890
919
 
891
920
  ### Updating the README
data/components/errors.rb CHANGED
@@ -4,7 +4,7 @@ module Ollama
4
4
  module Errors
5
5
  class OllamaError < StandardError
6
6
  def initialize(message = nil)
7
- super(message)
7
+ super
8
8
  end
9
9
  end
10
10
 
@@ -24,6 +24,8 @@ module Ollama
24
24
  "#{config[:credentials][:address].to_s.sub(%r{/$}, '')}/"
25
25
  end
26
26
 
27
+ @bearer_token = config[:credentials][:bearer_token]
28
+
27
29
  @request_options = config.dig(:options, :connection, :request)
28
30
 
29
31
  @request_options = if @request_options.is_a?(Hash)
@@ -97,6 +99,7 @@ module Ollama
97
99
  response = Faraday.new(request: @request_options) do |faraday|
98
100
  faraday.adapter @faraday_adapter
99
101
  faraday.response :raise_error
102
+ faraday.request :authorization, 'Bearer', @bearer_token if @bearer_token
100
103
  end.send(method_to_call) do |request|
101
104
  request.url url
102
105
  request.headers['Content-Type'] = 'application/json'
data/ollama-ai.gemspec CHANGED
@@ -29,8 +29,9 @@ Gem::Specification.new do |spec|
29
29
 
30
30
  spec.require_paths = ['ports/dsl']
31
31
 
32
- spec.add_dependency 'faraday', '~> 2.9'
32
+ spec.add_dependency 'faraday', '~> 2.10'
33
33
  spec.add_dependency 'faraday-typhoeus', '~> 1.1'
34
+ spec.add_dependency 'typhoeus', '~> 1.4', '>= 1.4.1'
34
35
 
35
36
  spec.metadata['rubygems_mfa_required'] = 'true'
36
37
  end
data/static/gem.rb CHANGED
@@ -3,7 +3,7 @@
3
3
  module Ollama
4
4
  GEM = {
5
5
  name: 'ollama-ai',
6
- version: '1.2.1',
6
+ version: '1.3.0',
7
7
  author: 'gbaptista',
8
8
  summary: 'Interact with Ollama API to run open source AI models locally.',
9
9
  description: "A Ruby gem for interacting with Ollama's API that allows you to run open source AI LLMs (Large Language Models) locally.",
data/template.md CHANGED
@@ -9,7 +9,7 @@ A Ruby gem for interacting with [Ollama](https://ollama.ai)'s API that allows yo
9
9
  ## TL;DR and Quick Start
10
10
 
11
11
  ```ruby
12
- gem 'ollama-ai', '~> 1.2.1'
12
+ gem 'ollama-ai', '~> 1.3.0'
13
13
  ```
14
14
 
15
15
  ```ruby
@@ -66,11 +66,11 @@ Result:
66
66
  ### Installing
67
67
 
68
68
  ```sh
69
- gem install ollama-ai -v 1.2.1
69
+ gem install ollama-ai -v 1.3.0
70
70
  ```
71
71
 
72
72
  ```sh
73
- gem 'ollama-ai', '~> 1.2.1'
73
+ gem 'ollama-ai', '~> 1.3.0'
74
74
  ```
75
75
 
76
76
  ## Usage
@@ -87,6 +87,34 @@ client = Ollama.new(
87
87
  )
88
88
  ```
89
89
 
90
+ #### Bearer Authentication
91
+
92
+ ```ruby
93
+ require 'ollama-ai'
94
+
95
+ client = Ollama.new(
96
+ credentials: {
97
+ address: 'http://localhost:11434',
98
+ bearer_token: 'eyJhbG...Qssw5c'
99
+ },
100
+ options: { server_sent_events: true }
101
+ )
102
+ ```
103
+
104
+ Remember that hardcoding your credentials in code is unsafe. It's preferable to use environment variables:
105
+
106
+ ```ruby
107
+ require 'ollama-ai'
108
+
109
+ client = Ollama.new(
110
+ credentials: {
111
+ address: 'http://localhost:11434',
112
+ bearer_token: ENV['OLLAMA_BEARER_TOKEN']
113
+ },
114
+ options: { server_sent_events: true }
115
+ )
116
+ ```
117
+
90
118
  ### Methods
91
119
 
92
120
  ```ruby
@@ -846,7 +874,7 @@ gem build ollama-ai.gemspec
846
874
 
847
875
  gem signin
848
876
 
849
- gem push ollama-ai-1.2.1.gem
877
+ gem push ollama-ai-1.3.0.gem
850
878
  ```
851
879
 
852
880
  ### Updating the README
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ollama-ai
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.2.1
4
+ version: 1.3.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - gbaptista
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2024-04-13 00:00:00.000000000 Z
11
+ date: 2024-07-21 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: faraday
@@ -16,14 +16,14 @@ dependencies:
16
16
  requirements:
17
17
  - - "~>"
18
18
  - !ruby/object:Gem::Version
19
- version: '2.9'
19
+ version: '2.10'
20
20
  type: :runtime
21
21
  prerelease: false
22
22
  version_requirements: !ruby/object:Gem::Requirement
23
23
  requirements:
24
24
  - - "~>"
25
25
  - !ruby/object:Gem::Version
26
- version: '2.9'
26
+ version: '2.10'
27
27
  - !ruby/object:Gem::Dependency
28
28
  name: faraday-typhoeus
29
29
  requirement: !ruby/object:Gem::Requirement
@@ -38,6 +38,26 @@ dependencies:
38
38
  - - "~>"
39
39
  - !ruby/object:Gem::Version
40
40
  version: '1.1'
41
+ - !ruby/object:Gem::Dependency
42
+ name: typhoeus
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - "~>"
46
+ - !ruby/object:Gem::Version
47
+ version: '1.4'
48
+ - - ">="
49
+ - !ruby/object:Gem::Version
50
+ version: 1.4.1
51
+ type: :runtime
52
+ prerelease: false
53
+ version_requirements: !ruby/object:Gem::Requirement
54
+ requirements:
55
+ - - "~>"
56
+ - !ruby/object:Gem::Version
57
+ version: '1.4'
58
+ - - ">="
59
+ - !ruby/object:Gem::Version
60
+ version: 1.4.1
41
61
  description: A Ruby gem for interacting with Ollama's API that allows you to run open
42
62
  source AI LLMs (Large Language Models) locally.
43
63
  email: