ollama-ai 1.0.0 → 1.0.1

Sign up to get free protection for your applications and to get access to all the features.
@@ -3,7 +3,7 @@
3
3
  require 'faraday'
4
4
  require 'json'
5
5
 
6
- require_relative '../ports/dsl/ollama-ai/errors'
6
+ require_relative '../components/errors'
7
7
 
8
8
  module Ollama
9
9
  module Controllers
@@ -79,7 +79,7 @@ module Ollama
79
79
  url = "#{@address}#{path}"
80
80
 
81
81
  if !callback.nil? && !server_sent_events_enabled
82
- raise BlockWithoutServerSentEventsError,
82
+ raise Errors::BlockWithoutServerSentEventsError,
83
83
  'You are trying to use a block without Server Sent Events (SSE) enabled.'
84
84
  end
85
85
 
@@ -125,7 +125,7 @@ module Ollama
125
125
 
126
126
  results.map { |result| result[:event] }
127
127
  rescue Faraday::Error => e
128
- raise RequestError.new(e.message, request: e, payload:)
128
+ raise Errors::RequestError.new(e.message, request: e, payload:)
129
129
  end
130
130
 
131
131
  def safe_parse_json(raw)
data/ollama-ai.gemspec CHANGED
@@ -29,7 +29,7 @@ Gem::Specification.new do |spec|
29
29
 
30
30
  spec.require_paths = ['ports/dsl']
31
31
 
32
- spec.add_dependency 'faraday', '~> 2.8'
32
+ spec.add_dependency 'faraday', '~> 2.9'
33
33
 
34
34
  spec.metadata['rubygems_mfa_required'] = 'true'
35
35
  end
data/static/gem.rb CHANGED
@@ -3,7 +3,7 @@
3
3
  module Ollama
4
4
  GEM = {
5
5
  name: 'ollama-ai',
6
- version: '1.0.0',
6
+ version: '1.0.1',
7
7
  author: 'gbaptista',
8
8
  summary: 'Interact with Ollama API to run open source AI models locally.',
9
9
  description: "A Ruby gem for interacting with Ollama's API that allows you to run open source AI LLMs (Large Language Models) locally.",