ollama-ruby 1.2.1 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 0b6a67a947af787d3a67180531d7afb1390a009adf38616b55f32bc6c9301942
4
- data.tar.gz: e7fb09439a06ede0a240f42f64048e88ac2d295fa50a8d3bc59dfed3bdb98b3a
3
+ metadata.gz: e26fc8b8ae9847d35f0422d1b4d8915c496b3a0d96b8ac202b7928c0f9ae7bd0
4
+ data.tar.gz: 802709860c8611397faf70b4f4fa7af41a0bebc2c5a642d97163b6337e6326e1
5
5
  SHA512:
6
- metadata.gz: acc5e14fca8df34795a2cca585ff56157023b14dbd53c10995bf1751a39d2f137dbb9c5a26bbb2c58dc7a2ac1402d56ceea37eaf744189a5fc191f3532a0e57b
7
- data.tar.gz: b8f0357fa9734eed66dd7b9013df7f6e464d3a0af7bd00acd11552dfffb15951b18d12bd2ab7c2c45736a735f4ee4f9ae2900d96415cf30bf9ff6a401e4dd3f8
6
+ metadata.gz: 26c44972749476a2fe6af10c97f7c18482d0ecca058e4ef3b416ec7e019d0109d6f5d24f7266cb37ddb420f59cd2c939a21bed44ab5db0180b1988bc4a8350de
7
+ data.tar.gz: 6be99709d688a678a4d6bbb12501cb0d758a5e794f76cb8310ce315c7956c53af456e8148bf05962fd4a8801c9f17f06187d84631182d03609920870aa8a84d4
data/CHANGES.md CHANGED
@@ -1,5 +1,12 @@
1
1
  # Changes
2
2
 
3
+ ## 2025-07-06 v1.3.0
4
+
5
+ * Added toggleable streaming in Markdown handler:
6
+ * Added conditional handler initialization
7
+ * Implemented toggleable streaming in Markdown handler
8
+ * Tested non-streaming (`stream: false`) behavior
9
+
3
10
  ## 2025-06-02 v1.2.1
4
11
 
5
12
  * Added thinking mode option to CLI:
data/bin/ollama_cli CHANGED
@@ -8,24 +8,26 @@ require 'tins/xt/secure_write'
8
8
  require 'json'
9
9
  require 'tmpdir'
10
10
 
11
- class ChatStart
12
- include Ollama::Handlers::Concern
13
-
14
- def initialize(output: $stdout)
15
- super
16
- @output.sync = true
17
- @content = ''
18
- end
11
+ module Ollama::Handlers
12
+ class ChatStart
13
+ include Ollama::Handlers::Concern
14
+
15
+ def initialize(output: $stdout)
16
+ super
17
+ @output.sync = true
18
+ @content = ''
19
+ end
19
20
 
20
- attr_reader :content
21
+ attr_reader :content
21
22
 
22
- def call(response)
23
- if content = response.response
24
- @content << content
25
- @output << content
23
+ def call(response)
24
+ if content = response.response
25
+ @content << content
26
+ @output << content
27
+ end
28
+ response.done and @output.puts
29
+ self
26
30
  end
27
- response.done and @output.puts
28
- self
29
31
  end
30
32
  end
31
33
 
@@ -115,8 +117,15 @@ if ENV['DEBUG'].to_i == 1
115
117
  EOT
116
118
  end
117
119
 
118
- handler = Object.const_get(opts[?H])
119
- handler == ChatStart and handler = handler.new
120
+ handler = Ollama::Handlers.const_get(opts[?H])
121
+ handler = case
122
+ when handler == Ollama::Handlers::ChatStart
123
+ handler.new
124
+ when handler == Ollama::Handlers::Markdown
125
+ handler.new(stream: !!opts[?S])
126
+ else
127
+ handler
128
+ end
120
129
 
121
130
  Client.new(base_url:, read_timeout: 120).generate(
122
131
  model:,
@@ -128,7 +137,7 @@ Client.new(base_url:, read_timeout: 120).generate(
128
137
  &handler
129
138
  )
130
139
 
131
- if handler.is_a?(ChatStart)
140
+ if handler.is_a?(Ollama::Handlers::ChatStart)
132
141
  filename = File.join(Dir.tmpdir, 'chat_start_%u.json' % $$)
133
142
  File.secure_write(filename) do |out|
134
143
  JSON.dump(
@@ -5,17 +5,23 @@ class Ollama::Handlers::Markdown
5
5
  include Ollama::Handlers::Concern
6
6
  include Term::ANSIColor
7
7
 
8
- def initialize(output: $stdout)
9
- super
10
- @output.sync = true
8
+ def initialize(output: $stdout, stream: true)
9
+ super(output:)
10
+ @stream = stream
11
+ @output.sync = @stream
11
12
  @content = ''
12
13
  end
13
14
 
14
15
  def call(response)
15
16
  if content = response.response || response.message&.content
16
- @content << content
17
- markdown_content = Kramdown::ANSI.parse(@content)
18
- @output.print clear_screen, move_home, markdown_content
17
+ if @stream
18
+ @content << content
19
+ markdown_content = Kramdown::ANSI.parse(@content)
20
+ @output.print clear_screen, move_home, markdown_content
21
+ else
22
+ markdown_content = Kramdown::ANSI.parse(content)
23
+ @output.print markdown_content
24
+ end
19
25
  end
20
26
  self
21
27
  end
@@ -1,6 +1,6 @@
1
1
  module Ollama
2
2
  # Ollama version
3
- VERSION = '1.2.1'
3
+ VERSION = '1.3.0'
4
4
  VERSION_ARRAY = VERSION.split('.').map(&:to_i) # :nodoc:
5
5
  VERSION_MAJOR = VERSION_ARRAY[0] # :nodoc:
6
6
  VERSION_MINOR = VERSION_ARRAY[1] # :nodoc:
data/ollama-ruby.gemspec CHANGED
@@ -1,9 +1,9 @@
1
1
  # -*- encoding: utf-8 -*-
2
- # stub: ollama-ruby 1.2.1 ruby lib
2
+ # stub: ollama-ruby 1.3.0 ruby lib
3
3
 
4
4
  Gem::Specification.new do |s|
5
5
  s.name = "ollama-ruby".freeze
6
- s.version = "1.2.1".freeze
6
+ s.version = "1.3.0".freeze
7
7
 
8
8
  s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
9
9
  s.require_paths = ["lib".freeze]
@@ -18,7 +18,7 @@ Gem::Specification.new do |s|
18
18
  s.licenses = ["MIT".freeze]
19
19
  s.rdoc_options = ["--title".freeze, "Ollama-ruby - Interacting with the Ollama API".freeze, "--main".freeze, "README.md".freeze]
20
20
  s.required_ruby_version = Gem::Requirement.new("~> 3.1".freeze)
21
- s.rubygems_version = "3.6.7".freeze
21
+ s.rubygems_version = "3.6.9".freeze
22
22
  s.summary = "Interacting with the Ollama API".freeze
23
23
  s.test_files = ["spec/ollama/client/doc_spec.rb".freeze, "spec/ollama/client_spec.rb".freeze, "spec/ollama/commands/chat_spec.rb".freeze, "spec/ollama/commands/copy_spec.rb".freeze, "spec/ollama/commands/create_spec.rb".freeze, "spec/ollama/commands/delete_spec.rb".freeze, "spec/ollama/commands/embed_spec.rb".freeze, "spec/ollama/commands/embeddings_spec.rb".freeze, "spec/ollama/commands/generate_spec.rb".freeze, "spec/ollama/commands/ps_spec.rb".freeze, "spec/ollama/commands/pull_spec.rb".freeze, "spec/ollama/commands/push_spec.rb".freeze, "spec/ollama/commands/show_spec.rb".freeze, "spec/ollama/commands/tags_spec.rb".freeze, "spec/ollama/commands/version_spec.rb".freeze, "spec/ollama/handlers/collector_spec.rb".freeze, "spec/ollama/handlers/dump_json_spec.rb".freeze, "spec/ollama/handlers/dump_yaml_spec.rb".freeze, "spec/ollama/handlers/markdown_spec.rb".freeze, "spec/ollama/handlers/nop_spec.rb".freeze, "spec/ollama/handlers/print_spec.rb".freeze, "spec/ollama/handlers/progress_spec.rb".freeze, "spec/ollama/handlers/say_spec.rb".freeze, "spec/ollama/handlers/single_spec.rb".freeze, "spec/ollama/image_spec.rb".freeze, "spec/ollama/message_spec.rb".freeze, "spec/ollama/options_spec.rb".freeze, "spec/ollama/tool_spec.rb".freeze, "spec/spec_helper.rb".freeze]
24
24
 
@@ -41,4 +41,14 @@ RSpec.describe Ollama::Handlers::Markdown do
41
41
  response = double('response', response: nil, message: nil, done: true)
42
42
  markdown.call(response)
43
43
  end
44
+
45
+ it 'displays markdown response in one go when stream is false' do
46
+ output = double('output', :sync= => false)
47
+ expect(output).to receive(:print).with(ansi).once
48
+ markdown = described_class.new(output:, stream: false)
49
+ response = double('response', response: md, done: true)
50
+ markdown.call(response)
51
+ response = double('response', response: nil, message: nil, done: true)
52
+ markdown.call(response)
53
+ end
44
54
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ollama-ruby
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.2.1
4
+ version: 1.3.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Florian Frank
@@ -362,7 +362,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
362
362
  - !ruby/object:Gem::Version
363
363
  version: '0'
364
364
  requirements: []
365
- rubygems_version: 3.6.7
365
+ rubygems_version: 3.6.9
366
366
  specification_version: 4
367
367
  summary: Interacting with the Ollama API
368
368
  test_files: