ollama_chat 0.0.25 → 0.0.27

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,4 +1,28 @@
1
+ # A module that provides switch functionality for configuring application
2
+ # behavior.
3
+ #
4
+ # The Switches module encapsulates various toggle switches used throughout the
5
+ # OllamaChat application to control different features and settings such as
6
+ # streaming, thinking, markdown output, voice output, embedding, and location
7
+ # information. These switches allow users to dynamically enable or disable
8
+ # specific functionalities during a chat session.
9
+ #
10
+ # @example Toggling a switch on/off
11
+ # switch = OllamaChat::Switches::Switch.new(value: false, msg: { true => 'Enabled', false => 'Disabled' })
12
+ # switch.toggle # Turns the switch on
13
+ # switch.toggle # Turns the switch off
1
14
  module OllamaChat::Switches
15
+ # A module that provides switch state checking functionality.
16
+ #
17
+ # The CheckSwitch module adds methods for checking the boolean state of
18
+ # switches and displaying their current status. It's designed to be included
19
+ # in switch classes to provide consistent behavior for querying switch states
20
+ # and outputting status messages.
21
+ #
22
+ # @example Checking switch states
23
+ # switch = OllamaChat::Switches::Switch.new(value: true, msg: { true => 'On', false => 'Off' })
24
+ # switch.on? # Returns true
25
+ # switch.off? # Returns false
2
26
  module CheckSwitch
3
27
  extend Tins::Concern
4
28
 
@@ -23,6 +47,19 @@ module OllamaChat::Switches
23
47
  end
24
48
  end
25
49
 
50
+ # A switch class that manages boolean state with toggle and set
51
+ # functionality.
52
+ #
53
+ # The Switch class provides a simple way to manage boolean configuration
54
+ # options with methods to toggle, set, and query the current state. It
55
+ # includes messaging capabilities to provide feedback when the state changes.
56
+ #
57
+ # @example Creating and using a switch
58
+ # switch = Switch.new(value: false, msg: { true => 'Enabled', false => 'Disabled' })
59
+ # switch.toggle # Turns the switch on
60
+ # switch.value # Returns true
61
+ # switch.off? # Returns false
62
+ # switch.on? # Returns true
26
63
  class Switch
27
64
  # The initialize method sets up the switch with a default value and
28
65
  # message.
@@ -64,6 +101,18 @@ module OllamaChat::Switches
64
101
  include CheckSwitch
65
102
  end
66
103
 
104
+ # A switch class that manages a boolean state based on a proc value.
105
+ #
106
+ # The CombinedSwitch class provides a way to manage a boolean configuration
107
+ # option where the state is determined by evaluating a stored proc. This is
108
+ # useful for complex conditions that depend on multiple factors or dynamic
109
+ # values, such as combining multiple switch states into a single effective
110
+ # state.
111
+ #
112
+ # @example Checking if embedding is currently performed
113
+ # # When embedding_enabled is true and embedding_paused is false,
114
+ # # the combined switch will return true
115
+ # combined_switch.value # => true
67
116
  class CombinedSwitch
68
117
  # The initialize method sets up the switch with a value and message.
69
118
  #
@@ -1,5 +1,20 @@
1
1
  require 'digest/md5'
2
2
 
3
+ # A cache fetcher implementation that handles caching of HTTP responses with
4
+ # content type metadata.
5
+ #
6
+ # This class provides a mechanism to store and retrieve cached HTTP responses,
7
+ # including their content types, using a key-based system. It is designed to
8
+ # work with various cache backends and ensures that both the response body and
9
+ # metadata are properly cached and retrieved for efficient subsequent requests.
10
+ #
11
+ # @example Using the CacheFetcher to cache and retrieve HTTP responses
12
+ # cache = Redis.new
13
+ # fetcher = OllamaChat::Utils::CacheFetcher.new(cache)
14
+ # fetcher.put('https://example.com', io)
15
+ # fetcher.get('https://example.com') do |cached_io|
16
+ # # Process cached content
17
+ # end
3
18
  class OllamaChat::Utils::CacheFetcher
4
19
  # The initialize method sets up the cache instance variable for the object.
5
20
  #
@@ -30,6 +45,7 @@ class OllamaChat::Utils::CacheFetcher
30
45
  io.extend(OllamaChat::Utils::Fetcher::HeaderExtension)
31
46
  io.content_type = content_type
32
47
  block.(io)
48
+ io
33
49
  end
34
50
  end
35
51
 
@@ -5,7 +5,34 @@ require 'mime-types'
5
5
  require 'stringio'
6
6
  require 'ollama_chat/utils/cache_fetcher'
7
7
 
8
+ # A fetcher implementation that handles retrieval and caching of HTTP
9
+ # resources.
10
+ #
11
+ # This class provides functionality to fetch content from URLs, with support
12
+ # for caching responses and their metadata. It handles various content types
13
+ # and integrates with different cache backends to improve performance by
14
+ # avoiding redundant network requests.
15
+ #
16
+ # @example Fetching content from a URL with caching
17
+ # fetcher = OllamaChat::Utils::Fetcher.new(cache: redis_cache)
18
+ # fetcher.get('https://example.com/data.json') do |tmp|
19
+ # # Process the fetched content
20
+ # end
8
21
  class OllamaChat::Utils::Fetcher
22
+ # A module that extends IO objects with content type metadata and expiration
23
+ # tracking.
24
+ #
25
+ # This module provides a way to attach MIME content type information and
26
+ # cache expiration details to IO objects, enabling them to carry metadata
27
+ # about their source and caching behavior. It is primarily used by fetcher
28
+ # implementations to decorate response objects with additional context for
29
+ # processing and caching decisions.
30
+ #
31
+ # @example Extending an IO object with header metadata
32
+ # io = StringIO.new("content")
33
+ # io.extend(OllamaChat::Utils::Fetcher::HeaderExtension)
34
+ # io.content_type = MIME::Types['text/plain'].first
35
+ # io.ex = 3600
9
36
  module HeaderExtension
10
37
  # The content_type method accesses the content type attribute of the object.
11
38
  #
@@ -30,6 +57,18 @@ class OllamaChat::Utils::Fetcher
30
57
  end
31
58
  end
32
59
 
60
+ # A custom error class raised when retrying HTTP requests without streaming.
61
+ #
62
+ # This exception is specifically used in the Fetcher class to indicate that
63
+ # an HTTP request should be retried using a non-streaming approach when a
64
+ # streaming attempt fails or is not supported.
65
+ #
66
+ # @example Handling the RetryWithoutStreaming error
67
+ # begin
68
+ # fetcher.get('https://example.com')
69
+ # rescue RetryWithoutStreaming
70
+ # # Handle retry with non-streaming method
71
+ # end
33
72
  class RetryWithoutStreaming < StandardError; end
34
73
 
35
74
  # The get method retrieves content from a URL, using caching when available.
@@ -51,8 +90,10 @@ class OllamaChat::Utils::Fetcher
51
90
  def self.get(url, headers: {}, **options, &block)
52
91
  cache = options.delete(:cache) and
53
92
  cache = OllamaChat::Utils::CacheFetcher.new(cache)
54
- cache and infobar.puts "Getting #{url.to_s.inspect} from cache."
93
+ cache and infobar.puts "Getting #{url.to_s.inspect} via cache"
55
94
  if result = cache&.get(url, &block)
95
+ content_type = result&.content_type || 'unknown'
96
+ infobar.puts "…hit, found #{content_type} content in cache."
56
97
  return result
57
98
  else
58
99
  new(**options).send(:get, url, headers:) do |tmp|
@@ -1,3 +1,11 @@
1
+ # A module that provides utility classes and methods for the OllamaChat
2
+ # application.
3
+ #
4
+ # The Utils module serves as a namespace for various helper components that
5
+ # support the core functionality of OllamaChat. It contains implementations for
6
+ # caching, interactive selection, content fetching, and file argument handling
7
+ # that are used throughout the application to provide robust and user-friendly
8
+ # features.
1
9
  module OllamaChat::Utils
2
10
  end
3
11
 
@@ -1,6 +1,6 @@
1
1
  module OllamaChat
2
2
  # OllamaChat version
3
- VERSION = '0.0.25'
3
+ VERSION = '0.0.27'
4
4
  VERSION_ARRAY = VERSION.split('.').map(&:to_i) # :nodoc:
5
5
  VERSION_MAJOR = VERSION_ARRAY[0] # :nodoc:
6
6
  VERSION_MINOR = VERSION_ARRAY[1] # :nodoc:
@@ -1,6 +1,12 @@
1
1
  require 'tempfile'
2
2
  require 'pathname'
3
3
 
4
+ # A class that provides functionality for inserting text into Vim buffers via
5
+ # remote communication.
6
+ #
7
+ # @example
8
+ # vim = OllamaChat::Vim.new("MY_SERVER")
9
+ # vim.insert("Hello, Vim!")
4
10
  class OllamaChat::Vim
5
11
  # Initializes a new Vim server connection
6
12
  #
@@ -1,3 +1,14 @@
1
+ # A module that provides web search functionality for OllamaChat.
2
+ #
3
+ # The WebSearching module encapsulates the logic for performing web searches
4
+ # using configured search engines. It handles query construction, location
5
+ # information integration, and delegates to engine-specific implementations for
6
+ # retrieving search results. The module supports multiple search engines
7
+ # including SearxNG and DuckDuckGo, making it flexible for different deployment
8
+ # scenarios and privacy preferences.
9
+ #
10
+ # @example Performing a web search
11
+ # chat.search_web('ruby programming tutorials', 5)
1
12
  module OllamaChat::WebSearching
2
13
  # The search_web method performs a web search using the configured search
3
14
  # engine.
data/lib/ollama_chat.rb CHANGED
@@ -1,3 +1,12 @@
1
+ # The main module namespace for the OllamaChat application.
2
+ #
3
+ # This module serves as the root namespace for all components of the OllamaChat
4
+ # Ruby gem, providing access to core classes, utilities, and configuration
5
+ # management for interacting with Ollama language models through a terminal
6
+ # interface.
7
+ #
8
+ # @example Accessing the main module
9
+ # OllamaChat::VERSION # => "0.0.25"
1
10
  module OllamaChat
2
11
  end
3
12
 
data/ollama_chat.gemspec CHANGED
@@ -1,9 +1,9 @@
1
1
  # -*- encoding: utf-8 -*-
2
- # stub: ollama_chat 0.0.25 ruby lib
2
+ # stub: ollama_chat 0.0.27 ruby lib
3
3
 
4
4
  Gem::Specification.new do |s|
5
5
  s.name = "ollama_chat".freeze
6
- s.version = "0.0.25".freeze
6
+ s.version = "0.0.27".freeze
7
7
 
8
8
  s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
9
9
  s.require_paths = ["lib".freeze]
@@ -13,7 +13,7 @@ Gem::Specification.new do |s|
13
13
  s.email = "flori@ping.de".freeze
14
14
  s.executables = ["ollama_chat".freeze, "ollama_chat_send".freeze]
15
15
  s.extra_rdoc_files = ["README.md".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/kramdown_ansi.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_output.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/vim.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze]
16
- s.files = [".all_images.yml".freeze, ".contexts/full.rb".freeze, ".contexts/info.rb".freeze, ".contexts/lib.rb".freeze, ".envrc".freeze, ".gitignore".freeze, "CHANGES.md".freeze, "Gemfile".freeze, "README.md".freeze, "Rakefile".freeze, "VERSION".freeze, "bin/ollama_chat".freeze, "bin/ollama_chat_send".freeze, "config/searxng/settings.yml".freeze, "docker-compose.yml".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/kramdown_ansi.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_output.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/ollama_chat_config/default_config.yml".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/vim.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze, "ollama_chat.gemspec".freeze, "redis/redis.conf".freeze, "spec/assets/api_show.json".freeze, "spec/assets/api_tags.json".freeze, "spec/assets/api_version.json".freeze, "spec/assets/conversation.json".freeze, "spec/assets/duckduckgo.html".freeze, "spec/assets/example.atom".freeze, "spec/assets/example.csv".freeze, "spec/assets/example.html".freeze, "spec/assets/example.pdf".freeze, "spec/assets/example.ps".freeze, "spec/assets/example.rb".freeze, "spec/assets/example.rss".freeze, "spec/assets/example.xml".freeze, "spec/assets/kitten.jpg".freeze, "spec/assets/prompt.txt".freeze, "spec/assets/searxng.json".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/kramdown_ansi_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/message_output_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/server_socket_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/ollama_chat/web_searching_spec.rb".freeze, "spec/spec_helper.rb".freeze, "tmp/.keep".freeze]
16
+ s.files = ["CHANGES.md".freeze, "Gemfile".freeze, "README.md".freeze, "Rakefile".freeze, "bin/ollama_chat".freeze, "bin/ollama_chat_send".freeze, "config/searxng/settings.yml".freeze, "docker-compose.yml".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/kramdown_ansi.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_output.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/ollama_chat_config/default_config.yml".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/vim.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze, "ollama_chat.gemspec".freeze, "redis/redis.conf".freeze, "spec/assets/api_show.json".freeze, "spec/assets/api_tags.json".freeze, "spec/assets/api_version.json".freeze, "spec/assets/conversation.json".freeze, "spec/assets/duckduckgo.html".freeze, "spec/assets/example.atom".freeze, "spec/assets/example.csv".freeze, "spec/assets/example.html".freeze, "spec/assets/example.pdf".freeze, "spec/assets/example.ps".freeze, "spec/assets/example.rb".freeze, "spec/assets/example.rss".freeze, "spec/assets/example.xml".freeze, "spec/assets/kitten.jpg".freeze, "spec/assets/prompt.txt".freeze, "spec/assets/searxng.json".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/kramdown_ansi_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/message_output_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/server_socket_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/ollama_chat/web_searching_spec.rb".freeze, "spec/spec_helper.rb".freeze, "tmp/.keep".freeze]
17
17
  s.homepage = "https://github.com/flori/ollama_chat".freeze
18
18
  s.licenses = ["MIT".freeze]
19
19
  s.rdoc_options = ["--title".freeze, "OllamaChat - A command-line interface (CLI) for interacting with an Ollama AI model.".freeze, "--main".freeze, "README.md".freeze]
@@ -24,7 +24,7 @@ Gem::Specification.new do |s|
24
24
 
25
25
  s.specification_version = 4
26
26
 
27
- s.add_development_dependency(%q<gem_hadar>.freeze, ["~> 2.0".freeze])
27
+ s.add_development_dependency(%q<gem_hadar>.freeze, ["~> 2.2".freeze])
28
28
  s.add_development_dependency(%q<all_images>.freeze, ["~> 0.6".freeze])
29
29
  s.add_development_dependency(%q<rspec>.freeze, ["~> 3.2".freeze])
30
30
  s.add_development_dependency(%q<kramdown>.freeze, ["~> 2.0".freeze])
@@ -1,6 +1,6 @@
1
1
  require 'spec_helper'
2
2
 
3
- describe OllamaChat::Chat do
3
+ describe OllamaChat::Chat, protect_env: true do
4
4
  let :argv do
5
5
  %w[ -C test ]
6
6
  end
@@ -77,6 +77,16 @@ describe OllamaChat::Chat do
77
77
  expect(chat.handle_input("/clobber")).to eq :next
78
78
  end
79
79
 
80
+ it 'returns :next when input is "/last"' do
81
+ expect(chat.messages).to receive(:show_last)
82
+ expect(chat.handle_input("/last")).to eq :next
83
+ end
84
+
85
+ it 'returns :next when input is "/last\s+(\d+)"' do
86
+ expect(chat.messages).to receive(:show_last).with(2)
87
+ expect(chat.handle_input("/last 2")).to eq :next
88
+ end
89
+
80
90
  it 'returns :next when input is "/drop(?:\s+(\d*))?"' do
81
91
  expect(chat.messages).to receive(:drop).with(?2)
82
92
  expect(chat.messages).to receive(:show_last)
@@ -5,10 +5,10 @@ describe OllamaChat::KramdownANSI do
5
5
  double('Chat').extend(described_class)
6
6
  end
7
7
 
8
- describe '#configure_kramdown_ansi_styles' do
8
+ describe '#configure_kramdown_ansi_styles', protect_env: true do
9
9
  it 'can be configured via env var' do
10
- allow(ENV).to receive(:key?).with('KRAMDOWN_ANSI_OLLAMA_CHAT_STYLES').and_return(true)
11
- allow(ENV).to receive(:key?).with('KRAMDOWN_ANSI_STYLES').and_return(false)
10
+ ENV['KRAMDOWN_ANSI_OLLAMA_CHAT_STYLES'] = '{}'
11
+ ENV.delete('KRAMDOWN_ANSI_STYLES')
12
12
 
13
13
  styles = { bold: '1' }
14
14
  expect(Kramdown::ANSI::Styles).to receive(:from_env_var).
@@ -19,8 +19,8 @@ describe OllamaChat::KramdownANSI do
19
19
  end
20
20
 
21
21
  it 'has a default configuration' do
22
- allow(ENV).to receive(:key?).with('KRAMDOWN_ANSI_OLLAMA_CHAT_STYLES').and_return(false)
23
- allow(ENV).to receive(:key?).with('KRAMDOWN_ANSI_STYLES').and_return(false)
22
+ ENV.delete('KRAMDOWN_ANSI_OLLAMA_CHAT_STYLES')
23
+ ENV.delete('KRAMDOWN_ANSI_STYLES')
24
24
 
25
25
  expect(chat.configure_kramdown_ansi_styles).to be_a(Hash)
26
26
  end
@@ -39,7 +39,7 @@ describe OllamaChat::MessageList do
39
39
  expect(list.size).to eq 1
40
40
  list.clear
41
41
  expect(list.size).to eq 1
42
- list << Ollama::Message.new(role: 'user', content: 'world')
42
+ list << Ollama::Message.new(role: 'user', content: 'world')
43
43
  expect(list.size).to eq 2
44
44
  list.clear
45
45
  expect(list.size).to eq 1
@@ -47,7 +47,7 @@ describe OllamaChat::MessageList do
47
47
 
48
48
  it 'can be added to' do
49
49
  expect(list.size).to eq 1
50
- list << Ollama::Message.new(role: 'user', content: 'world')
50
+ list << Ollama::Message.new(role: 'user', content: 'world')
51
51
  expect(list.size).to eq 2
52
52
  end
53
53
 
@@ -71,8 +71,31 @@ describe OllamaChat::MessageList do
71
71
  FileUtils.rm_f 'tmp/test-conversation.json'
72
72
  end
73
73
 
74
+ describe "#last" do
75
+ it "returns the last message when there are multiple messages" do
76
+ list = described_class.new(chat)
77
+ list << Ollama::Message.new(role: 'system', content: 'hello')
78
+ list << Ollama::Message.new(role: 'user', content: 'First message')
79
+ list << Ollama::Message.new(role: 'assistant', content: 'Second message')
80
+
81
+ expect(list.last.content).to eq('Second message')
82
+ end
83
+
84
+ it "returns the last message when there is only one message" do
85
+ list = described_class.new(chat)
86
+ list << Ollama::Message.new(role: 'system', content: 'hello')
87
+
88
+ expect(list.last.content).to eq('hello')
89
+ end
90
+
91
+ it "returns nil when there are no messages" do
92
+ list = described_class.new(chat)
93
+
94
+ expect(list.last).to be_nil
95
+ end
96
+ end
74
97
 
75
- describe '.show_last' do
98
+ describe '#show_last' do
76
99
  it 'shows nothing when there are no messages' do
77
100
  empty_list = described_class.new(chat)
78
101
  expect { empty_list.show_last }.not_to raise_error
@@ -83,7 +106,7 @@ describe OllamaChat::MessageList do
83
106
  list = described_class.new(chat)
84
107
  allow(chat).to receive(:think).and_return(double(on?: false))
85
108
  allow(chat).to receive(:markdown).and_return(double(on?: false))
86
- list << Ollama::Message.new(role: 'assistant', content: 'hello')
109
+ list << Ollama::Message.new(role: 'assistant', content: 'hello')
87
110
  expect(STDOUT).to receive(:puts).
88
111
  with("📨 \e[1m\e[38;5;111massistant\e[0m\e[0m:\nhello\n")
89
112
  expect(list.show_last).to be_a described_class
@@ -91,10 +114,22 @@ describe OllamaChat::MessageList do
91
114
 
92
115
  it 'shows nothing when the last message is by the user' do
93
116
  list = described_class.new(chat)
94
- list << Ollama::Message.new(role: 'user', content: 'world')
117
+ list << Ollama::Message.new(role: 'user', content: 'world')
95
118
  expect { list.show_last }.not_to raise_error
96
119
  expect(list.show_last).to be nil
97
120
  end
121
+
122
+ it "shows last N messages when N is larger than available messages" do
123
+ allow(chat).to receive(:think).and_return(double(on?: false))
124
+ allow(chat).to receive(:markdown).and_return(double(on?: false))
125
+ list = described_class.new(chat)
126
+ list << Ollama::Message.new(role: 'system', content: 'hello')
127
+ list << Ollama::Message.new(role: 'user', content: 'First message')
128
+ list << Ollama::Message.new(role: 'assistant', content: 'Second message')
129
+
130
+ expect(STDOUT).to receive(:puts).with(/Second message/)
131
+ expect(list.show_last(23)).to eq(list)
132
+ end
98
133
  end
99
134
 
100
135
  context 'without pager' do
@@ -117,7 +152,7 @@ describe OllamaChat::MessageList do
117
152
  and_return(double(on?: true)).at_least(:once)
118
153
  expect(chat).to receive(:think).
119
154
  and_return(double(on?: false)).at_least(:once)
120
- list << Ollama::Message.new(role: 'user', content: 'world')
155
+ list << Ollama::Message.new(role: 'user', content: 'world')
121
156
  expect(STDOUT).to receive(:puts).
122
157
  with(
123
158
  "📨 \e[1m\e[38;5;213msystem\e[0m\e[0m:\nhello\n" \
@@ -158,7 +193,7 @@ describe OllamaChat::MessageList do
158
193
  and_return(double(on?: true)).at_least(:once)
159
194
  expect(chat).to receive(:think).
160
195
  and_return(double(on?: false)).at_least(:once)
161
- list << Ollama::Message.new(role: 'user', content: 'world')
196
+ list << Ollama::Message.new(role: 'user', content: 'world')
162
197
  list.list_conversation
163
198
  end
164
199
  end
@@ -195,9 +230,9 @@ describe OllamaChat::MessageList do
195
230
  expect(list.size).to eq 1
196
231
  expect(list.drop(1)).to eq 0
197
232
  expect(list.size).to eq 1
198
- list << Ollama::Message.new(role: 'user', content: 'world')
233
+ list << Ollama::Message.new(role: 'user', content: 'world')
199
234
  expect(list.size).to eq 2
200
- list << Ollama::Message.new(role: 'assistant', content: 'hi')
235
+ list << Ollama::Message.new(role: 'assistant', content: 'hi')
201
236
  expect(list.size).to eq 3
202
237
  expect(list.drop(1)).to eq 1
203
238
  expect(list.size).to eq 1
@@ -227,7 +262,7 @@ describe OllamaChat::MessageList do
227
262
 
228
263
  it 'can be converted int an Ollama::Message array' do
229
264
  expect(chat).to receive(:location).and_return(double(on?: false))
230
- list << Ollama::Message.new(role: 'user', content: 'world')
265
+ list << Ollama::Message.new(role: 'user', content: 'world')
231
266
  expect(list.to_ary.map(&:as_json)).to eq [
232
267
  Ollama::Message.new(role: 'system', content: 'hello').as_json,
233
268
  Ollama::Message.new(role: 'user', content: 'world').as_json,
@@ -236,7 +271,7 @@ describe OllamaChat::MessageList do
236
271
 
237
272
  it 'can be converted int an Ollama::Message array with location' do
238
273
  expect(chat).to receive(:location).and_return(double(on?: true))
239
- list << Ollama::Message.new(role: 'user', content: 'world')
274
+ list << Ollama::Message.new(role: 'user', content: 'world')
240
275
  first = list.to_ary.first
241
276
  expect(first.role).to eq 'system'
242
277
  expect(first.content).to match(
@@ -246,7 +281,7 @@ describe OllamaChat::MessageList do
246
281
  it 'can be converted int an Ollama::Message array with location without a system prompt' do
247
282
  expect(chat).to receive(:location).and_return(double(on?: true))
248
283
  list = described_class.new(chat).tap do |list|
249
- list << Ollama::Message.new(role: 'user', content: 'hello')
284
+ list << Ollama::Message.new(role: 'user', content: 'hello')
250
285
  list << Ollama::Message.new(role: 'assistant', content: 'world')
251
286
  end
252
287
  first = list.to_ary.first
data/spec/spec_helper.rb CHANGED
@@ -12,44 +12,138 @@ require 'ollama_chat'
12
12
 
13
13
  ComplexConfig::Provider.deep_freeze = false
14
14
 
15
- def asset(name)
16
- File.join(__dir__, 'assets', name)
17
- end
15
+ # A module that provides helper methods for asset management within the
16
+ # application.
17
+ #
18
+ # The AssetHelpers module encapsulates functionality related to handling and
19
+ # processing application assets, such as CSS, JavaScript, and image files. It
20
+ # offers utilities for managing asset paths, generating URLs, and performing
21
+ # operations on assets during the application's runtime.
22
+ module AssetHelpers
23
+ # The asset method constructs and returns the full path to an asset file.
24
+ #
25
+ # This method takes a filename argument and combines it with the assets directory
26
+ # located within the same directory as the calling file, returning the
27
+ # complete path to that asset.
28
+ #
29
+ # @param name [String] the name of the asset file
30
+ #
31
+ # @return [String] the full path to the asset file
32
+ def asset(name)
33
+ File.join(__dir__, 'assets', name)
34
+ end
18
35
 
19
- def asset_content(name)
20
- File.read(File.join(__dir__, 'assets', name))
21
- end
36
+ # Reads and returns the content of an asset file from the assets directory.
37
+ #
38
+ # @param name [String] the name of the asset file to read
39
+ #
40
+ # @return [String] the content of the asset file as a string
41
+ def asset_content(name)
42
+ File.read(File.join(__dir__, 'assets', name))
43
+ end
22
44
 
23
- def asset_io(name, &block)
24
- io = File.new(File.join(__dir__, 'assets', name))
25
- if block
26
- begin
27
- block.call(io)
28
- ensure
29
- io.close
45
+ # The asset_io method retrieves an IO object for a specified asset file.
46
+ #
47
+ # This method constructs the path to an asset file within the assets directory
48
+ # and returns an IO object representing that file. If a block is provided, it
49
+ # yields the IO object to the block and ensures the file is properly closed
50
+ # after the block executes.
51
+ #
52
+ # @param name [ String ] the name of the asset file to retrieve
53
+ #
54
+ # @yield [ io ] yields the IO object for the asset file to the provided block
55
+ #
56
+ # @return [ File, nil ] returns the IO object for the asset file, or nil if a
57
+ # block is provided and the block does not return a value
58
+ def asset_io(name, &block)
59
+ io = File.new(File.join(__dir__, 'assets', name))
60
+ if block
61
+ begin
62
+ block.call(io)
63
+ ensure
64
+ io.close
65
+ end
66
+ else
67
+ io
30
68
  end
31
- else
32
- io
69
+ end
70
+
71
+ # The asset_json method reads and parses a JSON asset file.
72
+ #
73
+ # This method retrieves an asset by name, reads its contents from the
74
+ # filesystem, and then parses the resulting string as JSON, returning the
75
+ # parsed data structure.
76
+ #
77
+ # @param name [String] the name of the asset to retrieve and parse
78
+ #
79
+ # @return [Object] the parsed JSON data structure, typically a Hash or Array
80
+ def asset_json(name)
81
+ JSON(JSON(File.read(asset(name))))
33
82
  end
34
83
  end
35
84
 
36
- def asset_json(name)
37
- JSON(JSON(File.read(asset(name))))
85
+ # A module that provides functionality for stubbing Ollama server responses.
86
+ #
87
+ # The StubOllamaServer module enables developers to simulate Ollama API
88
+ # interactions in test environments by intercepting requests and returning
89
+ # predefined responses. This allows for faster, more reliable testing without
90
+ # requiring external service calls.
91
+ module StubOllamaServer
92
+ # The connect_to_ollama_server method establishes a connection to an Ollama
93
+ # server.
94
+ #
95
+ # This method sets up stubbed HTTP requests to simulate responses from an
96
+ # Ollama server, including API tags, show, and version endpoints. It can
97
+ # optionally instantiate a chat session after setting up the stubs.
98
+ #
99
+ # @param instantiate [Boolean] whether to instantiate a chat session after setting up stubs
100
+ def connect_to_ollama_server(instantiate: true)
101
+ before do
102
+ stub_request(:get, %r(/api/tags\z)).
103
+ to_return(status: 200, body: asset_json('api_tags.json'))
104
+ stub_request(:post, %r(/api/show\z)).
105
+ to_return(status: 200, body: asset_json('api_show.json'))
106
+ stub_request(:get, %r(/api/version\z)).
107
+ to_return(status: 200, body: asset_json('api_version.json'))
108
+ instantiate and chat
109
+ end
110
+ end
38
111
  end
39
112
 
40
- def connect_to_ollama_server(instantiate: true)
41
- before do
42
- stub_request(:get, %r(/api/tags\z)).
43
- to_return(status: 200, body: asset_json('api_tags.json'))
44
- stub_request(:post, %r(/api/show\z)).
45
- to_return(status: 200, body: asset_json('api_show.json'))
46
- stub_request(:get, %r(/api/version\z)).
47
- to_return(status: 200, body: asset_json('api_version.json'))
48
- instantiate and chat
113
+ # A module that provides functionality for protecting environment variables during tests.
114
+ #
115
+ # This module ensures that environment variable changes made during test execution
116
+ # are automatically restored to their original values after the test completes.
117
+ # It is designed to prevent side effects between tests that modify environment
118
+ # variables, maintaining a clean testing environment.
119
+ module ProtectEnvVars
120
+ # The apply method creates a lambda that protects environment variables
121
+ # during test execution.
122
+ #
123
+ # @return [Proc] a lambda that wraps test execution with environment variable
124
+ # preservation
125
+ def self.apply
126
+ -> example do
127
+ if example.metadata[:protect_env]
128
+ begin
129
+ stored_env = ENV.to_h
130
+ example.run
131
+ ensure
132
+ ENV.replace(stored_env)
133
+ end
134
+ else
135
+ example.run
136
+ end
137
+ end
49
138
  end
50
139
  end
51
140
 
52
141
  RSpec.configure do |config|
142
+ config.include AssetHelpers
143
+ config.extend StubOllamaServer
144
+
145
+ config.around(&ProtectEnvVars.apply)
146
+
53
147
  config.before(:suite) do
54
148
  infobar.show = nil
55
149
  end