ollama_chat 0.0.25 → 0.0.26

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,3 +1,20 @@
1
+ # A module that provides functionality for managing Ollama models, including
2
+ # checking model availability, pulling models from remote servers, and handling
3
+ # model presence verification.
4
+ #
5
+ # This module encapsulates the logic for interacting with Ollama models,
6
+ # ensuring that required models are available locally before attempting to use
7
+ # them in chat sessions. It handles both local model verification and remote
8
+ # model retrieval when necessary.
9
+ #
10
+ # @example Checking if a model is present
11
+ # chat.model_present?('llama3.1')
12
+ #
13
+ # @example Pulling a model from a remote server
14
+ # chat.pull_model_from_remote('mistral')
15
+ #
16
+ # @example Ensuring a model is available locally
17
+ # chat.pull_model_unless_present('phi3', {})
1
18
  module OllamaChat::ModelHandling
2
19
  # The model_present? method checks if the specified Ollama model is available.
3
20
  #
@@ -1,12 +1,31 @@
1
1
  require 'pathname'
2
2
 
3
+ # A configuration class for managing OllamaChat settings and file paths.
4
+ #
5
+ # This class handles the initialization and management of configuration files
6
+ # for the OllamaChat application. It provides methods for setting up default
7
+ # configurations, determining appropriate file paths for config and cache
8
+ # directories, and managing the loading and creation of configuration files
9
+ # based on XDG standards.
10
+ #
11
+ # @example Initializing with a custom configuration file
12
+ # config = OllamaChat::OllamaChatConfig.new('/path/to/custom/config.yml')
13
+ #
14
+ # @example Accessing default configuration paths
15
+ # config = OllamaChat::OllamaChatConfig.new
16
+ # config.default_config_path # => Path to the default configuration file
17
+ # config.config_dir_path # => Path to the configuration directory
18
+ # config.cache_dir_path # => Path to the cache directory
19
+ # config.database_path # => Path to the documents database file
3
20
  class OllamaChat::OllamaChatConfig
4
21
  include ComplexConfig
5
22
  include FileUtils
6
23
 
24
+ # Path to the default config
7
25
  DEFAULT_CONFIG_PATH = Pathname.new(__FILE__).dirname.
8
26
  join('ollama_chat_config/default_config.yml')
9
27
 
28
+ # Content of the default config
10
29
  DEFAULT_CONFIG = File.read(DEFAULT_CONFIG_PATH)
11
30
 
12
31
  # The initialize method sets up the configuration file path and ensures the
@@ -1,3 +1,16 @@
1
+ # A module that provides content parsing functionality for OllamaChat.
2
+ #
3
+ # The Parsing module encapsulates methods for processing various types of input
4
+ # sources including HTML, XML, CSV, RSS, Atom, PDF, and Postscript documents.
5
+ # It handles content extraction and conversion into standardized text formats
6
+ # suitable for chat interactions. The module supports different document
7
+ # policies for handling imported or embedded content and provides utilities for
8
+ # parsing structured data from multiple source types.
9
+ #
10
+ # @example Processing different document types
11
+ # chat.parse_source(html_io) # Converts HTML to markdown
12
+ # chat.parse_source(pdf_io) # Extracts text from PDF files
13
+ # chat.parse_source(csv_io) # Formats CSV data into readable strings
1
14
  module OllamaChat::Parsing
2
15
  # The parse_source method processes different types of input sources and
3
16
  # converts them into a standardized text representation.
@@ -1,3 +1,25 @@
1
+ # A module that provides server socket functionality for OllamaChat
2
+ #
3
+ # The ServerSocket module encapsulates the logic for creating and managing Unix
4
+ # domain socket servers that enable external processes to send input to running
5
+ # ollama_chat sessions. It supports both simple message transmission and
6
+ # bidirectional communication with response handling, allowing for integration
7
+ # with tools like ollama_chat_send.
8
+ #
9
+ # @example Sending a message to a running chat session
10
+ # OllamaChat::ServerSocket.send_to_server_socket(
11
+ # "Hello from external process",
12
+ # config: chat_config,
13
+ # type: :socket_input
14
+ # )
15
+ #
16
+ # @example Sending a message and waiting for a response
17
+ # response = OllamaChat::ServerSocket.send_to_server_socket(
18
+ # "What is the answer?",
19
+ # config: chat_config,
20
+ # type: :socket_input_with_response,
21
+ # parse: true
22
+ # )
1
23
  module OllamaChat::ServerSocket
2
24
  class << self
3
25
  # The send_to_server_socket method transmits a message to a Unix domain
@@ -56,6 +78,11 @@ module OllamaChat::ServerSocket
56
78
  end
57
79
  end
58
80
 
81
+ # The server_socket_message accessor method provides read and write access to
82
+ # the server socket message instance variable.
83
+ #
84
+ # @return [ Object, nil ] the current server socket message object or nil if
85
+ # not set
59
86
  attr_accessor :server_socket_message
60
87
 
61
88
  # Initializes the server socket to receive messages from the Ollama Chat
@@ -1,3 +1,28 @@
1
+ # A module that provides functionality for fetching and processing various
2
+ # types of content sources.
3
+ #
4
+ # The SourceFetching module encapsulates methods for retrieving content from
5
+ # different source types including URLs, file paths, and shell commands. It
6
+ # handles the logic for determining the appropriate fetching method based on
7
+ # the source identifier and processes the retrieved content through specialized
8
+ # parsers depending on the content type. The module also manages image
9
+ # handling, document importing, summarizing, and embedding operations while
10
+ # providing error handling and debugging capabilities.
11
+ #
12
+ # @example Fetching content from a URL
13
+ # chat.fetch_source('https://example.com/document.html') do |source_io|
14
+ # # Process the fetched content
15
+ # end
16
+ #
17
+ # @example Importing a local file
18
+ # chat.fetch_source('/path/to/local/file.txt') do |source_io|
19
+ # # Process the imported file content
20
+ # end
21
+ #
22
+ # @example Executing a shell command
23
+ # chat.fetch_source('!ls -la') do |source_io|
24
+ # # Process the command output
25
+ # end
1
26
  module OllamaChat::SourceFetching
2
27
  # The http_options method prepares HTTP options for requests based on
3
28
  # configuration settings.
@@ -1,4 +1,28 @@
1
+ # A module that provides switch functionality for configuring application
2
+ # behavior.
3
+ #
4
+ # The Switches module encapsulates various toggle switches used throughout the
5
+ # OllamaChat application to control different features and settings such as
6
+ # streaming, thinking, markdown output, voice output, embedding, and location
7
+ # information. These switches allow users to dynamically enable or disable
8
+ # specific functionalities during a chat session.
9
+ #
10
+ # @example Toggling a switch on/off
11
+ # switch = OllamaChat::Switches::Switch.new(value: false, msg: { true => 'Enabled', false => 'Disabled' })
12
+ # switch.toggle # Turns the switch on
13
+ # switch.toggle # Turns the switch off
1
14
  module OllamaChat::Switches
15
+ # A module that provides switch state checking functionality.
16
+ #
17
+ # The CheckSwitch module adds methods for checking the boolean state of
18
+ # switches and displaying their current status. It's designed to be included
19
+ # in switch classes to provide consistent behavior for querying switch states
20
+ # and outputting status messages.
21
+ #
22
+ # @example Checking switch states
23
+ # switch = OllamaChat::Switches::Switch.new(value: true, msg: { true => 'On', false => 'Off' })
24
+ # switch.on? # Returns true
25
+ # switch.off? # Returns false
2
26
  module CheckSwitch
3
27
  extend Tins::Concern
4
28
 
@@ -23,6 +47,19 @@ module OllamaChat::Switches
23
47
  end
24
48
  end
25
49
 
50
+ # A switch class that manages boolean state with toggle and set
51
+ # functionality.
52
+ #
53
+ # The Switch class provides a simple way to manage boolean configuration
54
+ # options with methods to toggle, set, and query the current state. It
55
+ # includes messaging capabilities to provide feedback when the state changes.
56
+ #
57
+ # @example Creating and using a switch
58
+ # switch = Switch.new(value: false, msg: { true => 'Enabled', false => 'Disabled' })
59
+ # switch.toggle # Turns the switch on
60
+ # switch.value # Returns true
61
+ # switch.off? # Returns false
62
+ # switch.on? # Returns true
26
63
  class Switch
27
64
  # The initialize method sets up the switch with a default value and
28
65
  # message.
@@ -64,6 +101,18 @@ module OllamaChat::Switches
64
101
  include CheckSwitch
65
102
  end
66
103
 
104
+ # A switch class that manages a boolean state based on a proc value.
105
+ #
106
+ # The CombinedSwitch class provides a way to manage a boolean configuration
107
+ # option where the state is determined by evaluating a stored proc. This is
108
+ # useful for complex conditions that depend on multiple factors or dynamic
109
+ # values, such as combining multiple switch states into a single effective
110
+ # state.
111
+ #
112
+ # @example Checking if embedding is currently performed
113
+ # # When embedding_enabled is true and embedding_paused is false,
114
+ # # the combined switch will return true
115
+ # combined_switch.value # => true
67
116
  class CombinedSwitch
68
117
  # The initialize method sets up the switch with a value and message.
69
118
  #
@@ -1,5 +1,20 @@
1
1
  require 'digest/md5'
2
2
 
3
+ # A cache fetcher implementation that handles caching of HTTP responses with
4
+ # content type metadata.
5
+ #
6
+ # This class provides a mechanism to store and retrieve cached HTTP responses,
7
+ # including their content types, using a key-based system. It is designed to
8
+ # work with various cache backends and ensures that both the response body and
9
+ # metadata are properly cached and retrieved for efficient subsequent requests.
10
+ #
11
+ # @example Using the CacheFetcher to cache and retrieve HTTP responses
12
+ # cache = Redis.new
13
+ # fetcher = OllamaChat::Utils::CacheFetcher.new(cache)
14
+ # fetcher.put('https://example.com', io)
15
+ # fetcher.get('https://example.com') do |cached_io|
16
+ # # Process cached content
17
+ # end
3
18
  class OllamaChat::Utils::CacheFetcher
4
19
  # The initialize method sets up the cache instance variable for the object.
5
20
  #
@@ -5,7 +5,34 @@ require 'mime-types'
5
5
  require 'stringio'
6
6
  require 'ollama_chat/utils/cache_fetcher'
7
7
 
8
+ # A fetcher implementation that handles retrieval and caching of HTTP
9
+ # resources.
10
+ #
11
+ # This class provides functionality to fetch content from URLs, with support
12
+ # for caching responses and their metadata. It handles various content types
13
+ # and integrates with different cache backends to improve performance by
14
+ # avoiding redundant network requests.
15
+ #
16
+ # @example Fetching content from a URL with caching
17
+ # fetcher = OllamaChat::Utils::Fetcher.new(cache: redis_cache)
18
+ # fetcher.get('https://example.com/data.json') do |tmp|
19
+ # # Process the fetched content
20
+ # end
8
21
  class OllamaChat::Utils::Fetcher
22
+ # A module that extends IO objects with content type metadata and expiration
23
+ # tracking.
24
+ #
25
+ # This module provides a way to attach MIME content type information and
26
+ # cache expiration details to IO objects, enabling them to carry metadata
27
+ # about their source and caching behavior. It is primarily used by fetcher
28
+ # implementations to decorate response objects with additional context for
29
+ # processing and caching decisions.
30
+ #
31
+ # @example Extending an IO object with header metadata
32
+ # io = StringIO.new("content")
33
+ # io.extend(OllamaChat::Utils::Fetcher::HeaderExtension)
34
+ # io.content_type = MIME::Types['text/plain'].first
35
+ # io.ex = 3600
9
36
  module HeaderExtension
10
37
  # The content_type method accesses the content type attribute of the object.
11
38
  #
@@ -30,6 +57,18 @@ class OllamaChat::Utils::Fetcher
30
57
  end
31
58
  end
32
59
 
60
+ # A custom error class raised when retrying HTTP requests without streaming.
61
+ #
62
+ # This exception is specifically used in the Fetcher class to indicate that
63
+ # an HTTP request should be retried using a non-streaming approach when a
64
+ # streaming attempt fails or is not supported.
65
+ #
66
+ # @example Handling the RetryWithoutStreaming error
67
+ # begin
68
+ # fetcher.get('https://example.com')
69
+ # rescue RetryWithoutStreaming
70
+ # # Handle retry with non-streaming method
71
+ # end
33
72
  class RetryWithoutStreaming < StandardError; end
34
73
 
35
74
  # The get method retrieves content from a URL, using caching when available.
@@ -51,8 +90,9 @@ class OllamaChat::Utils::Fetcher
51
90
  def self.get(url, headers: {}, **options, &block)
52
91
  cache = options.delete(:cache) and
53
92
  cache = OllamaChat::Utils::CacheFetcher.new(cache)
54
- cache and infobar.puts "Getting #{url.to_s.inspect} from cache."
93
+ cache and infobar.puts "Getting #{url.to_s.inspect} via cache"
55
94
  if result = cache&.get(url, &block)
95
+ infobar.puts "…hit, found#{result.content_type} content in cache."
56
96
  return result
57
97
  else
58
98
  new(**options).send(:get, url, headers:) do |tmp|
@@ -1,3 +1,11 @@
1
+ # A module that provides utility classes and methods for the OllamaChat
2
+ # application.
3
+ #
4
+ # The Utils module serves as a namespace for various helper components that
5
+ # support the core functionality of OllamaChat. It contains implementations for
6
+ # caching, interactive selection, content fetching, and file argument handling
7
+ # that are used throughout the application to provide robust and user-friendly
8
+ # features.
1
9
  module OllamaChat::Utils
2
10
  end
3
11
 
@@ -1,6 +1,6 @@
1
1
  module OllamaChat
2
2
  # OllamaChat version
3
- VERSION = '0.0.25'
3
+ VERSION = '0.0.26'
4
4
  VERSION_ARRAY = VERSION.split('.').map(&:to_i) # :nodoc:
5
5
  VERSION_MAJOR = VERSION_ARRAY[0] # :nodoc:
6
6
  VERSION_MINOR = VERSION_ARRAY[1] # :nodoc:
@@ -1,6 +1,12 @@
1
1
  require 'tempfile'
2
2
  require 'pathname'
3
3
 
4
+ # A class that provides functionality for inserting text into Vim buffers via
5
+ # remote communication.
6
+ #
7
+ # @example
8
+ # vim = OllamaChat::Vim.new("MY_SERVER")
9
+ # vim.insert("Hello, Vim!")
4
10
  class OllamaChat::Vim
5
11
  # Initializes a new Vim server connection
6
12
  #
@@ -1,3 +1,14 @@
1
+ # A module that provides web search functionality for OllamaChat.
2
+ #
3
+ # The WebSearching module encapsulates the logic for performing web searches
4
+ # using configured search engines. It handles query construction, location
5
+ # information integration, and delegates to engine-specific implementations for
6
+ # retrieving search results. The module supports multiple search engines
7
+ # including SearxNG and DuckDuckGo, making it flexible for different deployment
8
+ # scenarios and privacy preferences.
9
+ #
10
+ # @example Performing a web search
11
+ # chat.search_web('ruby programming tutorials', 5)
1
12
  module OllamaChat::WebSearching
2
13
  # The search_web method performs a web search using the configured search
3
14
  # engine.
data/lib/ollama_chat.rb CHANGED
@@ -1,3 +1,12 @@
1
+ # The main module namespace for the OllamaChat application.
2
+ #
3
+ # This module serves as the root namespace for all components of the OllamaChat
4
+ # Ruby gem, providing access to core classes, utilities, and configuration
5
+ # management for interacting with Ollama language models through a terminal
6
+ # interface.
7
+ #
8
+ # @example Accessing the main module
9
+ # OllamaChat::VERSION # => "0.0.25"
1
10
  module OllamaChat
2
11
  end
3
12
 
data/ollama_chat.gemspec CHANGED
@@ -1,9 +1,9 @@
1
1
  # -*- encoding: utf-8 -*-
2
- # stub: ollama_chat 0.0.25 ruby lib
2
+ # stub: ollama_chat 0.0.26 ruby lib
3
3
 
4
4
  Gem::Specification.new do |s|
5
5
  s.name = "ollama_chat".freeze
6
- s.version = "0.0.25".freeze
6
+ s.version = "0.0.26".freeze
7
7
 
8
8
  s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
9
9
  s.require_paths = ["lib".freeze]
@@ -13,7 +13,7 @@ Gem::Specification.new do |s|
13
13
  s.email = "flori@ping.de".freeze
14
14
  s.executables = ["ollama_chat".freeze, "ollama_chat_send".freeze]
15
15
  s.extra_rdoc_files = ["README.md".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/kramdown_ansi.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_output.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/vim.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze]
16
- s.files = [".all_images.yml".freeze, ".contexts/full.rb".freeze, ".contexts/info.rb".freeze, ".contexts/lib.rb".freeze, ".envrc".freeze, ".gitignore".freeze, "CHANGES.md".freeze, "Gemfile".freeze, "README.md".freeze, "Rakefile".freeze, "VERSION".freeze, "bin/ollama_chat".freeze, "bin/ollama_chat_send".freeze, "config/searxng/settings.yml".freeze, "docker-compose.yml".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/kramdown_ansi.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_output.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/ollama_chat_config/default_config.yml".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/vim.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze, "ollama_chat.gemspec".freeze, "redis/redis.conf".freeze, "spec/assets/api_show.json".freeze, "spec/assets/api_tags.json".freeze, "spec/assets/api_version.json".freeze, "spec/assets/conversation.json".freeze, "spec/assets/duckduckgo.html".freeze, "spec/assets/example.atom".freeze, "spec/assets/example.csv".freeze, "spec/assets/example.html".freeze, "spec/assets/example.pdf".freeze, "spec/assets/example.ps".freeze, "spec/assets/example.rb".freeze, "spec/assets/example.rss".freeze, "spec/assets/example.xml".freeze, "spec/assets/kitten.jpg".freeze, "spec/assets/prompt.txt".freeze, "spec/assets/searxng.json".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/kramdown_ansi_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/message_output_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/server_socket_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/ollama_chat/web_searching_spec.rb".freeze, "spec/spec_helper.rb".freeze, "tmp/.keep".freeze]
16
+ s.files = [".all_images.yml".freeze, ".contexts/code_comment.rb".freeze, ".contexts/full.rb".freeze, ".contexts/info.rb".freeze, ".contexts/lib.rb".freeze, ".contexts/yard.md".freeze, ".envrc".freeze, ".gitignore".freeze, "CHANGES.md".freeze, "Gemfile".freeze, "README.md".freeze, "Rakefile".freeze, "VERSION".freeze, "bin/ollama_chat".freeze, "bin/ollama_chat_send".freeze, "config/searxng/settings.yml".freeze, "docker-compose.yml".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/kramdown_ansi.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_output.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/ollama_chat_config/default_config.yml".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/vim.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze, "ollama_chat.gemspec".freeze, "redis/redis.conf".freeze, "spec/assets/api_show.json".freeze, "spec/assets/api_tags.json".freeze, "spec/assets/api_version.json".freeze, "spec/assets/conversation.json".freeze, "spec/assets/duckduckgo.html".freeze, "spec/assets/example.atom".freeze, "spec/assets/example.csv".freeze, "spec/assets/example.html".freeze, "spec/assets/example.pdf".freeze, "spec/assets/example.ps".freeze, "spec/assets/example.rb".freeze, "spec/assets/example.rss".freeze, "spec/assets/example.xml".freeze, "spec/assets/kitten.jpg".freeze, "spec/assets/prompt.txt".freeze, "spec/assets/searxng.json".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/kramdown_ansi_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/message_output_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/server_socket_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/ollama_chat/web_searching_spec.rb".freeze, "spec/spec_helper.rb".freeze, "tmp/.keep".freeze]
17
17
  s.homepage = "https://github.com/flori/ollama_chat".freeze
18
18
  s.licenses = ["MIT".freeze]
19
19
  s.rdoc_options = ["--title".freeze, "OllamaChat - A command-line interface (CLI) for interacting with an Ollama AI model.".freeze, "--main".freeze, "README.md".freeze]
@@ -24,7 +24,7 @@ Gem::Specification.new do |s|
24
24
 
25
25
  s.specification_version = 4
26
26
 
27
- s.add_development_dependency(%q<gem_hadar>.freeze, ["~> 2.0".freeze])
27
+ s.add_development_dependency(%q<gem_hadar>.freeze, ["~> 2.1".freeze])
28
28
  s.add_development_dependency(%q<all_images>.freeze, ["~> 0.6".freeze])
29
29
  s.add_development_dependency(%q<rspec>.freeze, ["~> 3.2".freeze])
30
30
  s.add_development_dependency(%q<kramdown>.freeze, ["~> 2.0".freeze])
@@ -1,6 +1,6 @@
1
1
  require 'spec_helper'
2
2
 
3
- describe OllamaChat::Chat do
3
+ describe OllamaChat::Chat, protect_env: true do
4
4
  let :argv do
5
5
  %w[ -C test ]
6
6
  end
@@ -77,6 +77,16 @@ describe OllamaChat::Chat do
77
77
  expect(chat.handle_input("/clobber")).to eq :next
78
78
  end
79
79
 
80
+ it 'returns :next when input is "/last"' do
81
+ expect(chat.messages).to receive(:show_last)
82
+ expect(chat.handle_input("/last")).to eq :next
83
+ end
84
+
85
+ it 'returns :next when input is "/last\s+(\d+)"' do
86
+ expect(chat.messages).to receive(:show_last).with(2)
87
+ expect(chat.handle_input("/last 2")).to eq :next
88
+ end
89
+
80
90
  it 'returns :next when input is "/drop(?:\s+(\d*))?"' do
81
91
  expect(chat.messages).to receive(:drop).with(?2)
82
92
  expect(chat.messages).to receive(:show_last)
@@ -5,10 +5,10 @@ describe OllamaChat::KramdownANSI do
5
5
  double('Chat').extend(described_class)
6
6
  end
7
7
 
8
- describe '#configure_kramdown_ansi_styles' do
8
+ describe '#configure_kramdown_ansi_styles', protect_env: true do
9
9
  it 'can be configured via env var' do
10
- allow(ENV).to receive(:key?).with('KRAMDOWN_ANSI_OLLAMA_CHAT_STYLES').and_return(true)
11
- allow(ENV).to receive(:key?).with('KRAMDOWN_ANSI_STYLES').and_return(false)
10
+ ENV['KRAMDOWN_ANSI_OLLAMA_CHAT_STYLES'] = '{}'
11
+ ENV.delete('KRAMDOWN_ANSI_STYLES')
12
12
 
13
13
  styles = { bold: '1' }
14
14
  expect(Kramdown::ANSI::Styles).to receive(:from_env_var).
@@ -19,8 +19,8 @@ describe OllamaChat::KramdownANSI do
19
19
  end
20
20
 
21
21
  it 'has a default configuration' do
22
- allow(ENV).to receive(:key?).with('KRAMDOWN_ANSI_OLLAMA_CHAT_STYLES').and_return(false)
23
- allow(ENV).to receive(:key?).with('KRAMDOWN_ANSI_STYLES').and_return(false)
22
+ ENV.delete('KRAMDOWN_ANSI_OLLAMA_CHAT_STYLES')
23
+ ENV.delete('KRAMDOWN_ANSI_STYLES')
24
24
 
25
25
  expect(chat.configure_kramdown_ansi_styles).to be_a(Hash)
26
26
  end
@@ -39,7 +39,7 @@ describe OllamaChat::MessageList do
39
39
  expect(list.size).to eq 1
40
40
  list.clear
41
41
  expect(list.size).to eq 1
42
- list << Ollama::Message.new(role: 'user', content: 'world')
42
+ list << Ollama::Message.new(role: 'user', content: 'world')
43
43
  expect(list.size).to eq 2
44
44
  list.clear
45
45
  expect(list.size).to eq 1
@@ -47,7 +47,7 @@ describe OllamaChat::MessageList do
47
47
 
48
48
  it 'can be added to' do
49
49
  expect(list.size).to eq 1
50
- list << Ollama::Message.new(role: 'user', content: 'world')
50
+ list << Ollama::Message.new(role: 'user', content: 'world')
51
51
  expect(list.size).to eq 2
52
52
  end
53
53
 
@@ -71,8 +71,31 @@ describe OllamaChat::MessageList do
71
71
  FileUtils.rm_f 'tmp/test-conversation.json'
72
72
  end
73
73
 
74
+ describe "#last" do
75
+ it "returns the last message when there are multiple messages" do
76
+ list = described_class.new(chat)
77
+ list << Ollama::Message.new(role: 'system', content: 'hello')
78
+ list << Ollama::Message.new(role: 'user', content: 'First message')
79
+ list << Ollama::Message.new(role: 'assistant', content: 'Second message')
80
+
81
+ expect(list.last.content).to eq('Second message')
82
+ end
83
+
84
+ it "returns the last message when there is only one message" do
85
+ list = described_class.new(chat)
86
+ list << Ollama::Message.new(role: 'system', content: 'hello')
87
+
88
+ expect(list.last.content).to eq('hello')
89
+ end
90
+
91
+ it "returns nil when there are no messages" do
92
+ list = described_class.new(chat)
93
+
94
+ expect(list.last).to be_nil
95
+ end
96
+ end
74
97
 
75
- describe '.show_last' do
98
+ describe '#show_last' do
76
99
  it 'shows nothing when there are no messages' do
77
100
  empty_list = described_class.new(chat)
78
101
  expect { empty_list.show_last }.not_to raise_error
@@ -83,7 +106,7 @@ describe OllamaChat::MessageList do
83
106
  list = described_class.new(chat)
84
107
  allow(chat).to receive(:think).and_return(double(on?: false))
85
108
  allow(chat).to receive(:markdown).and_return(double(on?: false))
86
- list << Ollama::Message.new(role: 'assistant', content: 'hello')
109
+ list << Ollama::Message.new(role: 'assistant', content: 'hello')
87
110
  expect(STDOUT).to receive(:puts).
88
111
  with("📨 \e[1m\e[38;5;111massistant\e[0m\e[0m:\nhello\n")
89
112
  expect(list.show_last).to be_a described_class
@@ -91,10 +114,22 @@ describe OllamaChat::MessageList do
91
114
 
92
115
  it 'shows nothing when the last message is by the user' do
93
116
  list = described_class.new(chat)
94
- list << Ollama::Message.new(role: 'user', content: 'world')
117
+ list << Ollama::Message.new(role: 'user', content: 'world')
95
118
  expect { list.show_last }.not_to raise_error
96
119
  expect(list.show_last).to be nil
97
120
  end
121
+
122
+ it "shows last N messages when N is larger than available messages" do
123
+ allow(chat).to receive(:think).and_return(double(on?: false))
124
+ allow(chat).to receive(:markdown).and_return(double(on?: false))
125
+ list = described_class.new(chat)
126
+ list << Ollama::Message.new(role: 'system', content: 'hello')
127
+ list << Ollama::Message.new(role: 'user', content: 'First message')
128
+ list << Ollama::Message.new(role: 'assistant', content: 'Second message')
129
+
130
+ expect(STDOUT).to receive(:puts).with(/Second message/)
131
+ expect(list.show_last(23)).to eq(list)
132
+ end
98
133
  end
99
134
 
100
135
  context 'without pager' do
@@ -117,7 +152,7 @@ describe OllamaChat::MessageList do
117
152
  and_return(double(on?: true)).at_least(:once)
118
153
  expect(chat).to receive(:think).
119
154
  and_return(double(on?: false)).at_least(:once)
120
- list << Ollama::Message.new(role: 'user', content: 'world')
155
+ list << Ollama::Message.new(role: 'user', content: 'world')
121
156
  expect(STDOUT).to receive(:puts).
122
157
  with(
123
158
  "📨 \e[1m\e[38;5;213msystem\e[0m\e[0m:\nhello\n" \
@@ -158,7 +193,7 @@ describe OllamaChat::MessageList do
158
193
  and_return(double(on?: true)).at_least(:once)
159
194
  expect(chat).to receive(:think).
160
195
  and_return(double(on?: false)).at_least(:once)
161
- list << Ollama::Message.new(role: 'user', content: 'world')
196
+ list << Ollama::Message.new(role: 'user', content: 'world')
162
197
  list.list_conversation
163
198
  end
164
199
  end
@@ -195,9 +230,9 @@ describe OllamaChat::MessageList do
195
230
  expect(list.size).to eq 1
196
231
  expect(list.drop(1)).to eq 0
197
232
  expect(list.size).to eq 1
198
- list << Ollama::Message.new(role: 'user', content: 'world')
233
+ list << Ollama::Message.new(role: 'user', content: 'world')
199
234
  expect(list.size).to eq 2
200
- list << Ollama::Message.new(role: 'assistant', content: 'hi')
235
+ list << Ollama::Message.new(role: 'assistant', content: 'hi')
201
236
  expect(list.size).to eq 3
202
237
  expect(list.drop(1)).to eq 1
203
238
  expect(list.size).to eq 1
@@ -227,7 +262,7 @@ describe OllamaChat::MessageList do
227
262
 
228
263
  it 'can be converted int an Ollama::Message array' do
229
264
  expect(chat).to receive(:location).and_return(double(on?: false))
230
- list << Ollama::Message.new(role: 'user', content: 'world')
265
+ list << Ollama::Message.new(role: 'user', content: 'world')
231
266
  expect(list.to_ary.map(&:as_json)).to eq [
232
267
  Ollama::Message.new(role: 'system', content: 'hello').as_json,
233
268
  Ollama::Message.new(role: 'user', content: 'world').as_json,
@@ -236,7 +271,7 @@ describe OllamaChat::MessageList do
236
271
 
237
272
  it 'can be converted int an Ollama::Message array with location' do
238
273
  expect(chat).to receive(:location).and_return(double(on?: true))
239
- list << Ollama::Message.new(role: 'user', content: 'world')
274
+ list << Ollama::Message.new(role: 'user', content: 'world')
240
275
  first = list.to_ary.first
241
276
  expect(first.role).to eq 'system'
242
277
  expect(first.content).to match(
@@ -246,7 +281,7 @@ describe OllamaChat::MessageList do
246
281
  it 'can be converted int an Ollama::Message array with location without a system prompt' do
247
282
  expect(chat).to receive(:location).and_return(double(on?: true))
248
283
  list = described_class.new(chat).tap do |list|
249
- list << Ollama::Message.new(role: 'user', content: 'hello')
284
+ list << Ollama::Message.new(role: 'user', content: 'hello')
250
285
  list << Ollama::Message.new(role: 'assistant', content: 'world')
251
286
  end
252
287
  first = list.to_ary.first