ollama_chat 0.0.25 → 0.0.27

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: a08d4184578d406a8e5d02823cbd5fc0c1999591f86f715b99637b9a263ba7f4
4
- data.tar.gz: 2cdbb7c09d8de378fc58b292e4d2e29ba6849f3b9c3f1bb742ba658ec6b997e0
3
+ metadata.gz: c2155f3ea224b97ac3a471848b53530b60308b685b21804a23138ce31e752891
4
+ data.tar.gz: f96db1b34a1d1a95bad394d6c0ac0ba43f7bc1c481344aa7d0d19565c4889149
5
5
  SHA512:
6
- metadata.gz: da10d3bd76363f8856a0e35dfe927084374025c3e87a9e531d02ba07bcaa214e057a649ab9c3c46057947e651f8ca616344a1de30bcecc1d64fe4e8c13df3893
7
- data.tar.gz: c2944a6a328e5a1833d4becfe9842dbdd6b4d223bb36903b22254914d22ec19eadcc7942f0e4ba0d6a11d0fccfb4d41f3bf5624019901fb594e652aeacd70c21
6
+ metadata.gz: 6dfc776afc77a3cfdb3b5ab6d6d4d41e188345c15d4e8341495c00e90d4c03aefef4ac01af1d8ffae4b1a20deb11c8cfe3f063e8d4d11c53aaa8253fd66d5466
7
+ data.tar.gz: aee0b2f2fe0a17af19d09384df6dca08a3848fb6ebea981d71f93f1e8dc56858e6e293619b1755ea97aaefd2f7174925d50387cb5b9782c976ef477270d4e373
data/CHANGES.md CHANGED
@@ -1,5 +1,27 @@
1
1
  # Changes
2
2
 
3
+ ## 2025-09-05 v0.0.27
4
+
5
+ - Enhanced cache hit notifications to properly handle content type with
6
+ fallback to 'unknown'
7
+ - Modified `OllamaChat::Utils::CacheFetcher` to return `io` for proper content
8
+ type propagation
9
+
10
+ ## 2025-08-27 v0.0.26
11
+
12
+ - Enhanced `/last` command to support numeric argument, allowing users to
13
+ specify the number of messages to display
14
+ - Configured tests to protect environment variables by using `protect_env:
15
+ true` option and direct `ENV` manipulation
16
+ - Refactored spec helper with modularized `AssetHelpers`, `StubOllamaServer`,
17
+ and `ProtectEnvVars` modules for better organization
18
+ - Improved code clarity and added comprehensive documentation across multiple
19
+ modules including `OllamaChat`, `Chat`, `MessageList`, and others
20
+ - Added detailed class-level documentation for `OllamaChatConfig` with examples
21
+ - Included documentation for the `Parsing`, `Vim`, `MessageFormat`,
22
+ `KramdownANSI`, `Information`, `UserAgent`, and `History` modules
23
+ - Improved cache hit message formatting and wording for better user experience
24
+
3
25
  ## 2025-08-18 v0.0.25
4
26
 
5
27
  - Integrated `context_spook` gem as development dependency
data/README.md CHANGED
@@ -153,7 +153,7 @@ The following commands can be given inside the chat, if prefixed by a `/`:
153
153
  /stream toggle stream output
154
154
  /location toggle location submission
155
155
  /voice [change] toggle voice output or change the voice
156
- /last show the last system/assistant message
156
+ /last [n] show the last n / 1 system/assistant message
157
157
  /list [n] list the last n / all conversation exchanges
158
158
  /clear [what] clear what=messages|links|history|tags|all
159
159
  /clobber clear the conversation, links, and collection
@@ -177,6 +177,7 @@ The following commands can be given inside the chat, if prefixed by a `/`:
177
177
  /load filename load conversation messages
178
178
  /output filename save last response to filename
179
179
  /pipe command write last response to command's stdin
180
+ /vim insert the last message into a vim server
180
181
  /quit to quit
181
182
  /help to view this help
182
183
  ```
data/Rakefile CHANGED
@@ -21,7 +21,11 @@ GemHadar do
21
21
 
22
22
  test_dir 'spec'
23
23
  ignore '.*.sw[pon]', 'pkg', 'Gemfile.lock', '.AppleDouble', '.bundle',
24
- '.yardoc', 'tags', 'corpus', 'coverage', '/config/searxng/*'
24
+ '.yardoc', 'doc', 'tags', 'corpus', 'coverage', '/config/searxng/*',
25
+ '.starscope.db', 'cscope.out'
26
+ package_ignore '.all_images.yml', '.tool-versions', '.gitignore', 'VERSION',
27
+ '.rspec', '.github', *FileList['.contexts/*'], '.envrc'
28
+
25
29
 
26
30
  readme 'README.md'
27
31
 
@@ -17,6 +17,20 @@ require 'xdg'
17
17
  require 'socket'
18
18
  require 'shellwords'
19
19
 
20
+ # A chat client for interacting with Ollama models through a terminal
21
+ # interface.
22
+ #
23
+ # The Chat class provides a complete command-line interface for chatting with
24
+ # language models via the Ollama API. It handles configuration management,
25
+ # message history, document processing, web searching, and various interactive
26
+ # features including voice output, markdown rendering, and embedding
27
+ # capabilities.
28
+ #
29
+ # @example Initializing a chat session
30
+ # chat = OllamaChat::Chat.new(argv: ['-m', 'llama3.1'])
31
+ #
32
+ # @example Starting an interactive chat
33
+ # chat.start
20
34
  class OllamaChat::Chat
21
35
  include Tins::GO
22
36
  include Term::ANSIColor
@@ -114,7 +128,6 @@ class OllamaChat::Chat
114
128
  # @return [Ollama::Client] the configured Ollama API client
115
129
  attr_reader :ollama
116
130
 
117
-
118
131
  # Returns the documents set for this object, initializing it lazily if needed.
119
132
  #
120
133
  # The documents set is memoized, meaning it will only be created once per
@@ -205,11 +218,12 @@ class OllamaChat::Chat
205
218
  end
206
219
  :next
207
220
  when %r(^/list(?:\s+(\d*))?$)
208
- last = 2 * $1.to_i if $1
209
- messages.list_conversation(last)
221
+ n = 2 * $1.to_i if $1
222
+ messages.list_conversation(n)
210
223
  :next
211
- when %r(^/last$)
212
- messages.show_last
224
+ when %r(^/last(?:\s+(\d*))?$)
225
+ n = $1.to_i if $1
226
+ messages.show_last(n)
213
227
  :next
214
228
  when %r(^/clear(?:\s+(messages|links|history|tags|all))?$)
215
229
  clean($1)
@@ -661,7 +675,6 @@ class OllamaChat::Chat
661
675
  #
662
676
  # @see fetch_source
663
677
  # @see embed_source
664
- # @see documents.clear
665
678
  def add_documents_from_argv(document_list)
666
679
  if document_list.any?(&:empty?)
667
680
  STDOUT.puts "Clearing collection #{bold{documents.collection}}."
@@ -1,20 +1,38 @@
1
+ # Module for handling document caching and retrieval using embedding
2
+ # similarity.
3
+ #
4
+ # This module provides methods to configure cache backends and manage document
5
+ # storage with semantic search capabilities. It integrates with Documentrix's
6
+ # document management system to enable efficient storage, retrieval, and
7
+ # similarity-based searching of documents using vector embeddings.
1
8
  module OllamaChat::DocumentCache
2
- # The document_cache_class method returns the cache class specified in the
3
- # configuration.
9
+ # Retrieves the cache class specified in the configuration.
4
10
  #
5
- # @return [ Class ] the cache class defined by the config.cache setting
11
+ # This method resolves the cache class name from the application's
12
+ # configuration to dynamically load the appropriate cache implementation.
13
+ #
14
+ # @return [Class] The cache class referenced by the configuration's cache
15
+ # setting.
16
+ # @raise [NameError] If the configured cache class name does not correspond
17
+ # to an existing constant.
6
18
  def document_cache_class
7
19
  Object.const_get(config.cache)
8
20
  end
9
21
 
10
- # The configure_cache method determines the appropriate cache class to use
11
- # for document storage.
12
- # It checks if the -M option was specified to use MemoryCache, otherwise it
13
- # attempts to use the configured cache class.
14
- # If an error occurs during this process, it falls back to using MemoryCache
15
- # and reports the error.
22
+ # Configures and returns the appropriate cache class based on command-line
23
+ # options.
24
+ #
25
+ # Determines which cache implementation to use based on command-line flags: -
26
+ # If the `-M` flag is set, uses {Documentrix::Documents::MemoryCache} -
27
+ # Otherwise, resolves and returns the cache class specified in configuration
28
+ #
29
+ # Falls back to {Documentrix::Documents::MemoryCache} if configuration
30
+ # resolution fails.
16
31
  #
17
- # @return [ Class ] the selected cache class to be used for document caching
32
+ # @return [Class] The selected cache class for document storage and
33
+ # retrieval.
34
+ # @raise [StandardError] If there is an error resolving the configured cache
35
+ # class, logs the error to standard error and falls back to MemoryCache.
18
36
  def configure_cache
19
37
  if @opts[?M]
20
38
  Documentrix::Documents::MemoryCache
@@ -1,3 +1,15 @@
1
+ # A class that handles chat responses and manages the flow of conversation
2
+ # between the user and Ollama models.
3
+ #
4
+ # This class is responsible for processing Ollama API responses, updating
5
+ # message history, displaying formatted output to the terminal, and managing
6
+ # voice synthesis for spoken responses. It acts as a handler for streaming
7
+ # responses and ensures proper formatting and display of both regular content
8
+ # and thinking annotations.
9
+ #
10
+ # @example Processing a chat response
11
+ # follow_chat = OllamaChat::FollowChat.new(chat: chat_instance, messages: message_list)
12
+ # follow_chat.call(response)
1
13
  class OllamaChat::FollowChat
2
14
  include Ollama
3
15
  include Ollama::Handlers::Concern
@@ -6,10 +18,13 @@ class OllamaChat::FollowChat
6
18
 
7
19
  # Initializes a new instance of OllamaChat::FollowChat.
8
20
  #
9
- # @param [OllamaChat::Chat] chat The chat object, which represents the conversation context.
10
- # @param [#to_a] messages A collection of message objects, representing the conversation history.
21
+ # @param [OllamaChat::Chat] chat The chat object, which represents the
22
+ # conversation context.
23
+ # @param [#to_a] messages A collection of message objects, representing the
24
+ # conversation history.
11
25
  # @param [String] voice (optional) to speek with if any.
12
- # @param [IO] output (optional) The output stream where terminal output should be printed. Defaults to STDOUT.
26
+ # @param [IO] output (optional) The output stream where terminal output
27
+ # should be printed. Defaults to STDOUT.
13
28
  #
14
29
  # @return [OllamaChat::FollowChat] A new instance of OllamaChat::FollowChat.
15
30
  def initialize(chat:, messages:, voice: nil, output: STDOUT)
@@ -23,7 +38,8 @@ class OllamaChat::FollowChat
23
38
 
24
39
  # Returns the conversation history (an array of message objects).
25
40
  #
26
- # @return [OllamaChat::MessageList<Ollama::Message>] The array of messages in the conversation.
41
+ # @return [OllamaChat::MessageList<Ollama::Message>] The array of messages in
42
+ # the conversation.
27
43
  attr_reader :messages
28
44
 
29
45
  # Invokes the chat flow based on the provided Ollama server response.
@@ -32,15 +48,18 @@ class OllamaChat::FollowChat
32
48
  # about the user input and the assistant's response.
33
49
  #
34
50
  # If the response indicates an assistant message, this method:
35
- # 1. Ensures that an assistant response exists in the message history (if not already present).
36
- # 2. Updates the last message with the new content and thinking (if applicable).
51
+ # 1. Ensures that an assistant response exists in the message history (if
52
+ # not already present).
53
+ # 2. Updates the last message with the new content and thinking (if
54
+ # applicable).
37
55
  # 3. Displays the formatted terminal output for the user.
38
56
  # 4. Outputs the voice response (if configured).
39
57
  #
40
58
  # Regardless of whether an assistant message is present, this method also
41
59
  # outputs evaluation statistics (if applicable).
42
60
  #
43
- # @param [Ollama::Response] response The parsed JSON response from the Ollama server.
61
+ # @param [Ollama::Response] response The parsed JSON response from the Ollama
62
+ # server.
44
63
  #
45
64
  # @return [OllamaChat::FollowChat] The current instance for method chaining.
46
65
  def call(response)
@@ -1,29 +1,77 @@
1
+ # A module that provides history management functionality for OllamaChat
2
+ # sessions.
3
+ #
4
+ # The History module encapsulates methods for initializing, saving, and
5
+ # clearing command-line history within the OllamaChat application. It handles
6
+ # persistence of user input history to a file and ensures that chat sessions
7
+ # can maintain state across invocations by loading previous command histories.
8
+ #
9
+ # @example Initializing chat history
10
+ # chat.init_chat_history
11
+ #
12
+ # @example Saving chat history
13
+ # chat.save_history
14
+ #
15
+ # @example Clearing chat history
16
+ # chat.clear_history
1
17
  module OllamaChat::History
2
- # Returns the full path of the chat history filename based on the
3
- # configuration.
18
+ # The chat_history_filename method constructs and returns the full file path
19
+ # for the chat history file.
20
+ #
21
+ # This method takes the configured chat history filename from the
22
+ # configuration and expands it to an absolute path using File.expand_path.
23
+ # This ensures that the returned path is fully qualified and can be used
24
+ # reliably for reading from or writing to the chat history file.
25
+ #
26
+ # @return [String] the absolute file path to the chat history file as
27
+ # specified in the configuration
4
28
  def chat_history_filename
5
29
  File.expand_path(config.chat_history_filename)
6
30
  end
7
31
 
8
- # Initializes the chat history by loading it from a file if it exists, and
9
- # then loads the history into Readline::HISTORY.
32
+ # The init_chat_history method initializes the chat session by loading
33
+ # previously saved command history from a file.
34
+ #
35
+ # This method checks for the existence of a chat history file and, if found,
36
+ # loads its contents into the Readline::HISTORY array. It clears the current
37
+ # history and replaces it with the saved history data. Any errors during the
38
+ # loading process are caught and logged as warnings, but do not interrupt the
39
+ # execution flow.
10
40
  def init_chat_history
11
41
  if File.exist?(chat_history_filename)
12
42
  File.open(chat_history_filename, ?r) do |history|
13
43
  history_data = JSON.load(history)
14
- clear_history
44
+ Readline::HISTORY.clear
15
45
  Readline::HISTORY.push(*history_data)
16
46
  end
17
47
  end
48
+ rescue => e
49
+ warn "Caught #{e.class} while loading #{chat_history_filename.inspect}: #{e}"
18
50
  end
19
51
 
20
- # Saves the current chat history to a file in JSON format.
52
+ # The save_history method persists the current command history to a file.
53
+ #
54
+ # This method serializes the Readline::HISTORY array into JSON format and
55
+ # writes it to the chat history filename. It handles potential errors during
56
+ # the write operation by catching exceptions and issuing a warning message.
21
57
  def save_history
22
58
  File.secure_write(chat_history_filename, JSON.dump(Readline::HISTORY))
59
+ rescue => e
60
+ warn "Caught #{e.class} while saving #{chat_history_filename.inspect}: #{e}"
23
61
  end
24
62
 
25
- # Clears all entries from Readline::HISTORY.
63
+ # The clear_history method clears the Readline history array and ensures that
64
+ # the chat history is saved afterwards.
65
+ #
66
+ # This method removes all entries from the Readline::HISTORY array,
67
+ # effectively clearing the command history maintained by the readline
68
+ # library. It then calls save_history to persist this cleared state to the
69
+ # configured history file. The method uses an ensure block to guarantee that
70
+ # save_history is called even if an exception occurs during the clearing
71
+ # process.
26
72
  def clear_history
27
73
  Readline::HISTORY.clear
74
+ ensure
75
+ save_history
28
76
  end
29
77
  end
@@ -1,3 +1,20 @@
1
+ # A module that provides information and user agent functionality for
2
+ # OllamaChat
3
+ #
4
+ # The Information module encapsulates methods for managing application
5
+ # identification, displaying version and configuration details, and handling
6
+ # command-line interface help messages. It includes user agent capabilities for
7
+ # HTTP requests and provides comprehensive information display features for
8
+ # chat sessions.
9
+ #
10
+ # @example Displaying application information
11
+ # chat.info
12
+ #
13
+ # @example Showing version details
14
+ # chat.version
15
+ #
16
+ # @example Displaying usage help
17
+ # chat.usage
1
18
  module OllamaChat::Information
2
19
  extend Tins::Concern
3
20
 
@@ -6,6 +23,20 @@ module OllamaChat::Information
6
23
  extend UserAgent
7
24
  end
8
25
 
26
+ # A module that provides user agent functionality for identifying the
27
+ # application.
28
+ #
29
+ # This module encapsulates methods for determining the application name and
30
+ # constructing a standardized user agent string that includes the application
31
+ # name and version. It is designed to be included in classes that need to
32
+ # provide identification information for HTTP requests or other
33
+ # communications.
34
+ #
35
+ # @example Accessing the program name
36
+ # UserAgent.progname # => "ollama_chat"
37
+ #
38
+ # @example Generating a user agent string
39
+ # UserAgent.user_agent # => "ollama_chat/0.0.25"
9
40
  module UserAgent
10
41
  # The progname method returns the name of the application.
11
42
  #
@@ -92,7 +123,7 @@ module OllamaChat::Information
92
123
  /stream toggle stream output
93
124
  /location toggle location submission
94
125
  /voice [change] toggle voice output or change the voice
95
- /last show the last system/assistant message
126
+ /last [n] show the last n / 1 system/assistant message
96
127
  /list [n] list the last n / all conversation exchanges
97
128
  /clear [what] clear what=messages|links|history|tags|all
98
129
  /clobber clear the conversation, links, and collection
@@ -1,3 +1,15 @@
1
+ # A module that provides Kramdown::ANSI styling configuration and parsing
2
+ # functionality for OllamaChat.
3
+ #
4
+ # This module handles the setup and application of ANSI styles for markdown
5
+ # rendering, allowing for customizable terminal output formatting. It manages
6
+ # the configuration of ANSI styles either from environment variables or falls
7
+ # back to default settings, and provides methods to parse content with the
8
+ # configured styling.
9
+ #
10
+ # @example Configuring custom ANSI styles via environment variable
11
+ # Set KRAMDOWN_ANSI_OLLAMA_CHAT_STYLES to a JSON object containing style
12
+ # definitions for customizing markdown output formatting in the terminal.
1
13
  module OllamaChat::KramdownANSI
2
14
  # The configure_kramdown_ansi_styles method sets up ANSI styling for
3
15
  # Kramdown::ANSI output by checking for specific environment variables and
@@ -1,3 +1,19 @@
1
+ # A module that provides formatting functionality for chat messages.
2
+ #
3
+ # The MessageFormat module encapsulates methods for determining message icons
4
+ # based on whether images are present, and for conditionally annotating content
5
+ # with thinking or talk indicators. It supports customizable formatting of
6
+ # message text for display in terminal interfaces.
7
+ #
8
+ # @example Using message_type to determine icon based on images
9
+ # message_type([]) # => "📨"
10
+ # message_type(["image"]) # => "📸"
11
+ #
12
+ # @example Annotating content with thinking indicator
13
+ # think_annotate { "Thinking..." } # => "💭\nThinking...\n" (when think is enabled)
14
+ #
15
+ # @example Annotating content with talk indicator
16
+ # talk_annotate { "Speaking..." } # => "💬\nSpeaking...\n" (when think is enabled)
1
17
  module OllamaChat::MessageFormat
2
18
  # The message_type method determines the appropriate message icon based on
3
19
  # whether images are present.
@@ -1,3 +1,33 @@
1
+ # A collection class for managing chat messages with support for system
2
+ # prompts, paged output, and conversation history.
3
+
4
+ # This class provides functionality for storing, retrieving, and displaying
5
+ # chat messages in a structured manner. It handles system prompts separately
6
+ # from regular user and assistant messages, supports pagination for displaying
7
+ # conversations, and offers methods for manipulating message history including
8
+ # clearing, loading, saving, and dropping exchanges. The class integrates with
9
+ # Kramdown::ANSI for formatted output and supports location information in
10
+ # system messages.
11
+
12
+ # @example Creating a new message list
13
+ # chat = OllamaChat::Chat.new
14
+ # messages = OllamaChat::MessageList.new(chat)
15
+ #
16
+ # @example Adding messages to the list
17
+ # messages << Ollama::Message.new(role: 'user', content: 'Hello')
18
+ # messages << Ollama::Message.new(role: 'assistant', content: 'Hi there!')
19
+ #
20
+ # @example Displaying conversation history
21
+ # messages.list_conversation(5) # Shows last 5 exchanges
22
+ #
23
+ # @example Clearing messages
24
+ # messages.clear # Removes all non-system messages
25
+ #
26
+ # @example Loading a saved conversation
27
+ # messages.load_conversation('conversation.json')
28
+ #
29
+ # @example Saving current conversation
30
+ # messages.save_conversation('my_conversation.json')
1
31
  class OllamaChat::MessageList
2
32
  include Term::ANSIColor
3
33
  include OllamaChat::MessageFormat
@@ -11,8 +41,20 @@ class OllamaChat::MessageList
11
41
  @messages = []
12
42
  end
13
43
 
44
+ # The system attribute reader returns the system prompt for the chat session.
45
+ #
46
+ # @attr_reader [ String, nil ] the current system prompt content or nil if not set
14
47
  attr_reader :system
15
48
 
49
+ # The messages attribute reader returns the messages set for this object,
50
+ # initializing it lazily if needed.
51
+ #
52
+ # The messages set is memoized, meaning it will only be created once per
53
+ # object instance and subsequent calls will return the same
54
+ # OllamaChat::MessageList instance.
55
+ #
56
+ # @attr_reader [OllamaChat::MessageList] A MessageList object containing all
57
+ # messages associated with this instance
16
58
  attr_reader :messages
17
59
 
18
60
  # Returns the number of messages stored in the message list.
@@ -111,11 +153,15 @@ class OllamaChat::MessageList
111
153
  # from the user. It uses a pager for output and returns the instance itself.
112
154
  #
113
155
  # @return [ OllamaChat::MessageList ] returns the instance of the class
114
- def show_last
115
- message = last
116
- !message || message.role == 'user' and return
156
+ def show_last(n = nil)
157
+ n ||= 1
158
+ messages = @messages.reject { |message| message.role == 'user' }
159
+ n = n.clamp(0..messages.size)
160
+ n <= 0 and return
117
161
  use_pager do |output|
118
- output.puts message_text_for(message)
162
+ messages[-n..-1].to_a.each do |message|
163
+ output.puts message_text_for(message)
164
+ end
119
165
  end
120
166
  self
121
167
  end
@@ -277,7 +323,8 @@ class OllamaChat::MessageList
277
323
  # If the output would exceed the terminal's line capacity, it pipes the content
278
324
  # through an appropriate pager command (like 'less' or 'more').
279
325
  #
280
- # @param block [Proc] A block that yields an IO object to write output to
326
+ # @yield A block that yields an IO object to write output to
327
+ # @yieldparam [IO] the IO object to write to
281
328
  def use_pager
282
329
  command = determine_pager_command
283
330
  output_buffer = StringIO.new
@@ -1,3 +1,15 @@
1
+ # A module that provides output functionality for chat messages.
2
+ #
3
+ # This module encapsulates methods for piping assistant responses to command
4
+ # standard input and writing assistant responses to files. It handles the
5
+ # mechanics of sending output to external processes or saving content to disk
6
+ # while providing appropriate error handling and user feedback.
7
+ #
8
+ # @example Piping a response to a command
9
+ # chat.pipe('cat > output.txt')
10
+ #
11
+ # @example Writing a response to a file
12
+ # chat.output('response.txt')
1
13
  module OllamaChat::MessageOutput
2
14
  # The pipe method forwards the last assistant message to a command's standard
3
15
  # input.
@@ -1,3 +1,20 @@
1
+ # A module that provides functionality for managing Ollama models, including
2
+ # checking model availability, pulling models from remote servers, and handling
3
+ # model presence verification.
4
+ #
5
+ # This module encapsulates the logic for interacting with Ollama models,
6
+ # ensuring that required models are available locally before attempting to use
7
+ # them in chat sessions. It handles both local model verification and remote
8
+ # model retrieval when necessary.
9
+ #
10
+ # @example Checking if a model is present
11
+ # chat.model_present?('llama3.1')
12
+ #
13
+ # @example Pulling a model from a remote server
14
+ # chat.pull_model_from_remote('mistral')
15
+ #
16
+ # @example Ensuring a model is available locally
17
+ # chat.pull_model_unless_present('phi3', {})
1
18
  module OllamaChat::ModelHandling
2
19
  # The model_present? method checks if the specified Ollama model is available.
3
20
  #
@@ -1,12 +1,31 @@
1
1
  require 'pathname'
2
2
 
3
+ # A configuration class for managing OllamaChat settings and file paths.
4
+ #
5
+ # This class handles the initialization and management of configuration files
6
+ # for the OllamaChat application. It provides methods for setting up default
7
+ # configurations, determining appropriate file paths for config and cache
8
+ # directories, and managing the loading and creation of configuration files
9
+ # based on XDG standards.
10
+ #
11
+ # @example Initializing with a custom configuration file
12
+ # config = OllamaChat::OllamaChatConfig.new('/path/to/custom/config.yml')
13
+ #
14
+ # @example Accessing default configuration paths
15
+ # config = OllamaChat::OllamaChatConfig.new
16
+ # config.default_config_path # => Path to the default configuration file
17
+ # config.config_dir_path # => Path to the configuration directory
18
+ # config.cache_dir_path # => Path to the cache directory
19
+ # config.database_path # => Path to the documents database file
3
20
  class OllamaChat::OllamaChatConfig
4
21
  include ComplexConfig
5
22
  include FileUtils
6
23
 
24
+ # Path to the default config
7
25
  DEFAULT_CONFIG_PATH = Pathname.new(__FILE__).dirname.
8
26
  join('ollama_chat_config/default_config.yml')
9
27
 
28
+ # Content of the default config
10
29
  DEFAULT_CONFIG = File.read(DEFAULT_CONFIG_PATH)
11
30
 
12
31
  # The initialize method sets up the configuration file path and ensures the
@@ -1,3 +1,16 @@
1
+ # A module that provides content parsing functionality for OllamaChat.
2
+ #
3
+ # The Parsing module encapsulates methods for processing various types of input
4
+ # sources including HTML, XML, CSV, RSS, Atom, PDF, and Postscript documents.
5
+ # It handles content extraction and conversion into standardized text formats
6
+ # suitable for chat interactions. The module supports different document
7
+ # policies for handling imported or embedded content and provides utilities for
8
+ # parsing structured data from multiple source types.
9
+ #
10
+ # @example Processing different document types
11
+ # chat.parse_source(html_io) # Converts HTML to markdown
12
+ # chat.parse_source(pdf_io) # Extracts text from PDF files
13
+ # chat.parse_source(csv_io) # Formats CSV data into readable strings
1
14
  module OllamaChat::Parsing
2
15
  # The parse_source method processes different types of input sources and
3
16
  # converts them into a standardized text representation.
@@ -1,3 +1,25 @@
1
+ # A module that provides server socket functionality for OllamaChat
2
+ #
3
+ # The ServerSocket module encapsulates the logic for creating and managing Unix
4
+ # domain socket servers that enable external processes to send input to running
5
+ # ollama_chat sessions. It supports both simple message transmission and
6
+ # bidirectional communication with response handling, allowing for integration
7
+ # with tools like ollama_chat_send.
8
+ #
9
+ # @example Sending a message to a running chat session
10
+ # OllamaChat::ServerSocket.send_to_server_socket(
11
+ # "Hello from external process",
12
+ # config: chat_config,
13
+ # type: :socket_input
14
+ # )
15
+ #
16
+ # @example Sending a message and waiting for a response
17
+ # response = OllamaChat::ServerSocket.send_to_server_socket(
18
+ # "What is the answer?",
19
+ # config: chat_config,
20
+ # type: :socket_input_with_response,
21
+ # parse: true
22
+ # )
1
23
  module OllamaChat::ServerSocket
2
24
  class << self
3
25
  # The send_to_server_socket method transmits a message to a Unix domain
@@ -56,6 +78,11 @@ module OllamaChat::ServerSocket
56
78
  end
57
79
  end
58
80
 
81
+ # The server_socket_message accessor method provides read and write access to
82
+ # the server socket message instance variable.
83
+ #
84
+ # @return [ Object, nil ] the current server socket message object or nil if
85
+ # not set
59
86
  attr_accessor :server_socket_message
60
87
 
61
88
  # Initializes the server socket to receive messages from the Ollama Chat
@@ -1,3 +1,28 @@
1
+ # A module that provides functionality for fetching and processing various
2
+ # types of content sources.
3
+ #
4
+ # The SourceFetching module encapsulates methods for retrieving content from
5
+ # different source types including URLs, file paths, and shell commands. It
6
+ # handles the logic for determining the appropriate fetching method based on
7
+ # the source identifier and processes the retrieved content through specialized
8
+ # parsers depending on the content type. The module also manages image
9
+ # handling, document importing, summarizing, and embedding operations while
10
+ # providing error handling and debugging capabilities.
11
+ #
12
+ # @example Fetching content from a URL
13
+ # chat.fetch_source('https://example.com/document.html') do |source_io|
14
+ # # Process the fetched content
15
+ # end
16
+ #
17
+ # @example Importing a local file
18
+ # chat.fetch_source('/path/to/local/file.txt') do |source_io|
19
+ # # Process the imported file content
20
+ # end
21
+ #
22
+ # @example Executing a shell command
23
+ # chat.fetch_source('!ls -la') do |source_io|
24
+ # # Process the command output
25
+ # end
1
26
  module OllamaChat::SourceFetching
2
27
  # The http_options method prepares HTTP options for requests based on
3
28
  # configuration settings.