ollama_chat 0.0.56 → 0.0.58

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGES.md +46 -0
  3. data/Rakefile +2 -1
  4. data/docker-compose.yml +1 -1
  5. data/lib/ollama_chat/chat.rb +47 -20
  6. data/lib/ollama_chat/clipboard.rb +8 -7
  7. data/lib/ollama_chat/conversation.rb +2 -2
  8. data/lib/ollama_chat/dialog.rb +2 -41
  9. data/lib/ollama_chat/document_cache.rb +8 -7
  10. data/lib/ollama_chat/env_config.rb +9 -0
  11. data/lib/ollama_chat/follow_chat.rb +8 -8
  12. data/lib/ollama_chat/history.rb +1 -1
  13. data/lib/ollama_chat/information.rb +5 -7
  14. data/lib/ollama_chat/kramdown_ansi.rb +2 -2
  15. data/lib/ollama_chat/message_list.rb +7 -5
  16. data/lib/ollama_chat/message_output.rb +8 -5
  17. data/lib/ollama_chat/model_handling.rb +2 -4
  18. data/lib/ollama_chat/ollama_chat_config/default_config.yml +3 -2
  19. data/lib/ollama_chat/ollama_chat_config.rb +2 -2
  20. data/lib/ollama_chat/parsing.rb +6 -5
  21. data/lib/ollama_chat/server_socket.rb +4 -4
  22. data/lib/ollama_chat/source_fetching.rb +8 -7
  23. data/lib/ollama_chat/state_selectors.rb +146 -0
  24. data/lib/ollama_chat/switches.rb +7 -9
  25. data/lib/ollama_chat/think_control.rb +11 -39
  26. data/lib/ollama_chat/utils/fetcher.rb +1 -1
  27. data/lib/ollama_chat/version.rb +1 -1
  28. data/lib/ollama_chat/web_searching.rb +3 -3
  29. data/lib/ollama_chat.rb +1 -0
  30. data/ollama_chat.gemspec +6 -5
  31. data/spec/ollama_chat/chat_spec.rb +5 -9
  32. data/spec/ollama_chat/clipboard_spec.rb +1 -1
  33. data/spec/ollama_chat/information_spec.rb +1 -1
  34. data/spec/ollama_chat/input_content_spec.rb +1 -1
  35. data/spec/ollama_chat/message_editing_spec.rb +1 -1
  36. data/spec/ollama_chat/message_output_spec.rb +1 -1
  37. data/spec/ollama_chat/model_handling_spec.rb +1 -1
  38. data/spec/ollama_chat/parsing_spec.rb +6 -6
  39. data/spec/ollama_chat/source_fetching_spec.rb +1 -3
  40. data/spec/ollama_chat/state_selectors_spec.rb +193 -0
  41. data/spec/ollama_chat/think_control_spec.rb +41 -101
  42. data/spec/ollama_chat/web_searching_spec.rb +1 -1
  43. data/spec/spec_helper.rb +6 -2
  44. metadata +19 -1
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 683c0bf5fdb8756030c0c5b69767d95f8ae852094a0048cb61b4cf7166f6a045
4
- data.tar.gz: 8e7f9022cfd1e9c32f987b76196222d346100cd5ceaa6d59e6dc5c30257e5dbb
3
+ metadata.gz: e977681ee5ceb8267b4c2d9f2ad7f9c22ac168791a14cf12a6ae5d8696d58c24
4
+ data.tar.gz: f5277d49ba8ccd5af55f83e742dfcb5b9818395151478547f183873669024d09
5
5
  SHA512:
6
- metadata.gz: 854af0ea07ced05e3f2f36741dd98592a2f6968e3307662e10c14bcb9a10b2168a1dde962de32f44e73719d177547cbccd8eee76c0554878a0de7ce2e1811772
7
- data.tar.gz: 86b71f7b0748be19d0f772213b1a21ea8c7f838296bcd48348076ef39372e0a0f07db20c59a1c1b56f8a7592d100993a7ffa6b6507b4febbbca2977f0375baff
6
+ metadata.gz: c39142b8c05b16591d285f66d80dbbda12e20058a8fb5b533e219c029bc08e66ade15e5c87a90228a9598929f0444e71b1fdf6cfe18abb4426a423f177716086
7
+ data.tar.gz: 734d78b497080fce515f6fa326396cb031f0540b18d565cdd324eeb4f1d16010840afd1177c55ac0cf24f7dc49b7101d108d17cf499de5d75b1daf54559ef7a3
data/CHANGES.md CHANGED
@@ -1,5 +1,51 @@
1
1
  # Changes
2
2
 
3
+ ## 2026-02-02 v0.0.58
4
+
5
+ - Updated Redis image to version to valkey **9.0.1** in docker-compose.yml
6
+ - Added `errors.lst` to `.gitignore` and updated packaging to ignore this file
7
+ - Added `utils` gem as a development dependency in `Rakefile` and
8
+ `ollama_chat.gemspec`
9
+ - Enhanced documentation consistency with standardized leading spaces for doc
10
+ comment continuation lines
11
+ - Standardized parameter and return value descriptions for methods to align
12
+ with YARD documentation standards
13
+ - Ensured all method signatures and descriptions comply with YARD documentation
14
+ standards
15
+
16
+ ## 2026-01-21 v0.0.57
17
+
18
+ - Introduce `OllamaChat::StateSelectors` module with `StateSelector` class for
19
+ managing configurable states
20
+ - Replace simple string-based document policy and think mode with
21
+ `StateSelector` objects
22
+ - Add `allow_empty` parameter to `StateSelector#initialize` method to allow
23
+ empty states in voice output
24
+ - Update `StateSelector#selected=` to conditionally validate states based on
25
+ `allow_empty?`
26
+ - Refactor voice handling to use `StateSelector` by replacing `@current_voice`
27
+ with `@voices` `StateSelector`
28
+ - Update `FollowChat` to use `@voices.selected` instead of `@current_voice` for
29
+ voice selection
30
+ - Simplify `change_voice` method in `dialog.rb` to delegate to `@voices.choose`
31
+ - Update voice display in `information.rb` to use `@voices.show` instead of raw
32
+ voice name
33
+ - Update configuration format to support nested `think` settings with `mode`
34
+ and `loud` sub-properties
35
+ - Modify command handlers to use `document_policy.choose` and
36
+ `think_mode.choose` instead of legacy methods
37
+ - Update `OllamaChat::Chat` initialization to use `setup_state_selectors`
38
+ method
39
+ - Refactor `OllamaChat::ThinkControl` to use new state selector system
40
+ - Update `OllamaChat::Parsing` to reference `@document_policy.selected` instead
41
+ of `@document_policy`
42
+ - Update default configuration file to use format with nested think settings
43
+ - Add proper `attr_reader` for `document_policy` and `think_mode` state
44
+ selectors
45
+ - Update help text to reference new state selector system
46
+ - Update `OllamaChat::Switches` to handle nested think configuration
47
+ - Add `OllamaChat::StateSelectors` to required files in `lib/ollama_chat.rb`
48
+
3
49
  ## 2026-01-17 v0.0.56
4
50
 
5
51
  - Updated `context_spook` dependency from version **1.4** to **1.5**
data/Rakefile CHANGED
@@ -22,7 +22,7 @@ GemHadar do
22
22
  test_dir 'spec'
23
23
  ignore '.*.sw[pon]', 'pkg', 'Gemfile.lock', '.AppleDouble', '.bundle',
24
24
  '.yardoc', 'doc', 'tags', 'corpus', 'coverage', '/config/searxng/*',
25
- '.starscope.db', 'cscope.out'
25
+ '.starscope.db', 'cscope.out', 'errors.lst'
26
26
  package_ignore '.all_images.yml', '.tool-versions', '.gitignore', 'VERSION',
27
27
  '.rspec', '.github', '.contexts', '.envrc', '.yardopts'
28
28
 
@@ -66,6 +66,7 @@ GemHadar do
66
66
  development_dependency 'debug'
67
67
  development_dependency 'simplecov'
68
68
  development_dependency 'context_spook'
69
+ development_dependency 'utils'
69
70
 
70
71
  licenses << 'MIT'
71
72
 
data/docker-compose.yml CHANGED
@@ -1,7 +1,7 @@
1
1
  services:
2
2
  redis:
3
3
  container_name: redis
4
- image: valkey/valkey:8.1.5-alpine
4
+ image: valkey/valkey:9.0.1-alpine
5
5
  restart: unless-stopped
6
6
  ports: [ "127.0.0.1:9736:6379" ]
7
7
  volumes:
@@ -36,6 +36,7 @@ class OllamaChat::Chat
36
36
  include Term::ANSIColor
37
37
  include OllamaChat::DocumentCache
38
38
  include OllamaChat::Switches
39
+ include OllamaChat::StateSelectors
39
40
  include OllamaChat::ModelHandling
40
41
  include OllamaChat::Parsing
41
42
  include OllamaChat::SourceFetching
@@ -78,7 +79,7 @@ class OllamaChat::Chat
78
79
  #
79
80
  # @param argv [Array<String>] Command-line arguments to parse (defaults to ARGV.dup)
80
81
  #
81
- # @raise [ArgumentError] If the Ollama API version is less than 0.9.0, indicating
82
+ # @raise [RuntimeError] If the Ollama API version is less than 0.9.0, indicating
82
83
  # incompatibility with required API features
83
84
  def initialize(argv: ARGV.dup)
84
85
  @opts = go 'f:u:m:s:c:C:D:MESVh', argv
@@ -88,14 +89,10 @@ class OllamaChat::Chat
88
89
  @ollama_chat_config = OllamaChat::OllamaChatConfig.new(@opts[?f])
89
90
  self.config = @ollama_chat_config.config
90
91
  setup_switches(config)
91
- @ollama = connect_ollama
92
- if server_version.version < '0.9.0'.version
93
- raise ArgumentError, 'require ollama API version 0.9.0 or higher'
94
- end
95
- @document_policy = config.document_policy
92
+ setup_state_selectors(config)
93
+ connect_ollama
96
94
  @model = choose_model(@opts[?m], config.model.name)
97
95
  @model_options = Ollama::Options[config.model.options]
98
- @think = config.think
99
96
  model_system = pull_model_unless_present(@model, @model_options)
100
97
  embedding_enabled.set(config.embedding.enabled && !@opts[?E])
101
98
  if @opts[?c]
@@ -111,7 +108,6 @@ class OllamaChat::Chat
111
108
  end
112
109
  @documents = setup_documents
113
110
  @cache = setup_cache
114
- @current_voice = config.voice.default
115
111
  @images = []
116
112
  @kramdown_ansi_styles = configure_kramdown_ansi_styles
117
113
  init_chat_history
@@ -120,6 +116,18 @@ class OllamaChat::Chat
120
116
  fix_config(e)
121
117
  end
122
118
 
119
+ # The document_policy reader returns the document policy selector for the chat session.
120
+ #
121
+ # @return [ OllamaChat::StateSelector ] the document policy selector object
122
+ # that manages the policy for handling document references in user text
123
+ attr_reader :document_policy
124
+
125
+ # The think_mode reader returns the think mode selector for the chat session.
126
+ #
127
+ # @return [ OllamaChat::StateSelector ] the think mode selector object
128
+ # that manages the thinking mode setting for the Ollama model interactions
129
+ attr_reader :think_mode
130
+
123
131
  # The debug method accesses the debug configuration setting.
124
132
  #
125
133
  # @return [TrueClass, FalseClass] the current debug mode status
@@ -207,7 +215,7 @@ class OllamaChat::Chat
207
215
  case content
208
216
  when %r(^/reconnect)
209
217
  STDERR.print green { "Reconnecting to ollama #{base_url.to_s.inspect}…" }
210
- @ollama = connect_ollama
218
+ connect_ollama
211
219
  STDERR.puts green { " Done." }
212
220
  :next
213
221
  when %r(^/copy$)
@@ -304,10 +312,10 @@ class OllamaChat::Chat
304
312
  info
305
313
  :next
306
314
  when %r(^/document_policy$)
307
- choose_document_policy
315
+ document_policy.choose
308
316
  :next
309
317
  when %r(^/think$)
310
- choose_think_mode
318
+ think_mode.choose
311
319
  :next
312
320
  when %r(^/think_loud$)
313
321
  think_loud.toggle
@@ -385,9 +393,9 @@ class OllamaChat::Chat
385
393
  # the specified number of URLs. The processing approach varies based on the current
386
394
  # document policy and embedding status:
387
395
  #
388
- # - **Embedding mode**: When `@document_policy == 'embedding'` AND `@embedding.on?` is true,
396
+ # - **Embedding mode**: When `document_policy.selected == 'embedding'` AND `@embedding.on?` is true,
389
397
  # each result is embedded and the query is interpolated into the `web_embed` prompt.
390
- # - **Summarizing mode**: When `@document_policy == 'summarizing'`,
398
+ # - **Summarizing mode**: When `document_policy.selected == 'summarizing'`,
391
399
  # each result is summarized and both query and results are interpolated into the
392
400
  # `web_summarize` prompt.
393
401
  # - **Importing mode**: For all other cases, each result is imported and both query and
@@ -403,11 +411,11 @@ class OllamaChat::Chat
403
411
  # web('3', 'ruby programming tutorials')
404
412
  #
405
413
  # @example Web search with embedding policy
406
- # # With @document_policy == 'embedding' and @embedding.on?
414
+ # # With document_policy.selected == 'embedding' and @embedding.on?
407
415
  # # Processes results through embedding pipeline
408
416
  #
409
417
  # @example Web search with summarizing policy
410
- # # With @document_policy == 'summarizing'
418
+ # # With document_policy.selected == 'summarizing'
411
419
  # # Processes results through summarization pipeline
412
420
  #
413
421
  # @see #search_web
@@ -417,13 +425,13 @@ class OllamaChat::Chat
417
425
  # @see #summarize
418
426
  def web(count, query)
419
427
  urls = search_web(query, count.to_i) or return :next
420
- if @document_policy == 'embedding' && @embedding.on?
428
+ if document_policy.selected == 'embedding' && @embedding.on?
421
429
  prompt = config.prompts.web_embed
422
430
  urls.each do |url|
423
431
  fetch_source(url) { |url_io| embed_source(url_io, url) }
424
432
  end
425
433
  prompt.named_placeholders_interpolate({query:})
426
- elsif @document_policy == 'summarizing'
434
+ elsif document_policy.selected == 'summarizing'
427
435
  prompt = config.prompts.web_import
428
436
  results = urls.each_with_object('') do |url, content|
429
437
  summarize(url).full? do |c|
@@ -617,7 +625,7 @@ class OllamaChat::Chat
617
625
  handler = OllamaChat::FollowChat.new(
618
626
  chat: self,
619
627
  messages:,
620
- voice: (@current_voice if voice.on?)
628
+ voice: (@voices.selected if voice.on?)
621
629
  )
622
630
  begin
623
631
  retried = false
@@ -633,7 +641,7 @@ class OllamaChat::Chat
633
641
  if think? && !retried
634
642
  STDOUT.puts "#{bold('Error')}: in think mode, switch thinking off and retry."
635
643
  sleep 1
636
- @think = false
644
+ think_mode.selected = 'disabled'
637
645
  retried = true
638
646
  retry
639
647
  else
@@ -677,12 +685,27 @@ class OllamaChat::Chat
677
685
 
678
686
  private
679
687
 
688
+ # The base_url method returns the Ollama server URL from command-line options
689
+ # or environment configuration.
690
+ #
691
+ # @return [String] the base URL used for connecting to the Ollama API
680
692
  def base_url
681
693
  @opts[?u] || OllamaChat::EnvConfig::OLLAMA::URL
682
694
  end
683
695
 
696
+ # The connect_ollama method establishes a connection to the Ollama API server.
697
+ #
698
+ # This method initializes a new Ollama::Client instance with configured timeouts
699
+ # and connection parameters, then verifies that the connected server meets
700
+ # the minimum required API version (0.9.0). It sets the @ollama instance
701
+ # variable to the configured client and stores the server version in @server_version.
702
+ #
703
+ # @return [Ollama::Client] the configured Ollama client instance
704
+ # @raise [RuntimeError] if the connected Ollama server API version is less
705
+ # than 0.9.0
684
706
  def connect_ollama
685
- Ollama::Client.new(
707
+ @server_version = nil
708
+ @ollama = Ollama::Client.new(
686
709
  connect_timeout: config.timeouts.connect_timeout?,
687
710
  read_timeout: config.timeouts.read_timeout?,
688
711
  write_timeout: config.timeouts.write_timeout?,
@@ -690,6 +713,10 @@ class OllamaChat::Chat
690
713
  debug: ,
691
714
  user_agent:
692
715
  )
716
+ if server_version.version < '0.9.0'.version
717
+ raise 'require ollama API version 0.9.0 or higher'
718
+ end
719
+ @ollama
693
720
  end
694
721
 
695
722
  # The setup_documents method initializes the document processing pipeline by
@@ -13,10 +13,11 @@
13
13
  module OllamaChat::Clipboard
14
14
  # Copy the last assistant's message to the system clipboard.
15
15
  #
16
- # This method checks if there is a last message from an assistant in the `@messages`
17
- # array and copies its content to the clipboard using the specified command from `config.copy`.
18
- # If no assistant response is available or the clipboard command is not found, appropriate
19
- # error messages are displayed.
16
+ # This method checks if there is a last message from an assistant in the
17
+ # `@messages` array and copies its content to the clipboard using the
18
+ # specified command from `config.copy`.
19
+ # If no assistant response is available or the clipboard command is not
20
+ # found, appropriate error messages are displayed.
20
21
  #
21
22
  # @return [NilClass] Always returns nil.
22
23
  def copy_to_clipboard
@@ -38,9 +39,9 @@ module OllamaChat::Clipboard
38
39
 
39
40
  # Paste content from the input.
40
41
  #
41
- # Prompts the user to paste their content and then press C-d (Ctrl+D) to terminate
42
- # input. Reads all lines from standard input until Ctrl+D is pressed and returns
43
- # the pasted content as a string.
42
+ # Prompts the user to paste their content and then press C-d (Ctrl+D) to
43
+ # terminate input. Reads all lines from standard input until Ctrl+D is
44
+ # pressed and returns the pasted content as a string.
44
45
  #
45
46
  # @return [String] The pasted content entered by the user.
46
47
  def paste_from_input
@@ -19,7 +19,7 @@ module OllamaChat::Conversation
19
19
  # format.
20
20
  #
21
21
  # @param filename [String] The path to the file where the conversation should
22
- # be saved
22
+ # be saved
23
23
  #
24
24
  # @example Save conversation with explicit filename
25
25
  # chat.save_conversation('conversations/2023-10-15_my_session.json')
@@ -43,7 +43,7 @@ module OllamaChat::Conversation
43
43
  # for confirmation.
44
44
  #
45
45
  # @param filename [String] The path to the file containing the conversation
46
- # to load
46
+ # to load
47
47
  #
48
48
  # @example Load a conversation from a specific file
49
49
  # chat.load_conversation('conversations/2023-10-15_my_session.json')
@@ -12,9 +12,6 @@
12
12
  #
13
13
  # @example Changing the system prompt
14
14
  # chat.change_system_prompt('default_prompt', system: '?sherlock')
15
- #
16
- # @example Choosing a document policy
17
- # chat.choose_document_policy
18
15
  module OllamaChat::Dialog
19
16
  # The model_with_size method formats a model's size for display
20
17
  # by creating a formatted string that includes the model name and its size
@@ -104,41 +101,6 @@ module OllamaChat::Dialog
104
101
  info
105
102
  end
106
103
 
107
- # The document_policy method sets the policy for handling document imports.
108
- #
109
- # @param value [ String ] the document policy to be set
110
- attr_writer :document_policy
111
-
112
- # The choose_document_policy method presents a menu to select a document policy.
113
- # It allows the user to choose from importing, embedding, summarizing, or
114
- # ignoring documents.
115
- # The method displays available policies and sets the selected policy as the
116
- # current document policy.
117
- # If no valid policy is found, it defaults to the first option.
118
- # After selection, it outputs the chosen policy and displays the current
119
- # configuration information.
120
- def choose_document_policy
121
- policies = %w[ importing embedding summarizing ignoring ].sort
122
- current = if policies.index(@document_policy)
123
- @document_policy
124
- elsif policies.index(config.document_policy)
125
- config.document_policy
126
- else
127
- policies.first
128
- end
129
- policies.unshift('[EXIT]')
130
- policy = OllamaChat::Utils::Chooser.choose(policies)
131
- case policy
132
- when nil, '[EXIT]'
133
- STDOUT.puts "Exiting chooser."
134
- policy = current
135
- end
136
- self.document_policy = policy
137
- ensure
138
- STDOUT.puts "Using document policy #{bold{@document_policy}}."
139
- info
140
- end
141
-
142
104
  # The change_system_prompt method allows the user to select or enter a new
143
105
  # system prompt for the chat session.
144
106
  # It provides an interactive chooser when multiple prompts match the given
@@ -147,7 +109,7 @@ module OllamaChat::Dialog
147
109
  #
148
110
  # @param default [ String ] the default system prompt to fall back to
149
111
  # @param system [ String ] the system prompt identifier or pattern to
150
- # search for
112
+ # search for
151
113
  def change_system_prompt(default, system: nil)
152
114
  selector = case system
153
115
  when /\A\?(.+)\z/
@@ -204,8 +166,7 @@ module OllamaChat::Dialog
204
166
  #
205
167
  # @return [ String ] the full name of the chosen voice
206
168
  def change_voice
207
- chosen = OllamaChat::Utils::Chooser.choose(config.voice.list)
208
- @current_voice = chosen.full? || config.voice.default
169
+ @voices.choose
209
170
  end
210
171
 
211
172
  # The message_list method creates and returns a new MessageList instance
@@ -12,9 +12,9 @@ module OllamaChat::DocumentCache
12
12
  # configuration to dynamically load the appropriate cache implementation.
13
13
  #
14
14
  # @return [Class] The cache class referenced by the configuration's cache
15
- # setting.
15
+ # setting.
16
16
  # @raise [NameError] If the configured cache class name does not correspond
17
- # to an existing constant.
17
+ # to an existing constant.
18
18
  def document_cache_class
19
19
  Object.const_get(config.cache)
20
20
  end
@@ -22,17 +22,18 @@ module OllamaChat::DocumentCache
22
22
  # Configures and returns the appropriate cache class based on command-line
23
23
  # options.
24
24
  #
25
- # Determines which cache implementation to use based on command-line flags: -
26
- # If the `-M` flag is set, uses {Documentrix::Documents::MemoryCache} -
27
- # Otherwise, resolves and returns the cache class specified in configuration
25
+ # Determines which cache implementation to use based on command-line flags:
26
+ # - If the `-M` flag is set, uses {Documentrix::Documents::MemoryCache}
27
+ # - Otherwise, resolves and returns the cache class specified in
28
+ # configuration
28
29
  #
29
30
  # Falls back to {Documentrix::Documents::MemoryCache} if configuration
30
31
  # resolution fails.
31
32
  #
32
33
  # @return [Class] The selected cache class for document storage and
33
- # retrieval.
34
+ # retrieval.
34
35
  # @raise [StandardError] If there is an error resolving the configured cache
35
- # class, logs the error to standard error and falls back to MemoryCache.
36
+ # class, logs the error to standard error and falls back to MemoryCache.
36
37
  def configure_cache
37
38
  if @opts[?M]
38
39
  Documentrix::Documents::MemoryCache
@@ -2,6 +2,15 @@ require 'const_conf'
2
2
  require 'pathname'
3
3
 
4
4
  module OllamaChat
5
+ # Environment configuration module for OllamaChat
6
+ #
7
+ # This module provides a structured way to manage environment variables and
8
+ # configuration settings for the OllamaChat application. It uses the
9
+ # ConstConf library to define and manage configuration parameters with
10
+ # default values, descriptions, and decoding logic.
11
+ #
12
+ # The module organizes configuration into logical sections including general
13
+ # settings, Ollama-specific configurations, and chat-specific options.
5
14
  module EnvConfig
6
15
  include ConstConf
7
16
 
@@ -19,12 +19,12 @@ class OllamaChat::FollowChat
19
19
  # Initializes a new instance of OllamaChat::FollowChat.
20
20
  #
21
21
  # @param [OllamaChat::Chat] chat The chat object, which represents the
22
- # conversation context.
22
+ # conversation context.
23
23
  # @param [#to_a] messages A collection of message objects, representing the
24
- # conversation history.
24
+ # conversation history.
25
25
  # @param [String] voice (optional) to speek with if any.
26
26
  # @param [IO] output (optional) The output stream where terminal output
27
- # should be printed. Defaults to STDOUT.
27
+ # should be printed. Defaults to STDOUT.
28
28
  #
29
29
  # @return [OllamaChat::FollowChat] A new instance of OllamaChat::FollowChat.
30
30
  def initialize(chat:, messages:, voice: nil, output: STDOUT)
@@ -39,7 +39,7 @@ class OllamaChat::FollowChat
39
39
  # Returns the conversation history (an array of message objects).
40
40
  #
41
41
  # @return [OllamaChat::MessageList<Ollama::Message>] The array of messages in
42
- # the conversation.
42
+ # the conversation.
43
43
  attr_reader :messages
44
44
 
45
45
  # Invokes the chat flow based on the provided Ollama server response.
@@ -59,7 +59,7 @@ class OllamaChat::FollowChat
59
59
  # outputs evaluation statistics (if applicable).
60
60
  #
61
61
  # @param [Ollama::Response] response The parsed JSON response from the Ollama
62
- # server.
62
+ # server.
63
63
  #
64
64
  # @return [OllamaChat::FollowChat] The current instance for method chaining.
65
65
  def call(response)
@@ -128,7 +128,7 @@ class OllamaChat::FollowChat
128
128
  # last message if thinking is enabled and thinking content is present.
129
129
  #
130
130
  # @param response [ Object ] the response object containing message content
131
- # and thinking
131
+ # and thinking
132
132
  def update_last_message(response)
133
133
  @messages.last.content << response.message&.content
134
134
  if @chat.think_loud? and response_thinking = response.message&.thinking.full?
@@ -212,8 +212,8 @@ class OllamaChat::FollowChat
212
212
  # @param response [ Object ] the response object containing evaluation metrics
213
213
  #
214
214
  # @return [ String ] a formatted string with statistical information about
215
- # the evaluation process including durations, counts, and rates, styled with
216
- # colors and formatting
215
+ # the evaluation process including durations, counts, and rates, styled
216
+ # with colors and formatting
217
217
  def eval_stats(response)
218
218
  eval_duration = response.eval_duration / 1e9
219
219
  prompt_eval_duration = response.prompt_eval_duration / 1e9
@@ -24,7 +24,7 @@ module OllamaChat::History
24
24
  # reliably for reading from or writing to the chat history file.
25
25
  #
26
26
  # @return [String] the absolute file path to the chat history file as
27
- # specified in the configuration
27
+ # specified in the configuration
28
28
  def chat_history_filename
29
29
  File.expand_path(OllamaChat::EnvConfig::OLLAMA::CHAT::HISTORY)
30
30
  end
@@ -80,7 +80,7 @@ module OllamaChat::Information
80
80
  # configurations, embedding settings, and various operational switches.
81
81
  #
82
82
  # @return [ nil ] This method does not return a value; it outputs information
83
- # directly to standard output.
83
+ # directly to standard output.
84
84
  def info
85
85
  STDOUT.puts "Running ollama_chat version: #{bold(OllamaChat::VERSION)}"
86
86
  STDOUT.puts "Connected to ollama server version: #{bold(server_version)} on: #{bold(server_url)}"
@@ -99,15 +99,13 @@ module OllamaChat::Information
99
99
  end
100
100
  markdown.show
101
101
  stream.show
102
- think_show
102
+ think_mode.show
103
103
  think_loud.show
104
104
  location.show
105
105
  voice.show
106
- if @voice.on?
107
- STDOUT.puts " Using voice #{bold{@current_voice}} to speak."
108
- end
106
+ @voice.on? and @voices.show
109
107
  STDOUT.puts "Documents database cache is #{@documents.nil? ? 'n/a' : bold{@documents.cache.class}}"
110
- STDOUT.puts "Document policy for references in user text: #{bold{@document_policy}}"
108
+ STDOUT.puts "Document policy for references in user text: #{bold{document_policy}}"
111
109
  STDOUT.puts "Currently selected search engine is #{bold(search_engine)}."
112
110
  STDOUT.puts "Conversation length: #{bold(@messages.size.to_s)} message(s)."
113
111
  nil
@@ -144,7 +142,7 @@ module OllamaChat::Information
144
142
  /summarize [n] source summarize the source's content in n words
145
143
  /embedding toggle embedding paused or not
146
144
  /embed source embed the source's content
147
- /web [n] query query web & for n(=1) results (policy: #@document_policy)
145
+ /web [n] query query web & for n(=1) results (policy: #{document_policy})
148
146
  /links [clear] display (or clear) links used in the chat
149
147
  /save filename store conversation messages
150
148
  /load filename load conversation messages
@@ -16,7 +16,7 @@ module OllamaChat::KramdownANSI
16
16
  # falling back to default styles.
17
17
  #
18
18
  # @return [ Hash ] a hash of ANSI styles configured either from environment
19
- # variables or using default settings
19
+ # variables or using default settings
20
20
  def configure_kramdown_ansi_styles
21
21
  if json = OllamaChat::EnvConfig::KRAMDOWN_ANSI_OLLAMA_CHAT_STYLES?
22
22
  Kramdown::ANSI::Styles.from_json(json).ansi_styles
@@ -37,7 +37,7 @@ module OllamaChat::KramdownANSI
37
37
  # If nil, returns an empty string.
38
38
  #
39
39
  # @return [ String ] the content formatted with ANSI escape sequences
40
- # according to the configured styles
40
+ # according to the configured styles
41
41
  def kramdown_ansi_parse(content)
42
42
  content.nil? and return ''
43
43
  Kramdown::ANSI.parse(content, ansi_styles: @kramdown_ansi_styles)
@@ -35,7 +35,7 @@ class OllamaChat::MessageList
35
35
  # The initialize method sets up the message list for an OllamaChat session.
36
36
  #
37
37
  # @param chat [ OllamaChat::Chat ] the chat object that this message list
38
- # belongs to
38
+ # belongs to
39
39
  def initialize(chat)
40
40
  @chat = chat
41
41
  @messages = []
@@ -54,7 +54,7 @@ class OllamaChat::MessageList
54
54
  # OllamaChat::MessageList instance.
55
55
  #
56
56
  # @attr_reader [OllamaChat::MessageList] A MessageList object containing all
57
- # messages associated with this instance
57
+ # messages associated with this instance
58
58
  attr_reader :messages
59
59
 
60
60
  # Returns the number of messages stored in the message list.
@@ -202,9 +202,11 @@ class OllamaChat::MessageList
202
202
 
203
203
  # Sets the system prompt for the chat session.
204
204
  #
205
- # @param system [String, nil] The new system prompt. If `nil` or `false`, clears the system prompt.
205
+ # @param system [String, nil] The new system prompt. If `nil` or `false`,
206
+ # clears the system prompt.
206
207
  #
207
- # @return [OllamaChat::MessageList] Returns `self` to allow chaining of method calls.
208
+ # @return [OllamaChat::MessageList] Returns `self` to allow chaining of
209
+ # method calls.
208
210
  #
209
211
  # @note This method:
210
212
  # - Removes all existing system prompts from the message list
@@ -252,7 +254,7 @@ class OllamaChat::MessageList
252
254
  # curent location, time, and unit preferences.
253
255
  #
254
256
  # @return [Array] An array of Ollama::Message objects representing the
255
- # messages in the list.
257
+ # messages in the list.
256
258
  def to_ary
257
259
  location = at_location.full?
258
260
  add_system = !!location
@@ -18,7 +18,7 @@ module OllamaChat::MessageOutput
18
18
  #
19
19
  # @return [ OllamaChat::Chat ] returns self
20
20
  # @return [ nil ] returns nil if the command is not provided or if there is
21
- # no assistant message
21
+ # no assistant message
22
22
  def pipe(cmd)
23
23
  cmd.present? or return
24
24
  if message = @messages.last and message.role == 'assistant'
@@ -44,7 +44,7 @@ module OllamaChat::MessageOutput
44
44
  # The output method writes the last assistant message to a file.
45
45
  #
46
46
  # @param filename [ String ] the path to the file where the last assistant
47
- # message should be written
47
+ # message should be written
48
48
  #
49
49
  # @return [ OllamaChat::Chat ] returns self
50
50
  def output(filename)
@@ -72,11 +72,14 @@ module OllamaChat::MessageOutput
72
72
  # doesn't exist, the method returns early without writing. Otherwise, it
73
73
  # opens the file in write mode and writes the message content to it.
74
74
  #
75
- # @param filename [ String ] the path to the file where the content should be written
76
- # @param message [ Ollama::Message ] the message object containing the content to write
75
+ # @param filename [ String ] the path to the file where the content should be
76
+ # written
77
+ # @param message [ Ollama::Message ] the message object containing the
78
+ # content to write
77
79
  #
78
80
  # @return [ TrueClass ] returns true if the file was successfully written
79
- # @return [ nil ] returns nil if the user chose not to overwrite or if an error occurred
81
+ # @return [ nil ] returns nil if the user chose not to overwrite or if an
82
+ # error occurred
80
83
  def attempt_to_write_file(filename, message)
81
84
  path = Pathname.new(filename.to_s).expand_path
82
85
  if !path.exist? ||
@@ -21,7 +21,7 @@ module OllamaChat::ModelHandling
21
21
  # @param model [ String ] the name of the Ollama model
22
22
  #
23
23
  # @return [ String, FalseClass ] the system prompt if the model is present,
24
- # false otherwise
24
+ # false otherwise
25
25
  def model_present?(model)
26
26
  ollama.show(model:) { return _1.system.to_s }
27
27
  rescue Ollama::Errors::NotFoundError
@@ -32,8 +32,6 @@ module OllamaChat::ModelHandling
32
32
  # remote server if it is not found locally.
33
33
  #
34
34
  # @param model [ String ] the name of the model to be pulled
35
- #
36
- # @return [ nil ]
37
35
  def pull_model_from_remote(model)
38
36
  STDOUT.puts "Model #{bold{model}} not found locally, attempting to pull it from remote now…"
39
37
  ollama.pull(model:)
@@ -54,7 +52,7 @@ module OllamaChat::ModelHandling
54
52
  # @param options [ Hash ] Options for the pull_model_from_remote method.
55
53
  #
56
54
  # @return [ String, FalseClass ] the system prompt if the model and it are
57
- # present, false otherwise.
55
+ # present, false otherwise.
58
56
  def pull_model_unless_present(model, options)
59
57
  if system = model_present?(model)
60
58
  return system.full?