ollama_chat 0.0.52 → 0.0.54

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: a6ecb3e69adc69e9ce8c09eac40af1fb1078dd85d9b7edf33eb5c0cf33027a3c
4
- data.tar.gz: cbfc1d833db9239d5d354fd9388bc48247c2dfa3e1ea92500a08f3f85e5dce9b
3
+ metadata.gz: f0a01656cc0ad8d063570956971ee34ebf94929e90feb3c5f8207879d8367a12
4
+ data.tar.gz: 87ce3880db6bfcbdaf049618a8922536bdcbdd20b15366187612fb0e33594fa0
5
5
  SHA512:
6
- metadata.gz: 91fd323ab11c5b9b44787163978c8862310054939b591ff9566a182783d20cda4768ee0aa1c55b90b30a3a7e475e1df33f0f2c81f2c1e4b99b757daead3c18fe
7
- data.tar.gz: 57b4ca9f70ed945a947a099d4323f909a74bb8b6f97e4925af06692552962124cb0244e2b1208cd414b6022ef68b164fbe64b0dee1e87568c2ca4cbec4fe3013
6
+ metadata.gz: c53b7fa09b117940de484fda33145e8f68ef7340da1e392e335f7aca5952c11ecbcad30a32e78551df1ff7972b20d7519a58a64dd808b5e9fecb309445ab12f8
7
+ data.tar.gz: c83bb2ae1c82af0aa66a99262c94d0fc7060807d3333d6ffd261bd558d0d338502419ca6ddf3a8b9b31f5a7ef48dc2c15ad6bd42d227a86290506bab12f3f7de
data/CHANGES.md CHANGED
@@ -1,5 +1,35 @@
1
1
  # Changes
2
2
 
3
+ ## 2026-01-08 v0.0.54
4
+
5
+ ### New Features
6
+
7
+ - Added `/revise_last` command for editing the last message
8
+ - Implemented `OllamaChat::MessageEditing` module with editor integration
9
+ - Enhanced message editing with proper error handling and user feedback
10
+
11
+ ### Improvements
12
+
13
+ - Improved `InputContent` module with better error handling (replaced `$?` with
14
+ direct `system` return)
15
+ - Updated `choose_filename` to use `_1` parameter for Ruby idioms
16
+ - Added comprehensive test suite for `InputContent` module with **100%** line
17
+ coverage
18
+
19
+ ## 2026-01-07 v0.0.53
20
+
21
+ - Added `/compose` command functionality to compose content using an external
22
+ editor
23
+ - Introduced `OllamaChat::EnvConfig::EDITOR?` and
24
+ `OllamaChat::EnvConfig::EDITOR!` methods for editor configuration access
25
+ - Implemented `compose` method in `InputContent` module using `Tempfile` for
26
+ temporary file handling
27
+ - Added `EDITOR` configuration with default value of `vim` or `vi` if available
28
+ - Updated help text to include the new `/compose` command
29
+ - Added graceful error handling for editor failures, returning `nil` and
30
+ printing error to `STDERR`
31
+ - Required `tempfile` gem for temporary file handling functionality
32
+
3
33
  ## 2026-01-06 v0.0.52
4
34
 
5
35
  - Enabled verbose context generation to provide real-time feedback during
data/README.md CHANGED
@@ -180,6 +180,7 @@ The following commands can be given inside the chat, if prefixed by a `/`:
180
180
  /links [clear] display (or clear) links used in the chat
181
181
  /save filename store conversation messages
182
182
  /load filename load conversation messages
183
+ /compose compose content using an EDITOR
183
184
  /input [pattern] select and read content from a file (default: **/*)
184
185
  /context [pattern...] collect context with glob patterns
185
186
  /output filename save last response to filename
@@ -51,6 +51,7 @@ class OllamaChat::Chat
51
51
  include OllamaChat::KramdownANSI
52
52
  include OllamaChat::Conversation
53
53
  include OllamaChat::InputContent
54
+ include OllamaChat::MessageEditing
54
55
 
55
56
  # Initializes a new OllamaChat::Chat instance with the given command-line
56
57
  # arguments.
@@ -338,6 +339,11 @@ class OllamaChat::Chat
338
339
  arg and patterns = arg.scan(/(\S+)/).flatten
339
340
  @parse_content = false
340
341
  context_spook(patterns) or :next
342
+ when %r(^/compose$)
343
+ compose or :next
344
+ when %r(^/revise_last$)
345
+ revise_last
346
+ :next
341
347
  when %r(^/save\s+(.+)$)
342
348
  save_conversation($1)
343
349
  :next
@@ -30,6 +30,18 @@ module OllamaChat
30
30
  end
31
31
  end
32
32
 
33
+ EDITOR = set do
34
+ description 'Editor to use'
35
+
36
+ default do
37
+ if editor = %w[ vim vi ].find { `which #{_1}`.full?(:chomp) }
38
+ editor
39
+ else
40
+ warn 'Need an editor command configured via env var "EDITOR"'
41
+ end
42
+ end
43
+ end
44
+
33
45
  DIFF_TOOL = set do
34
46
  description 'Diff tool to apply changes with'
35
47
 
@@ -147,6 +147,7 @@ module OllamaChat::Information
147
147
  /links [clear] display (or clear) links used in the chat
148
148
  /save filename store conversation messages
149
149
  /load filename load conversation messages
150
+ /compose compose content using an EDITOR
150
151
  /input [pattern] select and read content from a file (default: **/*)
151
152
  /context [pattern...] collect context with glob patterns
152
153
  /output filename save last response to filename
@@ -1,3 +1,5 @@
1
+ require 'tempfile'
2
+
1
3
  # A module that provides input content processing functionality for OllamaChat.
2
4
  #
3
5
  # The InputContent module encapsulates methods for reading and returning
@@ -34,7 +36,7 @@ module OllamaChat::InputContent
34
36
  #
35
37
  # @return [ String, nil ] the path to the selected file or nil if no file was chosen
36
38
  def choose_filename(pattern)
37
- files = Dir.glob(pattern).select { File.file?(it) }
39
+ files = Dir.glob(pattern).select { File.file?(_1) }
38
40
  files.unshift('[EXIT]')
39
41
  case chosen = OllamaChat::Utils::Chooser.choose(files)
40
42
  when '[EXIT]', nil
@@ -88,4 +90,30 @@ module OllamaChat::InputContent
88
90
  end
89
91
  end
90
92
  end
93
+
94
+ # The compose method opens an editor to compose content.
95
+ #
96
+ # This method checks for a configured editor and opens a temporary file in
97
+ # that editor for the user to compose content. Upon successful editing, it
98
+ # reads the content from the temporary file and returns it. If the editor
99
+ # fails or no editor is configured, appropriate error messages are displayed
100
+ # and nil is returned.
101
+ #
102
+ # @return [ String, nil ] the composed content if successful, nil otherwise
103
+ def compose
104
+ unless editor = OllamaChat::EnvConfig::EDITOR?
105
+ STDERR.puts "Editor reqired for compose, set env var "\
106
+ "#{OllamaChat::EnvConfig::EDITOR!.env_var.inspect}."
107
+ return
108
+ end
109
+ Tempfile.open do |tmp|
110
+ result = system %{#{editor} #{tmp.path.inspect}}
111
+ if result
112
+ return File.read(tmp.path)
113
+ else
114
+ STDERR.puts "Editor failed to edit #{tmp.path.inspect}."
115
+ end
116
+ end
117
+ nil
118
+ end
91
119
  end
@@ -0,0 +1,47 @@
1
+ # A module that provides message editing functionality for OllamaChat.
2
+ #
3
+ # The MessageEditing module encapsulates methods for modifying existing chat
4
+ # messages using an external editor. It allows users to edit the last message
5
+ # in the conversation, whether it's a system prompt, user message, or assistant
6
+ # response.
7
+ module OllamaChat::MessageEditing
8
+ # The revise_last method opens the last message in an external editor for
9
+ # modification.
10
+ #
11
+ # This method retrieves the last message from the conversation, writes its
12
+ # content to a temporary file, opens that file in the configured editor,
13
+ # and then updates the message with the edited content upon successful
14
+ # completion.
15
+ #
16
+ # @return [String, nil] the edited content if successful, nil otherwise
17
+ def revise_last
18
+ if message = @messages.last
19
+ unless editor = OllamaChat::EnvConfig::EDITOR?
20
+ STDERR.puts "Editor required for revise, set env var " \
21
+ "#{OllamaChat::EnvConfig::EDITOR!.env_var.inspect}."
22
+ return
23
+ end
24
+
25
+ Tempfile.open do |tmp|
26
+ tmp.write(message.content)
27
+ tmp.flush
28
+
29
+ result = system %{#{editor} #{tmp.path.inspect}}
30
+
31
+ if result
32
+ new_content = File.read(tmp.path)
33
+ old_message = @messages.messages.pop.as_json
34
+ old_message[:content] = new_content
35
+ @messages << Ollama::Message.from_hash(old_message)
36
+ STDOUT.puts "Message edited and updated."
37
+ return new_content
38
+ else
39
+ STDERR.puts "Editor failed to edit message."
40
+ end
41
+ end
42
+ else
43
+ STDERR.puts "No message available to revise."
44
+ end
45
+ nil
46
+ end
47
+ end
@@ -1,6 +1,6 @@
1
1
  module OllamaChat
2
2
  # OllamaChat version
3
- VERSION = '0.0.52'
3
+ VERSION = '0.0.54'
4
4
  VERSION_ARRAY = VERSION.split('.').map(&:to_i) # :nodoc:
5
5
  VERSION_MAJOR = VERSION_ARRAY[0] # :nodoc:
6
6
  VERSION_MINOR = VERSION_ARRAY[1] # :nodoc:
data/lib/ollama_chat.rb CHANGED
@@ -37,5 +37,6 @@ require 'ollama_chat/server_socket'
37
37
  require 'ollama_chat/kramdown_ansi'
38
38
  require 'ollama_chat/conversation'
39
39
  require 'ollama_chat/input_content'
40
+ require 'ollama_chat/message_editing'
40
41
  require 'ollama_chat/env_config'
41
42
  require 'ollama_chat/chat'
data/ollama_chat.gemspec CHANGED
@@ -1,9 +1,9 @@
1
1
  # -*- encoding: utf-8 -*-
2
- # stub: ollama_chat 0.0.52 ruby lib
2
+ # stub: ollama_chat 0.0.54 ruby lib
3
3
 
4
4
  Gem::Specification.new do |s|
5
5
  s.name = "ollama_chat".freeze
6
- s.version = "0.0.52".freeze
6
+ s.version = "0.0.54".freeze
7
7
 
8
8
  s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
9
9
  s.require_paths = ["lib".freeze]
@@ -12,15 +12,15 @@ Gem::Specification.new do |s|
12
12
  s.description = "The app provides a command-line interface (CLI) to an Ollama AI model,\nallowing users to engage in text-based conversations and generate\nhuman-like responses. Users can import data from local files or web pages,\nwhich are then processed through three different modes: fully importing the\ncontent into the conversation context, summarizing the information for\nconcise reference, or storing it in an embedding vector database for later\nretrieval based on the conversation.\n".freeze
13
13
  s.email = "flori@ping.de".freeze
14
14
  s.executables = ["ollama_chat".freeze, "ollama_chat_send".freeze]
15
- s.extra_rdoc_files = ["README.md".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/conversation.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/env_config.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/input_content.rb".freeze, "lib/ollama_chat/kramdown_ansi.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_output.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/redis_cache.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/think_control.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/vim.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze]
16
- s.files = [".utilsrc".freeze, "CHANGES.md".freeze, "Gemfile".freeze, "README.md".freeze, "Rakefile".freeze, "bin/ollama_chat".freeze, "bin/ollama_chat_send".freeze, "config/searxng/settings.yml".freeze, "docker-compose.yml".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/conversation.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/env_config.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/input_content.rb".freeze, "lib/ollama_chat/kramdown_ansi.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_output.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/ollama_chat_config/default_config.yml".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/redis_cache.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/think_control.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/vim.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze, "ollama_chat.gemspec".freeze, "redis/redis.conf".freeze, "spec/assets/api_show.json".freeze, "spec/assets/api_tags.json".freeze, "spec/assets/api_version.json".freeze, "spec/assets/conversation.json".freeze, "spec/assets/duckduckgo.html".freeze, "spec/assets/example.atom".freeze, "spec/assets/example.csv".freeze, "spec/assets/example.html".freeze, "spec/assets/example.pdf".freeze, "spec/assets/example.ps".freeze, "spec/assets/example.rb".freeze, "spec/assets/example.rss".freeze, "spec/assets/example.xml".freeze, "spec/assets/example_with_quote.html".freeze, "spec/assets/kitten.jpg".freeze, "spec/assets/prompt.txt".freeze, "spec/assets/searxng.json".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/kramdown_ansi_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/message_output_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/redis_cache_spec.rb".freeze, "spec/ollama_chat/server_socket_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/ollama_chat/web_searching_spec.rb".freeze, "spec/spec_helper.rb".freeze, "tmp/.keep".freeze]
15
+ s.extra_rdoc_files = ["README.md".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/conversation.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/env_config.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/input_content.rb".freeze, "lib/ollama_chat/kramdown_ansi.rb".freeze, "lib/ollama_chat/message_editing.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_output.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/redis_cache.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/think_control.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/vim.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze]
16
+ s.files = [".utilsrc".freeze, "CHANGES.md".freeze, "Gemfile".freeze, "README.md".freeze, "Rakefile".freeze, "bin/ollama_chat".freeze, "bin/ollama_chat_send".freeze, "config/searxng/settings.yml".freeze, "docker-compose.yml".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/conversation.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/env_config.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/input_content.rb".freeze, "lib/ollama_chat/kramdown_ansi.rb".freeze, "lib/ollama_chat/message_editing.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_output.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/ollama_chat_config/default_config.yml".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/redis_cache.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/think_control.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/vim.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze, "ollama_chat.gemspec".freeze, "redis/redis.conf".freeze, "spec/assets/api_show.json".freeze, "spec/assets/api_tags.json".freeze, "spec/assets/api_version.json".freeze, "spec/assets/conversation.json".freeze, "spec/assets/duckduckgo.html".freeze, "spec/assets/example.atom".freeze, "spec/assets/example.csv".freeze, "spec/assets/example.html".freeze, "spec/assets/example.pdf".freeze, "spec/assets/example.ps".freeze, "spec/assets/example.rb".freeze, "spec/assets/example.rss".freeze, "spec/assets/example.xml".freeze, "spec/assets/example_with_quote.html".freeze, "spec/assets/kitten.jpg".freeze, "spec/assets/prompt.txt".freeze, "spec/assets/searxng.json".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/input_content_spec.rb".freeze, "spec/ollama_chat/kramdown_ansi_spec.rb".freeze, "spec/ollama_chat/message_editing_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/message_output_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/redis_cache_spec.rb".freeze, "spec/ollama_chat/server_socket_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/ollama_chat/web_searching_spec.rb".freeze, "spec/spec_helper.rb".freeze, "tmp/.keep".freeze]
17
17
  s.homepage = "https://github.com/flori/ollama_chat".freeze
18
18
  s.licenses = ["MIT".freeze]
19
19
  s.rdoc_options = ["--title".freeze, "OllamaChat - A command-line interface (CLI) for interacting with an Ollama AI model.".freeze, "--main".freeze, "README.md".freeze]
20
20
  s.required_ruby_version = Gem::Requirement.new(">= 3.2".freeze)
21
21
  s.rubygems_version = "4.0.2".freeze
22
22
  s.summary = "A command-line interface (CLI) for interacting with an Ollama AI model.".freeze
23
- s.test_files = ["spec/assets/example.rb".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/kramdown_ansi_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/message_output_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/redis_cache_spec.rb".freeze, "spec/ollama_chat/server_socket_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/ollama_chat/web_searching_spec.rb".freeze, "spec/spec_helper.rb".freeze]
23
+ s.test_files = ["spec/assets/example.rb".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/input_content_spec.rb".freeze, "spec/ollama_chat/kramdown_ansi_spec.rb".freeze, "spec/ollama_chat/message_editing_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/message_output_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/redis_cache_spec.rb".freeze, "spec/ollama_chat/server_socket_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/ollama_chat/web_searching_spec.rb".freeze, "spec/spec_helper.rb".freeze]
24
24
 
25
25
  s.specification_version = 4
26
26
 
@@ -0,0 +1,127 @@
1
+ require 'spec_helper'
2
+
3
+ describe OllamaChat::InputContent do
4
+ let :chat do
5
+ OllamaChat::Chat.new
6
+ end
7
+
8
+ connect_to_ollama_server
9
+
10
+ describe '#input' do
11
+ it 'can read content from a selected file' do
12
+ # Mock the file selection process
13
+ expect(chat).to receive(:choose_filename).with('**/*').
14
+ and_return('spec/assets/example.rb')
15
+
16
+ # Test that it returns the file content
17
+ result = chat.input(nil)
18
+ expect(result).to include('puts "Hello World!"')
19
+ end
20
+
21
+ it 'returns nil when no file is selected' do
22
+ expect(chat).to receive(:choose_filename).with('**/*').and_return(nil)
23
+ expect(chat.input(nil)).to be_nil
24
+ end
25
+
26
+ it 'can read content with specific pattern' do
27
+ expect(chat).to receive(:choose_filename).with('spec/assets/*').
28
+ and_return('spec/assets/example.rb')
29
+ result = chat.input('spec/assets/*')
30
+ expect(result).to include('puts "Hello World!"')
31
+ end
32
+ end
33
+
34
+ describe '#choose_filename' do
35
+ it 'can select a file from matching patterns' do
36
+ # Test with a pattern that matches existing files
37
+ files = Dir.glob('spec/assets/*')
38
+ expect(files).to_not be_empty
39
+
40
+ # Mock the selection process
41
+ expect(OllamaChat::Utils::Chooser).to receive(:choose).
42
+ with(files.unshift('[EXIT]')).and_return(files[1])
43
+
44
+ result = chat.choose_filename('spec/assets/*')
45
+ expect(result).to eq files[1]
46
+ end
47
+
48
+ it 'returns nil when user exits selection' do
49
+ expect(OllamaChat::Utils::Chooser).to receive(:choose).and_return(nil)
50
+ expect(chat.choose_filename('spec/assets/*')).to be_nil
51
+ end
52
+
53
+ it 'returns nil when user chooses exit' do
54
+ expect(OllamaChat::Utils::Chooser).to receive(:choose).and_return('[EXIT]')
55
+ expect(chat.choose_filename('spec/assets/*')).to be_nil
56
+ end
57
+ end
58
+
59
+ describe '#context_spook' do
60
+ it 'can collect context with patterns' do
61
+ # Test with specific patterns
62
+ patterns = ['spec/assets/example.rb']
63
+ expect(ContextSpook).to receive(:generate_context).with(hash_including(verbose: true))
64
+
65
+ # This should not raise an error, but return nil since we're not mocking the full implementation
66
+ expect { chat.context_spook(patterns) }.not_to raise_error
67
+ end
68
+
69
+ it 'can load default context when no patterns provided' do
70
+ # Mock finding a context definition file
71
+ filename = '.contexts/code_comment.rb'
72
+ expect(chat).to receive(:choose_filename).with('.contexts/*.rb').
73
+ and_return(filename)
74
+
75
+ expect(ContextSpook).to receive(:generate_context).
76
+ with(filename, hash_including(verbose: true))
77
+
78
+ # This should not raise an error
79
+ expect { chat.context_spook(nil) }.not_to raise_error
80
+ end
81
+
82
+ it 'returns nil when no context file is found' do
83
+ expect(chat).to receive(:choose_filename).with('.contexts/*.rb').
84
+ and_return(nil)
85
+ expect(chat.context_spook(nil)).to be_nil
86
+ end
87
+ end
88
+
89
+ describe '#compose' do
90
+ it 'can open editor to compose content' do
91
+ # Mock editor configuration
92
+ const_conf_as('OllamaChat::EnvConfig::EDITOR' => '/usr/bin/vim')
93
+
94
+ # Mock Tempfile behavior
95
+ tmp_double = double('tmp', path: '/tmp/test')
96
+ expect(Tempfile).to receive(:open).and_yield(tmp_double)
97
+
98
+ # Mock system call to simulate successful editor execution
99
+ expect(chat).to receive(:system).with('/usr/bin/vim "/tmp/test"').and_return(true)
100
+
101
+ # Mock file reading to return content
102
+ expect(File).to receive(:read).with('/tmp/test').and_return('composed content')
103
+
104
+ result = chat.compose
105
+ expect(result).to eq 'composed content'
106
+ end
107
+
108
+ it 'handles missing editor gracefully' do
109
+ const_conf_as('OllamaChat::EnvConfig::EDITOR' => nil)
110
+
111
+ expect(STDERR).to receive(:puts).with(/Editor reqired for compose/)
112
+ expect(chat.compose).to be_nil
113
+ end
114
+
115
+ it 'handles editor failure' do
116
+ const_conf_as('OllamaChat::EnvConfig::EDITOR' => '/usr/bin/vim')
117
+
118
+ tmp_double = double('tmp', path: '/tmp/test')
119
+ expect(Tempfile).to receive(:open).and_yield(tmp_double)
120
+
121
+ expect(chat).to receive(:system).with('/usr/bin/vim "/tmp/test"').and_return(false)
122
+
123
+ expect(STDERR).to receive(:puts).with(/Editor failed to edit/)
124
+ expect(chat.compose).to be_nil
125
+ end
126
+ end
127
+ end
@@ -0,0 +1,63 @@
1
+ require 'spec_helper'
2
+
3
+ describe OllamaChat::MessageEditing do
4
+ let :chat do
5
+ OllamaChat::Chat.new
6
+ end
7
+
8
+ connect_to_ollama_server
9
+
10
+ describe '#revise_last' do
11
+ it 'can revise the last message' do
12
+ # First add a message to work with
13
+ chat.messages << Ollama::Message.new(role: 'assistant', content: 'original content')
14
+
15
+ const_conf_as('OllamaChat::EnvConfig::EDITOR' => '/usr/bin/vim')
16
+
17
+ # Mock Tempfile behavior to simulate editor interaction
18
+ tmp_double = double('tmp', write: true, flush: true, path: '/tmp/test')
19
+ expect(Tempfile).to receive(:open).and_yield(tmp_double)
20
+
21
+ # Mock system call to simulate successful editor execution
22
+ expect(chat).to receive(:system).with('/usr/bin/vim "/tmp/test"').and_return(true)
23
+
24
+ # Mock file reading to return edited content
25
+ expect(File).to receive(:read).with('/tmp/test').and_return('edited content')
26
+
27
+ # The method should return the edited content
28
+ expect(chat.revise_last).to eq 'edited content'
29
+ end
30
+
31
+ it 'handles missing last message' do
32
+ expect(STDERR).to receive(:puts).with(/No message available to revise/)
33
+ expect(chat.revise_last).to be_nil
34
+ end
35
+
36
+ it 'handles missing editor gracefully' do
37
+ chat.messages << Ollama::Message.new(role: 'assistant', content: 'original content')
38
+
39
+ const_conf_as('OllamaChat::EnvConfig::EDITOR' => nil)
40
+
41
+ expect(STDERR).to receive(:puts).with(/Editor required for revise/)
42
+ expect(chat.revise_last).to be_nil
43
+ end
44
+
45
+ it 'handles no messages to revise' do
46
+ # Clear messages array
47
+ chat.instance_variable_get(:@messages).clear
48
+
49
+ expect(STDERR).to receive(:puts).with(/No message available to revise/)
50
+ expect(chat.revise_last).to be_nil
51
+ end
52
+
53
+ it 'handles editor failure' do
54
+ const_conf_as('OllamaChat::EnvConfig::EDITOR' => '/usr/bin/vim')
55
+ chat.messages << Ollama::Message.new(role: 'assistant', content: 'original content')
56
+ tmp_double = double('tmp', write: true, flush: true, path: '/tmp/test')
57
+ expect(Tempfile).to receive(:open).and_yield(tmp_double)
58
+ expect(chat).to receive(:system).with('/usr/bin/vim "/tmp/test"').and_return(false)
59
+ expect(STDERR).to receive(:puts).with(/Editor failed to edit message/)
60
+ expect(chat.revise_last).to be_nil
61
+ end
62
+ end
63
+ end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ollama_chat
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.52
4
+ version: 0.0.54
5
5
  platform: ruby
6
6
  authors:
7
7
  - Florian Frank
@@ -420,6 +420,7 @@ extra_rdoc_files:
420
420
  - lib/ollama_chat/information.rb
421
421
  - lib/ollama_chat/input_content.rb
422
422
  - lib/ollama_chat/kramdown_ansi.rb
423
+ - lib/ollama_chat/message_editing.rb
423
424
  - lib/ollama_chat/message_format.rb
424
425
  - lib/ollama_chat/message_list.rb
425
426
  - lib/ollama_chat/message_output.rb
@@ -461,6 +462,7 @@ files:
461
462
  - lib/ollama_chat/information.rb
462
463
  - lib/ollama_chat/input_content.rb
463
464
  - lib/ollama_chat/kramdown_ansi.rb
465
+ - lib/ollama_chat/message_editing.rb
464
466
  - lib/ollama_chat/message_format.rb
465
467
  - lib/ollama_chat/message_list.rb
466
468
  - lib/ollama_chat/message_output.rb
@@ -504,7 +506,9 @@ files:
504
506
  - spec/ollama_chat/clipboard_spec.rb
505
507
  - spec/ollama_chat/follow_chat_spec.rb
506
508
  - spec/ollama_chat/information_spec.rb
509
+ - spec/ollama_chat/input_content_spec.rb
507
510
  - spec/ollama_chat/kramdown_ansi_spec.rb
511
+ - spec/ollama_chat/message_editing_spec.rb
508
512
  - spec/ollama_chat/message_list_spec.rb
509
513
  - spec/ollama_chat/message_output_spec.rb
510
514
  - spec/ollama_chat/model_handling_spec.rb
@@ -550,7 +554,9 @@ test_files:
550
554
  - spec/ollama_chat/clipboard_spec.rb
551
555
  - spec/ollama_chat/follow_chat_spec.rb
552
556
  - spec/ollama_chat/information_spec.rb
557
+ - spec/ollama_chat/input_content_spec.rb
553
558
  - spec/ollama_chat/kramdown_ansi_spec.rb
559
+ - spec/ollama_chat/message_editing_spec.rb
554
560
  - spec/ollama_chat/message_list_spec.rb
555
561
  - spec/ollama_chat/message_output_spec.rb
556
562
  - spec/ollama_chat/model_handling_spec.rb