ollama_chat 0.0.53 → 0.0.54

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 30525dbd5fcb884ff5a7a978f59e06c77b051a16007938f3eb12770b869aa6f2
4
- data.tar.gz: 906f22197039173a58bc242b9ff7108842e9d5ec453cf2a77adb589352367fb1
3
+ metadata.gz: f0a01656cc0ad8d063570956971ee34ebf94929e90feb3c5f8207879d8367a12
4
+ data.tar.gz: 87ce3880db6bfcbdaf049618a8922536bdcbdd20b15366187612fb0e33594fa0
5
5
  SHA512:
6
- metadata.gz: e8be84b4c4e4902cdab557f81b8fdf9729fa5ec7c54c585aa8c8a3f2a76585fcc6e6344f454af579f24676252bf5dd1234664b3304f6086f9bdb9c5ecf31b92a
7
- data.tar.gz: '0827fa2a548405d5e9208fa0c181220dd7955360882dfa3d7311756aa611594f544ac4fedb89bf9a1f0ef90be98fd952ff61bbe336749715f610a0eec9215a20'
6
+ metadata.gz: c53b7fa09b117940de484fda33145e8f68ef7340da1e392e335f7aca5952c11ecbcad30a32e78551df1ff7972b20d7519a58a64dd808b5e9fecb309445ab12f8
7
+ data.tar.gz: c83bb2ae1c82af0aa66a99262c94d0fc7060807d3333d6ffd261bd558d0d338502419ca6ddf3a8b9b31f5a7ef48dc2c15ad6bd42d227a86290506bab12f3f7de
data/CHANGES.md CHANGED
@@ -1,5 +1,21 @@
1
1
  # Changes
2
2
 
3
+ ## 2026-01-08 v0.0.54
4
+
5
+ ### New Features
6
+
7
+ - Added `/revise_last` command for editing the last message
8
+ - Implemented `OllamaChat::MessageEditing` module with editor integration
9
+ - Enhanced message editing with proper error handling and user feedback
10
+
11
+ ### Improvements
12
+
13
+ - Improved `InputContent` module with better error handling (replaced `$?` with
14
+ direct `system` return)
15
+ - Updated `choose_filename` to use `_1` parameter for Ruby idioms
16
+ - Added comprehensive test suite for `InputContent` module with **100%** line
17
+ coverage
18
+
3
19
  ## 2026-01-07 v0.0.53
4
20
 
5
21
  - Added `/compose` command functionality to compose content using an external
@@ -51,6 +51,7 @@ class OllamaChat::Chat
51
51
  include OllamaChat::KramdownANSI
52
52
  include OllamaChat::Conversation
53
53
  include OllamaChat::InputContent
54
+ include OllamaChat::MessageEditing
54
55
 
55
56
  # Initializes a new OllamaChat::Chat instance with the given command-line
56
57
  # arguments.
@@ -340,6 +341,9 @@ class OllamaChat::Chat
340
341
  context_spook(patterns) or :next
341
342
  when %r(^/compose$)
342
343
  compose or :next
344
+ when %r(^/revise_last$)
345
+ revise_last
346
+ :next
343
347
  when %r(^/save\s+(.+)$)
344
348
  save_conversation($1)
345
349
  :next
@@ -36,7 +36,7 @@ module OllamaChat::InputContent
36
36
  #
37
37
  # @return [ String, nil ] the path to the selected file or nil if no file was chosen
38
38
  def choose_filename(pattern)
39
- files = Dir.glob(pattern).select { File.file?(it) }
39
+ files = Dir.glob(pattern).select { File.file?(_1) }
40
40
  files.unshift('[EXIT]')
41
41
  case chosen = OllamaChat::Utils::Chooser.choose(files)
42
42
  when '[EXIT]', nil
@@ -107,8 +107,8 @@ module OllamaChat::InputContent
107
107
  return
108
108
  end
109
109
  Tempfile.open do |tmp|
110
- system %{#{editor} #{tmp.path.inspect}}
111
- if $?.success?
110
+ result = system %{#{editor} #{tmp.path.inspect}}
111
+ if result
112
112
  return File.read(tmp.path)
113
113
  else
114
114
  STDERR.puts "Editor failed to edit #{tmp.path.inspect}."
@@ -0,0 +1,47 @@
1
+ # A module that provides message editing functionality for OllamaChat.
2
+ #
3
+ # The MessageEditing module encapsulates methods for modifying existing chat
4
+ # messages using an external editor. It allows users to edit the last message
5
+ # in the conversation, whether it's a system prompt, user message, or assistant
6
+ # response.
7
+ module OllamaChat::MessageEditing
8
+ # The revise_last method opens the last message in an external editor for
9
+ # modification.
10
+ #
11
+ # This method retrieves the last message from the conversation, writes its
12
+ # content to a temporary file, opens that file in the configured editor,
13
+ # and then updates the message with the edited content upon successful
14
+ # completion.
15
+ #
16
+ # @return [String, nil] the edited content if successful, nil otherwise
17
+ def revise_last
18
+ if message = @messages.last
19
+ unless editor = OllamaChat::EnvConfig::EDITOR?
20
+ STDERR.puts "Editor required for revise, set env var " \
21
+ "#{OllamaChat::EnvConfig::EDITOR!.env_var.inspect}."
22
+ return
23
+ end
24
+
25
+ Tempfile.open do |tmp|
26
+ tmp.write(message.content)
27
+ tmp.flush
28
+
29
+ result = system %{#{editor} #{tmp.path.inspect}}
30
+
31
+ if result
32
+ new_content = File.read(tmp.path)
33
+ old_message = @messages.messages.pop.as_json
34
+ old_message[:content] = new_content
35
+ @messages << Ollama::Message.from_hash(old_message)
36
+ STDOUT.puts "Message edited and updated."
37
+ return new_content
38
+ else
39
+ STDERR.puts "Editor failed to edit message."
40
+ end
41
+ end
42
+ else
43
+ STDERR.puts "No message available to revise."
44
+ end
45
+ nil
46
+ end
47
+ end
@@ -1,6 +1,6 @@
1
1
  module OllamaChat
2
2
  # OllamaChat version
3
- VERSION = '0.0.53'
3
+ VERSION = '0.0.54'
4
4
  VERSION_ARRAY = VERSION.split('.').map(&:to_i) # :nodoc:
5
5
  VERSION_MAJOR = VERSION_ARRAY[0] # :nodoc:
6
6
  VERSION_MINOR = VERSION_ARRAY[1] # :nodoc:
data/lib/ollama_chat.rb CHANGED
@@ -37,5 +37,6 @@ require 'ollama_chat/server_socket'
37
37
  require 'ollama_chat/kramdown_ansi'
38
38
  require 'ollama_chat/conversation'
39
39
  require 'ollama_chat/input_content'
40
+ require 'ollama_chat/message_editing'
40
41
  require 'ollama_chat/env_config'
41
42
  require 'ollama_chat/chat'
data/ollama_chat.gemspec CHANGED
@@ -1,9 +1,9 @@
1
1
  # -*- encoding: utf-8 -*-
2
- # stub: ollama_chat 0.0.53 ruby lib
2
+ # stub: ollama_chat 0.0.54 ruby lib
3
3
 
4
4
  Gem::Specification.new do |s|
5
5
  s.name = "ollama_chat".freeze
6
- s.version = "0.0.53".freeze
6
+ s.version = "0.0.54".freeze
7
7
 
8
8
  s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
9
9
  s.require_paths = ["lib".freeze]
@@ -12,15 +12,15 @@ Gem::Specification.new do |s|
12
12
  s.description = "The app provides a command-line interface (CLI) to an Ollama AI model,\nallowing users to engage in text-based conversations and generate\nhuman-like responses. Users can import data from local files or web pages,\nwhich are then processed through three different modes: fully importing the\ncontent into the conversation context, summarizing the information for\nconcise reference, or storing it in an embedding vector database for later\nretrieval based on the conversation.\n".freeze
13
13
  s.email = "flori@ping.de".freeze
14
14
  s.executables = ["ollama_chat".freeze, "ollama_chat_send".freeze]
15
- s.extra_rdoc_files = ["README.md".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/conversation.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/env_config.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/input_content.rb".freeze, "lib/ollama_chat/kramdown_ansi.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_output.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/redis_cache.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/think_control.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/vim.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze]
16
- s.files = [".utilsrc".freeze, "CHANGES.md".freeze, "Gemfile".freeze, "README.md".freeze, "Rakefile".freeze, "bin/ollama_chat".freeze, "bin/ollama_chat_send".freeze, "config/searxng/settings.yml".freeze, "docker-compose.yml".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/conversation.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/env_config.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/input_content.rb".freeze, "lib/ollama_chat/kramdown_ansi.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_output.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/ollama_chat_config/default_config.yml".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/redis_cache.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/think_control.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/vim.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze, "ollama_chat.gemspec".freeze, "redis/redis.conf".freeze, "spec/assets/api_show.json".freeze, "spec/assets/api_tags.json".freeze, "spec/assets/api_version.json".freeze, "spec/assets/conversation.json".freeze, "spec/assets/duckduckgo.html".freeze, "spec/assets/example.atom".freeze, "spec/assets/example.csv".freeze, "spec/assets/example.html".freeze, "spec/assets/example.pdf".freeze, "spec/assets/example.ps".freeze, "spec/assets/example.rb".freeze, "spec/assets/example.rss".freeze, "spec/assets/example.xml".freeze, "spec/assets/example_with_quote.html".freeze, "spec/assets/kitten.jpg".freeze, "spec/assets/prompt.txt".freeze, "spec/assets/searxng.json".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/kramdown_ansi_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/message_output_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/redis_cache_spec.rb".freeze, "spec/ollama_chat/server_socket_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/ollama_chat/web_searching_spec.rb".freeze, "spec/spec_helper.rb".freeze, "tmp/.keep".freeze]
15
+ s.extra_rdoc_files = ["README.md".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/conversation.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/env_config.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/input_content.rb".freeze, "lib/ollama_chat/kramdown_ansi.rb".freeze, "lib/ollama_chat/message_editing.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_output.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/redis_cache.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/think_control.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/vim.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze]
16
+ s.files = [".utilsrc".freeze, "CHANGES.md".freeze, "Gemfile".freeze, "README.md".freeze, "Rakefile".freeze, "bin/ollama_chat".freeze, "bin/ollama_chat_send".freeze, "config/searxng/settings.yml".freeze, "docker-compose.yml".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/conversation.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/env_config.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/input_content.rb".freeze, "lib/ollama_chat/kramdown_ansi.rb".freeze, "lib/ollama_chat/message_editing.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_output.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/ollama_chat_config/default_config.yml".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/redis_cache.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/think_control.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/vim.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze, "ollama_chat.gemspec".freeze, "redis/redis.conf".freeze, "spec/assets/api_show.json".freeze, "spec/assets/api_tags.json".freeze, "spec/assets/api_version.json".freeze, "spec/assets/conversation.json".freeze, "spec/assets/duckduckgo.html".freeze, "spec/assets/example.atom".freeze, "spec/assets/example.csv".freeze, "spec/assets/example.html".freeze, "spec/assets/example.pdf".freeze, "spec/assets/example.ps".freeze, "spec/assets/example.rb".freeze, "spec/assets/example.rss".freeze, "spec/assets/example.xml".freeze, "spec/assets/example_with_quote.html".freeze, "spec/assets/kitten.jpg".freeze, "spec/assets/prompt.txt".freeze, "spec/assets/searxng.json".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/input_content_spec.rb".freeze, "spec/ollama_chat/kramdown_ansi_spec.rb".freeze, "spec/ollama_chat/message_editing_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/message_output_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/redis_cache_spec.rb".freeze, "spec/ollama_chat/server_socket_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/ollama_chat/web_searching_spec.rb".freeze, "spec/spec_helper.rb".freeze, "tmp/.keep".freeze]
17
17
  s.homepage = "https://github.com/flori/ollama_chat".freeze
18
18
  s.licenses = ["MIT".freeze]
19
19
  s.rdoc_options = ["--title".freeze, "OllamaChat - A command-line interface (CLI) for interacting with an Ollama AI model.".freeze, "--main".freeze, "README.md".freeze]
20
20
  s.required_ruby_version = Gem::Requirement.new(">= 3.2".freeze)
21
21
  s.rubygems_version = "4.0.2".freeze
22
22
  s.summary = "A command-line interface (CLI) for interacting with an Ollama AI model.".freeze
23
- s.test_files = ["spec/assets/example.rb".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/kramdown_ansi_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/message_output_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/redis_cache_spec.rb".freeze, "spec/ollama_chat/server_socket_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/ollama_chat/web_searching_spec.rb".freeze, "spec/spec_helper.rb".freeze]
23
+ s.test_files = ["spec/assets/example.rb".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/input_content_spec.rb".freeze, "spec/ollama_chat/kramdown_ansi_spec.rb".freeze, "spec/ollama_chat/message_editing_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/message_output_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/redis_cache_spec.rb".freeze, "spec/ollama_chat/server_socket_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/ollama_chat/web_searching_spec.rb".freeze, "spec/spec_helper.rb".freeze]
24
24
 
25
25
  s.specification_version = 4
26
26
 
@@ -0,0 +1,127 @@
1
+ require 'spec_helper'
2
+
3
+ describe OllamaChat::InputContent do
4
+ let :chat do
5
+ OllamaChat::Chat.new
6
+ end
7
+
8
+ connect_to_ollama_server
9
+
10
+ describe '#input' do
11
+ it 'can read content from a selected file' do
12
+ # Mock the file selection process
13
+ expect(chat).to receive(:choose_filename).with('**/*').
14
+ and_return('spec/assets/example.rb')
15
+
16
+ # Test that it returns the file content
17
+ result = chat.input(nil)
18
+ expect(result).to include('puts "Hello World!"')
19
+ end
20
+
21
+ it 'returns nil when no file is selected' do
22
+ expect(chat).to receive(:choose_filename).with('**/*').and_return(nil)
23
+ expect(chat.input(nil)).to be_nil
24
+ end
25
+
26
+ it 'can read content with specific pattern' do
27
+ expect(chat).to receive(:choose_filename).with('spec/assets/*').
28
+ and_return('spec/assets/example.rb')
29
+ result = chat.input('spec/assets/*')
30
+ expect(result).to include('puts "Hello World!"')
31
+ end
32
+ end
33
+
34
+ describe '#choose_filename' do
35
+ it 'can select a file from matching patterns' do
36
+ # Test with a pattern that matches existing files
37
+ files = Dir.glob('spec/assets/*')
38
+ expect(files).to_not be_empty
39
+
40
+ # Mock the selection process
41
+ expect(OllamaChat::Utils::Chooser).to receive(:choose).
42
+ with(files.unshift('[EXIT]')).and_return(files[1])
43
+
44
+ result = chat.choose_filename('spec/assets/*')
45
+ expect(result).to eq files[1]
46
+ end
47
+
48
+ it 'returns nil when user exits selection' do
49
+ expect(OllamaChat::Utils::Chooser).to receive(:choose).and_return(nil)
50
+ expect(chat.choose_filename('spec/assets/*')).to be_nil
51
+ end
52
+
53
+ it 'returns nil when user chooses exit' do
54
+ expect(OllamaChat::Utils::Chooser).to receive(:choose).and_return('[EXIT]')
55
+ expect(chat.choose_filename('spec/assets/*')).to be_nil
56
+ end
57
+ end
58
+
59
+ describe '#context_spook' do
60
+ it 'can collect context with patterns' do
61
+ # Test with specific patterns
62
+ patterns = ['spec/assets/example.rb']
63
+ expect(ContextSpook).to receive(:generate_context).with(hash_including(verbose: true))
64
+
65
+ # This should not raise an error, but return nil since we're not mocking the full implementation
66
+ expect { chat.context_spook(patterns) }.not_to raise_error
67
+ end
68
+
69
+ it 'can load default context when no patterns provided' do
70
+ # Mock finding a context definition file
71
+ filename = '.contexts/code_comment.rb'
72
+ expect(chat).to receive(:choose_filename).with('.contexts/*.rb').
73
+ and_return(filename)
74
+
75
+ expect(ContextSpook).to receive(:generate_context).
76
+ with(filename, hash_including(verbose: true))
77
+
78
+ # This should not raise an error
79
+ expect { chat.context_spook(nil) }.not_to raise_error
80
+ end
81
+
82
+ it 'returns nil when no context file is found' do
83
+ expect(chat).to receive(:choose_filename).with('.contexts/*.rb').
84
+ and_return(nil)
85
+ expect(chat.context_spook(nil)).to be_nil
86
+ end
87
+ end
88
+
89
+ describe '#compose' do
90
+ it 'can open editor to compose content' do
91
+ # Mock editor configuration
92
+ const_conf_as('OllamaChat::EnvConfig::EDITOR' => '/usr/bin/vim')
93
+
94
+ # Mock Tempfile behavior
95
+ tmp_double = double('tmp', path: '/tmp/test')
96
+ expect(Tempfile).to receive(:open).and_yield(tmp_double)
97
+
98
+ # Mock system call to simulate successful editor execution
99
+ expect(chat).to receive(:system).with('/usr/bin/vim "/tmp/test"').and_return(true)
100
+
101
+ # Mock file reading to return content
102
+ expect(File).to receive(:read).with('/tmp/test').and_return('composed content')
103
+
104
+ result = chat.compose
105
+ expect(result).to eq 'composed content'
106
+ end
107
+
108
+ it 'handles missing editor gracefully' do
109
+ const_conf_as('OllamaChat::EnvConfig::EDITOR' => nil)
110
+
111
+ expect(STDERR).to receive(:puts).with(/Editor reqired for compose/)
112
+ expect(chat.compose).to be_nil
113
+ end
114
+
115
+ it 'handles editor failure' do
116
+ const_conf_as('OllamaChat::EnvConfig::EDITOR' => '/usr/bin/vim')
117
+
118
+ tmp_double = double('tmp', path: '/tmp/test')
119
+ expect(Tempfile).to receive(:open).and_yield(tmp_double)
120
+
121
+ expect(chat).to receive(:system).with('/usr/bin/vim "/tmp/test"').and_return(false)
122
+
123
+ expect(STDERR).to receive(:puts).with(/Editor failed to edit/)
124
+ expect(chat.compose).to be_nil
125
+ end
126
+ end
127
+ end
@@ -0,0 +1,63 @@
1
+ require 'spec_helper'
2
+
3
+ describe OllamaChat::MessageEditing do
4
+ let :chat do
5
+ OllamaChat::Chat.new
6
+ end
7
+
8
+ connect_to_ollama_server
9
+
10
+ describe '#revise_last' do
11
+ it 'can revise the last message' do
12
+ # First add a message to work with
13
+ chat.messages << Ollama::Message.new(role: 'assistant', content: 'original content')
14
+
15
+ const_conf_as('OllamaChat::EnvConfig::EDITOR' => '/usr/bin/vim')
16
+
17
+ # Mock Tempfile behavior to simulate editor interaction
18
+ tmp_double = double('tmp', write: true, flush: true, path: '/tmp/test')
19
+ expect(Tempfile).to receive(:open).and_yield(tmp_double)
20
+
21
+ # Mock system call to simulate successful editor execution
22
+ expect(chat).to receive(:system).with('/usr/bin/vim "/tmp/test"').and_return(true)
23
+
24
+ # Mock file reading to return edited content
25
+ expect(File).to receive(:read).with('/tmp/test').and_return('edited content')
26
+
27
+ # The method should return the edited content
28
+ expect(chat.revise_last).to eq 'edited content'
29
+ end
30
+
31
+ it 'handles missing last message' do
32
+ expect(STDERR).to receive(:puts).with(/No message available to revise/)
33
+ expect(chat.revise_last).to be_nil
34
+ end
35
+
36
+ it 'handles missing editor gracefully' do
37
+ chat.messages << Ollama::Message.new(role: 'assistant', content: 'original content')
38
+
39
+ const_conf_as('OllamaChat::EnvConfig::EDITOR' => nil)
40
+
41
+ expect(STDERR).to receive(:puts).with(/Editor required for revise/)
42
+ expect(chat.revise_last).to be_nil
43
+ end
44
+
45
+ it 'handles no messages to revise' do
46
+ # Clear messages array
47
+ chat.instance_variable_get(:@messages).clear
48
+
49
+ expect(STDERR).to receive(:puts).with(/No message available to revise/)
50
+ expect(chat.revise_last).to be_nil
51
+ end
52
+
53
+ it 'handles editor failure' do
54
+ const_conf_as('OllamaChat::EnvConfig::EDITOR' => '/usr/bin/vim')
55
+ chat.messages << Ollama::Message.new(role: 'assistant', content: 'original content')
56
+ tmp_double = double('tmp', write: true, flush: true, path: '/tmp/test')
57
+ expect(Tempfile).to receive(:open).and_yield(tmp_double)
58
+ expect(chat).to receive(:system).with('/usr/bin/vim "/tmp/test"').and_return(false)
59
+ expect(STDERR).to receive(:puts).with(/Editor failed to edit message/)
60
+ expect(chat.revise_last).to be_nil
61
+ end
62
+ end
63
+ end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ollama_chat
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.53
4
+ version: 0.0.54
5
5
  platform: ruby
6
6
  authors:
7
7
  - Florian Frank
@@ -420,6 +420,7 @@ extra_rdoc_files:
420
420
  - lib/ollama_chat/information.rb
421
421
  - lib/ollama_chat/input_content.rb
422
422
  - lib/ollama_chat/kramdown_ansi.rb
423
+ - lib/ollama_chat/message_editing.rb
423
424
  - lib/ollama_chat/message_format.rb
424
425
  - lib/ollama_chat/message_list.rb
425
426
  - lib/ollama_chat/message_output.rb
@@ -461,6 +462,7 @@ files:
461
462
  - lib/ollama_chat/information.rb
462
463
  - lib/ollama_chat/input_content.rb
463
464
  - lib/ollama_chat/kramdown_ansi.rb
465
+ - lib/ollama_chat/message_editing.rb
464
466
  - lib/ollama_chat/message_format.rb
465
467
  - lib/ollama_chat/message_list.rb
466
468
  - lib/ollama_chat/message_output.rb
@@ -504,7 +506,9 @@ files:
504
506
  - spec/ollama_chat/clipboard_spec.rb
505
507
  - spec/ollama_chat/follow_chat_spec.rb
506
508
  - spec/ollama_chat/information_spec.rb
509
+ - spec/ollama_chat/input_content_spec.rb
507
510
  - spec/ollama_chat/kramdown_ansi_spec.rb
511
+ - spec/ollama_chat/message_editing_spec.rb
508
512
  - spec/ollama_chat/message_list_spec.rb
509
513
  - spec/ollama_chat/message_output_spec.rb
510
514
  - spec/ollama_chat/model_handling_spec.rb
@@ -550,7 +554,9 @@ test_files:
550
554
  - spec/ollama_chat/clipboard_spec.rb
551
555
  - spec/ollama_chat/follow_chat_spec.rb
552
556
  - spec/ollama_chat/information_spec.rb
557
+ - spec/ollama_chat/input_content_spec.rb
553
558
  - spec/ollama_chat/kramdown_ansi_spec.rb
559
+ - spec/ollama_chat/message_editing_spec.rb
554
560
  - spec/ollama_chat/message_list_spec.rb
555
561
  - spec/ollama_chat/message_output_spec.rb
556
562
  - spec/ollama_chat/model_handling_spec.rb