ollama_chat 0.0.13 → 0.0.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: a6a03b8af7c470d83520d269829f51fe480f7a5813bc1b4df50c63d339e28953
4
- data.tar.gz: cb86dd9896d6948fb736c889a672fb29e4f21c8174a3ddb62d6fcc93be59f020
3
+ metadata.gz: 9c7760df8454e8d8b522c8d37448ca4b872f27b44ef484e842f8c35a96fec50b
4
+ data.tar.gz: abbc3fe1a7d53ded7ed878dfbbd6a584a99e8b687e16f1386e6e4e058cda1f46
5
5
  SHA512:
6
- metadata.gz: ef19594894b1bb11217e710e1a302a2d30add6140da629c66e3e60d990e30f45a29196d310e06f7c06b36d8ddf33f84e7bd4278f87705b48e501b06d5f53d290
7
- data.tar.gz: 332c66cdca39a187ecb3aff193e5a2522ad11deabaedb584c32a78a620d4c068be3cc062d181a5e6c1d0bf44fed371b3409010f2a4e404614ba142e37d5afbdc
6
+ metadata.gz: 28056039df80caf298aafb8445c0d3b6662e9fee8d28b6756fb13b844c006092754e68e33fc8947f8326ee11e4248206558c522b170463e4f58da88792cadfde
7
+ data.tar.gz: 795e2bf8fefb75b53c1108637affadb796c24b86c1cec853b16f62053d80c0319a239a8053676434a6c22197411557d91ad995b025e00bb735b83dccbb0d6ef2
data/CHANGES.md CHANGED
@@ -1,8 +1,21 @@
1
1
  # Changes
2
2
 
3
+ ## 2025-06-07 v0.0.14
4
+
5
+ * **Message List Improvements**:
6
+ * Added thinking status to messages when chat is in think mode
7
+ * Improved system prompt handling with new method documentation
8
+ * **Improved /system command handling for OllamaChat chat system**:
9
+ * Added support for '/system [show]' command to show or change system prompt.
10
+ * Add conversation length to chat information display
11
+ * **Improvements to OllamaChat::SourceFetching**:
12
+ * Fixed bug where document type concatenation could cause errors when `full?`
13
+ returns `nil`, ensuring proper string formatting and avoiding potential
14
+ crashes.
15
+
3
16
  ## 2025-06-05 v0.0.13
4
17
 
5
- * Improved chat command handling
18
+ * **Improved chat command handling**
6
19
  - Added support for '/clear tags' to clear all tags.
7
20
  - Updated cases for 'history', 'all' and added case for 'tags'.
8
21
  - Added commands to clear documents collection and print a message in `information.rb`.
data/README.md CHANGED
@@ -123,11 +123,11 @@ The following commands can be given inside the chat, if prefixed by a `/`:
123
123
  /location toggle location submission
124
124
  /voice [change] toggle voice output or change the voice
125
125
  /list [n] list the last n / all conversation exchanges
126
- /clear [messages|links|history] clear the all messages, links, or the chat history (defaults to messages)
126
+ /clear [what] clear what=messages|links|history|tags|all
127
127
  /clobber clear the conversation, links, and collection
128
128
  /drop [n] drop the last n exchanges, defaults to 1
129
129
  /model change the model
130
- /system change system prompt (clears conversation)
130
+ /system [show] change/show system prompt
131
131
  /regenerate the last answer message
132
132
  /collection [clear|change] change (default) collection or clear
133
133
  /info show information for current session
data/VERSION CHANGED
@@ -1 +1 @@
1
- 0.0.13
1
+ 0.0.14
@@ -152,9 +152,11 @@ class OllamaChat::Chat
152
152
  when %r(^/model$)
153
153
  @model = choose_model('', @model)
154
154
  :next
155
- when %r(^/system$)
156
- change_system_prompt(@system)
157
- info
155
+ when %r(^/system(?:\s+(show))?$)
156
+ if $1 != 'show'
157
+ change_system_prompt(@system)
158
+ end
159
+ @messages.show_system_prompt
158
160
  :next
159
161
  when %r(^/regenerate$)
160
162
  if content = messages.second_last&.content
@@ -371,6 +373,8 @@ class OllamaChat::Chat
371
373
  end
372
374
  end
373
375
 
376
+ content = content.encode(invalid: :replace)
377
+
374
378
  content, tags = if @parse_content
375
379
  parse_content(content, @images)
376
380
  else
@@ -1,8 +1,12 @@
1
1
  module OllamaChat::History
2
+ # Returns the full path of the chat history filename based on the
3
+ # configuration.
2
4
  def chat_history_filename
3
5
  File.expand_path(config.chat_history_filename)
4
6
  end
5
7
 
8
+ # Initializes the chat history by loading it from a file if it exists, and
9
+ # then loads the history into Readline::HISTORY.
6
10
  def init_chat_history
7
11
  if File.exist?(chat_history_filename)
8
12
  File.open(chat_history_filename, ?r) do |history|
@@ -13,10 +17,12 @@ module OllamaChat::History
13
17
  end
14
18
  end
15
19
 
20
+ # Saves the current chat history to a file in JSON format.
16
21
  def save_history
17
22
  File.secure_write(chat_history_filename, JSON.dump(Readline::HISTORY))
18
23
  end
19
24
 
25
+ # Clears all entries from Readline::HISTORY.
20
26
  def clear_history
21
27
  Readline::HISTORY.clear
22
28
  end
@@ -30,30 +30,31 @@ module OllamaChat::Information
30
30
  def info
31
31
  STDOUT.puts "Running ollama_chat version: #{bold(OllamaChat::VERSION)}"
32
32
  STDOUT.puts "Connected to ollama server version: #{bold(server_version)}"
33
- STDOUT.puts "Current model is #{bold{@model}}."
33
+ STDOUT.puts "Current conversation model is #{bold{@model}}."
34
34
  if @model_options.present?
35
35
  STDOUT.puts " Options: #{JSON.pretty_generate(@model_options).gsub(/(?<!\A)^/, ' ')}"
36
36
  end
37
37
  @embedding.show
38
38
  if @embedding.on?
39
- STDOUT.puts "Embedding model is #{bold{@embedding_model}}"
39
+ STDOUT.puts "Current embedding model is #{bold{@embedding_model}}"
40
40
  if @embedding_model_options.present?
41
41
  STDOUT.puts " Options: #{JSON.pretty_generate(@embedding_model_options).gsub(/(?<!\A)^/, ' ')}"
42
42
  end
43
43
  STDOUT.puts "Text splitter is #{bold{config.embedding.splitter.name}}."
44
44
  collection_stats
45
45
  end
46
- STDOUT.puts "Documents database cache is #{@documents.nil? ? 'n/a' : bold{@documents.cache.class}}"
47
46
  markdown.show
48
47
  stream.show
48
+ think.show
49
49
  location.show
50
- STDOUT.puts "Document policy for references in user text: #{bold{@document_policy}}"
51
- STDOUT.puts "Thinking is #{bold(think.on? ? 'enabled' : 'disabled')}."
52
- STDOUT.puts "Currently selected search engine is #{bold(search_engine)}."
50
+ voice.show
53
51
  if @voice.on?
54
- STDOUT.puts "Using voice #{bold{@current_voice}} to speak."
52
+ STDOUT.puts " Using voice #{bold{@current_voice}} to speak."
55
53
  end
56
- @messages.show_system_prompt
54
+ STDOUT.puts "Documents database cache is #{@documents.nil? ? 'n/a' : bold{@documents.cache.class}}"
55
+ STDOUT.puts "Document policy for references in user text: #{bold{@document_policy}}"
56
+ STDOUT.puts "Currently selected search engine is #{bold(search_engine)}."
57
+ STDOUT.puts "Conversation length: #{bold(@messages.size.to_s)} message(s)."
57
58
  nil
58
59
  end
59
60
 
@@ -70,7 +71,7 @@ module OllamaChat::Information
70
71
  /clobber clear the conversation, links, and collection
71
72
  /drop [n] drop the last n exchanges, defaults to 1
72
73
  /model change the model
73
- /system change system prompt (clears conversation)
74
+ /system [show] change/show system prompt
74
75
  /regenerate the last answer message
75
76
  /collection [clear|change] change (default) collection or clear
76
77
  /info show information for current session
@@ -106,10 +106,20 @@ class OllamaChat::MessageList
106
106
  when 'system' then 213
107
107
  else 210
108
108
  end
109
+ thinking = if @chat.think.on?
110
+ think_annotate do
111
+ m.thinking.full? { @chat.markdown.on? ? Kramdown::ANSI.parse(_1) : _1 }
112
+ end
113
+ end
109
114
  content = m.content.full? { @chat.markdown.on? ? Kramdown::ANSI.parse(_1) : _1 }
110
115
  message_text = message_type(m.images) + " "
111
116
  message_text += bold { color(role_color) { m.role } }
112
- message_text += ":\n#{content}"
117
+ if thinking
118
+ message_text += [ ?:, thinking, talk_annotate { content } ].compact.
119
+ map { _1.chomp } * ?\n
120
+ else
121
+ message_text += ":\n#{content}"
122
+ end
113
123
  m.images.full? { |images|
114
124
  message_text += "\nImages: " + italic { images.map(&:path) * ', ' }
115
125
  }
@@ -136,17 +146,26 @@ class OllamaChat::MessageList
136
146
  end
137
147
  end
138
148
 
139
- # The set_system_prompt method sets the system prompt for the chat session.
140
- # This implies deleting all of the messages in the message list, so it only
141
- # contains the system prompt at the end.
149
+ # Sets the system prompt for the chat session.
142
150
  #
143
- # @param system [ String ] the new system prompt
151
+ # @param system [String, nil] The new system prompt. If `nil` or `false`, clears the system prompt.
144
152
  #
145
- # @return [ OllamaChat::MessageList ] the message list instance itself, allowing for chaining.
153
+ # @return [OllamaChat::MessageList] Returns `self` to allow chaining of method calls.
154
+ #
155
+ # @note This method:
156
+ # - Removes all existing system prompts from the message list
157
+ # - Adds the new system prompt to the beginning of the message list if provided
158
+ # - Handles edge cases such as clearing prompts when `system` is `nil` or `false`
146
159
  def set_system_prompt(system)
147
- @system = system.to_s
148
- @messages.clear
149
- @messages << Ollama::Message.new(role: 'system', content: self.system)
160
+ @messages.reject! { |msg| msg.role == 'system' }
161
+ if new_system_prompt = system.full?(:to_s)
162
+ @system = new_system_prompt
163
+ @messages.unshift(
164
+ Ollama::Message.new(role: 'system', content: self.system)
165
+ )
166
+ else
167
+ @system = nil
168
+ end
150
169
  self
151
170
  end
152
171
 
@@ -50,7 +50,7 @@ module OllamaChat::SourceFetching
50
50
 
51
51
  def import_source(source_io, source)
52
52
  source = source.to_s
53
- document_type = source_io&.content_type.full? { |ct| italic { ct } } + ' '
53
+ document_type = source_io&.content_type.full? { |ct| italic { ct } + ' ' }
54
54
  STDOUT.puts "Importing #{document_type}document #{source.to_s.inspect} now."
55
55
  source_content = parse_source(source_io)
56
56
  "Imported #{source.inspect}:\n\n#{source_content}\n\n"
@@ -1,6 +1,6 @@
1
1
  module OllamaChat
2
2
  # OllamaChat version
3
- VERSION = '0.0.13'
3
+ VERSION = '0.0.14'
4
4
  VERSION_ARRAY = VERSION.split('.').map(&:to_i) # :nodoc:
5
5
  VERSION_MAJOR = VERSION_ARRAY[0] # :nodoc:
6
6
  VERSION_MINOR = VERSION_ARRAY[1] # :nodoc:
data/ollama_chat.gemspec CHANGED
@@ -1,9 +1,9 @@
1
1
  # -*- encoding: utf-8 -*-
2
- # stub: ollama_chat 0.0.13 ruby lib
2
+ # stub: ollama_chat 0.0.14 ruby lib
3
3
 
4
4
  Gem::Specification.new do |s|
5
5
  s.name = "ollama_chat".freeze
6
- s.version = "0.0.13".freeze
6
+ s.version = "0.0.14".freeze
7
7
 
8
8
  s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
9
9
  s.require_paths = ["lib".freeze]
@@ -86,7 +86,7 @@ RSpec.describe OllamaChat::Chat do
86
86
 
87
87
  it 'returns :next when input is "/system"' do
88
88
  expect(chat).to receive(:change_system_prompt).with(nil)
89
- expect(chat).to receive(:info)
89
+ expect(chat.messages).to receive(:show_system_prompt)
90
90
  expect(chat.handle_input("/system")).to eq :next
91
91
  end
92
92
 
@@ -288,7 +288,8 @@ RSpec.describe OllamaChat::Chat do
288
288
  /
289
289
  Running\ ollama_chat\ version|
290
290
  Connected\ to\ ollama\ server|
291
- Current\ model|
291
+ Current\ conversation\ model|
292
+ Current\ embedding\ model|
292
293
  Options|
293
294
  Embedding|
294
295
  Text\ splitter|
@@ -297,8 +298,10 @@ RSpec.describe OllamaChat::Chat do
297
298
  Streaming|
298
299
  Location|
299
300
  Document\ policy|
300
- Thinking\ is|
301
- Currently\ selected\ search\ engine
301
+ Thinking|
302
+ Voice\ output|
303
+ Currently\ selected\ search\ engine|
304
+ Conversation\ length
302
305
  /x
303
306
  ).at_least(1)
304
307
  expect(chat.info).to be_nil
@@ -24,7 +24,7 @@ RSpec.describe OllamaChat::Information do
24
24
 
25
25
  it 'can show info' do
26
26
  expect(STDOUT).to receive(:puts).with(/Connected to ollama server version/)
27
- expect(STDOUT).to receive(:puts).with(/Current model is/)
27
+ expect(STDOUT).to receive(:puts).with(/Current conversation model is/)
28
28
  expect(STDOUT).to receive(:puts).at_least(1)
29
29
  expect(chat.info).to be_nil
30
30
  end
@@ -64,9 +64,11 @@ RSpec.describe OllamaChat::MessageList do
64
64
  FileUtils.rm_f 'tmp/test-conversation.json'
65
65
  end
66
66
 
67
- it 'can list conversations' do
67
+ it 'can list conversations without thinking' do
68
68
  expect(chat).to receive(:markdown).
69
69
  and_return(double(on?: true)).at_least(:once)
70
+ expect(chat).to receive(:think).
71
+ and_return(double(on?: false)).at_least(:once)
70
72
  list << Ollama::Message.new(role: 'user', content: 'world')
71
73
  expect(STDOUT).to receive(:puts).
72
74
  with("📨 \e[1m\e[38;5;213msystem\e[0m\e[0m:\nhello\n")
@@ -75,6 +77,21 @@ RSpec.describe OllamaChat::MessageList do
75
77
  list.list_conversation
76
78
  end
77
79
 
80
+ it 'can list conversations with thinking' do
81
+ expect(chat).to receive(:markdown).
82
+ and_return(double(on?: true)).at_least(:once)
83
+ expect(chat).to receive(:think).
84
+ and_return(double(on?: true)).at_least(:once)
85
+ expect(STDOUT).to receive(:puts).
86
+ with("📨 \e[1m\e[38;5;213msystem\e[0m\e[0m:\n💭\nI need to say something nice…\n\n💬\nhello\n")
87
+ expect(STDOUT).to receive(:puts).
88
+ with("📨 \e[1m\e[38;5;172muser\e[0m\e[0m:\nworld\n")
89
+ list.set_system_prompt nil
90
+ list << Ollama::Message.new(role: 'system', content: 'hello', thinking: 'I need to say something nice…')
91
+ list << Ollama::Message.new(role: 'user', content: 'world')
92
+ list.list_conversation
93
+ end
94
+
78
95
  it 'can show_system_prompt' do
79
96
  expect(list).to receive(:system).and_return 'test **prompt**'
80
97
  expect(Kramdown::ANSI).to receive(:parse).with('test **prompt**').
@@ -82,10 +99,27 @@ RSpec.describe OllamaChat::MessageList do
82
99
  expect(list.show_system_prompt).to eq list
83
100
  end
84
101
 
85
- it 'can set_system_prompt' do
102
+ it 'can set_system_prompt if unset' do
103
+ list.messages.clear
104
+ expect(list.messages.count { _1.role == 'system' }).to eq 0
86
105
  expect {
87
106
  expect(list.set_system_prompt('test prompt')).to eq list
88
107
  }.to change { list.system }.from(nil).to('test prompt')
108
+ expect(list.messages.count { _1.role == 'system' }).to eq 1
109
+ end
110
+
111
+ it 'can set_system_prompt if already set' do
112
+ list.messages.clear
113
+ expect(list.messages.count { _1.role == 'system' }).to eq 0
114
+ list.set_system_prompt('first prompt')
115
+ expect(list.system).to eq('first prompt')
116
+ expect(list.messages.count { _1.role == 'system' }).to eq 1
117
+ #
118
+ list.set_system_prompt('new prompt')
119
+ expect(list.system).to eq('new prompt')
120
+ expect(list.messages.count { _1.role == 'system' }).to eq 1
121
+ expect(list.messages.first.role).to eq('system')
122
+ expect(list.messages.first.content).to eq('new prompt')
89
123
  end
90
124
 
91
125
  it 'can drop n conversations exhanges' do
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ollama_chat
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.13
4
+ version: 0.0.14
5
5
  platform: ruby
6
6
  authors:
7
7
  - Florian Frank