ollama_chat 0.0.16 → 0.0.18

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 1cddb901277529210fcc39b4c086217515e98aa75c1569044a47e1cf7149acbe
4
- data.tar.gz: f7a40ef2722c750bb71ff2b42d3c2cf0c04816bfd11753e22fa7a09f2d13c9f8
3
+ metadata.gz: 5b9fa311f0d8759e37629d79db604ca81c344699e18203efd60f4b3361eb95f0
4
+ data.tar.gz: '0727224708a9fb941a6572322793f0161fc21cee945ed08642fd41707c26d57f'
5
5
  SHA512:
6
- metadata.gz: 7cd5baab3a073464bbf28d3cd9e6ebbcfdcd938c55b1a17ca5d6f2a6b3949d1a2d0a138d9aacf192f6d2f2de1751b8d3e10368c2c15eba5b955ee5454fbd6ff1
7
- data.tar.gz: 3ed3442aa4962666de9a56ce5a10e816c9b6f21d85e1281c65962d70c41c60e0c9c8f47fc4e380703799d67e006b2aef36fbe4e2208632e7c3d02c4cc3120c1a
6
+ metadata.gz: ba5167d27ca64037115b36fc77c405e85311f1595fb771835b3c55429305b2e6304e7064c30e08f57fb8d97e99ccda818cc45cb43681c25b42e120c30d9feb64
7
+ data.tar.gz: 43b994f34e3e161b49c989dfec1df0d02f6debb710c7a078902d3872b2bebea2fc3606481ef5d36142afba36b9a60a5daa80630663f2ef5614d30674e4329c1e
data/CHANGES.md CHANGED
@@ -1,5 +1,32 @@
1
1
  # Changes
2
2
 
3
+ ## 2025-07-31 v0.0.18
4
+
5
+ * **Added /prompt command**: The `/prompt` command was added to the list of
6
+ supported commands, allowing users to prefill their input with text from
7
+ predefined prompts.
8
+ + Integrated prompt handling in `lib/ollama_chat/chat.rb`, where a new case
9
+ statement for `/prompt` sets up prefill functionality.
10
+ + Implemented prompt selection using the `choose_prompt` method in
11
+ `lib/ollama_chat/dialog.rb`.
12
+ + Set up input hooks using `Reline.pre_input_hook` to insert selected prompts
13
+ before user input.
14
+ * **Improved user interaction**:
15
+ - Added model size display during model selection via the `model_with_size`
16
+ method in `lib/ollama_chat/dialog.rb`.
17
+ - Updated model selection logic to include formatted sizes in the display.
18
+ * **Optimized voice list generation**: In
19
+ `lib/ollama_chat/ollama_chat_config/default_config.yml`, updated the voice
20
+ list generation logic to use a more efficient method of retrieving voice
21
+ names.
22
+
23
+ ## 2025-07-14 v0.0.17
24
+
25
+ * Implement Pager Support for List Command
26
+ * Add simple command completion to chat
27
+ * Improved chat link generation
28
+ + Changed `record.tags.first` to have prefix `?#` before the tag
29
+
3
30
  ## 2025-07-10 v0.0.16
4
31
 
5
32
  - **New Features**
data/README.md CHANGED
@@ -131,6 +131,7 @@ The following commands can be given inside the chat, if prefixed by a `/`:
131
131
  /drop [n] drop the last n exchanges, defaults to 1
132
132
  /model change the model
133
133
  /system [show] change/show system prompt
134
+ /prompt prefill user prompt with preset prompts
134
135
  /regenerate the last answer message
135
136
  /collection [clear|change] change (default) collection or clear
136
137
  /info show information for current session
data/Rakefile CHANGED
@@ -32,13 +32,13 @@ GemHadar do
32
32
  dependency 'excon', '~> 1.0'
33
33
  dependency 'ollama-ruby', '~> 1.2'
34
34
  dependency 'documentrix', '~> 0.0', '>= 0.0.2'
35
- dependency 'unix_socks'
35
+ dependency 'unix_socks', '>= 0.0.1'
36
36
  dependency 'rss', '~> 0.3'
37
37
  dependency 'term-ansicolor', '~> 1.11'
38
38
  dependency 'redis', '~> 5.0'
39
39
  dependency 'mime-types', '~> 3.0'
40
40
  dependency 'reverse_markdown', '~> 3.0'
41
- dependency 'xdg', '~> 7.0'
41
+ dependency 'xdg'
42
42
  dependency 'kramdown-ansi', '~> 0.0', '>= 0.0.1'
43
43
  dependency 'complex_config', '~> 0.22', '>= 0.22.2'
44
44
  dependency 'tins', '~> 1.34'
data/VERSION CHANGED
@@ -1 +1 @@
1
- 0.0.16
1
+ 0.0.18
@@ -159,6 +159,9 @@ class OllamaChat::Chat
159
159
  end
160
160
  @messages.show_system_prompt
161
161
  :next
162
+ when %r(^/prompt)
163
+ @prefill_prompt = choose_prompt
164
+ :next
162
165
  when %r(^/regenerate$)
163
166
  if content = messages.second_last&.content
164
167
  content.gsub!(/\nConsider these chunks for your answer.*\z/, '')
@@ -348,9 +351,18 @@ class OllamaChat::Chat
348
351
  @parse_content = true
349
352
  type = :terminal_input
350
353
  input_prompt = bold { color(172) { message_type(@images) + " user" } } + bold { "> " }
351
-
352
354
  begin
353
- content = Reline.readline(input_prompt, true)&.chomp
355
+ content = enable_command_completion do
356
+ if prefill_prompt = @prefill_prompt.full?
357
+ Reline.pre_input_hook = -> {
358
+ Reline.insert_text prefill_prompt.gsub(/\n*\z/, '')
359
+ @prefill_prompt = nil
360
+ }
361
+ else
362
+ Reline.pre_input_hook = nil
363
+ end
364
+ Reline.readline(input_prompt, true)&.chomp
365
+ end
354
366
  rescue Interrupt
355
367
  if message = server_socket_message
356
368
  type = message.type.full?(:to_sym) || :socket_input
@@ -418,7 +430,7 @@ class OllamaChat::Chat
418
430
  else
419
431
  'file://%s' % File.expand_path(record.source)
420
432
  end
421
- [ link, record.tags.first ]
433
+ [ link, ?# + record.tags.first ]
422
434
  }.uniq.map { |l, t| hyperlink(l, t) }.join(' ')
423
435
  config.debug and jj messages.to_ary
424
436
  end
@@ -526,4 +538,15 @@ class OllamaChat::Chat
526
538
  exit 1
527
539
  end
528
540
  end
541
+
542
+ def enable_command_completion(&block)
543
+ old = Reline.completion_proc
544
+ commands = display_chat_help_message.scan(/^\s*(\S+)/).inject(&:concat)
545
+ Reline.completion_proc = -> input {
546
+ commands.grep Regexp.new('\A' + Regexp.quote(input))
547
+ }
548
+ block.()
549
+ ensure
550
+ Reline.completion_proc = old
551
+ end
529
552
  end
@@ -1,10 +1,21 @@
1
1
  module OllamaChat::Dialog
2
+ private def model_with_size(model)
3
+ result = model.name
4
+ formatted_size = Term::ANSIColor.bold {
5
+ Tins::Unit.format(model.size, unit: ?B, prefix: 1024, format: '%.1f %U')
6
+ }
7
+ result.singleton_class.class_eval do
8
+ define_method(:to_s) { "%s %s" % [ model.name, formatted_size ] }
9
+ end
10
+ result
11
+ end
12
+
2
13
  def choose_model(cli_model, current_model)
3
14
  selector = if cli_model =~ /\A\?+(.*)\z/
4
15
  cli_model = ''
5
16
  Regexp.new($1)
6
17
  end
7
- models = ollama.tags.models.map(&:name).sort
18
+ models = ollama.tags.models.sort_by(&:name).map { |m| model_with_size(m) }
8
19
  selector and models = models.grep(selector)
9
20
  model = if cli_model == ''
10
21
  OllamaChat::Utils::Chooser.choose(models) || current_model
@@ -92,6 +103,18 @@ module OllamaChat::Dialog
92
103
  @messages.set_system_prompt(system)
93
104
  end
94
105
 
106
+ def choose_prompt
107
+ prompts = config.prompts.attribute_names
108
+ prompts.unshift('[EXIT]')
109
+ case chosen = OllamaChat::Utils::Chooser.choose(prompts)
110
+ when '[EXIT]', nil
111
+ STDOUT.puts "Exiting chooser."
112
+ return
113
+ when *prompts
114
+ config.prompts.send(chosen)
115
+ end
116
+ end
117
+
95
118
  def change_voice
96
119
  chosen = OllamaChat::Utils::Chooser.choose(config.voice.list)
97
120
  @current_voice = chosen.full? || config.voice.default
@@ -58,8 +58,8 @@ module OllamaChat::Information
58
58
  nil
59
59
  end
60
60
 
61
- def display_chat_help
62
- STDOUT.puts <<~EOT
61
+ private def display_chat_help_message
62
+ <<~EOT
63
63
  /copy to copy last response to clipboard
64
64
  /paste to paste content
65
65
  /markdown toggle markdown output
@@ -72,6 +72,7 @@ module OllamaChat::Information
72
72
  /drop [n] drop the last n exchanges, defaults to 1
73
73
  /model change the model
74
74
  /system [show] change/show system prompt
75
+ /prompt prefill user prompt with preset prompts
75
76
  /regenerate the last answer message
76
77
  /collection [clear|change] change (default) collection or clear
77
78
  /info show information for current session
@@ -89,6 +90,10 @@ module OllamaChat::Information
89
90
  /quit to quit
90
91
  /help to view this help
91
92
  EOT
93
+ end
94
+
95
+ def display_chat_help
96
+ STDOUT.puts display_chat_help_message
92
97
  nil
93
98
  end
94
99
 
@@ -99,31 +99,33 @@ class OllamaChat::MessageList
99
99
  # @return [ OllamaChat::MessageList ]
100
100
  def list_conversation(last = nil)
101
101
  last = (last || @messages.size).clamp(0, @messages.size)
102
- @messages[-last..-1].to_a.each do |m|
103
- role_color = case m.role
104
- when 'user' then 172
105
- when 'assistant' then 111
106
- when 'system' then 213
107
- else 210
102
+ use_pager do |output|
103
+ @messages[-last..-1].to_a.each do |m|
104
+ role_color = case m.role
105
+ when 'user' then 172
106
+ when 'assistant' then 111
107
+ when 'system' then 213
108
+ else 210
109
+ end
110
+ thinking = if @chat.think.on?
111
+ think_annotate do
112
+ m.thinking.full? { @chat.markdown.on? ? Kramdown::ANSI.parse(_1) : _1 }
113
+ end
108
114
  end
109
- thinking = if @chat.think.on?
110
- think_annotate do
111
- m.thinking.full? { @chat.markdown.on? ? Kramdown::ANSI.parse(_1) : _1 }
112
- end
113
- end
114
- content = m.content.full? { @chat.markdown.on? ? Kramdown::ANSI.parse(_1) : _1 }
115
- message_text = message_type(m.images) + " "
116
- message_text += bold { color(role_color) { m.role } }
117
- if thinking
118
- message_text += [ ?:, thinking, talk_annotate { content } ].compact.
119
- map { _1.chomp } * ?\n
120
- else
121
- message_text += ":\n#{content}"
115
+ content = m.content.full? { @chat.markdown.on? ? Kramdown::ANSI.parse(_1) : _1 }
116
+ message_text = message_type(m.images) + " "
117
+ message_text += bold { color(role_color) { m.role } }
118
+ if thinking
119
+ message_text += [ ?:, thinking, talk_annotate { content } ].compact.
120
+ map { _1.chomp } * ?\n
121
+ else
122
+ message_text += ":\n#{content}"
123
+ end
124
+ m.images.full? { |images|
125
+ message_text += "\nImages: " + italic { images.map(&:path) * ', ' }
126
+ }
127
+ output.puts message_text
122
128
  end
123
- m.images.full? { |images|
124
- message_text += "\nImages: " + italic { images.map(&:path) * ', ' }
125
- }
126
- STDOUT.puts message_text
127
129
  end
128
130
  self
129
131
  end
@@ -261,4 +263,22 @@ class OllamaChat::MessageList
261
263
  def config
262
264
  @chat.config
263
265
  end
266
+
267
+ def determine_pager_command
268
+ default_pager = ENV['PAGER'].full?
269
+ if fallback_pager = `which less`.chomp.full? || `which more`.chomp.full?
270
+ fallback_pager << ' -r'
271
+ end
272
+ default_pager || fallback_pager
273
+ end
274
+
275
+ def use_pager
276
+ command = determine_pager_command
277
+ output_buffer = StringIO.new
278
+ yield output_buffer
279
+ messages = output_buffer.string
280
+ Kramdown::ANSI::Pager.pager(command:, lines: messages.count(?\n)) do |output|
281
+ output.puts messages
282
+ end
283
+ end
264
284
  end
@@ -32,7 +32,7 @@ system_prompts:
32
32
  voice:
33
33
  enabled: false
34
34
  default: Samantha
35
- list: <%= `say -v ? 2>/dev/null`.lines.map { _1[/^(.+?)\s+[a-z]{2}_[a-zA-Z0-9]{2,}/, 1] }.uniq.sort.to_s.force_encoding('ASCII-8BIT') %>
35
+ list: <%= `say -v ? 2>/dev/null`.lines.map { |l| l.force_encoding('ASCII-8BIT'); l[/^(.+?)\s+[a-z]{2}_[a-zA-Z0-9]{2,}/, 1] }.uniq.sort.to_s %>
36
36
  markdown: true
37
37
  stream: true
38
38
  document_policy: importing
@@ -1,6 +1,6 @@
1
1
  module OllamaChat
2
2
  # OllamaChat version
3
- VERSION = '0.0.16'
3
+ VERSION = '0.0.18'
4
4
  VERSION_ARRAY = VERSION.split('.').map(&:to_i) # :nodoc:
5
5
  VERSION_MAJOR = VERSION_ARRAY[0] # :nodoc:
6
6
  VERSION_MINOR = VERSION_ARRAY[1] # :nodoc:
data/ollama_chat.gemspec CHANGED
@@ -1,9 +1,9 @@
1
1
  # -*- encoding: utf-8 -*-
2
- # stub: ollama_chat 0.0.16 ruby lib
2
+ # stub: ollama_chat 0.0.18 ruby lib
3
3
 
4
4
  Gem::Specification.new do |s|
5
5
  s.name = "ollama_chat".freeze
6
- s.version = "0.0.16".freeze
6
+ s.version = "0.0.18".freeze
7
7
 
8
8
  s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
9
9
  s.require_paths = ["lib".freeze]
@@ -24,7 +24,7 @@ Gem::Specification.new do |s|
24
24
 
25
25
  s.specification_version = 4
26
26
 
27
- s.add_development_dependency(%q<gem_hadar>.freeze, ["~> 1.20".freeze])
27
+ s.add_development_dependency(%q<gem_hadar>.freeze, ["~> 1.23".freeze])
28
28
  s.add_development_dependency(%q<all_images>.freeze, ["~> 0.6".freeze])
29
29
  s.add_development_dependency(%q<rspec>.freeze, ["~> 3.2".freeze])
30
30
  s.add_development_dependency(%q<kramdown>.freeze, ["~> 2.0".freeze])
@@ -34,13 +34,13 @@ Gem::Specification.new do |s|
34
34
  s.add_runtime_dependency(%q<excon>.freeze, ["~> 1.0".freeze])
35
35
  s.add_runtime_dependency(%q<ollama-ruby>.freeze, ["~> 1.2".freeze])
36
36
  s.add_runtime_dependency(%q<documentrix>.freeze, ["~> 0.0".freeze, ">= 0.0.2".freeze])
37
- s.add_runtime_dependency(%q<unix_socks>.freeze, [">= 0".freeze])
37
+ s.add_runtime_dependency(%q<unix_socks>.freeze, [">= 0.0.1".freeze])
38
38
  s.add_runtime_dependency(%q<rss>.freeze, ["~> 0.3".freeze])
39
39
  s.add_runtime_dependency(%q<term-ansicolor>.freeze, ["~> 1.11".freeze])
40
40
  s.add_runtime_dependency(%q<redis>.freeze, ["~> 5.0".freeze])
41
41
  s.add_runtime_dependency(%q<mime-types>.freeze, ["~> 3.0".freeze])
42
42
  s.add_runtime_dependency(%q<reverse_markdown>.freeze, ["~> 3.0".freeze])
43
- s.add_runtime_dependency(%q<xdg>.freeze, ["~> 7.0".freeze])
43
+ s.add_runtime_dependency(%q<xdg>.freeze, [">= 0".freeze])
44
44
  s.add_runtime_dependency(%q<kramdown-ansi>.freeze, ["~> 0.0".freeze, ">= 0.0.1".freeze])
45
45
  s.add_runtime_dependency(%q<complex_config>.freeze, ["~> 0.22".freeze, ">= 0.22.2".freeze])
46
46
  s.add_runtime_dependency(%q<tins>.freeze, ["~> 1.34".freeze])
@@ -10,7 +10,8 @@ RSpec.describe OllamaChat::MessageList do
10
10
  units: 'SI (International System of Units)'
11
11
  ),
12
12
  prompts: double(
13
- location: 'You are at %{location_name} (%{location_decimal_degrees}), on %{localtime}, preferring %{units}'
13
+ location: 'You are at %{location_name} (%{location_decimal_degrees}),' \
14
+ ' on %{localtime}, preferring %{units}'
14
15
  ),
15
16
  system_prompts: double(
16
17
  assistant?: 'You are a helpful assistant.'
@@ -64,32 +65,60 @@ RSpec.describe OllamaChat::MessageList do
64
65
  FileUtils.rm_f 'tmp/test-conversation.json'
65
66
  end
66
67
 
67
- it 'can list conversations without thinking' do
68
- expect(chat).to receive(:markdown).
69
- and_return(double(on?: true)).at_least(:once)
70
- expect(chat).to receive(:think).
71
- and_return(double(on?: false)).at_least(:once)
72
- list << Ollama::Message.new(role: 'user', content: 'world')
73
- expect(STDOUT).to receive(:puts).
74
- with("📨 \e[1m\e[38;5;213msystem\e[0m\e[0m:\nhello\n")
75
- expect(STDOUT).to receive(:puts).
76
- with("📨 \e[1m\e[38;5;172muser\e[0m\e[0m:\nworld\n")
77
- list.list_conversation
78
- end
79
-
80
- it 'can list conversations with thinking' do
81
- expect(chat).to receive(:markdown).
82
- and_return(double(on?: true)).at_least(:once)
83
- expect(chat).to receive(:think).
84
- and_return(double(on?: true)).at_least(:once)
85
- expect(STDOUT).to receive(:puts).
86
- with("📨 \e[1m\e[38;5;213msystem\e[0m\e[0m:\n💭\nI need to say something nice…\n\n💬\nhello\n")
87
- expect(STDOUT).to receive(:puts).
88
- with("📨 \e[1m\e[38;5;172muser\e[0m\e[0m:\nworld\n")
89
- list.set_system_prompt nil
90
- list << Ollama::Message.new(role: 'system', content: 'hello', thinking: 'I need to say something nice…')
91
- list << Ollama::Message.new(role: 'user', content: 'world')
92
- list.list_conversation
68
+ context 'without pager' do
69
+ before do
70
+ expect(list).to receive(:determine_pager_command).and_return nil
71
+ end
72
+
73
+ it 'can list conversations without thinking' do
74
+ expect(chat).to receive(:markdown).
75
+ and_return(double(on?: true)).at_least(:once)
76
+ expect(chat).to receive(:think).
77
+ and_return(double(on?: false)).at_least(:once)
78
+ list << Ollama::Message.new(role: 'user', content: 'world')
79
+ expect(STDOUT).to receive(:puts).
80
+ with(
81
+ "📨 \e[1m\e[38;5;213msystem\e[0m\e[0m:\nhello\n" \
82
+ "📨 \e[1m\e[38;5;172muser\e[0m\e[0m:\nworld\n"
83
+ )
84
+ list.list_conversation
85
+ end
86
+
87
+ it 'can list conversations with thinking' do
88
+ expect(chat).to receive(:markdown).
89
+ and_return(double(on?: true)).at_least(:once)
90
+ expect(chat).to receive(:think).
91
+ and_return(double(on?: true)).at_least(:once)
92
+ expect(STDOUT).to receive(:puts).
93
+ with(
94
+ "📨 \e[1m\e[38;5;213msystem\e[0m\e[0m:\n" \
95
+ "💭\nI need to say something nice…\n\n💬\nhello\n" \
96
+ "📨 \e[1m\e[38;5;172muser\e[0m\e[0m:\nworld\n"
97
+ )
98
+ list.set_system_prompt nil
99
+ list << Ollama::Message.new(
100
+ role: 'system', content: 'hello',
101
+ thinking: 'I need to say something nice…'
102
+ )
103
+ list << Ollama::Message.new(role: 'user', content: 'world')
104
+ list.list_conversation
105
+ end
106
+ end
107
+
108
+ context 'with pager' do
109
+ before do
110
+ expect(list).to receive(:determine_pager_command).and_return 'true'
111
+ expect(Tins::Terminal).to receive(:lines).and_return 1
112
+ end
113
+
114
+ it 'can list conversations' do
115
+ expect(chat).to receive(:markdown).
116
+ and_return(double(on?: true)).at_least(:once)
117
+ expect(chat).to receive(:think).
118
+ and_return(double(on?: false)).at_least(:once)
119
+ list << Ollama::Message.new(role: 'user', content: 'world')
120
+ list.list_conversation
121
+ end
93
122
  end
94
123
 
95
124
  it 'can show_system_prompt' do
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ollama_chat
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.16
4
+ version: 0.0.18
5
5
  platform: ruby
6
6
  authors:
7
7
  - Florian Frank
@@ -15,14 +15,14 @@ dependencies:
15
15
  requirements:
16
16
  - - "~>"
17
17
  - !ruby/object:Gem::Version
18
- version: '1.20'
18
+ version: '1.23'
19
19
  type: :development
20
20
  prerelease: false
21
21
  version_requirements: !ruby/object:Gem::Requirement
22
22
  requirements:
23
23
  - - "~>"
24
24
  - !ruby/object:Gem::Version
25
- version: '1.20'
25
+ version: '1.23'
26
26
  - !ruby/object:Gem::Dependency
27
27
  name: all_images
28
28
  requirement: !ruby/object:Gem::Requirement
@@ -161,14 +161,14 @@ dependencies:
161
161
  requirements:
162
162
  - - ">="
163
163
  - !ruby/object:Gem::Version
164
- version: '0'
164
+ version: 0.0.1
165
165
  type: :runtime
166
166
  prerelease: false
167
167
  version_requirements: !ruby/object:Gem::Requirement
168
168
  requirements:
169
169
  - - ">="
170
170
  - !ruby/object:Gem::Version
171
- version: '0'
171
+ version: 0.0.1
172
172
  - !ruby/object:Gem::Dependency
173
173
  name: rss
174
174
  requirement: !ruby/object:Gem::Requirement
@@ -243,16 +243,16 @@ dependencies:
243
243
  name: xdg
244
244
  requirement: !ruby/object:Gem::Requirement
245
245
  requirements:
246
- - - "~>"
246
+ - - ">="
247
247
  - !ruby/object:Gem::Version
248
- version: '7.0'
248
+ version: '0'
249
249
  type: :runtime
250
250
  prerelease: false
251
251
  version_requirements: !ruby/object:Gem::Requirement
252
252
  requirements:
253
- - - "~>"
253
+ - - ">="
254
254
  - !ruby/object:Gem::Version
255
- version: '7.0'
255
+ version: '0'
256
256
  - !ruby/object:Gem::Dependency
257
257
  name: kramdown-ansi
258
258
  requirement: !ruby/object:Gem::Requirement