ollama_chat 0.0.16 → 0.0.17

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 1cddb901277529210fcc39b4c086217515e98aa75c1569044a47e1cf7149acbe
4
- data.tar.gz: f7a40ef2722c750bb71ff2b42d3c2cf0c04816bfd11753e22fa7a09f2d13c9f8
3
+ metadata.gz: 1ca8b4d15da972d1600ccab9349f1852ccb75f2733cd9a85ccfb57f2890cb786
4
+ data.tar.gz: 3ec2450f156b98ac722b58358de3190e58d30ebf6f8e09dd435804b409c1e1e0
5
5
  SHA512:
6
- metadata.gz: 7cd5baab3a073464bbf28d3cd9e6ebbcfdcd938c55b1a17ca5d6f2a6b3949d1a2d0a138d9aacf192f6d2f2de1751b8d3e10368c2c15eba5b955ee5454fbd6ff1
7
- data.tar.gz: 3ed3442aa4962666de9a56ce5a10e816c9b6f21d85e1281c65962d70c41c60e0c9c8f47fc4e380703799d67e006b2aef36fbe4e2208632e7c3d02c4cc3120c1a
6
+ metadata.gz: 98a1335f95549196c6a6a486465c3dad12694b8006c18a7ff2619be8a866b826869ed35dde99d810fea330101e67361142000ac705e5784450bc9ae11952f8d7
7
+ data.tar.gz: dce73464d416d095eee8d86723ca241cc7affdbc87b4472bf4e0cf0caf084f52d2c47810fd3fe8ec67cb883df85e58098caae1ad45c4088850d98cc37705c2b0
data/CHANGES.md CHANGED
@@ -1,5 +1,12 @@
1
1
  # Changes
2
2
 
3
+ ## 2025-07-14 v0.0.17
4
+
5
+ * Implement Pager Support for List Command
6
+ * Add simple command completion to chat
7
+ * Improved chat link generation
8
+ + Changed `record.tags.first` to have prefix `?#` before the tag
9
+
3
10
  ## 2025-07-10 v0.0.16
4
11
 
5
12
  - **New Features**
data/Rakefile CHANGED
@@ -32,13 +32,13 @@ GemHadar do
32
32
  dependency 'excon', '~> 1.0'
33
33
  dependency 'ollama-ruby', '~> 1.2'
34
34
  dependency 'documentrix', '~> 0.0', '>= 0.0.2'
35
- dependency 'unix_socks'
35
+ dependency 'unix_socks', '>= 0.0.1'
36
36
  dependency 'rss', '~> 0.3'
37
37
  dependency 'term-ansicolor', '~> 1.11'
38
38
  dependency 'redis', '~> 5.0'
39
39
  dependency 'mime-types', '~> 3.0'
40
40
  dependency 'reverse_markdown', '~> 3.0'
41
- dependency 'xdg', '~> 7.0'
41
+ dependency 'xdg'
42
42
  dependency 'kramdown-ansi', '~> 0.0', '>= 0.0.1'
43
43
  dependency 'complex_config', '~> 0.22', '>= 0.22.2'
44
44
  dependency 'tins', '~> 1.34'
data/VERSION CHANGED
@@ -1 +1 @@
1
- 0.0.16
1
+ 0.0.17
@@ -348,9 +348,10 @@ class OllamaChat::Chat
348
348
  @parse_content = true
349
349
  type = :terminal_input
350
350
  input_prompt = bold { color(172) { message_type(@images) + " user" } } + bold { "> " }
351
-
352
351
  begin
353
- content = Reline.readline(input_prompt, true)&.chomp
352
+ content = enable_command_completion do
353
+ Reline.readline(input_prompt, true)&.chomp
354
+ end
354
355
  rescue Interrupt
355
356
  if message = server_socket_message
356
357
  type = message.type.full?(:to_sym) || :socket_input
@@ -418,7 +419,7 @@ class OllamaChat::Chat
418
419
  else
419
420
  'file://%s' % File.expand_path(record.source)
420
421
  end
421
- [ link, record.tags.first ]
422
+ [ link, ?# + record.tags.first ]
422
423
  }.uniq.map { |l, t| hyperlink(l, t) }.join(' ')
423
424
  config.debug and jj messages.to_ary
424
425
  end
@@ -526,4 +527,15 @@ class OllamaChat::Chat
526
527
  exit 1
527
528
  end
528
529
  end
530
+
531
+ def enable_command_completion(&block)
532
+ old = Reline.completion_proc
533
+ commands = display_chat_help_message.scan(/^\s*(\S+)/).inject(&:concat)
534
+ Reline.completion_proc = -> input {
535
+ commands.grep Regexp.new('\A' + Regexp.quote(input))
536
+ }
537
+ block.()
538
+ ensure
539
+ Reline.completion_proc = old
540
+ end
529
541
  end
@@ -58,8 +58,8 @@ module OllamaChat::Information
58
58
  nil
59
59
  end
60
60
 
61
- def display_chat_help
62
- STDOUT.puts <<~EOT
61
+ private def display_chat_help_message
62
+ <<~EOT
63
63
  /copy to copy last response to clipboard
64
64
  /paste to paste content
65
65
  /markdown toggle markdown output
@@ -89,6 +89,10 @@ module OllamaChat::Information
89
89
  /quit to quit
90
90
  /help to view this help
91
91
  EOT
92
+ end
93
+
94
+ def display_chat_help
95
+ STDOUT.puts display_chat_help_message
92
96
  nil
93
97
  end
94
98
 
@@ -99,31 +99,33 @@ class OllamaChat::MessageList
99
99
  # @return [ OllamaChat::MessageList ]
100
100
  def list_conversation(last = nil)
101
101
  last = (last || @messages.size).clamp(0, @messages.size)
102
- @messages[-last..-1].to_a.each do |m|
103
- role_color = case m.role
104
- when 'user' then 172
105
- when 'assistant' then 111
106
- when 'system' then 213
107
- else 210
102
+ use_pager do |output|
103
+ @messages[-last..-1].to_a.each do |m|
104
+ role_color = case m.role
105
+ when 'user' then 172
106
+ when 'assistant' then 111
107
+ when 'system' then 213
108
+ else 210
109
+ end
110
+ thinking = if @chat.think.on?
111
+ think_annotate do
112
+ m.thinking.full? { @chat.markdown.on? ? Kramdown::ANSI.parse(_1) : _1 }
113
+ end
108
114
  end
109
- thinking = if @chat.think.on?
110
- think_annotate do
111
- m.thinking.full? { @chat.markdown.on? ? Kramdown::ANSI.parse(_1) : _1 }
112
- end
113
- end
114
- content = m.content.full? { @chat.markdown.on? ? Kramdown::ANSI.parse(_1) : _1 }
115
- message_text = message_type(m.images) + " "
116
- message_text += bold { color(role_color) { m.role } }
117
- if thinking
118
- message_text += [ ?:, thinking, talk_annotate { content } ].compact.
119
- map { _1.chomp } * ?\n
120
- else
121
- message_text += ":\n#{content}"
115
+ content = m.content.full? { @chat.markdown.on? ? Kramdown::ANSI.parse(_1) : _1 }
116
+ message_text = message_type(m.images) + " "
117
+ message_text += bold { color(role_color) { m.role } }
118
+ if thinking
119
+ message_text += [ ?:, thinking, talk_annotate { content } ].compact.
120
+ map { _1.chomp } * ?\n
121
+ else
122
+ message_text += ":\n#{content}"
123
+ end
124
+ m.images.full? { |images|
125
+ message_text += "\nImages: " + italic { images.map(&:path) * ', ' }
126
+ }
127
+ output.puts message_text
122
128
  end
123
- m.images.full? { |images|
124
- message_text += "\nImages: " + italic { images.map(&:path) * ', ' }
125
- }
126
- STDOUT.puts message_text
127
129
  end
128
130
  self
129
131
  end
@@ -261,4 +263,22 @@ class OllamaChat::MessageList
261
263
  def config
262
264
  @chat.config
263
265
  end
266
+
267
+ def determine_pager_command
268
+ default_pager = ENV['PAGER'].full?
269
+ if fallback_pager = `which less`.chomp.full? || `which more`.chomp.full?
270
+ fallback_pager << ' -r'
271
+ end
272
+ default_pager || fallback_pager
273
+ end
274
+
275
+ def use_pager
276
+ command = determine_pager_command
277
+ output_buffer = StringIO.new
278
+ yield output_buffer
279
+ messages = output_buffer.string
280
+ Kramdown::ANSI::Pager.pager(command:, lines: messages.count(?\n)) do |output|
281
+ output.puts messages
282
+ end
283
+ end
264
284
  end
@@ -1,6 +1,6 @@
1
1
  module OllamaChat
2
2
  # OllamaChat version
3
- VERSION = '0.0.16'
3
+ VERSION = '0.0.17'
4
4
  VERSION_ARRAY = VERSION.split('.').map(&:to_i) # :nodoc:
5
5
  VERSION_MAJOR = VERSION_ARRAY[0] # :nodoc:
6
6
  VERSION_MINOR = VERSION_ARRAY[1] # :nodoc:
data/ollama_chat.gemspec CHANGED
@@ -1,9 +1,9 @@
1
1
  # -*- encoding: utf-8 -*-
2
- # stub: ollama_chat 0.0.16 ruby lib
2
+ # stub: ollama_chat 0.0.17 ruby lib
3
3
 
4
4
  Gem::Specification.new do |s|
5
5
  s.name = "ollama_chat".freeze
6
- s.version = "0.0.16".freeze
6
+ s.version = "0.0.17".freeze
7
7
 
8
8
  s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
9
9
  s.require_paths = ["lib".freeze]
@@ -34,13 +34,13 @@ Gem::Specification.new do |s|
34
34
  s.add_runtime_dependency(%q<excon>.freeze, ["~> 1.0".freeze])
35
35
  s.add_runtime_dependency(%q<ollama-ruby>.freeze, ["~> 1.2".freeze])
36
36
  s.add_runtime_dependency(%q<documentrix>.freeze, ["~> 0.0".freeze, ">= 0.0.2".freeze])
37
- s.add_runtime_dependency(%q<unix_socks>.freeze, [">= 0".freeze])
37
+ s.add_runtime_dependency(%q<unix_socks>.freeze, [">= 0.0.1".freeze])
38
38
  s.add_runtime_dependency(%q<rss>.freeze, ["~> 0.3".freeze])
39
39
  s.add_runtime_dependency(%q<term-ansicolor>.freeze, ["~> 1.11".freeze])
40
40
  s.add_runtime_dependency(%q<redis>.freeze, ["~> 5.0".freeze])
41
41
  s.add_runtime_dependency(%q<mime-types>.freeze, ["~> 3.0".freeze])
42
42
  s.add_runtime_dependency(%q<reverse_markdown>.freeze, ["~> 3.0".freeze])
43
- s.add_runtime_dependency(%q<xdg>.freeze, ["~> 7.0".freeze])
43
+ s.add_runtime_dependency(%q<xdg>.freeze, [">= 0".freeze])
44
44
  s.add_runtime_dependency(%q<kramdown-ansi>.freeze, ["~> 0.0".freeze, ">= 0.0.1".freeze])
45
45
  s.add_runtime_dependency(%q<complex_config>.freeze, ["~> 0.22".freeze, ">= 0.22.2".freeze])
46
46
  s.add_runtime_dependency(%q<tins>.freeze, ["~> 1.34".freeze])
@@ -10,7 +10,8 @@ RSpec.describe OllamaChat::MessageList do
10
10
  units: 'SI (International System of Units)'
11
11
  ),
12
12
  prompts: double(
13
- location: 'You are at %{location_name} (%{location_decimal_degrees}), on %{localtime}, preferring %{units}'
13
+ location: 'You are at %{location_name} (%{location_decimal_degrees}),' \
14
+ ' on %{localtime}, preferring %{units}'
14
15
  ),
15
16
  system_prompts: double(
16
17
  assistant?: 'You are a helpful assistant.'
@@ -64,32 +65,60 @@ RSpec.describe OllamaChat::MessageList do
64
65
  FileUtils.rm_f 'tmp/test-conversation.json'
65
66
  end
66
67
 
67
- it 'can list conversations without thinking' do
68
- expect(chat).to receive(:markdown).
69
- and_return(double(on?: true)).at_least(:once)
70
- expect(chat).to receive(:think).
71
- and_return(double(on?: false)).at_least(:once)
72
- list << Ollama::Message.new(role: 'user', content: 'world')
73
- expect(STDOUT).to receive(:puts).
74
- with("📨 \e[1m\e[38;5;213msystem\e[0m\e[0m:\nhello\n")
75
- expect(STDOUT).to receive(:puts).
76
- with("📨 \e[1m\e[38;5;172muser\e[0m\e[0m:\nworld\n")
77
- list.list_conversation
78
- end
79
-
80
- it 'can list conversations with thinking' do
81
- expect(chat).to receive(:markdown).
82
- and_return(double(on?: true)).at_least(:once)
83
- expect(chat).to receive(:think).
84
- and_return(double(on?: true)).at_least(:once)
85
- expect(STDOUT).to receive(:puts).
86
- with("📨 \e[1m\e[38;5;213msystem\e[0m\e[0m:\n💭\nI need to say something nice…\n\n💬\nhello\n")
87
- expect(STDOUT).to receive(:puts).
88
- with("📨 \e[1m\e[38;5;172muser\e[0m\e[0m:\nworld\n")
89
- list.set_system_prompt nil
90
- list << Ollama::Message.new(role: 'system', content: 'hello', thinking: 'I need to say something nice…')
91
- list << Ollama::Message.new(role: 'user', content: 'world')
92
- list.list_conversation
68
+ context 'without pager' do
69
+ before do
70
+ expect(list).to receive(:determine_pager_command).and_return nil
71
+ end
72
+
73
+ it 'can list conversations without thinking' do
74
+ expect(chat).to receive(:markdown).
75
+ and_return(double(on?: true)).at_least(:once)
76
+ expect(chat).to receive(:think).
77
+ and_return(double(on?: false)).at_least(:once)
78
+ list << Ollama::Message.new(role: 'user', content: 'world')
79
+ expect(STDOUT).to receive(:puts).
80
+ with(
81
+ "📨 \e[1m\e[38;5;213msystem\e[0m\e[0m:\nhello\n" \
82
+ "📨 \e[1m\e[38;5;172muser\e[0m\e[0m:\nworld\n"
83
+ )
84
+ list.list_conversation
85
+ end
86
+
87
+ it 'can list conversations with thinking' do
88
+ expect(chat).to receive(:markdown).
89
+ and_return(double(on?: true)).at_least(:once)
90
+ expect(chat).to receive(:think).
91
+ and_return(double(on?: true)).at_least(:once)
92
+ expect(STDOUT).to receive(:puts).
93
+ with(
94
+ "📨 \e[1m\e[38;5;213msystem\e[0m\e[0m:\n" \
95
+ "💭\nI need to say something nice…\n\n💬\nhello\n" \
96
+ "📨 \e[1m\e[38;5;172muser\e[0m\e[0m:\nworld\n"
97
+ )
98
+ list.set_system_prompt nil
99
+ list << Ollama::Message.new(
100
+ role: 'system', content: 'hello',
101
+ thinking: 'I need to say something nice…'
102
+ )
103
+ list << Ollama::Message.new(role: 'user', content: 'world')
104
+ list.list_conversation
105
+ end
106
+ end
107
+
108
+ context 'with pager' do
109
+ before do
110
+ expect(list).to receive(:determine_pager_command).and_return 'true'
111
+ expect(Tins::Terminal).to receive(:lines).and_return 1
112
+ end
113
+
114
+ it 'can list conversations' do
115
+ expect(chat).to receive(:markdown).
116
+ and_return(double(on?: true)).at_least(:once)
117
+ expect(chat).to receive(:think).
118
+ and_return(double(on?: false)).at_least(:once)
119
+ list << Ollama::Message.new(role: 'user', content: 'world')
120
+ list.list_conversation
121
+ end
93
122
  end
94
123
 
95
124
  it 'can show_system_prompt' do
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ollama_chat
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.16
4
+ version: 0.0.17
5
5
  platform: ruby
6
6
  authors:
7
7
  - Florian Frank
@@ -161,14 +161,14 @@ dependencies:
161
161
  requirements:
162
162
  - - ">="
163
163
  - !ruby/object:Gem::Version
164
- version: '0'
164
+ version: 0.0.1
165
165
  type: :runtime
166
166
  prerelease: false
167
167
  version_requirements: !ruby/object:Gem::Requirement
168
168
  requirements:
169
169
  - - ">="
170
170
  - !ruby/object:Gem::Version
171
- version: '0'
171
+ version: 0.0.1
172
172
  - !ruby/object:Gem::Dependency
173
173
  name: rss
174
174
  requirement: !ruby/object:Gem::Requirement
@@ -243,16 +243,16 @@ dependencies:
243
243
  name: xdg
244
244
  requirement: !ruby/object:Gem::Requirement
245
245
  requirements:
246
- - - "~>"
246
+ - - ">="
247
247
  - !ruby/object:Gem::Version
248
- version: '7.0'
248
+ version: '0'
249
249
  type: :runtime
250
250
  prerelease: false
251
251
  version_requirements: !ruby/object:Gem::Requirement
252
252
  requirements:
253
- - - "~>"
253
+ - - ">="
254
254
  - !ruby/object:Gem::Version
255
- version: '7.0'
255
+ version: '0'
256
256
  - !ruby/object:Gem::Dependency
257
257
  name: kramdown-ansi
258
258
  requirement: !ruby/object:Gem::Requirement