ollama_chat 0.0.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (58) hide show
  1. checksums.yaml +7 -0
  2. data/.all_images.yml +17 -0
  3. data/.gitignore +9 -0
  4. data/Gemfile +5 -0
  5. data/README.md +159 -0
  6. data/Rakefile +58 -0
  7. data/VERSION +1 -0
  8. data/bin/ollama_chat +5 -0
  9. data/lib/ollama_chat/chat.rb +398 -0
  10. data/lib/ollama_chat/clipboard.rb +23 -0
  11. data/lib/ollama_chat/dialog.rb +94 -0
  12. data/lib/ollama_chat/document_cache.rb +16 -0
  13. data/lib/ollama_chat/follow_chat.rb +60 -0
  14. data/lib/ollama_chat/information.rb +113 -0
  15. data/lib/ollama_chat/message_list.rb +216 -0
  16. data/lib/ollama_chat/message_type.rb +5 -0
  17. data/lib/ollama_chat/model_handling.rb +29 -0
  18. data/lib/ollama_chat/ollama_chat_config.rb +103 -0
  19. data/lib/ollama_chat/parsing.rb +159 -0
  20. data/lib/ollama_chat/source_fetching.rb +173 -0
  21. data/lib/ollama_chat/switches.rb +119 -0
  22. data/lib/ollama_chat/utils/cache_fetcher.rb +38 -0
  23. data/lib/ollama_chat/utils/chooser.rb +53 -0
  24. data/lib/ollama_chat/utils/fetcher.rb +175 -0
  25. data/lib/ollama_chat/utils/file_argument.rb +34 -0
  26. data/lib/ollama_chat/utils.rb +7 -0
  27. data/lib/ollama_chat/version.rb +8 -0
  28. data/lib/ollama_chat.rb +20 -0
  29. data/ollama_chat.gemspec +50 -0
  30. data/spec/assets/api_show.json +63 -0
  31. data/spec/assets/api_tags.json +21 -0
  32. data/spec/assets/conversation.json +14 -0
  33. data/spec/assets/duckduckgo.html +757 -0
  34. data/spec/assets/example.atom +26 -0
  35. data/spec/assets/example.csv +5 -0
  36. data/spec/assets/example.html +10 -0
  37. data/spec/assets/example.pdf +139 -0
  38. data/spec/assets/example.ps +4 -0
  39. data/spec/assets/example.rb +1 -0
  40. data/spec/assets/example.rss +25 -0
  41. data/spec/assets/example.xml +7 -0
  42. data/spec/assets/kitten.jpg +0 -0
  43. data/spec/assets/prompt.txt +1 -0
  44. data/spec/ollama_chat/chat_spec.rb +105 -0
  45. data/spec/ollama_chat/clipboard_spec.rb +29 -0
  46. data/spec/ollama_chat/follow_chat_spec.rb +46 -0
  47. data/spec/ollama_chat/information_spec.rb +50 -0
  48. data/spec/ollama_chat/message_list_spec.rb +132 -0
  49. data/spec/ollama_chat/model_handling_spec.rb +35 -0
  50. data/spec/ollama_chat/parsing_spec.rb +240 -0
  51. data/spec/ollama_chat/source_fetching_spec.rb +54 -0
  52. data/spec/ollama_chat/switches_spec.rb +167 -0
  53. data/spec/ollama_chat/utils/cache_fetcher_spec.rb +43 -0
  54. data/spec/ollama_chat/utils/fetcher_spec.rb +137 -0
  55. data/spec/ollama_chat/utils/file_argument_spec.rb +17 -0
  56. data/spec/spec_helper.rb +46 -0
  57. data/tmp/.keep +0 -0
  58. metadata +476 -0
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA256:
3
+ metadata.gz: 7968cdb60fc0612f59f3c240927a4a11f21ad664009270d614a31a119e818f17
4
+ data.tar.gz: f27240bd0371ecb4c5e2b20a73f2b94e3091ee3c6e230b313c89b39112be6096
5
+ SHA512:
6
+ metadata.gz: d665a8f4e8182b0b6a18f3daf86ce7a464632c2dfe3c608332528d10502ac65f4c3663cd7a6ebc18439f4c1156a183ce0962171d3bfe326b074f539fba51cee4
7
+ data.tar.gz: fa7a6384dabf4204a128ae56ef43c3fe22cbe2842f69ffa5c5d1db16133d64767541be616e6219ac4923cde21d4441b6e8f1a1f24dea726ed27dbbca35043d6a
data/.all_images.yml ADDED
@@ -0,0 +1,17 @@
1
+ dockerfile: |-
2
+ RUN apk add --no-cache build-base git
3
+ RUN gem update --system
4
+ RUN gem install gem_hadar bundler
5
+
6
+ script: &script |-
7
+ echo -e "\e[1m"
8
+ ruby -v
9
+ rm -f Gemfile.lock
10
+ bundle install --jobs=$(getconf _NPROCESSORS_ONLN) --full-index
11
+ echo -e "\e[0m"
12
+ rake test
13
+
14
+ images:
15
+ ruby:3.4-alpine: *script
16
+ ruby:3.3-alpine: *script
17
+ ruby:3.2-alpine: *script
data/.gitignore ADDED
@@ -0,0 +1,9 @@
1
+ .*.sw[pon]
2
+ .AppleDouble
3
+ .bundle
4
+ .yardoc
5
+ Gemfile.lock
6
+ corpus
7
+ coverage
8
+ pkg
9
+ tags
data/Gemfile ADDED
@@ -0,0 +1,5 @@
1
+ # vim: set filetype=ruby et sw=2 ts=2:
2
+
3
+ source 'https://rubygems.org'
4
+
5
+ gemspec
data/README.md ADDED
@@ -0,0 +1,159 @@
1
+ # OllamaChat - Ruby Chat Bot for Ollama
2
+
3
+ ## Description
4
+
5
+ **ollama_chat** is a chat client, that can be used to connect to an ollama
6
+ server and enter chat conversations with the LLMs provided by it.
7
+
8
+ ## Installation (gem)
9
+
10
+ To install **ollama_chat**, you can type
11
+
12
+ ```
13
+ gem install ollama_chat
14
+ ```
15
+
16
+ in your terminal.
17
+
18
+ ## Usage
19
+
20
+ It can be started with the following arguments:
21
+
22
+ ```
23
+ Usage: ollama_chat [OPTIONS]
24
+
25
+ -f CONFIG config file to read
26
+ -u URL the ollama base url, OLLAMA_URL
27
+ -m MODEL the ollama model to chat with, OLLAMA_CHAT_MODEL
28
+ -s SYSTEM the system prompt to use as a file, OLLAMA_CHAT_SYSTEM
29
+ -c CHAT a saved chat conversation to load
30
+ -C COLLECTION name of the collection used in this conversation
31
+ -D DOCUMENT load document and add to embeddings collection (multiple)
32
+ -M use (empty) MemoryCache for this chat session
33
+ -E disable embeddings for this chat session
34
+ -V display the current version number and quit
35
+ -h this help
36
+ ```
37
+
38
+ The base URL can be either set by the environment variable `OLLAMA_URL` or it
39
+ is derived from the environment variable `OLLAMA_HOST`. The default model to
40
+ connect can be configured in the environment variable `OLLAMA_MODEL`.
41
+
42
+ The YAML config file is stored in `$XDG_CONFIG_HOME/ollama_chat/config.yml` and
43
+ you can use it for more complex settings.
44
+
45
+ ### Example: Setting a system prompt
46
+
47
+ Some settings can be passed as arguments as well, e. g. if you want to choose a
48
+ specific system prompt:
49
+
50
+ ```
51
+ $ ollama_chat -s sherlock.txt
52
+ Model with architecture llama found.
53
+ Connecting to llama3.1@http://ollama.local.net:11434 now…
54
+ Configured system prompt is:
55
+ You are Sherlock Holmes and the user is your new client, Dr. Watson is also in
56
+ the room. You will talk and act in the typical manner of Sherlock Holmes do and
57
+ try to solve the user's case using logic and deduction.
58
+
59
+ Type /help to display the chat help.
60
+ 📨 user:
61
+ Good morning.
62
+ 📨 assistant:
63
+ Ah, good morning, my dear fellow! It is a pleasure to make your acquaintance. I
64
+ am Sherlock Holmes, the renowned detective, and this is my trusty sidekick, Dr.
65
+ Watson. Please, have a seat and tell us about the nature of your visit. What
66
+ seems to be the problem that has brought you to our humble abode at 221B Baker
67
+ Street?
68
+
69
+ (Watson nods in encouragement as he takes notes)
70
+
71
+ Now, pray tell, what is it that puzzles you, my dear client? A missing item,
72
+ perhaps? Or a mysterious occurrence that requires clarification? The game, as
73
+ they say, is afoot!
74
+ ```
75
+
76
+ ### Example: Using a multimodal model
77
+
78
+ This example shows how an image like this can be sent to the LLM for multimodal
79
+ analysis:
80
+
81
+ ![cat](spec/assets/kitten.jpg)
82
+
83
+ ```
84
+ $ ollama_chat -m llava-llama3
85
+ Model with architecture llama found.
86
+ Connecting to llava-llama3@http://localhost:11434 now…
87
+ Type /help to display the chat help.
88
+ 📸 user> What's on this image? ./spec/assets/kitten.jpg
89
+ 📨 assistant:
90
+ The image captures a moment of tranquility featuring a young cat. The cat,
91
+ adorned with gray and white fur marked by black stripes on its face and legs,
92
+ is the central figure in this scene. Its eyes, a striking shade of blue, are
93
+ wide open and directed towards the camera, giving an impression of curiosity or
94
+ alertness.
95
+
96
+ The cat is comfortably nestled on a red blanket, which contrasts vividly with
97
+ its fur. The blanket, soft and inviting, provides a sense of warmth to the
98
+ image. In the background, partially obscured by the cat's head, is another
99
+ blanket of similar red hue. The repetition of the color adds a sense of harmony
100
+ to the composition.
101
+
102
+ The cat's position on the right side of the photo creates an interesting
103
+ asymmetry with the camera lens, which occupies the left side of the frame. This
104
+ visual balance enhances the overall composition of the image.
105
+
106
+ There are no discernible texts or other objects in the image. The focus is
107
+ solely on the cat and its immediate surroundings. The image does not provide
108
+ any information about the location or setting beyond what has been described.
109
+ The simplicity of the scene allows the viewer to concentrate on the main
110
+ subject - the young, blue-eyed cat.
111
+ ```
112
+
113
+ ### Chat commands
114
+
115
+ The following commands can be given inside the chat, if prefixed by a `/`:
116
+
117
+ ```
118
+ /copy to copy last response to clipboard
119
+ /paste to paste content
120
+ /markdown toggle markdown output
121
+ /stream toggle stream output
122
+ /location toggle location submission
123
+ /voice( change) toggle voice output or change the voice
124
+ /list [n] list the last n / all conversation exchanges
125
+ /clear clear the whole conversation
126
+ /clobber clear the conversation and collection
127
+ /drop [n] drop the last n exchanges, defaults to 1
128
+ /model change the model
129
+ /system change system prompt (clears conversation)
130
+ /regenerate the last answer message
131
+ /collection( clear|change) change (default) collection or clear
132
+ /info show information for current session
133
+ /config output current configuration ("/Users/flori/.config/ollama_chat/config.yml")
134
+ /document_policy pick a scan policy for document references
135
+ /import source import the source's content
136
+ /summarize [n] source summarize the source's content in n words
137
+ /embedding toggle embedding paused or not
138
+ /embed source embed the source's content
139
+ /web [n] query query web search & return n or 1 results
140
+ /links( clear) display (or clear) links used in the chat
141
+ /save filename store conversation messages
142
+ /load filename load conversation messages
143
+ /quit to quit
144
+ /help to view this help
145
+ ```
146
+
147
+ ## Download
148
+
149
+ The homepage of this app is located at
150
+
151
+ * https://github.com/flori/ollama\_chat
152
+
153
+ ## Author
154
+
155
+ <b>OllamaChat</b> was written by [Florian Frank](mailto:flori@ping.de)
156
+
157
+ ## License
158
+
159
+ This software is licensed under the <i>MIT</i> license.
data/Rakefile ADDED
@@ -0,0 +1,58 @@
1
+ # vim: set filetype=ruby et sw=2 ts=2:
2
+
3
+ require 'gem_hadar'
4
+
5
+ GemHadar do
6
+ name 'ollama_chat'
7
+ module_type :module
8
+ author 'Florian Frank'
9
+ email 'flori@ping.de'
10
+ homepage "https://github.com/flori/#{name}"
11
+ summary 'A command-line interface (CLI) for interacting with an Ollama AI model.'
12
+ description <<~EOT
13
+ The app provides a command-line interface (CLI) to an Ollama AI model,
14
+ allowing users to engage in text-based conversations and generate
15
+ human-like responses. Users can import data from local files or web pages,
16
+ which are then processed through three different modes: fully importing the
17
+ content into the conversation context, summarizing the information for
18
+ concise reference, or storing it in an embedding vector database for later
19
+ retrieval based on the conversation.
20
+ EOT
21
+
22
+ test_dir 'spec'
23
+ ignore '.*.sw[pon]', 'pkg', 'Gemfile.lock', '.AppleDouble', '.bundle',
24
+ '.yardoc', 'tags', 'corpus', 'coverage'
25
+
26
+ readme 'README.md'
27
+
28
+ required_ruby_version '~> 3.1'
29
+
30
+ executables << 'ollama_chat'
31
+
32
+ dependency 'excon', '~> 1.0'
33
+ dependency 'ollama-ruby', '~> 0.14'
34
+ dependency 'documentrix', '~> 0.0'
35
+ dependency 'rss', '~> 0.3'
36
+ dependency 'term-ansicolor', '~> 1.11'
37
+ dependency 'redis', '~> 5.0'
38
+ dependency 'mime-types', '~> 3.0'
39
+ dependency 'reverse_markdown', '~> 3.0'
40
+ dependency 'xdg', '~> 7.0'
41
+ dependency 'kramdown-ansi', '~> 0.0', '>= 0.0.1'
42
+ dependency 'complex_config', '~> 0.22', '>= 0.22.2'
43
+ dependency 'tins', '~> 1.34'
44
+ dependency 'search_ui', '~> 0.0'
45
+ dependency 'amatch', '~> 0.4.1'
46
+ dependency 'pdf-reader', '~> 2.0'
47
+ dependency 'csv', '~> 3.0'
48
+ development_dependency 'all_images', '~> 0.6'
49
+ development_dependency 'rspec', '~> 3.2'
50
+ development_dependency 'kramdown', '~> 2.0'
51
+ development_dependency 'webmock'
52
+ development_dependency 'debug'
53
+ development_dependency 'simplecov'
54
+
55
+ licenses << 'MIT'
56
+
57
+ clobber 'coverage'
58
+ end
data/VERSION ADDED
@@ -0,0 +1 @@
1
+ 0.0.0
data/bin/ollama_chat ADDED
@@ -0,0 +1,5 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require 'ollama_chat'
4
+
5
+ exit OllamaChat::Chat.new.start.to_i
@@ -0,0 +1,398 @@
1
+ require 'tins'
2
+ require 'term/ansicolor'
3
+ require 'reline'
4
+ require 'reverse_markdown'
5
+ require 'complex_config'
6
+ require 'fileutils'
7
+ require 'uri'
8
+ require 'nokogiri'
9
+ require 'rss'
10
+ require 'pdf/reader'
11
+ require 'csv'
12
+ require 'xdg'
13
+
14
+ class OllamaChat::Chat
15
+ include Tins::GO
16
+ include Term::ANSIColor
17
+ include OllamaChat::DocumentCache
18
+ include OllamaChat::Switches
19
+ include OllamaChat::ModelHandling
20
+ include OllamaChat::Parsing
21
+ include OllamaChat::SourceFetching
22
+ include OllamaChat::Dialog
23
+ include OllamaChat::Information
24
+ include OllamaChat::Clipboard
25
+ include OllamaChat::MessageType
26
+
27
+ def initialize(argv: ARGV.dup)
28
+ @opts = go 'f:u:m:s:c:C:D:MEVh', argv
29
+ @opts[?h] and exit usage
30
+ @opts[?V] and exit version
31
+ @ollama_chat_config = OllamaChat::OllamaChatConfig.new(@opts[?f])
32
+ self.config = @ollama_chat_config.config
33
+ setup_switches(config)
34
+ base_url = @opts[?u] || config.url
35
+ @ollama = Ollama::Client.new(
36
+ base_url: base_url,
37
+ debug: config.debug,
38
+ user_agent:
39
+ )
40
+ @document_policy = config.document_policy
41
+ @model = choose_model(@opts[?m], config.model.name)
42
+ @model_options = Ollama::Options[config.model.options]
43
+ model_system = pull_model_unless_present(@model, @model_options)
44
+ @embedding_enabled.set(config.embedding.enabled && !@opts[?E])
45
+ @messages = OllamaChat::MessageList.new(self)
46
+ if @opts[?c]
47
+ @messages.load_conversation(@opts[?c])
48
+ else
49
+ default = config.system_prompts.default? || model_system
50
+ if @opts[?s] =~ /\A\?/
51
+ change_system_prompt(default, system: @opts[?s])
52
+ else
53
+ system = OllamaChat::Utils::FileArgument.get_file_argument(@opts[?s], default:)
54
+ system.present? and @messages.set_system_prompt(system)
55
+ end
56
+ end
57
+ @documents = setup_documents
58
+ @cache = setup_cache
59
+ @current_voice = config.voice.default
60
+ @images = []
61
+ end
62
+
63
+ attr_reader :ollama
64
+
65
+ attr_reader :documents
66
+
67
+ def links
68
+ @links ||= Set.new
69
+ end
70
+
71
+ class << self
72
+ attr_accessor :config
73
+ end
74
+
75
+ def config=(config)
76
+ self.class.config = config
77
+ end
78
+
79
+ def config
80
+ self.class.config
81
+ end
82
+
83
+ def start
84
+ info
85
+ STDOUT.puts "\nType /help to display the chat help."
86
+
87
+ loop do
88
+ parse_content = true
89
+ input_prompt = bold { color(172) { message_type(@images) + " user" } } + bold { "> " }
90
+ content = Reline.readline(input_prompt, true)&.chomp
91
+
92
+ case content
93
+ when %r(^/copy$)
94
+ copy_to_clipboard
95
+ next
96
+ when %r(^/paste$)
97
+ content = paste_from_input
98
+ when %r(^/markdown$)
99
+ @markdown.toggle
100
+ next
101
+ when %r(^/stream$)
102
+ @stream.toggle
103
+ next
104
+ when %r(^/location$)
105
+ @location.toggle
106
+ next
107
+ when %r(^/voice(?:\s+(change))?$)
108
+ if $1 == 'change'
109
+ change_voice
110
+ else
111
+ @voice.toggle
112
+ end
113
+ next
114
+ when %r(^/list(?:\s+(\d*))?$)
115
+ last = 2 * $1.to_i if $1
116
+ @messages.list_conversation(last)
117
+ next
118
+ when %r(^/clear$)
119
+ @messages.clear
120
+ STDOUT.puts "Cleared messages."
121
+ next
122
+ when %r(^/clobber$)
123
+ if ask?(prompt: 'Are you sure to clear messages and collection? (y/n) ') =~ /\Ay/i
124
+ @messages.clear
125
+ @documents.clear
126
+ links.clear
127
+ STDOUT.puts "Cleared messages and collection #{bold{@documents.collection}}."
128
+ else
129
+ STDOUT.puts 'Cancelled.'
130
+ end
131
+ next
132
+ when %r(^/drop(?:\s+(\d*))?$)
133
+ @messages.drop($1)
134
+ @messages.list_conversation(2)
135
+ next
136
+ when %r(^/model$)
137
+ @model = choose_model('', @model)
138
+ next
139
+ when %r(^/system$)
140
+ change_system_prompt(@system)
141
+ info
142
+ next
143
+ when %r(^/regenerate$)
144
+ if content = @messages.second_last&.content
145
+ content.gsub!(/\nConsider these chunks for your answer.*\z/, '')
146
+ @messages.drop(2)
147
+ else
148
+ STDOUT.puts "Not enough messages in this conversation."
149
+ redo
150
+ end
151
+ parse_content = false
152
+ content
153
+ when %r(^/collection(?:\s+(clear|change))?$)
154
+ case $1 || 'change'
155
+ when 'clear'
156
+ loop do
157
+ tags = @documents.tags.add('[EXIT]').add('[ALL]')
158
+ tag = OllamaChat::Utils::Chooser.choose(tags, prompt: 'Clear? %s')
159
+ case tag
160
+ when nil, '[EXIT]'
161
+ STDOUT.puts "Exiting chooser."
162
+ break
163
+ when '[ALL]'
164
+ if ask?(prompt: 'Are you sure? (y/n) ') =~ /\Ay/i
165
+ @documents.clear
166
+ STDOUT.puts "Cleared collection #{bold{@documents.collection}}."
167
+ break
168
+ else
169
+ STDOUT.puts 'Cancelled.'
170
+ sleep 3
171
+ end
172
+ when /./
173
+ @documents.clear(tags: [ tag ])
174
+ STDOUT.puts "Cleared tag #{tag} from collection #{bold{@documents.collection}}."
175
+ sleep 3
176
+ end
177
+ end
178
+ when 'change'
179
+ choose_collection(@documents.collection)
180
+ end
181
+ next
182
+ when %r(^/info$)
183
+ info
184
+ next
185
+ when %r(^/document_policy$)
186
+ choose_document_policy
187
+ next
188
+ when %r(^/import\s+(.+))
189
+ parse_content = false
190
+ content = import($1) or next
191
+ when %r(^/summarize\s+(?:(\d+)\s+)?(.+))
192
+ parse_content = false
193
+ content = summarize($2, words: $1) or next
194
+ when %r(^/embedding$)
195
+ @embedding_paused.toggle(show: false)
196
+ @embedding.show
197
+ next
198
+ when %r(^/embed\s+(.+))
199
+ parse_content = false
200
+ content = embed($1) or next
201
+ when %r(^/web\s+(?:(\d+)\s+)?(.+))
202
+ parse_content = false
203
+ urls = search_web($2, $1.to_i)
204
+ urls.each do |url|
205
+ fetch_source(url) { |url_io| embed_source(url_io, url) }
206
+ end
207
+ urls_summarized = urls.map { summarize(_1) }
208
+ query = $2.inspect
209
+ results = urls.zip(urls_summarized).
210
+ map { |u, s| "%s as \n:%s" % [ u, s ] } * "\n\n"
211
+ content = config.prompts.web % { query:, results: }
212
+ when %r(^/save\s+(.+)$)
213
+ @messages.save_conversation($1)
214
+ STDOUT.puts "Saved conversation to #$1."
215
+ next
216
+ when %r(^/links(?:\s+(clear))?$)
217
+ case $1
218
+ when 'clear'
219
+ loop do
220
+ links_options = links.dup.add('[EXIT]').add('[ALL]')
221
+ link = OllamaChat::Utils::Chooser.choose(links_options, prompt: 'Clear? %s')
222
+ case link
223
+ when nil, '[EXIT]'
224
+ STDOUT.puts "Exiting chooser."
225
+ break
226
+ when '[ALL]'
227
+ if ask?(prompt: 'Are you sure? (y/n) ') =~ /\Ay/i
228
+ links.clear
229
+ STDOUT.puts "Cleared all links in list."
230
+ break
231
+ else
232
+ STDOUT.puts 'Cancelled.'
233
+ sleep 3
234
+ end
235
+ when /./
236
+ links.delete(link)
237
+ STDOUT.puts "Cleared link from links in list."
238
+ sleep 3
239
+ end
240
+ end
241
+ when nil
242
+ if links.empty?
243
+ STDOUT.puts "List is empty."
244
+ else
245
+ Math.log10(links.size).ceil
246
+ format = "% #{}s. %s"
247
+ connect = -> link { hyperlink(link) { link } }
248
+ STDOUT.puts links.each_with_index.map { |x, i| format % [ i + 1, connect.(x) ] }
249
+ end
250
+ end
251
+ next
252
+ when %r(^/load\s+(.+)$)
253
+ @messages.load_conversation($1)
254
+ STDOUT.puts "Loaded conversation from #$1."
255
+ next
256
+ when %r(^/config$)
257
+ default_pager = ENV['PAGER'].full?
258
+ if fallback_pager = `which less`.chomp.full? || `which more`.chomp.full?
259
+ fallback_pager << ' -r'
260
+ end
261
+ my_pager = default_pager || fallback_pager
262
+ rendered = config.to_s
263
+ Kramdown::ANSI::Pager.pager(
264
+ lines: rendered.count(?\n),
265
+ command: my_pager
266
+ ) do |output|
267
+ output.puts rendered
268
+ end
269
+ next
270
+ when %r(^/quit$)
271
+ STDOUT.puts "Goodbye."
272
+ return
273
+ when %r(^/)
274
+ display_chat_help
275
+ next
276
+ when ''
277
+ STDOUT.puts "Type /quit to quit."
278
+ next
279
+ when nil
280
+ STDOUT.puts "Goodbye."
281
+ return
282
+ end
283
+
284
+ content, tags = if parse_content
285
+ parse_content(content, @images)
286
+ else
287
+ [ content, Documentrix::Utils::Tags.new ]
288
+ end
289
+
290
+ if @embedding.on? && content
291
+ records = @documents.find_where(
292
+ content.downcase,
293
+ tags:,
294
+ prompt: config.embedding.model.prompt?,
295
+ text_size: config.embedding.found_texts_size?,
296
+ text_count: config.embedding.found_texts_count?,
297
+ )
298
+ unless records.empty?
299
+ content += "\nConsider these chunks for your answer:\n\n"\
300
+ "#{records.map { [ _1.text, _1.tags_set ] * ?\n }.join("\n\n---\n\n")}"
301
+ end
302
+ end
303
+
304
+ @messages << Ollama::Message.new(role: 'user', content:, images: @images.dup)
305
+ @images.clear
306
+ handler = OllamaChat::FollowChat.new(
307
+ messages: @messages,
308
+ markdown: @markdown.on?,
309
+ voice: (@current_voice if @voice.on?)
310
+ )
311
+ ollama.chat(
312
+ model: @model,
313
+ messages: @messages,
314
+ options: @model_options,
315
+ stream: @stream.on?,
316
+ &handler
317
+ )
318
+ if @embedding.on? && !records.empty?
319
+ STDOUT.puts "", records.map { |record|
320
+ link = if record.source =~ %r(\Ahttps?://)
321
+ record.source
322
+ else
323
+ 'file://%s' % File.expand_path(record.source)
324
+ end
325
+ [ link, record.tags.first ]
326
+ }.uniq.map { |l, t| hyperlink(l, t) }.join(' ')
327
+ config.debug and jj @messages.to_ary
328
+ end
329
+ rescue Interrupt
330
+ STDOUT.puts "Type /quit to quit."
331
+ end
332
+ 0
333
+ end
334
+
335
+ private
336
+
337
+ def setup_documents
338
+ if @embedding.on?
339
+ @embedding_model = config.embedding.model.name
340
+ @embedding_model_options = Ollama::Options[config.embedding.model.options]
341
+ pull_model_unless_present(@embedding_model, @embedding_model_options)
342
+ collection = @opts[?C] || config.embedding.collection
343
+ documents = Documentrix::Documents.new(
344
+ ollama:,
345
+ model: @embedding_model,
346
+ model_options: config.embedding.model.options,
347
+ database_filename: config.embedding.database_filename || @ollama_chat_config.database_path,
348
+ collection: ,
349
+ cache: configure_cache,
350
+ redis_url: config.redis.documents.url?,
351
+ debug: config.debug
352
+ )
353
+
354
+ document_list = @opts[?D].to_a
355
+ add_documents_from_argv(documents, document_list)
356
+ documents
357
+ else
358
+ Tins::NULL
359
+ end
360
+ end
361
+
362
+ def add_documents_from_argv(documents, document_list)
363
+ if document_list.any?(&:empty?)
364
+ STDOUT.puts "Clearing collection #{bold{documents.collection}}."
365
+ documents.clear
366
+ document_list.reject!(&:empty?)
367
+ end
368
+ unless document_list.empty?
369
+ document_list.map! do |doc|
370
+ if doc =~ %r(\Ahttps?://)
371
+ doc
372
+ else
373
+ File.expand_path(doc)
374
+ end
375
+ end
376
+ STDOUT.puts "Collection #{bold{documents.collection}}: Adding #{document_list.size} documents…"
377
+ count = 1
378
+ document_list.each_slice(25) do |docs|
379
+ docs.each do |doc|
380
+ fetch_source(doc) do |doc_io|
381
+ embed_source(doc_io, doc, count:)
382
+ end
383
+ count += 1
384
+ end
385
+ end
386
+ end
387
+ end
388
+
389
+ def setup_cache
390
+ if url = config.redis.expiring.url?
391
+ Documentrix::Documents::RedisCache.new(
392
+ prefix: 'Expiring-',
393
+ url:,
394
+ ex: config.redis.expiring.ex,
395
+ )
396
+ end
397
+ end
398
+ end
@@ -0,0 +1,23 @@
1
+ module OllamaChat::Clipboard
2
+ def copy_to_clipboard
3
+ if message = @messages.last and message.role == 'assistant'
4
+ copy = `which #{config.copy}`.chomp
5
+ if copy.present?
6
+ IO.popen(copy, 'w') do |clipboard|
7
+ clipboard.write(message.content)
8
+ end
9
+ STDOUT.puts "The last response has been copied to the system clipboard."
10
+ else
11
+ STDERR.puts "#{config.copy.inspect} command not found in system's path!"
12
+ end
13
+ else
14
+ STDERR.puts "No response available to copy to the system clipboard."
15
+ end
16
+ nil
17
+ end
18
+
19
+ def paste_from_input
20
+ STDOUT.puts bold { "Paste your content and then press C-d!" }
21
+ STDIN.read
22
+ end
23
+ end