ollama_chat 0.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. checksums.yaml +7 -0
  2. data/.all_images.yml +17 -0
  3. data/.gitignore +9 -0
  4. data/Gemfile +5 -0
  5. data/README.md +159 -0
  6. data/Rakefile +58 -0
  7. data/VERSION +1 -0
  8. data/bin/ollama_chat +5 -0
  9. data/lib/ollama_chat/chat.rb +398 -0
  10. data/lib/ollama_chat/clipboard.rb +23 -0
  11. data/lib/ollama_chat/dialog.rb +94 -0
  12. data/lib/ollama_chat/document_cache.rb +16 -0
  13. data/lib/ollama_chat/follow_chat.rb +60 -0
  14. data/lib/ollama_chat/information.rb +113 -0
  15. data/lib/ollama_chat/message_list.rb +216 -0
  16. data/lib/ollama_chat/message_type.rb +5 -0
  17. data/lib/ollama_chat/model_handling.rb +29 -0
  18. data/lib/ollama_chat/ollama_chat_config.rb +103 -0
  19. data/lib/ollama_chat/parsing.rb +159 -0
  20. data/lib/ollama_chat/source_fetching.rb +173 -0
  21. data/lib/ollama_chat/switches.rb +119 -0
  22. data/lib/ollama_chat/utils/cache_fetcher.rb +38 -0
  23. data/lib/ollama_chat/utils/chooser.rb +53 -0
  24. data/lib/ollama_chat/utils/fetcher.rb +175 -0
  25. data/lib/ollama_chat/utils/file_argument.rb +34 -0
  26. data/lib/ollama_chat/utils.rb +7 -0
  27. data/lib/ollama_chat/version.rb +8 -0
  28. data/lib/ollama_chat.rb +20 -0
  29. data/ollama_chat.gemspec +50 -0
  30. data/spec/assets/api_show.json +63 -0
  31. data/spec/assets/api_tags.json +21 -0
  32. data/spec/assets/conversation.json +14 -0
  33. data/spec/assets/duckduckgo.html +757 -0
  34. data/spec/assets/example.atom +26 -0
  35. data/spec/assets/example.csv +5 -0
  36. data/spec/assets/example.html +10 -0
  37. data/spec/assets/example.pdf +139 -0
  38. data/spec/assets/example.ps +4 -0
  39. data/spec/assets/example.rb +1 -0
  40. data/spec/assets/example.rss +25 -0
  41. data/spec/assets/example.xml +7 -0
  42. data/spec/assets/kitten.jpg +0 -0
  43. data/spec/assets/prompt.txt +1 -0
  44. data/spec/ollama_chat/chat_spec.rb +105 -0
  45. data/spec/ollama_chat/clipboard_spec.rb +29 -0
  46. data/spec/ollama_chat/follow_chat_spec.rb +46 -0
  47. data/spec/ollama_chat/information_spec.rb +50 -0
  48. data/spec/ollama_chat/message_list_spec.rb +132 -0
  49. data/spec/ollama_chat/model_handling_spec.rb +35 -0
  50. data/spec/ollama_chat/parsing_spec.rb +240 -0
  51. data/spec/ollama_chat/source_fetching_spec.rb +54 -0
  52. data/spec/ollama_chat/switches_spec.rb +167 -0
  53. data/spec/ollama_chat/utils/cache_fetcher_spec.rb +43 -0
  54. data/spec/ollama_chat/utils/fetcher_spec.rb +137 -0
  55. data/spec/ollama_chat/utils/file_argument_spec.rb +17 -0
  56. data/spec/spec_helper.rb +46 -0
  57. data/tmp/.keep +0 -0
  58. metadata +476 -0
@@ -0,0 +1,94 @@
1
+ module OllamaChat::Dialog
2
+ def choose_model(cli_model, current_model)
3
+ models = ollama.tags.models.map(&:name).sort
4
+ model = if cli_model == ''
5
+ OllamaChat::Utils::Chooser.choose(models) || current_model
6
+ else
7
+ cli_model || current_model
8
+ end
9
+ ensure
10
+ STDOUT.puts green { "Connecting to #{model}@#{ollama.base_url} now…" }
11
+ end
12
+
13
+ def ask?(prompt:)
14
+ print prompt
15
+ STDIN.gets.chomp
16
+ end
17
+
18
+ def choose_collection(current_collection)
19
+ collections = [ current_collection ] + @documents.collections
20
+ collections = collections.compact.map(&:to_s).uniq.sort
21
+ collections.unshift('[EXIT]').unshift('[NEW]')
22
+ collection = OllamaChat::Utils::Chooser.choose(collections) || current_collection
23
+ case collection
24
+ when '[NEW]'
25
+ @documents.collection = ask?(prompt: "Enter name of the new collection: ")
26
+ when nil, '[EXIT]'
27
+ STDOUT.puts "Exiting chooser."
28
+ when /./
29
+ @documents.collection = collection
30
+ end
31
+ ensure
32
+ STDOUT.puts "Using collection #{bold{@documents.collection}}."
33
+ info
34
+ end
35
+
36
+ attr_writer :document_policy
37
+
38
+ def choose_document_policy
39
+ policies = %w[ importing embedding summarizing ignoring ].sort
40
+ current = if policies.index(@document_policy)
41
+ @document_policy
42
+ elsif policies.index(config.document_policy)
43
+ config.document_policy
44
+ else
45
+ policies.first
46
+ end
47
+ policies.unshift('[EXIT]')
48
+ policy = OllamaChat::Utils::Chooser.choose(policies)
49
+ case policy
50
+ when nil, '[EXIT]'
51
+ STDOUT.puts "Exiting chooser."
52
+ policy = current
53
+ end
54
+ self.document_policy = policy
55
+ ensure
56
+ STDOUT.puts "Using document policy #{bold{@document_policy}}."
57
+ info
58
+ end
59
+
60
+ def change_system_prompt(default, system: nil)
61
+ selector = Regexp.new(system.to_s[1..-1].to_s)
62
+ prompts = config.system_prompts.attribute_names.compact.grep(selector)
63
+ if prompts.size == 1
64
+ system = config.system_prompts.send(prompts.first)
65
+ else
66
+ prompts.unshift('[EXIT]').unshift('[NEW]')
67
+ chosen = OllamaChat::Utils::Chooser.choose(prompts)
68
+ system =
69
+ case chosen
70
+ when '[NEW]'
71
+ ask?(prompt: "Enter new system prompt to use: ")
72
+ when '[EXIT]'
73
+ STDOUT.puts "Exiting chooser."
74
+ return
75
+ when nil
76
+ default
77
+ when *prompts
78
+ config.system_prompts.send(chosen)
79
+ else
80
+ default
81
+ end
82
+ end
83
+ @messages.set_system_prompt(system)
84
+ end
85
+
86
+ def change_voice
87
+ chosen = OllamaChat::Utils::Chooser.choose(config.voice.list)
88
+ @current_voice = chosen.full? || config.voice.default
89
+ end
90
+
91
+ def message_list
92
+ MessageList.new(self)
93
+ end
94
+ end
@@ -0,0 +1,16 @@
1
+ module OllamaChat::DocumentCache
2
+ def document_cache_class
3
+ Object.const_get(config.cache)
4
+ end
5
+
6
+ def configure_cache
7
+ if @opts[?M]
8
+ Documentrix::Documents::MemoryCache
9
+ else
10
+ document_cache_class
11
+ end
12
+ rescue => e
13
+ STDERR.puts "Caught #{e.class}: #{e} => Falling back to MemoryCache."
14
+ Documentrix::Documents::MemoryCache
15
+ end
16
+ end
@@ -0,0 +1,60 @@
1
+ class OllamaChat::FollowChat
2
+ include Ollama
3
+ include Ollama::Handlers::Concern
4
+ include Term::ANSIColor
5
+ include OllamaChat::MessageType
6
+
7
+ def initialize(messages:, markdown: false, voice: nil, output: STDOUT)
8
+ super(output:)
9
+ @output.sync = true
10
+ @markdown = markdown
11
+ @say = voice ? Handlers::Say.new(voice:) : NOP
12
+ @messages = messages
13
+ @user = nil
14
+ end
15
+
16
+ def call(response)
17
+ OllamaChat::Chat.config.debug and jj response
18
+ if response&.message&.role == 'assistant'
19
+ if @messages&.last&.role != 'assistant'
20
+ @messages << Message.new(role: 'assistant', content: '')
21
+ @user = message_type(@messages.last.images) + " " +
22
+ bold { color(111) { 'assistant:' } }
23
+ @output.puts @user unless @markdown
24
+ end
25
+ if content = response.message&.content
26
+ content = content.gsub(%r(<think>), "💭\n").gsub(%r(</think>), "\n💬")
27
+ end
28
+ @messages.last.content << content
29
+ if @markdown and content = @messages.last.content.full?
30
+ markdown_content = Kramdown::ANSI.parse(content)
31
+ @output.print clear_screen, move_home, @user, ?\n, markdown_content
32
+ else
33
+ @output.print content
34
+ end
35
+ @say.call(response)
36
+ end
37
+ if response.done
38
+ @output.puts "", eval_stats(response)
39
+ end
40
+ self
41
+ end
42
+
43
+ def eval_stats(response)
44
+ eval_duration = response.eval_duration / 1e9
45
+ prompt_eval_duration = response.prompt_eval_duration / 1e9
46
+ stats_text = {
47
+ eval_duration: Tins::Duration.new(eval_duration),
48
+ eval_count: response.eval_count.to_i,
49
+ eval_rate: bold { "%.2f c/s" % (response.eval_count.to_i / eval_duration) } + color(111),
50
+ prompt_eval_duration: Tins::Duration.new(prompt_eval_duration),
51
+ prompt_eval_count: response.prompt_eval_count.to_i,
52
+ prompt_eval_rate: bold { "%.2f c/s" % (response.prompt_eval_count.to_i / prompt_eval_duration) } + color(111),
53
+ total_duration: Tins::Duration.new(response.total_duration / 1e9),
54
+ load_duration: Tins::Duration.new(response.load_duration / 1e9),
55
+ }.map { _1 * '=' } * ' '
56
+ '📊 ' + color(111) {
57
+ Kramdown::ANSI::Width.wrap(stats_text, percentage: 90).gsub(/(?<!\A)^/, ' ')
58
+ }
59
+ end
60
+ end
@@ -0,0 +1,113 @@
1
+ module OllamaChat::Information
2
+ extend Tins::Concern
3
+
4
+ included do
5
+ include UserAgent
6
+ extend UserAgent
7
+ end
8
+
9
+ module UserAgent
10
+ def progname
11
+ 'ollama_chat'
12
+ end
13
+
14
+ def user_agent
15
+ [ progname, OllamaChat::VERSION ] * ?/
16
+ end
17
+ end
18
+
19
+ def collection_stats
20
+ STDOUT.puts <<~EOT
21
+ Current Collection
22
+ Name: #{bold{@documents.collection}}
23
+ #Embeddings: #{@documents.size}
24
+ #Tags: #{@documents.tags.size}
25
+ Tags: #{@documents.tags}
26
+ EOT
27
+ nil
28
+ end
29
+
30
+ def info
31
+ STDOUT.puts "Current model is #{bold{@model}}."
32
+ if @model_options.present?
33
+ STDOUT.puts " Options: #{JSON.pretty_generate(@model_options).gsub(/(?<!\A)^/, ' ')}"
34
+ end
35
+ @embedding.show
36
+ if @embedding.on?
37
+ STDOUT.puts "Embedding model is #{bold{@embedding_model}}"
38
+ if @embedding_model_options.present?
39
+ STDOUT.puts " Options: #{JSON.pretty_generate(@embedding_model_options).gsub(/(?<!\A)^/, ' ')}"
40
+ end
41
+ STDOUT.puts "Text splitter is #{bold{config.embedding.splitter.name}}."
42
+ collection_stats
43
+ end
44
+ STDOUT.puts "Documents database cache is #{@documents.nil? ? 'n/a' : bold{@documents.cache.class}}"
45
+ @markdown.show
46
+ @stream.show
47
+ @location.show
48
+ STDOUT.puts "Document policy for references in user text: #{bold{@document_policy}}"
49
+ if @voice.on?
50
+ STDOUT.puts "Using voice #{bold{@current_voice}} to speak."
51
+ end
52
+ @messages.show_system_prompt
53
+ nil
54
+ end
55
+
56
+ def display_chat_help
57
+ STDOUT.puts <<~EOT
58
+ /copy to copy last response to clipboard
59
+ /paste to paste content
60
+ /markdown toggle markdown output
61
+ /stream toggle stream output
62
+ /location toggle location submission
63
+ /voice( change) toggle voice output or change the voice
64
+ /list [n] list the last n / all conversation exchanges
65
+ /clear clear the whole conversation
66
+ /clobber clear the conversation, links, and collection
67
+ /drop [n] drop the last n exchanges, defaults to 1
68
+ /model change the model
69
+ /system change system prompt (clears conversation)
70
+ /regenerate the last answer message
71
+ /collection( clear|change) change (default) collection or clear
72
+ /info show information for current session
73
+ /config output current configuration (#{@ollama_chat_config.filename.to_s.inspect})
74
+ /document_policy pick a scan policy for document references
75
+ /import source import the source's content
76
+ /summarize [n] source summarize the source's content in n words
77
+ /embedding toggle embedding paused or not
78
+ /embed source embed the source's content
79
+ /web [n] query query web search & return n or 1 results
80
+ /links( clear) display (or clear) links used in the chat
81
+ /save filename store conversation messages
82
+ /load filename load conversation messages
83
+ /quit to quit
84
+ /help to view this help
85
+ EOT
86
+ nil
87
+ end
88
+
89
+ def usage
90
+ STDOUT.puts <<~EOT
91
+ Usage: #{progname} [OPTIONS]
92
+
93
+ -f CONFIG config file to read
94
+ -u URL the ollama base url, OLLAMA_URL
95
+ -m MODEL the ollama model to chat with, OLLAMA_CHAT_MODEL
96
+ -s SYSTEM the system prompt to use as a file, OLLAMA_CHAT_SYSTEM
97
+ -c CHAT a saved chat conversation to load
98
+ -C COLLECTION name of the collection used in this conversation
99
+ -D DOCUMENT load document and add to embeddings collection (multiple)
100
+ -M use (empty) MemoryCache for this chat session
101
+ -E disable embeddings for this chat session
102
+ -V display the current version number and quit
103
+ -h this help
104
+
105
+ EOT
106
+ 0
107
+ end
108
+
109
+ def version
110
+ STDOUT.puts "%s %s" % [ progname, OllamaChat::VERSION ]
111
+ 0
112
+ end
113
+ end
@@ -0,0 +1,216 @@
1
+ class OllamaChat::MessageList
2
+ include Term::ANSIColor
3
+ include OllamaChat::MessageType
4
+
5
+ # The initialize method sets up the message list for an OllamaChat session.
6
+ #
7
+ # @param chat [ OllamaChat::Chat ] the chat object that this message list
8
+ # belongs to
9
+ def initialize(chat)
10
+ @chat = chat
11
+ @messages = []
12
+ end
13
+
14
+ attr_reader :system
15
+
16
+ attr_reader :messages
17
+
18
+ # Returns the number of messages stored in the message list.
19
+ #
20
+ # @return [ Integer ] The size of the message list.
21
+ def size
22
+ @messages.size
23
+ end
24
+
25
+ # The clear method removes all non-system messages from the message list.
26
+ #
27
+ # @return [ OllamaChat::MessageList ] self
28
+ def clear
29
+ @messages.delete_if { _1.role != 'system' }
30
+ self
31
+ end
32
+
33
+ # The << operator appends a message to the list of messages and returns self.
34
+ #
35
+ # @param message [ Ollama::Message ] the message to append
36
+ #
37
+ # @return [ OllamaChat::MessageList ] self
38
+ def <<(message)
39
+ @messages << message
40
+ self
41
+ end
42
+
43
+ # Returns the last message from the conversation.
44
+ #
45
+ # @return [ Ollama::Message ] The last message in the conversation, or nil if
46
+ # there are no messages.
47
+ def last
48
+ @messages.last
49
+ end
50
+
51
+ # The second_last method returns the second-to-last message from the
52
+ # conversation if there are more than one non-system messages.
53
+ #
54
+ # @return [ Ollama::Message ] the second-to-last message
55
+ def second_last
56
+ if @messages.reject { _1.role == 'system' }.size > 1
57
+ @messages[-2]
58
+ end
59
+ end
60
+
61
+ # The load_conversation method loads a conversation from a file and populates
62
+ # the message list.
63
+ #
64
+ # @param filename [ String ] the path to the file containing the conversation
65
+ #
66
+ # @return [ OllamaChat::MessageList ] self
67
+ def load_conversation(filename)
68
+ unless File.exist?(filename)
69
+ STDOUT.puts "File #{filename} doesn't exist. Choose another filename."
70
+ return
71
+ end
72
+ @messages =
73
+ File.open(filename, 'r') do |output|
74
+ JSON(output.read).map { Ollama::Message.from_hash(_1) }
75
+ end
76
+ self
77
+ end
78
+
79
+ # The save_conversation method saves the current conversation to a file.
80
+ #
81
+ # @param filename [ String ] the path where the conversation will be saved
82
+ #
83
+ # @return [ OllamaChat::MessageList ] self
84
+ def save_conversation(filename)
85
+ if File.exist?(filename)
86
+ STDOUT.puts "File #{filename} already exists. Choose another filename."
87
+ return
88
+ end
89
+ File.open(filename, 'w') do |output|
90
+ output.puts JSON(@messages)
91
+ end
92
+ self
93
+ end
94
+
95
+ # The list_conversation method displays the last n messages from the conversation.
96
+ #
97
+ # @param last [ Integer ] the number of messages to display (default: nil)
98
+ #
99
+ # @return [ OllamaChat::MessageList ]
100
+ def list_conversation(last = nil)
101
+ last = (last || @messages.size).clamp(0, @messages.size)
102
+ @messages[-last..-1].to_a.each do |m|
103
+ role_color = case m.role
104
+ when 'user' then 172
105
+ when 'assistant' then 111
106
+ when 'system' then 213
107
+ else 210
108
+ end
109
+ content = m.content.full? { @chat.markdown.on? ? Kramdown::ANSI.parse(_1) : _1 }
110
+ message_text = message_type(m.images) + " "
111
+ message_text += bold { color(role_color) { m.role } }
112
+ message_text += ":\n#{content}"
113
+ m.images.full? { |images|
114
+ message_text += "\nImages: " + italic { images.map(&:path) * ', ' }
115
+ }
116
+ STDOUT.puts message_text
117
+ end
118
+ self
119
+ end
120
+
121
+ # The drop method removes the last n exchanges from the message list and returns the number of removed exchanges.
122
+ #
123
+ # @param n [ Integer ] the number of exchanges to remove
124
+ #
125
+ # @return [ Integer ] the number of removed exchanges, or 0 if there are no more exchanges to pop
126
+ def drop(n)
127
+ if @messages.reject { _1.role == 'system' }.size > 1
128
+ n = n.to_i.clamp(1, Float::INFINITY)
129
+ r = @messages.pop(2 * n)
130
+ m = r.size / 2
131
+ STDOUT.puts "Popped the last #{m} exchanges."
132
+ m
133
+ else
134
+ STDOUT.puts "No more exchanges you can pop."
135
+ 0
136
+ end
137
+ end
138
+
139
+ # The set_system_prompt method sets the system prompt for the chat session.
140
+ # This implies deleting all of the messages in the message list, so it only
141
+ # contains the system prompt at the end.
142
+ #
143
+ # @param system [ String ] the new system prompt
144
+ #
145
+ # @return [ OllamaChat::MessageList ] the message list instance itself, allowing for chaining.
146
+ def set_system_prompt(system)
147
+ @system = system.to_s
148
+ @messages.clear
149
+ @messages << Ollama::Message.new(role: 'system', content: self.system)
150
+ self
151
+ end
152
+
153
+ # The show_system_prompt method displays the system prompt configured for the
154
+ # chat session.
155
+ #
156
+ # It retrieves the system prompt from the @system instance variable, parses
157
+ # it using Kramdown::ANSI, and removes any trailing newlines. If the
158
+ # resulting string is empty, the method returns immediately.
159
+ #
160
+ # Otherwise, it prints a formatted message to the console, including the
161
+ # configured system prompt and its length in characters.
162
+ #
163
+ # @return [self, NilClass] nil if the system prompt is empty, otherwise self.
164
+ def show_system_prompt
165
+ system_prompt = Kramdown::ANSI.parse(system.to_s).gsub(/\n+\z/, '').full?
166
+ system_prompt or return
167
+ STDOUT.puts <<~EOT
168
+ Configured system prompt is:
169
+ #{system_prompt}
170
+
171
+ System prompt length: #{bold{system_prompt.size}} characters.
172
+ EOT
173
+ self
174
+ end
175
+
176
+ # The to_ary method converts the message list into an array of
177
+ # Ollama::Message objects. If location support was enabled and the message
178
+ # list contains a system message, the system messages is decorated with the
179
+ # curent location, time, and unit preferences.
180
+ #
181
+ # @return [Array] An array of Ollama::Message objects representing the
182
+ # messages in the list.
183
+ def to_ary
184
+ location = at_location.full?
185
+ @messages.map do |message|
186
+ if message.role == 'system' && location
187
+ content = message.content + "\n\n#{location}"
188
+ Ollama::Message.new(role: message.role, content:)
189
+ else
190
+ message
191
+ end
192
+ end
193
+ end
194
+
195
+ # The at_location method returns the location/time/units information as a
196
+ # string if location is enabled.
197
+ #
198
+ # @return [ String ] the location information
199
+ def at_location
200
+ if @chat.location.on?
201
+ location_name = config.location.name
202
+ location_decimal_degrees = config.location.decimal_degrees * ', '
203
+ localtime = Time.now.iso8601
204
+ units = config.location.units
205
+ config.prompts.location % {
206
+ location_name:, location_decimal_degrees:, localtime:, units:,
207
+ }
208
+ end.to_s
209
+ end
210
+
211
+ private
212
+
213
+ def config
214
+ @chat.config
215
+ end
216
+ end
@@ -0,0 +1,5 @@
1
+ module OllamaChat::MessageType
2
+ def message_type(images)
3
+ images.present? ? ?📸 : ?📨
4
+ end
5
+ end
@@ -0,0 +1,29 @@
1
+ module OllamaChat::ModelHandling
2
+ def model_present?(model)
3
+ ollama.show(name: model) { return _1.system.to_s }
4
+ rescue Ollama::Errors::NotFoundError
5
+ false
6
+ end
7
+
8
+ def pull_model_from_remote(model)
9
+ STDOUT.puts "Model #{bold{model}} not found locally, attempting to pull it from remote now…"
10
+ ollama.pull(name: model)
11
+ end
12
+
13
+ def pull_model_unless_present(model, options)
14
+ if system = model_present?(model)
15
+ return system.full?
16
+ else
17
+ pull_model_from_remote(model)
18
+ if system = model_present?(model)
19
+ return system.full?
20
+ else
21
+ STDOUT.puts "Model #{bold{model}} not found remotely. => Exiting."
22
+ exit 1
23
+ end
24
+ end
25
+ rescue Ollama::Errors::Error => e
26
+ warn "Caught #{e.class} while pulling model: #{e} => Exiting."
27
+ exit 1
28
+ end
29
+ end
@@ -0,0 +1,103 @@
1
+ class OllamaChat::OllamaChatConfig
2
+ include ComplexConfig
3
+ include FileUtils
4
+
5
+ DEFAULT_CONFIG = <<~EOT
6
+ ---
7
+ url: <%= ENV['OLLAMA_URL'] || 'http://%s' % ENV.fetch('OLLAMA_HOST') %>
8
+ proxy: null # http://localhost:8080
9
+ model:
10
+ name: <%= ENV.fetch('OLLAMA_CHAT_MODEL', 'llama3.1') %>
11
+ options:
12
+ num_ctx: 8192
13
+ location:
14
+ enabled: false
15
+ name: Berlin
16
+ decimal_degrees: [ 52.514127, 13.475211 ]
17
+ units: SI (International System of Units) # or USCS (United States Customary System)
18
+ prompts:
19
+ embed: "This source was now embedded: %{source}"
20
+ summarize: |
21
+ Generate an abstract summary of the content in this document using
22
+ %{words} words:
23
+
24
+ %{source_content}
25
+ web: |
26
+ Answer the the query %{query} using these sources and summaries:
27
+
28
+ %{results}
29
+ system_prompts:
30
+ default: <%= ENV.fetch('OLLAMA_CHAT_SYSTEM', 'null') %>
31
+ voice:
32
+ enabled: false
33
+ default: Samantha
34
+ list: <%= `say -v ? 2>/dev/null`.lines.map { _1[/^(.+?)\s+[a-z]{2}_[a-zA-Z0-9]{2,}/, 1] }.uniq.sort.to_s.force_encoding('ASCII-8BIT') %>
35
+ markdown: true
36
+ stream: true
37
+ document_policy: importing
38
+ embedding:
39
+ enabled: true
40
+ model:
41
+ name: mxbai-embed-large
42
+ embedding_length: 1024
43
+ options: {}
44
+ # Retrieval prompt template:
45
+ prompt: 'Represent this sentence for searching relevant passages: %s'
46
+ batch_size: 10
47
+ database_filename: null # ':memory:'
48
+ collection: <%= ENV['OLLAMA_CHAT_COLLECTION'] %>
49
+ found_texts_size: 4096
50
+ found_texts_count: 10
51
+ splitter:
52
+ name: RecursiveCharacter
53
+ chunk_size: 1024
54
+ cache: Documentrix::Documents::SQLiteCache
55
+ redis:
56
+ documents:
57
+ url: <%= ENV.fetch('REDIS_URL', 'null') %>
58
+ expiring:
59
+ url: <%= ENV.fetch('REDIS_EXPIRING_URL', 'null') %>
60
+ ex: 86400
61
+ debug: <%= ENV['OLLAMA_CHAT_DEBUG'].to_i == 1 ? true : false %>
62
+ ssl_no_verify: []
63
+ copy: pbcopy
64
+ EOT
65
+
66
+ def initialize(filename = nil)
67
+ @filename = filename || default_path
68
+ unless File.directory?(cache_dir_path)
69
+ mkdir_p cache_dir_path.to_s
70
+ end
71
+ @config = Provider.config(@filename, '⚙️')
72
+ retried = false
73
+ rescue ConfigurationFileMissing
74
+ if @filename == default_path && !retried
75
+ retried = true
76
+ mkdir_p config_dir_path.to_s
77
+ File.secure_write(default_path, DEFAULT_CONFIG)
78
+ retry
79
+ else
80
+ raise
81
+ end
82
+ end
83
+
84
+ attr_reader :filename
85
+
86
+ attr_reader :config
87
+
88
+ def default_path
89
+ config_dir_path + 'config.yml'
90
+ end
91
+
92
+ def config_dir_path
93
+ XDG.new.config_home + 'ollama_chat'
94
+ end
95
+
96
+ def cache_dir_path
97
+ XDG.new.cache_home + 'ollama_chat'
98
+ end
99
+
100
+ def database_path
101
+ cache_dir_path + 'documents.db'
102
+ end
103
+ end