ollama_chat 0.0.4 → 0.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGES.md +15 -0
- data/VERSION +1 -1
- data/lib/ollama_chat/chat.rb +32 -17
- data/lib/ollama_chat/follow_chat.rb +1 -1
- data/lib/ollama_chat/ollama_chat_config/default_config.yml +2 -2
- data/lib/ollama_chat/version.rb +1 -1
- data/ollama_chat.gemspec +4 -4
- data/spec/ollama_chat/chat_spec.rb +13 -0
- data/spec/ollama_chat/web_searching_spec.rb +7 -3
- data/spec/spec_helper.rb +2 -0
- metadata +4 -4
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: fc27a8640616134647d04ee2d9b806e41806a55dc8a492e24bcf90509a84f8c8
|
4
|
+
data.tar.gz: b560ce8230eb463f38881605c29f73cab6a601dc9194b79cacec2ce443b1a4bc
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 92e51935010fcace611baa132f5619b2894da2509854eb20d428e2a5d74b6bc658fa90edd577633a887b381f24861c7d3c580a79891e7cfc348a11b6b9191a08
|
7
|
+
data.tar.gz: d74878f18ff236f99c75fee50b40a91c69a3aaa245c2c681d33f68d8869d5a745afb7afcc8219e8cb4a1f443fe3c055661a3a25b5f03f6007fa8f9042ffaa0d6
|
data/CHANGES.md
CHANGED
@@ -1,5 +1,20 @@
|
|
1
1
|
# Changes
|
2
2
|
|
3
|
+
## 2025-03-22 v0.0.5
|
4
|
+
|
5
|
+
* Updated default config to use environment variable for Searxng URL:
|
6
|
+
* Changed `url` field in `searxng` section of `default_config.yml`.
|
7
|
+
* Replaced hardcoded URL with expression that fetches value from `OLLAMA_SEARXNG_URL` environment variable.
|
8
|
+
* Handle Ollama server disconnection:
|
9
|
+
* Added error handling for `Ollama::Errors::TimeoutError`.
|
10
|
+
* Print error message when connection is lost.
|
11
|
+
* Output last exchange of a loaded conversation:
|
12
|
+
* Add attribute reader to `messages` in `lib/ollama_chat/chat.rb`.
|
13
|
+
* Replace `@messages` with `messages` in method calls throughout the class.
|
14
|
+
* Update conversation listing, clearing, dropping, saving, loading methods.
|
15
|
+
* Refactor interaction with user logic.
|
16
|
+
* Update tests in `spec/ollama_chat/chat_spec.rb`.
|
17
|
+
|
3
18
|
## 2025-02-21 v0.0.4
|
4
19
|
|
5
20
|
* Added support for web searching with SearXNG:
|
data/VERSION
CHANGED
@@ -1 +1 @@
|
|
1
|
-
0.0.
|
1
|
+
0.0.5
|
data/lib/ollama_chat/chat.rb
CHANGED
@@ -46,14 +46,14 @@ class OllamaChat::Chat
|
|
46
46
|
embedding_enabled.set(config.embedding.enabled && !@opts[?E])
|
47
47
|
@messages = OllamaChat::MessageList.new(self)
|
48
48
|
if @opts[?c]
|
49
|
-
|
49
|
+
messages.load_conversation(@opts[?c])
|
50
50
|
else
|
51
51
|
default = config.system_prompts.default? || model_system
|
52
52
|
if @opts[?s] =~ /\A\?/
|
53
53
|
change_system_prompt(default, system: @opts[?s])
|
54
54
|
else
|
55
55
|
system = OllamaChat::Utils::FileArgument.get_file_argument(@opts[?s], default:)
|
56
|
-
system.present? and
|
56
|
+
system.present? and messages.set_system_prompt(system)
|
57
57
|
end
|
58
58
|
end
|
59
59
|
@documents = setup_documents
|
@@ -66,6 +66,8 @@ class OllamaChat::Chat
|
|
66
66
|
|
67
67
|
attr_reader :documents
|
68
68
|
|
69
|
+
attr_reader :messages
|
70
|
+
|
69
71
|
def links
|
70
72
|
@links ||= Set.new
|
71
73
|
end
|
@@ -84,8 +86,17 @@ class OllamaChat::Chat
|
|
84
86
|
|
85
87
|
def start
|
86
88
|
info
|
89
|
+
if messages.size > 1
|
90
|
+
messages.list_conversation(2)
|
91
|
+
end
|
87
92
|
STDOUT.puts "\nType /help to display the chat help."
|
88
93
|
|
94
|
+
interact_with_user
|
95
|
+
end
|
96
|
+
|
97
|
+
private
|
98
|
+
|
99
|
+
def interact_with_user
|
89
100
|
loop do
|
90
101
|
parse_content = true
|
91
102
|
input_prompt = bold { color(172) { message_type(@images) + " user" } } + bold { "> " }
|
@@ -115,15 +126,15 @@ class OllamaChat::Chat
|
|
115
126
|
next
|
116
127
|
when %r(^/list(?:\s+(\d*))?$)
|
117
128
|
last = 2 * $1.to_i if $1
|
118
|
-
|
129
|
+
messages.list_conversation(last)
|
119
130
|
next
|
120
131
|
when %r(^/clear$)
|
121
|
-
|
132
|
+
messages.clear
|
122
133
|
STDOUT.puts "Cleared messages."
|
123
134
|
next
|
124
135
|
when %r(^/clobber$)
|
125
136
|
if ask?(prompt: 'Are you sure to clear messages and collection? (y/n) ') =~ /\Ay/i
|
126
|
-
|
137
|
+
messages.clear
|
127
138
|
@documents.clear
|
128
139
|
links.clear
|
129
140
|
STDOUT.puts "Cleared messages and collection #{bold{@documents.collection}}."
|
@@ -132,8 +143,8 @@ class OllamaChat::Chat
|
|
132
143
|
end
|
133
144
|
next
|
134
145
|
when %r(^/drop(?:\s+(\d*))?$)
|
135
|
-
|
136
|
-
|
146
|
+
messages.drop($1)
|
147
|
+
messages.list_conversation(2)
|
137
148
|
next
|
138
149
|
when %r(^/model$)
|
139
150
|
@model = choose_model('', @model)
|
@@ -143,9 +154,9 @@ class OllamaChat::Chat
|
|
143
154
|
info
|
144
155
|
next
|
145
156
|
when %r(^/regenerate$)
|
146
|
-
if content =
|
157
|
+
if content = messages.second_last&.content
|
147
158
|
content.gsub!(/\nConsider these chunks for your answer.*\z/, '')
|
148
|
-
|
159
|
+
messages.drop(2)
|
149
160
|
else
|
150
161
|
STDOUT.puts "Not enough messages in this conversation."
|
151
162
|
redo
|
@@ -212,7 +223,7 @@ class OllamaChat::Chat
|
|
212
223
|
map { |u, s| "%s as \n:%s" % [ u, s ] } * "\n\n"
|
213
224
|
content = config.prompts.web % { query:, results: }
|
214
225
|
when %r(^/save\s+(.+)$)
|
215
|
-
|
226
|
+
messages.save_conversation($1)
|
216
227
|
STDOUT.puts "Saved conversation to #$1."
|
217
228
|
next
|
218
229
|
when %r(^/links(?:\s+(clear))?$)
|
@@ -252,7 +263,10 @@ class OllamaChat::Chat
|
|
252
263
|
end
|
253
264
|
next
|
254
265
|
when %r(^/load\s+(.+)$)
|
255
|
-
|
266
|
+
messages.load_conversation($1)
|
267
|
+
if messages.size > 1
|
268
|
+
messages.list_conversation(2)
|
269
|
+
end
|
256
270
|
STDOUT.puts "Loaded conversation from #$1."
|
257
271
|
next
|
258
272
|
when %r(^/config$)
|
@@ -303,16 +317,16 @@ class OllamaChat::Chat
|
|
303
317
|
end
|
304
318
|
end
|
305
319
|
|
306
|
-
|
320
|
+
messages << Ollama::Message.new(role: 'user', content:, images: @images.dup)
|
307
321
|
@images.clear
|
308
322
|
handler = OllamaChat::FollowChat.new(
|
309
323
|
chat: self,
|
310
|
-
messages
|
324
|
+
messages:,
|
311
325
|
voice: (@current_voice if voice.on?)
|
312
326
|
)
|
313
327
|
ollama.chat(
|
314
328
|
model: @model,
|
315
|
-
messages
|
329
|
+
messages:,
|
316
330
|
options: @model_options,
|
317
331
|
stream: stream.on?,
|
318
332
|
&handler
|
@@ -326,16 +340,16 @@ class OllamaChat::Chat
|
|
326
340
|
end
|
327
341
|
[ link, record.tags.first ]
|
328
342
|
}.uniq.map { |l, t| hyperlink(l, t) }.join(' ')
|
329
|
-
config.debug and jj
|
343
|
+
config.debug and jj messages.to_ary
|
330
344
|
end
|
345
|
+
rescue Ollama::Errors::TimeoutError
|
346
|
+
STDOUT.puts "#{bold('Error')}: Currently lost connection to ollama server and cannot send command."
|
331
347
|
rescue Interrupt
|
332
348
|
STDOUT.puts "Type /quit to quit."
|
333
349
|
end
|
334
350
|
0
|
335
351
|
end
|
336
352
|
|
337
|
-
private
|
338
|
-
|
339
353
|
def setup_documents
|
340
354
|
if embedding.on?
|
341
355
|
@embedding_model = config.embedding.model.name
|
@@ -346,6 +360,7 @@ class OllamaChat::Chat
|
|
346
360
|
ollama:,
|
347
361
|
model: @embedding_model,
|
348
362
|
model_options: config.embedding.model.options,
|
363
|
+
embedding_length: config.embedding.model.embedding_length,
|
349
364
|
database_filename: config.embedding.database_filename || @ollama_chat_config.database_path,
|
350
365
|
collection: ,
|
351
366
|
cache: configure_cache,
|
@@ -52,7 +52,7 @@ class OllamaChat::FollowChat
|
|
52
52
|
prompt_eval_rate: bold { "%.2f c/s" % (response.prompt_eval_count.to_i / prompt_eval_duration) } + color(111),
|
53
53
|
total_duration: Tins::Duration.new(response.total_duration / 1e9),
|
54
54
|
load_duration: Tins::Duration.new(response.load_duration / 1e9),
|
55
|
-
}.map { _1 *
|
55
|
+
}.map { _1 * ?= } * ' '
|
56
56
|
'📊 ' + color(111) {
|
57
57
|
Kramdown::ANSI::Width.wrap(stats_text, percentage: 90).gsub(/(?<!\A)^/, ' ')
|
58
58
|
}
|
@@ -21,7 +21,7 @@ prompts:
|
|
21
21
|
Answer the the query %{query} using these sources and summaries:
|
22
22
|
|
23
23
|
%{results}
|
24
|
-
location: You are at %{location_name}
|
24
|
+
location: You are at %{location_name}, %{location_decimal_degrees}, on %{localtime}, preferring %{units}
|
25
25
|
system_prompts:
|
26
26
|
default: <%= ENV.fetch('OLLAMA_CHAT_SYSTEM', 'null') %>
|
27
27
|
assistant: You are a helpful assistant.
|
@@ -66,4 +66,4 @@ web_search:
|
|
66
66
|
duckduckgo:
|
67
67
|
url: 'https://www.duckduckgo.com/html/?q=%{query}'
|
68
68
|
searxng:
|
69
|
-
url: 'http://localhost:8088/search?q=%{query}&format=json'
|
69
|
+
url: <%= ENV.fetch('OLLAMA_SEARXNG_URL', 'http://localhost:8088/search?q=%{query}&language=en&format=json') %>
|
data/lib/ollama_chat/version.rb
CHANGED
data/ollama_chat.gemspec
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
# -*- encoding: utf-8 -*-
|
2
|
-
# stub: ollama_chat 0.0.
|
2
|
+
# stub: ollama_chat 0.0.5 ruby lib
|
3
3
|
|
4
4
|
Gem::Specification.new do |s|
|
5
5
|
s.name = "ollama_chat".freeze
|
6
|
-
s.version = "0.0.
|
6
|
+
s.version = "0.0.5".freeze
|
7
7
|
|
8
8
|
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
|
9
9
|
s.require_paths = ["lib".freeze]
|
10
10
|
s.authors = ["Florian Frank".freeze]
|
11
|
-
s.date = "2025-
|
11
|
+
s.date = "2025-03-22"
|
12
12
|
s.description = "The app provides a command-line interface (CLI) to an Ollama AI model,\nallowing users to engage in text-based conversations and generate\nhuman-like responses. Users can import data from local files or web pages,\nwhich are then processed through three different modes: fully importing the\ncontent into the conversation context, summarizing the information for\nconcise reference, or storing it in an embedding vector database for later\nretrieval based on the conversation.\n".freeze
|
13
13
|
s.email = "flori@ping.de".freeze
|
14
14
|
s.executables = ["ollama_chat".freeze]
|
@@ -24,7 +24,7 @@ Gem::Specification.new do |s|
|
|
24
24
|
|
25
25
|
s.specification_version = 4
|
26
26
|
|
27
|
-
s.add_development_dependency(%q<gem_hadar>.freeze, ["~> 1.
|
27
|
+
s.add_development_dependency(%q<gem_hadar>.freeze, ["~> 1.20".freeze])
|
28
28
|
s.add_development_dependency(%q<all_images>.freeze, ["~> 0.6".freeze])
|
29
29
|
s.add_development_dependency(%q<rspec>.freeze, ["~> 3.2".freeze])
|
30
30
|
s.add_development_dependency(%q<kramdown>.freeze, ["~> 2.0".freeze])
|
@@ -15,6 +15,19 @@ RSpec.describe OllamaChat::Chat do
|
|
15
15
|
expect(chat).to be_a described_class
|
16
16
|
end
|
17
17
|
|
18
|
+
context 'loading conversations' do
|
19
|
+
let :argv do
|
20
|
+
%w[ -C test -c ] << asset('conversation.json')
|
21
|
+
end
|
22
|
+
|
23
|
+
it 'dispays the last exchange of the converstation' do
|
24
|
+
expect(chat).to receive(:interact_with_user).and_return 0
|
25
|
+
expect(STDOUT).to receive(:puts).at_least(1)
|
26
|
+
expect(chat.messages).to receive(:list_conversation)
|
27
|
+
chat.start
|
28
|
+
end
|
29
|
+
end
|
30
|
+
|
18
31
|
describe OllamaChat::DocumentCache do
|
19
32
|
context 'with MemoryCache' do
|
20
33
|
let :argv do
|
@@ -8,9 +8,11 @@ RSpec.describe OllamaChat::WebSearching do
|
|
8
8
|
connect_to_ollama_server
|
9
9
|
|
10
10
|
it 'can search web with duckduckgo' do
|
11
|
+
url = 'https://www.duckduckgo.com/html/?q=foo'
|
12
|
+
chat.config.web_search.engines.duckduckgo.attributes_update(url:)
|
11
13
|
expect(chat).to receive(:search_engine).and_return 'duckduckgo'
|
12
|
-
stub_request(:get,
|
13
|
-
with(headers: { 'Host'=>'www.duckduckgo.com' }).
|
14
|
+
stub_request(:get, url).
|
15
|
+
with(headers: { 'Host'=> 'www.duckduckgo.com' }).
|
14
16
|
to_return(status: 200, body: asset_content('duckduckgo.html'), headers: {})
|
15
17
|
expect(chat.search_web('foo').first.to_s).to eq(
|
16
18
|
'https://en.wikipedia.org/wiki/Foo_Fighters'
|
@@ -18,8 +20,10 @@ RSpec.describe OllamaChat::WebSearching do
|
|
18
20
|
end
|
19
21
|
|
20
22
|
it 'can search web with searxng' do
|
23
|
+
url = 'http://localhost:8088/search?format=json&language=en&q=foo'
|
24
|
+
chat.config.web_search.engines.searxng.attributes_update(url:)
|
21
25
|
expect(chat).to receive(:search_engine).and_return 'searxng'
|
22
|
-
stub_request(:get,
|
26
|
+
stub_request(:get, url).
|
23
27
|
with(headers: { 'Host'=>'localhost:8088' }).
|
24
28
|
to_return(status: 200, body: asset_content('searxng.json'), headers: {})
|
25
29
|
expect(chat.search_web('foo').first.to_s).to eq(
|
data/spec/spec_helper.rb
CHANGED
metadata
CHANGED
@@ -1,13 +1,13 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: ollama_chat
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.0.
|
4
|
+
version: 0.0.5
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Florian Frank
|
8
8
|
bindir: bin
|
9
9
|
cert_chain: []
|
10
|
-
date: 2025-
|
10
|
+
date: 2025-03-22 00:00:00.000000000 Z
|
11
11
|
dependencies:
|
12
12
|
- !ruby/object:Gem::Dependency
|
13
13
|
name: gem_hadar
|
@@ -15,14 +15,14 @@ dependencies:
|
|
15
15
|
requirements:
|
16
16
|
- - "~>"
|
17
17
|
- !ruby/object:Gem::Version
|
18
|
-
version: '1.
|
18
|
+
version: '1.20'
|
19
19
|
type: :development
|
20
20
|
prerelease: false
|
21
21
|
version_requirements: !ruby/object:Gem::Requirement
|
22
22
|
requirements:
|
23
23
|
- - "~>"
|
24
24
|
- !ruby/object:Gem::Version
|
25
|
-
version: '1.
|
25
|
+
version: '1.20'
|
26
26
|
- !ruby/object:Gem::Dependency
|
27
27
|
name: all_images
|
28
28
|
requirement: !ruby/object:Gem::Requirement
|