ollama_chat 0.0.4 → 0.0.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: a3f84805f1f6ef58ce6fd25f0f4a477b919c7afa72d909bf3c48ca8fa643d1dd
4
- data.tar.gz: 61ce65c1f30464f4b30c0a4228a88aa9f4796a15407820d84db5c6240527d014
3
+ metadata.gz: fc27a8640616134647d04ee2d9b806e41806a55dc8a492e24bcf90509a84f8c8
4
+ data.tar.gz: b560ce8230eb463f38881605c29f73cab6a601dc9194b79cacec2ce443b1a4bc
5
5
  SHA512:
6
- metadata.gz: 68bff70a1b439a0a01ec0daf7c8a643c6814450f8dbdd8368d92a43941d774034fe34a616f237ecec66c59ab1ed97585eda5e39d857625fd8f5a9a61d8d3f7b0
7
- data.tar.gz: 20ec1a209f3e23374f0b3ee8b0bb1b1757fbb412500c823f1b8117f19ddc323dec2b9d1715fb9e3726549885dee08d4ccf00e25f1036a5af57699cb3f42b02c6
6
+ metadata.gz: 92e51935010fcace611baa132f5619b2894da2509854eb20d428e2a5d74b6bc658fa90edd577633a887b381f24861c7d3c580a79891e7cfc348a11b6b9191a08
7
+ data.tar.gz: d74878f18ff236f99c75fee50b40a91c69a3aaa245c2c681d33f68d8869d5a745afb7afcc8219e8cb4a1f443fe3c055661a3a25b5f03f6007fa8f9042ffaa0d6
data/CHANGES.md CHANGED
@@ -1,5 +1,20 @@
1
1
  # Changes
2
2
 
3
+ ## 2025-03-22 v0.0.5
4
+
5
+ * Updated default config to use environment variable for Searxng URL:
6
+ * Changed `url` field in `searxng` section of `default_config.yml`.
7
+ * Replaced hardcoded URL with expression that fetches value from `OLLAMA_SEARXNG_URL` environment variable.
8
+ * Handle Ollama server disconnection:
9
+ * Added error handling for `Ollama::Errors::TimeoutError`.
10
+ * Print error message when connection is lost.
11
+ * Output last exchange of a loaded conversation:
12
+ * Add attribute reader to `messages` in `lib/ollama_chat/chat.rb`.
13
+ * Replace `@messages` with `messages` in method calls throughout the class.
14
+ * Update conversation listing, clearing, dropping, saving, loading methods.
15
+ * Refactor interaction with user logic.
16
+ * Update tests in `spec/ollama_chat/chat_spec.rb`.
17
+
3
18
  ## 2025-02-21 v0.0.4
4
19
 
5
20
  * Added support for web searching with SearXNG:
data/VERSION CHANGED
@@ -1 +1 @@
1
- 0.0.4
1
+ 0.0.5
@@ -46,14 +46,14 @@ class OllamaChat::Chat
46
46
  embedding_enabled.set(config.embedding.enabled && !@opts[?E])
47
47
  @messages = OllamaChat::MessageList.new(self)
48
48
  if @opts[?c]
49
- @messages.load_conversation(@opts[?c])
49
+ messages.load_conversation(@opts[?c])
50
50
  else
51
51
  default = config.system_prompts.default? || model_system
52
52
  if @opts[?s] =~ /\A\?/
53
53
  change_system_prompt(default, system: @opts[?s])
54
54
  else
55
55
  system = OllamaChat::Utils::FileArgument.get_file_argument(@opts[?s], default:)
56
- system.present? and @messages.set_system_prompt(system)
56
+ system.present? and messages.set_system_prompt(system)
57
57
  end
58
58
  end
59
59
  @documents = setup_documents
@@ -66,6 +66,8 @@ class OllamaChat::Chat
66
66
 
67
67
  attr_reader :documents
68
68
 
69
+ attr_reader :messages
70
+
69
71
  def links
70
72
  @links ||= Set.new
71
73
  end
@@ -84,8 +86,17 @@ class OllamaChat::Chat
84
86
 
85
87
  def start
86
88
  info
89
+ if messages.size > 1
90
+ messages.list_conversation(2)
91
+ end
87
92
  STDOUT.puts "\nType /help to display the chat help."
88
93
 
94
+ interact_with_user
95
+ end
96
+
97
+ private
98
+
99
+ def interact_with_user
89
100
  loop do
90
101
  parse_content = true
91
102
  input_prompt = bold { color(172) { message_type(@images) + " user" } } + bold { "> " }
@@ -115,15 +126,15 @@ class OllamaChat::Chat
115
126
  next
116
127
  when %r(^/list(?:\s+(\d*))?$)
117
128
  last = 2 * $1.to_i if $1
118
- @messages.list_conversation(last)
129
+ messages.list_conversation(last)
119
130
  next
120
131
  when %r(^/clear$)
121
- @messages.clear
132
+ messages.clear
122
133
  STDOUT.puts "Cleared messages."
123
134
  next
124
135
  when %r(^/clobber$)
125
136
  if ask?(prompt: 'Are you sure to clear messages and collection? (y/n) ') =~ /\Ay/i
126
- @messages.clear
137
+ messages.clear
127
138
  @documents.clear
128
139
  links.clear
129
140
  STDOUT.puts "Cleared messages and collection #{bold{@documents.collection}}."
@@ -132,8 +143,8 @@ class OllamaChat::Chat
132
143
  end
133
144
  next
134
145
  when %r(^/drop(?:\s+(\d*))?$)
135
- @messages.drop($1)
136
- @messages.list_conversation(2)
146
+ messages.drop($1)
147
+ messages.list_conversation(2)
137
148
  next
138
149
  when %r(^/model$)
139
150
  @model = choose_model('', @model)
@@ -143,9 +154,9 @@ class OllamaChat::Chat
143
154
  info
144
155
  next
145
156
  when %r(^/regenerate$)
146
- if content = @messages.second_last&.content
157
+ if content = messages.second_last&.content
147
158
  content.gsub!(/\nConsider these chunks for your answer.*\z/, '')
148
- @messages.drop(2)
159
+ messages.drop(2)
149
160
  else
150
161
  STDOUT.puts "Not enough messages in this conversation."
151
162
  redo
@@ -212,7 +223,7 @@ class OllamaChat::Chat
212
223
  map { |u, s| "%s as \n:%s" % [ u, s ] } * "\n\n"
213
224
  content = config.prompts.web % { query:, results: }
214
225
  when %r(^/save\s+(.+)$)
215
- @messages.save_conversation($1)
226
+ messages.save_conversation($1)
216
227
  STDOUT.puts "Saved conversation to #$1."
217
228
  next
218
229
  when %r(^/links(?:\s+(clear))?$)
@@ -252,7 +263,10 @@ class OllamaChat::Chat
252
263
  end
253
264
  next
254
265
  when %r(^/load\s+(.+)$)
255
- @messages.load_conversation($1)
266
+ messages.load_conversation($1)
267
+ if messages.size > 1
268
+ messages.list_conversation(2)
269
+ end
256
270
  STDOUT.puts "Loaded conversation from #$1."
257
271
  next
258
272
  when %r(^/config$)
@@ -303,16 +317,16 @@ class OllamaChat::Chat
303
317
  end
304
318
  end
305
319
 
306
- @messages << Ollama::Message.new(role: 'user', content:, images: @images.dup)
320
+ messages << Ollama::Message.new(role: 'user', content:, images: @images.dup)
307
321
  @images.clear
308
322
  handler = OllamaChat::FollowChat.new(
309
323
  chat: self,
310
- messages: @messages,
324
+ messages:,
311
325
  voice: (@current_voice if voice.on?)
312
326
  )
313
327
  ollama.chat(
314
328
  model: @model,
315
- messages: @messages,
329
+ messages:,
316
330
  options: @model_options,
317
331
  stream: stream.on?,
318
332
  &handler
@@ -326,16 +340,16 @@ class OllamaChat::Chat
326
340
  end
327
341
  [ link, record.tags.first ]
328
342
  }.uniq.map { |l, t| hyperlink(l, t) }.join(' ')
329
- config.debug and jj @messages.to_ary
343
+ config.debug and jj messages.to_ary
330
344
  end
345
+ rescue Ollama::Errors::TimeoutError
346
+ STDOUT.puts "#{bold('Error')}: Currently lost connection to ollama server and cannot send command."
331
347
  rescue Interrupt
332
348
  STDOUT.puts "Type /quit to quit."
333
349
  end
334
350
  0
335
351
  end
336
352
 
337
- private
338
-
339
353
  def setup_documents
340
354
  if embedding.on?
341
355
  @embedding_model = config.embedding.model.name
@@ -346,6 +360,7 @@ class OllamaChat::Chat
346
360
  ollama:,
347
361
  model: @embedding_model,
348
362
  model_options: config.embedding.model.options,
363
+ embedding_length: config.embedding.model.embedding_length,
349
364
  database_filename: config.embedding.database_filename || @ollama_chat_config.database_path,
350
365
  collection: ,
351
366
  cache: configure_cache,
@@ -52,7 +52,7 @@ class OllamaChat::FollowChat
52
52
  prompt_eval_rate: bold { "%.2f c/s" % (response.prompt_eval_count.to_i / prompt_eval_duration) } + color(111),
53
53
  total_duration: Tins::Duration.new(response.total_duration / 1e9),
54
54
  load_duration: Tins::Duration.new(response.load_duration / 1e9),
55
- }.map { _1 * '=' } * ' '
55
+ }.map { _1 * ?= } * ' '
56
56
  '📊 ' + color(111) {
57
57
  Kramdown::ANSI::Width.wrap(stats_text, percentage: 90).gsub(/(?<!\A)^/, ' ')
58
58
  }
@@ -21,7 +21,7 @@ prompts:
21
21
  Answer the the query %{query} using these sources and summaries:
22
22
 
23
23
  %{results}
24
- location: You are at %{location_name} (%{location_decimal_degrees}), on %{localtime}, preferring %{units}
24
+ location: You are at %{location_name}, %{location_decimal_degrees}, on %{localtime}, preferring %{units}
25
25
  system_prompts:
26
26
  default: <%= ENV.fetch('OLLAMA_CHAT_SYSTEM', 'null') %>
27
27
  assistant: You are a helpful assistant.
@@ -66,4 +66,4 @@ web_search:
66
66
  duckduckgo:
67
67
  url: 'https://www.duckduckgo.com/html/?q=%{query}'
68
68
  searxng:
69
- url: 'http://localhost:8088/search?q=%{query}&format=json'
69
+ url: <%= ENV.fetch('OLLAMA_SEARXNG_URL', 'http://localhost:8088/search?q=%{query}&language=en&format=json') %>
@@ -1,6 +1,6 @@
1
1
  module OllamaChat
2
2
  # OllamaChat version
3
- VERSION = '0.0.4'
3
+ VERSION = '0.0.5'
4
4
  VERSION_ARRAY = VERSION.split('.').map(&:to_i) # :nodoc:
5
5
  VERSION_MAJOR = VERSION_ARRAY[0] # :nodoc:
6
6
  VERSION_MINOR = VERSION_ARRAY[1] # :nodoc:
data/ollama_chat.gemspec CHANGED
@@ -1,14 +1,14 @@
1
1
  # -*- encoding: utf-8 -*-
2
- # stub: ollama_chat 0.0.4 ruby lib
2
+ # stub: ollama_chat 0.0.5 ruby lib
3
3
 
4
4
  Gem::Specification.new do |s|
5
5
  s.name = "ollama_chat".freeze
6
- s.version = "0.0.4".freeze
6
+ s.version = "0.0.5".freeze
7
7
 
8
8
  s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
9
9
  s.require_paths = ["lib".freeze]
10
10
  s.authors = ["Florian Frank".freeze]
11
- s.date = "2025-02-21"
11
+ s.date = "2025-03-22"
12
12
  s.description = "The app provides a command-line interface (CLI) to an Ollama AI model,\nallowing users to engage in text-based conversations and generate\nhuman-like responses. Users can import data from local files or web pages,\nwhich are then processed through three different modes: fully importing the\ncontent into the conversation context, summarizing the information for\nconcise reference, or storing it in an embedding vector database for later\nretrieval based on the conversation.\n".freeze
13
13
  s.email = "flori@ping.de".freeze
14
14
  s.executables = ["ollama_chat".freeze]
@@ -24,7 +24,7 @@ Gem::Specification.new do |s|
24
24
 
25
25
  s.specification_version = 4
26
26
 
27
- s.add_development_dependency(%q<gem_hadar>.freeze, ["~> 1.19".freeze])
27
+ s.add_development_dependency(%q<gem_hadar>.freeze, ["~> 1.20".freeze])
28
28
  s.add_development_dependency(%q<all_images>.freeze, ["~> 0.6".freeze])
29
29
  s.add_development_dependency(%q<rspec>.freeze, ["~> 3.2".freeze])
30
30
  s.add_development_dependency(%q<kramdown>.freeze, ["~> 2.0".freeze])
@@ -15,6 +15,19 @@ RSpec.describe OllamaChat::Chat do
15
15
  expect(chat).to be_a described_class
16
16
  end
17
17
 
18
+ context 'loading conversations' do
19
+ let :argv do
20
+ %w[ -C test -c ] << asset('conversation.json')
21
+ end
22
+
23
+ it 'dispays the last exchange of the converstation' do
24
+ expect(chat).to receive(:interact_with_user).and_return 0
25
+ expect(STDOUT).to receive(:puts).at_least(1)
26
+ expect(chat.messages).to receive(:list_conversation)
27
+ chat.start
28
+ end
29
+ end
30
+
18
31
  describe OllamaChat::DocumentCache do
19
32
  context 'with MemoryCache' do
20
33
  let :argv do
@@ -8,9 +8,11 @@ RSpec.describe OllamaChat::WebSearching do
8
8
  connect_to_ollama_server
9
9
 
10
10
  it 'can search web with duckduckgo' do
11
+ url = 'https://www.duckduckgo.com/html/?q=foo'
12
+ chat.config.web_search.engines.duckduckgo.attributes_update(url:)
11
13
  expect(chat).to receive(:search_engine).and_return 'duckduckgo'
12
- stub_request(:get, 'https://www.duckduckgo.com/html/?q=foo').
13
- with(headers: { 'Host'=>'www.duckduckgo.com' }).
14
+ stub_request(:get, url).
15
+ with(headers: { 'Host'=> 'www.duckduckgo.com' }).
14
16
  to_return(status: 200, body: asset_content('duckduckgo.html'), headers: {})
15
17
  expect(chat.search_web('foo').first.to_s).to eq(
16
18
  'https://en.wikipedia.org/wiki/Foo_Fighters'
@@ -18,8 +20,10 @@ RSpec.describe OllamaChat::WebSearching do
18
20
  end
19
21
 
20
22
  it 'can search web with searxng' do
23
+ url = 'http://localhost:8088/search?format=json&language=en&q=foo'
24
+ chat.config.web_search.engines.searxng.attributes_update(url:)
21
25
  expect(chat).to receive(:search_engine).and_return 'searxng'
22
- stub_request(:get, 'http://localhost:8088/search?format=json&language=en&q=foo').
26
+ stub_request(:get, url).
23
27
  with(headers: { 'Host'=>'localhost:8088' }).
24
28
  to_return(status: 200, body: asset_content('searxng.json'), headers: {})
25
29
  expect(chat.search_web('foo').first.to_s).to eq(
data/spec/spec_helper.rb CHANGED
@@ -14,6 +14,8 @@ require 'webmock/rspec'
14
14
  WebMock.disable_net_connect!
15
15
  require 'ollama_chat'
16
16
 
17
+ ComplexConfig::Provider.deep_freeze = false
18
+
17
19
  def asset(name)
18
20
  File.join(__dir__, 'assets', name)
19
21
  end
metadata CHANGED
@@ -1,13 +1,13 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ollama_chat
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.4
4
+ version: 0.0.5
5
5
  platform: ruby
6
6
  authors:
7
7
  - Florian Frank
8
8
  bindir: bin
9
9
  cert_chain: []
10
- date: 2025-02-21 00:00:00.000000000 Z
10
+ date: 2025-03-22 00:00:00.000000000 Z
11
11
  dependencies:
12
12
  - !ruby/object:Gem::Dependency
13
13
  name: gem_hadar
@@ -15,14 +15,14 @@ dependencies:
15
15
  requirements:
16
16
  - - "~>"
17
17
  - !ruby/object:Gem::Version
18
- version: '1.19'
18
+ version: '1.20'
19
19
  type: :development
20
20
  prerelease: false
21
21
  version_requirements: !ruby/object:Gem::Requirement
22
22
  requirements:
23
23
  - - "~>"
24
24
  - !ruby/object:Gem::Version
25
- version: '1.19'
25
+ version: '1.20'
26
26
  - !ruby/object:Gem::Dependency
27
27
  name: all_images
28
28
  requirement: !ruby/object:Gem::Requirement