ollama_chat 0.0.3 → 0.0.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/docker-compose.yml CHANGED
@@ -1,10 +1,31 @@
1
1
  services:
2
2
  redis:
3
+ container_name: redis
3
4
  image: valkey/valkey:7.2.8-alpine
4
5
  restart: unless-stopped
5
6
  ports: [ "127.0.0.1:9736:6379" ]
6
7
  volumes:
7
8
  - "redis-data:/data:delegated"
8
9
  - "./redis/redis.conf:/etc/redis.conf"
10
+ searxng:
11
+ container_name: searxng
12
+ image: searxng/searxng:latest
13
+ ports:
14
+ - "127.0.0.1:8088:8080"
15
+ restart: unless-stopped
16
+ cap_drop:
17
+ - ALL
18
+ cap_add:
19
+ - CHOWN
20
+ - SETGID
21
+ - SETUID
22
+ - DAC_OVERRIDE
23
+ logging:
24
+ driver: "json-file"
25
+ options:
26
+ max-size: "1m"
27
+ max-file: "1"
28
+ volumes:
29
+ - "./config/searxng:/etc/searxng"
9
30
  volumes:
10
31
  redis-data:
@@ -19,6 +19,7 @@ class OllamaChat::Chat
19
19
  include OllamaChat::ModelHandling
20
20
  include OllamaChat::Parsing
21
21
  include OllamaChat::SourceFetching
22
+ include OllamaChat::WebSearching
22
23
  include OllamaChat::Dialog
23
24
  include OllamaChat::Information
24
25
  include OllamaChat::Clipboard
@@ -45,14 +46,14 @@ class OllamaChat::Chat
45
46
  embedding_enabled.set(config.embedding.enabled && !@opts[?E])
46
47
  @messages = OllamaChat::MessageList.new(self)
47
48
  if @opts[?c]
48
- @messages.load_conversation(@opts[?c])
49
+ messages.load_conversation(@opts[?c])
49
50
  else
50
51
  default = config.system_prompts.default? || model_system
51
52
  if @opts[?s] =~ /\A\?/
52
53
  change_system_prompt(default, system: @opts[?s])
53
54
  else
54
55
  system = OllamaChat::Utils::FileArgument.get_file_argument(@opts[?s], default:)
55
- system.present? and @messages.set_system_prompt(system)
56
+ system.present? and messages.set_system_prompt(system)
56
57
  end
57
58
  end
58
59
  @documents = setup_documents
@@ -65,6 +66,8 @@ class OllamaChat::Chat
65
66
 
66
67
  attr_reader :documents
67
68
 
69
+ attr_reader :messages
70
+
68
71
  def links
69
72
  @links ||= Set.new
70
73
  end
@@ -83,8 +86,17 @@ class OllamaChat::Chat
83
86
 
84
87
  def start
85
88
  info
89
+ if messages.size > 1
90
+ messages.list_conversation(2)
91
+ end
86
92
  STDOUT.puts "\nType /help to display the chat help."
87
93
 
94
+ interact_with_user
95
+ end
96
+
97
+ private
98
+
99
+ def interact_with_user
88
100
  loop do
89
101
  parse_content = true
90
102
  input_prompt = bold { color(172) { message_type(@images) + " user" } } + bold { "> " }
@@ -114,15 +126,15 @@ class OllamaChat::Chat
114
126
  next
115
127
  when %r(^/list(?:\s+(\d*))?$)
116
128
  last = 2 * $1.to_i if $1
117
- @messages.list_conversation(last)
129
+ messages.list_conversation(last)
118
130
  next
119
131
  when %r(^/clear$)
120
- @messages.clear
132
+ messages.clear
121
133
  STDOUT.puts "Cleared messages."
122
134
  next
123
135
  when %r(^/clobber$)
124
136
  if ask?(prompt: 'Are you sure to clear messages and collection? (y/n) ') =~ /\Ay/i
125
- @messages.clear
137
+ messages.clear
126
138
  @documents.clear
127
139
  links.clear
128
140
  STDOUT.puts "Cleared messages and collection #{bold{@documents.collection}}."
@@ -131,8 +143,8 @@ class OllamaChat::Chat
131
143
  end
132
144
  next
133
145
  when %r(^/drop(?:\s+(\d*))?$)
134
- @messages.drop($1)
135
- @messages.list_conversation(2)
146
+ messages.drop($1)
147
+ messages.list_conversation(2)
136
148
  next
137
149
  when %r(^/model$)
138
150
  @model = choose_model('', @model)
@@ -142,9 +154,9 @@ class OllamaChat::Chat
142
154
  info
143
155
  next
144
156
  when %r(^/regenerate$)
145
- if content = @messages.second_last&.content
157
+ if content = messages.second_last&.content
146
158
  content.gsub!(/\nConsider these chunks for your answer.*\z/, '')
147
- @messages.drop(2)
159
+ messages.drop(2)
148
160
  else
149
161
  STDOUT.puts "Not enough messages in this conversation."
150
162
  redo
@@ -201,7 +213,7 @@ class OllamaChat::Chat
201
213
  content = embed($1) or next
202
214
  when %r(^/web\s+(?:(\d+)\s+)?(.+))
203
215
  parse_content = false
204
- urls = search_web($2, $1.to_i)
216
+ urls = search_web($2, $1.to_i) or next
205
217
  urls.each do |url|
206
218
  fetch_source(url) { |url_io| embed_source(url_io, url) }
207
219
  end
@@ -211,7 +223,7 @@ class OllamaChat::Chat
211
223
  map { |u, s| "%s as \n:%s" % [ u, s ] } * "\n\n"
212
224
  content = config.prompts.web % { query:, results: }
213
225
  when %r(^/save\s+(.+)$)
214
- @messages.save_conversation($1)
226
+ messages.save_conversation($1)
215
227
  STDOUT.puts "Saved conversation to #$1."
216
228
  next
217
229
  when %r(^/links(?:\s+(clear))?$)
@@ -251,7 +263,10 @@ class OllamaChat::Chat
251
263
  end
252
264
  next
253
265
  when %r(^/load\s+(.+)$)
254
- @messages.load_conversation($1)
266
+ messages.load_conversation($1)
267
+ if messages.size > 1
268
+ messages.list_conversation(2)
269
+ end
255
270
  STDOUT.puts "Loaded conversation from #$1."
256
271
  next
257
272
  when %r(^/config$)
@@ -302,16 +317,16 @@ class OllamaChat::Chat
302
317
  end
303
318
  end
304
319
 
305
- @messages << Ollama::Message.new(role: 'user', content:, images: @images.dup)
320
+ messages << Ollama::Message.new(role: 'user', content:, images: @images.dup)
306
321
  @images.clear
307
322
  handler = OllamaChat::FollowChat.new(
308
323
  chat: self,
309
- messages: @messages,
324
+ messages:,
310
325
  voice: (@current_voice if voice.on?)
311
326
  )
312
327
  ollama.chat(
313
328
  model: @model,
314
- messages: @messages,
329
+ messages:,
315
330
  options: @model_options,
316
331
  stream: stream.on?,
317
332
  &handler
@@ -325,16 +340,16 @@ class OllamaChat::Chat
325
340
  end
326
341
  [ link, record.tags.first ]
327
342
  }.uniq.map { |l, t| hyperlink(l, t) }.join(' ')
328
- config.debug and jj @messages.to_ary
343
+ config.debug and jj messages.to_ary
329
344
  end
345
+ rescue Ollama::Errors::TimeoutError
346
+ STDOUT.puts "#{bold('Error')}: Currently lost connection to ollama server and cannot send command."
330
347
  rescue Interrupt
331
348
  STDOUT.puts "Type /quit to quit."
332
349
  end
333
350
  0
334
351
  end
335
352
 
336
- private
337
-
338
353
  def setup_documents
339
354
  if embedding.on?
340
355
  @embedding_model = config.embedding.model.name
@@ -345,6 +360,7 @@ class OllamaChat::Chat
345
360
  ollama:,
346
361
  model: @embedding_model,
347
362
  model_options: config.embedding.model.options,
363
+ embedding_length: config.embedding.model.embedding_length,
348
364
  database_filename: config.embedding.database_filename || @ollama_chat_config.database_path,
349
365
  collection: ,
350
366
  cache: configure_cache,
@@ -392,7 +408,7 @@ class OllamaChat::Chat
392
408
  Documentrix::Documents::RedisCache.new(
393
409
  prefix: 'Expiring-',
394
410
  url:,
395
- ex: config.redis.expiring.ex,
411
+ ex: config.redis.expiring.ex?.to_i,
396
412
  )
397
413
  end
398
414
  end
@@ -52,7 +52,7 @@ class OllamaChat::FollowChat
52
52
  prompt_eval_rate: bold { "%.2f c/s" % (response.prompt_eval_count.to_i / prompt_eval_duration) } + color(111),
53
53
  total_duration: Tins::Duration.new(response.total_duration / 1e9),
54
54
  load_duration: Tins::Duration.new(response.load_duration / 1e9),
55
- }.map { _1 * '=' } * ' '
55
+ }.map { _1 * ?= } * ' '
56
56
  '📊 ' + color(111) {
57
57
  Kramdown::ANSI::Width.wrap(stats_text, percentage: 90).gsub(/(?<!\A)^/, ' ')
58
58
  }
@@ -28,6 +28,7 @@ module OllamaChat::Information
28
28
  end
29
29
 
30
30
  def info
31
+ STDOUT.puts "Running ollama_chat version: #{bold(OllamaChat::VERSION)}"
31
32
  STDOUT.puts "Connected to ollama server version: #{bold(server_version)}"
32
33
  STDOUT.puts "Current model is #{bold{@model}}."
33
34
  if @model_options.present?
@@ -47,6 +48,7 @@ module OllamaChat::Information
47
48
  stream.show
48
49
  location.show
49
50
  STDOUT.puts "Document policy for references in user text: #{bold{@document_policy}}"
51
+ STDOUT.puts "Currently selected search engine is #{bold(search_engine)}."
50
52
  if @voice.on?
51
53
  STDOUT.puts "Using voice #{bold{@current_voice}} to speak."
52
54
  end
@@ -21,7 +21,7 @@ prompts:
21
21
  Answer the the query %{query} using these sources and summaries:
22
22
 
23
23
  %{results}
24
- location: You are at %{location_name} (%{location_decimal_degrees}), on %{localtime}, preferring %{units}
24
+ location: You are at %{location_name}, %{location_decimal_degrees}, on %{localtime}, preferring %{units}
25
25
  system_prompts:
26
26
  default: <%= ENV.fetch('OLLAMA_CHAT_SYSTEM', 'null') %>
27
27
  assistant: You are a helpful assistant.
@@ -60,3 +60,10 @@ request_headers:
60
60
  Accept: 'text/*,application/*,image/*'
61
61
  ssl_no_verify: []
62
62
  copy: pbcopy
63
+ web_search:
64
+ use: duckduckgo
65
+ engines:
66
+ duckduckgo:
67
+ url: 'https://www.duckduckgo.com/html/?q=%{query}'
68
+ searxng:
69
+ url: <%= ENV.fetch('OLLAMA_SEARXNG_URL', 'http://localhost:8088/search?q=%{query}&language=en&format=json') %>
@@ -141,37 +141,4 @@ module OllamaChat::SourceFetching
141
141
  summarize(source)
142
142
  end
143
143
  end
144
-
145
- def search_web(query, n = nil)
146
- if l = @messages.at_location.full?
147
- query += " #{l}"
148
- end
149
- n = n.to_i.clamp(1..)
150
- query = URI.encode_uri_component(query)
151
- url = "https://www.duckduckgo.com/html/?q=#{query}"
152
- OllamaChat::Utils::Fetcher.get(
153
- url,
154
- headers: config.request_headers?.to_h,
155
- debug: config.debug
156
- ) do |tmp|
157
- result = []
158
- doc = Nokogiri::HTML(tmp)
159
- doc.css('.results_links').each do |link|
160
- if n > 0
161
- url = link.css('.result__a').first&.[]('href')
162
- url.sub!(%r(\A(//duckduckgo\.com)?/l/\?uddg=), '')
163
- url.sub!(%r(&rut=.*), '')
164
- url = URI.decode_uri_component(url)
165
- url = URI.parse(url)
166
- url.host =~ /duckduckgo\.com/ and next
167
- links.add(url.to_s)
168
- result << url
169
- n -= 1
170
- else
171
- break
172
- end
173
- end
174
- result
175
- end
176
- end
177
144
  end
@@ -1,6 +1,6 @@
1
1
  module OllamaChat
2
2
  # OllamaChat version
3
- VERSION = '0.0.3'
3
+ VERSION = '0.0.5'
4
4
  VERSION_ARRAY = VERSION.split('.').map(&:to_i) # :nodoc:
5
5
  VERSION_MAJOR = VERSION_ARRAY[0] # :nodoc:
6
6
  VERSION_MINOR = VERSION_ARRAY[1] # :nodoc:
@@ -0,0 +1,60 @@
1
+ module OllamaChat::WebSearching
2
+ def search_web(query, n = nil)
3
+ l = @messages.at_location.full? and query += " #{l}"
4
+ n = n.to_i.clamp(1..)
5
+ query = URI.encode_uri_component(query)
6
+ search_command = :"search_web_with_#{search_engine}"
7
+ if respond_to?(search_command, true)
8
+ send(search_command, query, n)
9
+ else
10
+ STDOUT.puts "Search engine #{bold{search_engine}} not implemented!"
11
+ nil
12
+ end
13
+ end
14
+
15
+ private
16
+
17
+ def search_engine
18
+ config.web_search.use
19
+ end
20
+
21
+ def search_web_with_searxng(query, n)
22
+ url = config.web_search.engines.searxng.url % { query: }
23
+ OllamaChat::Utils::Fetcher.get(
24
+ url,
25
+ headers: config.request_headers?.to_h,
26
+ debug: config.debug
27
+ ) do |tmp|
28
+ data = JSON.parse(tmp.read, object_class: JSON::GenericObject)
29
+ data.results.first(n).map(&:url)
30
+ end
31
+ end
32
+
33
+ def search_web_with_duckduckgo(query, n)
34
+ url = config.web_search.engines.duckduckgo.url % { query: }
35
+ OllamaChat::Utils::Fetcher.get(
36
+ url,
37
+ headers: config.request_headers?.to_h,
38
+ debug: config.debug
39
+ ) do |tmp|
40
+ result = []
41
+ doc = Nokogiri::HTML(tmp)
42
+ doc.css('.results_links').each do |link|
43
+ if n > 0
44
+ url = link.css('.result__a').first&.[]('href')
45
+ url.sub!(%r(\A(//duckduckgo\.com)?/l/\?uddg=), '')
46
+ url.sub!(%r(&rut=.*), '')
47
+ url = URI.decode_uri_component(url)
48
+ url = URI.parse(url)
49
+ url.host =~ /duckduckgo\.com/ and next
50
+ links.add(url.to_s)
51
+ result << url
52
+ n -= 1
53
+ else
54
+ break
55
+ end
56
+ end
57
+ result
58
+ end
59
+ end
60
+ end
data/lib/ollama_chat.rb CHANGED
@@ -13,6 +13,7 @@ require 'ollama_chat/message_list'
13
13
  require 'ollama_chat/model_handling'
14
14
  require 'ollama_chat/parsing'
15
15
  require 'ollama_chat/source_fetching'
16
+ require 'ollama_chat/web_searching'
16
17
  require 'ollama_chat/dialog'
17
18
  require 'ollama_chat/information'
18
19
  require 'ollama_chat/clipboard'
data/ollama_chat.gemspec CHANGED
@@ -1,30 +1,30 @@
1
1
  # -*- encoding: utf-8 -*-
2
- # stub: ollama_chat 0.0.3 ruby lib
2
+ # stub: ollama_chat 0.0.5 ruby lib
3
3
 
4
4
  Gem::Specification.new do |s|
5
5
  s.name = "ollama_chat".freeze
6
- s.version = "0.0.3".freeze
6
+ s.version = "0.0.5".freeze
7
7
 
8
8
  s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
9
9
  s.require_paths = ["lib".freeze]
10
10
  s.authors = ["Florian Frank".freeze]
11
- s.date = "2025-02-17"
11
+ s.date = "2025-03-22"
12
12
  s.description = "The app provides a command-line interface (CLI) to an Ollama AI model,\nallowing users to engage in text-based conversations and generate\nhuman-like responses. Users can import data from local files or web pages,\nwhich are then processed through three different modes: fully importing the\ncontent into the conversation context, summarizing the information for\nconcise reference, or storing it in an embedding vector database for later\nretrieval based on the conversation.\n".freeze
13
13
  s.email = "flori@ping.de".freeze
14
14
  s.executables = ["ollama_chat".freeze]
15
- s.extra_rdoc_files = ["README.md".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_type.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze]
16
- s.files = [".all_images.yml".freeze, ".envrc".freeze, ".gitignore".freeze, "CHANGES.md".freeze, "Gemfile".freeze, "README.md".freeze, "Rakefile".freeze, "VERSION".freeze, "bin/ollama_chat".freeze, "docker-compose.yml".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_type.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/ollama_chat_config/default_config.yml".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "ollama_chat.gemspec".freeze, "redis/redis.conf".freeze, "spec/assets/api_show.json".freeze, "spec/assets/api_tags.json".freeze, "spec/assets/api_version.json".freeze, "spec/assets/conversation.json".freeze, "spec/assets/duckduckgo.html".freeze, "spec/assets/example.atom".freeze, "spec/assets/example.csv".freeze, "spec/assets/example.html".freeze, "spec/assets/example.pdf".freeze, "spec/assets/example.ps".freeze, "spec/assets/example.rb".freeze, "spec/assets/example.rss".freeze, "spec/assets/example.xml".freeze, "spec/assets/kitten.jpg".freeze, "spec/assets/prompt.txt".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/spec_helper.rb".freeze, "tmp/.keep".freeze]
15
+ s.extra_rdoc_files = ["README.md".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_type.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze]
16
+ s.files = [".all_images.yml".freeze, ".envrc".freeze, ".gitignore".freeze, "CHANGES.md".freeze, "Gemfile".freeze, "README.md".freeze, "Rakefile".freeze, "VERSION".freeze, "bin/ollama_chat".freeze, "config/searxng/settings.yml".freeze, "docker-compose.yml".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_type.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/ollama_chat_config/default_config.yml".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze, "ollama_chat.gemspec".freeze, "redis/redis.conf".freeze, "spec/assets/api_show.json".freeze, "spec/assets/api_tags.json".freeze, "spec/assets/api_version.json".freeze, "spec/assets/conversation.json".freeze, "spec/assets/duckduckgo.html".freeze, "spec/assets/example.atom".freeze, "spec/assets/example.csv".freeze, "spec/assets/example.html".freeze, "spec/assets/example.pdf".freeze, "spec/assets/example.ps".freeze, "spec/assets/example.rb".freeze, "spec/assets/example.rss".freeze, "spec/assets/example.xml".freeze, "spec/assets/kitten.jpg".freeze, "spec/assets/prompt.txt".freeze, "spec/assets/searxng.json".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/ollama_chat/web_searching_spec.rb".freeze, "spec/spec_helper.rb".freeze, "tmp/.keep".freeze]
17
17
  s.homepage = "https://github.com/flori/ollama_chat".freeze
18
18
  s.licenses = ["MIT".freeze]
19
19
  s.rdoc_options = ["--title".freeze, "OllamaChat - A command-line interface (CLI) for interacting with an Ollama AI model.".freeze, "--main".freeze, "README.md".freeze]
20
20
  s.required_ruby_version = Gem::Requirement.new("~> 3.1".freeze)
21
21
  s.rubygems_version = "3.6.2".freeze
22
22
  s.summary = "A command-line interface (CLI) for interacting with an Ollama AI model.".freeze
23
- s.test_files = ["spec/assets/example.rb".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/spec_helper.rb".freeze]
23
+ s.test_files = ["spec/assets/example.rb".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/ollama_chat/web_searching_spec.rb".freeze, "spec/spec_helper.rb".freeze]
24
24
 
25
25
  s.specification_version = 4
26
26
 
27
- s.add_development_dependency(%q<gem_hadar>.freeze, ["~> 1.19".freeze])
27
+ s.add_development_dependency(%q<gem_hadar>.freeze, ["~> 1.20".freeze])
28
28
  s.add_development_dependency(%q<all_images>.freeze, ["~> 0.6".freeze])
29
29
  s.add_development_dependency(%q<rspec>.freeze, ["~> 3.2".freeze])
30
30
  s.add_development_dependency(%q<kramdown>.freeze, ["~> 2.0".freeze])
@@ -0,0 +1,111 @@
1
+ {
2
+ "query": "foo",
3
+ "number_of_results": 0,
4
+ "results": [
5
+ {
6
+ "url": "https://en.wikipedia.org/wiki/Foo_Fighters",
7
+ "title": "Foo Fighters - Wikipedia",
8
+ "content": "The Foo Fighters are an American rock band formed in Seattle in 1994. Initially founded as a one-man project by former Nirvana drummer Dave Grohl, the band comprises vocalist/guitarist Grohl, bassist Nate Mendel, guitarist Pat Smear, guitarist Chris Shiflett, keyboardist Rami Jaffee and drummer Josh Freese.Guitarist Franz Stahl and drummers William Goldsmith and Taylor Hawkins are former ...",
9
+ "publishedDate": null,
10
+ "thumbnail": null,
11
+ "engine": "brave",
12
+ "template": "default.html",
13
+ "parsed_url": [
14
+ "https",
15
+ "en.wikipedia.org",
16
+ "/wiki/Foo_Fighters",
17
+ "",
18
+ "",
19
+ ""
20
+ ],
21
+ "engines": [
22
+ "startpage",
23
+ "brave",
24
+ "google",
25
+ "duckduckgo"
26
+ ],
27
+ "positions": [
28
+ 2,
29
+ 1,
30
+ 2,
31
+ 1
32
+ ],
33
+ "score": 12.0,
34
+ "category": "general"
35
+ },
36
+ {
37
+ "url": "https://www.foofighters.com",
38
+ "title": "Home - Foo Fighters",
39
+ "content": "Official website of Foo Fighters",
40
+ "publishedDate": "2023-04-19T00:00:00",
41
+ "thumbnail": null,
42
+ "engine": "brave",
43
+ "template": "default.html",
44
+ "parsed_url": [
45
+ "https",
46
+ "www.foofighters.com",
47
+ "",
48
+ "",
49
+ "",
50
+ ""
51
+ ],
52
+ "engines": [
53
+ "startpage",
54
+ "brave",
55
+ "google",
56
+ "duckduckgo"
57
+ ],
58
+ "positions": [
59
+ 1,
60
+ 2,
61
+ 7,
62
+ 2
63
+ ],
64
+ "score": 8.571428571428571,
65
+ "category": "general"
66
+ },
67
+ {
68
+ "url": "https://www.dictionary.com/e/tech-science/foo/",
69
+ "title": "foo | Meaning & Origin | Dictionary.com",
70
+ "content": "\"Foo was here\" was a popular piece of graffiti drawn by Australian soldiers in WWII, and possibly even WWI, that depicts a little man poking his head and large nose over the wall à la Kilroy. While the origins of this foo are unclear, it appears to be unrelated to Holman's foo.",
71
+ "publishedDate": "2022-07-04T00:00:00",
72
+ "thumbnail": "",
73
+ "engine": "brave",
74
+ "template": "default.html",
75
+ "parsed_url": [
76
+ "https",
77
+ "www.dictionary.com",
78
+ "/e/tech-science/foo/",
79
+ "",
80
+ "",
81
+ ""
82
+ ],
83
+ "engines": [
84
+ "brave",
85
+ "duckduckgo"
86
+ ],
87
+ "positions": [
88
+ 13,
89
+ 1
90
+ ],
91
+ "score": 2.1538461538461537,
92
+ "category": "general"
93
+ }
94
+ ],
95
+ "suggestions": [
96
+ "foo fighters - everlong",
97
+ "Foo meaning",
98
+ "Foo Fighters WW2",
99
+ "Foo Fighters - Learn to Fly",
100
+ "foo fighters - the colour and the shape",
101
+ "Foo Fighters tour 2024",
102
+ "Foo programming",
103
+ "Foo word"
104
+ ],
105
+ "unresponsive_engines": [
106
+ [
107
+ "qwant",
108
+ "Suspended: CAPTCHA"
109
+ ]
110
+ ]
111
+ }
@@ -15,6 +15,19 @@ RSpec.describe OllamaChat::Chat do
15
15
  expect(chat).to be_a described_class
16
16
  end
17
17
 
18
+ context 'loading conversations' do
19
+ let :argv do
20
+ %w[ -C test -c ] << asset('conversation.json')
21
+ end
22
+
23
+ it 'dispays the last exchange of the converstation' do
24
+ expect(chat).to receive(:interact_with_user).and_return 0
25
+ expect(STDOUT).to receive(:puts).at_least(1)
26
+ expect(chat.messages).to receive(:list_conversation)
27
+ chat.start
28
+ end
29
+ end
30
+
18
31
  describe OllamaChat::DocumentCache do
19
32
  context 'with MemoryCache' do
20
33
  let :argv do
@@ -71,6 +84,7 @@ RSpec.describe OllamaChat::Chat do
71
84
  expect(STDOUT).to receive(:puts).
72
85
  with(
73
86
  /
87
+ Running\ ollama_chat\ version|
74
88
  Connected\ to\ ollama\ server|
75
89
  Current\ model|
76
90
  Options|
@@ -80,7 +94,8 @@ RSpec.describe OllamaChat::Chat do
80
94
  output\ content|
81
95
  Streaming|
82
96
  Location|
83
- Document\ policy
97
+ Document\ policy|
98
+ Currently\ selected\ search\ engine
84
99
  /x
85
100
  ).at_least(1)
86
101
  expect(chat.info).to be_nil
@@ -36,13 +36,4 @@ RSpec.describe OllamaChat::SourceFetching do
36
36
  'This source was now embedded: ./spec/assets/example.html'
37
37
  )
38
38
  end
39
-
40
- it 'can search web' do
41
- stub_request(:get, "https://www.duckduckgo.com/html/?q=foo").
42
- with(headers: { 'Host'=>'www.duckduckgo.com' }).
43
- to_return(status: 200, body: asset_content('duckduckgo.html'), headers: {})
44
- expect(chat.search_web('foo').first.to_s).to eq(
45
- 'https://en.wikipedia.org/wiki/Foo_Fighters'
46
- )
47
- end
48
39
  end
@@ -0,0 +1,33 @@
1
+ require 'spec_helper'
2
+
3
+ RSpec.describe OllamaChat::WebSearching do
4
+ let :chat do
5
+ OllamaChat::Chat.new
6
+ end
7
+
8
+ connect_to_ollama_server
9
+
10
+ it 'can search web with duckduckgo' do
11
+ url = 'https://www.duckduckgo.com/html/?q=foo'
12
+ chat.config.web_search.engines.duckduckgo.attributes_update(url:)
13
+ expect(chat).to receive(:search_engine).and_return 'duckduckgo'
14
+ stub_request(:get, url).
15
+ with(headers: { 'Host'=> 'www.duckduckgo.com' }).
16
+ to_return(status: 200, body: asset_content('duckduckgo.html'), headers: {})
17
+ expect(chat.search_web('foo').first.to_s).to eq(
18
+ 'https://en.wikipedia.org/wiki/Foo_Fighters'
19
+ )
20
+ end
21
+
22
+ it 'can search web with searxng' do
23
+ url = 'http://localhost:8088/search?format=json&language=en&q=foo'
24
+ chat.config.web_search.engines.searxng.attributes_update(url:)
25
+ expect(chat).to receive(:search_engine).and_return 'searxng'
26
+ stub_request(:get, url).
27
+ with(headers: { 'Host'=>'localhost:8088' }).
28
+ to_return(status: 200, body: asset_content('searxng.json'), headers: {})
29
+ expect(chat.search_web('foo').first.to_s).to eq(
30
+ 'https://en.wikipedia.org/wiki/Foo_Fighters'
31
+ )
32
+ end
33
+ end
data/spec/spec_helper.rb CHANGED
@@ -14,6 +14,8 @@ require 'webmock/rspec'
14
14
  WebMock.disable_net_connect!
15
15
  require 'ollama_chat'
16
16
 
17
+ ComplexConfig::Provider.deep_freeze = false
18
+
17
19
  def asset(name)
18
20
  File.join(__dir__, 'assets', name)
19
21
  end