ollama_chat 0.0.0 → 0.0.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 7968cdb60fc0612f59f3c240927a4a11f21ad664009270d614a31a119e818f17
4
- data.tar.gz: f27240bd0371ecb4c5e2b20a73f2b94e3091ee3c6e230b313c89b39112be6096
3
+ metadata.gz: e5c75809ec83f12f9b70645f426774dec435a2510db811fa861e9c862a1e5c71
4
+ data.tar.gz: bca4f2fcf8e5e77ccc922858e4fe25db8933e5b09a9b264b94648a39feabc0c2
5
5
  SHA512:
6
- metadata.gz: d665a8f4e8182b0b6a18f3daf86ce7a464632c2dfe3c608332528d10502ac65f4c3663cd7a6ebc18439f4c1156a183ce0962171d3bfe326b074f539fba51cee4
7
- data.tar.gz: fa7a6384dabf4204a128ae56ef43c3fe22cbe2842f69ffa5c5d1db16133d64767541be616e6219ac4923cde21d4441b6e8f1a1f24dea726ed27dbbca35043d6a
6
+ metadata.gz: a8e9c93775bfd1629a743436a7fd85b7b7801d7a69221a423b5abbe0a44d98afccc7f9c8eb5182fdfacd848b0d1b27140fb599c3794f77bd4fa2cec7a012a9fb
7
+ data.tar.gz: 4f0460bb249fcd988a3425bdedc377ceaf5052f90c12563ef5fa01b72100857e30484622ed840b11147aed9cdc9e8a9c5cf4f9979c96fa317c8c27780d9fa990
data/.envrc ADDED
@@ -0,0 +1,2 @@
1
+ export REDIS_URL=redis://localhost:9736
2
+ export REDIS_EXPRING_URL=redis://localhost:9736
data/CHANGES.md ADDED
@@ -0,0 +1,18 @@
1
+ # Changes
2
+
3
+ ## 2025-02-02 v0.0.1
4
+
5
+ * Renamed `documents` variable to `@documents` in `OllamaChat::Chat`
6
+ * Modified `add_documents_from_argv` method to accept only `document_list` as argument
7
+ * Updated spec for `OllamaChat::Chat` to reflect changes in `add_documents_from_argv` method
8
+ * Use `clamp(1..)` instead of manual checks for `n.to_i` in source fetching
9
+ * Dropped is now used consistently in the code for message popping
10
+ * Set up Redis environment and service for development:
11
+ * Added `.envrc` file with Redis URL exports.
12
+ * Added `docker-compose.yml` file to define a Redis service:
13
+ * Added `redis.conf` file with basic settings:
14
+ * Use method names rather than instance variables for switch access.
15
+
16
+ ## 2025-01-29 v0.0.0
17
+
18
+ * Start
data/VERSION CHANGED
@@ -1 +1 @@
1
- 0.0.0
1
+ 0.0.1
@@ -0,0 +1,10 @@
1
+ services:
2
+ redis:
3
+ image: valkey/valkey:7.2.8-alpine
4
+ restart: unless-stopped
5
+ ports: [ "127.0.0.1:9736:6379" ]
6
+ volumes:
7
+ - "redis-data:/data:delegated"
8
+ - "./redis/redis.conf:/etc/redis.conf"
9
+ volumes:
10
+ redis-data:
@@ -41,7 +41,7 @@ class OllamaChat::Chat
41
41
  @model = choose_model(@opts[?m], config.model.name)
42
42
  @model_options = Ollama::Options[config.model.options]
43
43
  model_system = pull_model_unless_present(@model, @model_options)
44
- @embedding_enabled.set(config.embedding.enabled && !@opts[?E])
44
+ embedding_enabled.set(config.embedding.enabled && !@opts[?E])
45
45
  @messages = OllamaChat::MessageList.new(self)
46
46
  if @opts[?c]
47
47
  @messages.load_conversation(@opts[?c])
@@ -96,19 +96,19 @@ class OllamaChat::Chat
96
96
  when %r(^/paste$)
97
97
  content = paste_from_input
98
98
  when %r(^/markdown$)
99
- @markdown.toggle
99
+ markdown.toggle
100
100
  next
101
101
  when %r(^/stream$)
102
- @stream.toggle
102
+ stream.toggle
103
103
  next
104
104
  when %r(^/location$)
105
- @location.toggle
105
+ location.toggle
106
106
  next
107
107
  when %r(^/voice(?:\s+(change))?$)
108
108
  if $1 == 'change'
109
109
  change_voice
110
110
  else
111
- @voice.toggle
111
+ voice.toggle
112
112
  end
113
113
  next
114
114
  when %r(^/list(?:\s+(\d*))?$)
@@ -192,8 +192,8 @@ class OllamaChat::Chat
192
192
  parse_content = false
193
193
  content = summarize($2, words: $1) or next
194
194
  when %r(^/embedding$)
195
- @embedding_paused.toggle(show: false)
196
- @embedding.show
195
+ embedding_paused.toggle(show: false)
196
+ embedding.show
197
197
  next
198
198
  when %r(^/embed\s+(.+))
199
199
  parse_content = false
@@ -287,7 +287,7 @@ class OllamaChat::Chat
287
287
  [ content, Documentrix::Utils::Tags.new ]
288
288
  end
289
289
 
290
- if @embedding.on? && content
290
+ if embedding.on? && content
291
291
  records = @documents.find_where(
292
292
  content.downcase,
293
293
  tags:,
@@ -304,18 +304,18 @@ class OllamaChat::Chat
304
304
  @messages << Ollama::Message.new(role: 'user', content:, images: @images.dup)
305
305
  @images.clear
306
306
  handler = OllamaChat::FollowChat.new(
307
+ chat: self,
307
308
  messages: @messages,
308
- markdown: @markdown.on?,
309
- voice: (@current_voice if @voice.on?)
309
+ voice: (@current_voice if voice.on?)
310
310
  )
311
311
  ollama.chat(
312
312
  model: @model,
313
313
  messages: @messages,
314
314
  options: @model_options,
315
- stream: @stream.on?,
315
+ stream: stream.on?,
316
316
  &handler
317
317
  )
318
- if @embedding.on? && !records.empty?
318
+ if embedding.on? && !records.empty?
319
319
  STDOUT.puts "", records.map { |record|
320
320
  link = if record.source =~ %r(\Ahttps?://)
321
321
  record.source
@@ -335,12 +335,12 @@ class OllamaChat::Chat
335
335
  private
336
336
 
337
337
  def setup_documents
338
- if @embedding.on?
338
+ if embedding.on?
339
339
  @embedding_model = config.embedding.model.name
340
340
  @embedding_model_options = Ollama::Options[config.embedding.model.options]
341
341
  pull_model_unless_present(@embedding_model, @embedding_model_options)
342
342
  collection = @opts[?C] || config.embedding.collection
343
- documents = Documentrix::Documents.new(
343
+ @documents = Documentrix::Documents.new(
344
344
  ollama:,
345
345
  model: @embedding_model,
346
346
  model_options: config.embedding.model.options,
@@ -352,14 +352,14 @@ class OllamaChat::Chat
352
352
  )
353
353
 
354
354
  document_list = @opts[?D].to_a
355
- add_documents_from_argv(documents, document_list)
356
- documents
355
+ add_documents_from_argv(document_list)
356
+ @documents
357
357
  else
358
358
  Tins::NULL
359
359
  end
360
360
  end
361
361
 
362
- def add_documents_from_argv(documents, document_list)
362
+ def add_documents_from_argv(document_list)
363
363
  if document_list.any?(&:empty?)
364
364
  STDOUT.puts "Clearing collection #{bold{documents.collection}}."
365
365
  documents.clear
@@ -4,10 +4,10 @@ class OllamaChat::FollowChat
4
4
  include Term::ANSIColor
5
5
  include OllamaChat::MessageType
6
6
 
7
- def initialize(messages:, markdown: false, voice: nil, output: STDOUT)
7
+ def initialize(chat:, messages:, voice: nil, output: STDOUT)
8
8
  super(output:)
9
+ @chat = chat
9
10
  @output.sync = true
10
- @markdown = markdown
11
11
  @say = voice ? Handlers::Say.new(voice:) : NOP
12
12
  @messages = messages
13
13
  @user = nil
@@ -20,13 +20,13 @@ class OllamaChat::FollowChat
20
20
  @messages << Message.new(role: 'assistant', content: '')
21
21
  @user = message_type(@messages.last.images) + " " +
22
22
  bold { color(111) { 'assistant:' } }
23
- @output.puts @user unless @markdown
23
+ @output.puts @user unless @chat.markdown.on?
24
24
  end
25
25
  if content = response.message&.content
26
26
  content = content.gsub(%r(<think>), "💭\n").gsub(%r(</think>), "\n💬")
27
27
  end
28
28
  @messages.last.content << content
29
- if @markdown and content = @messages.last.content.full?
29
+ if @chat.markdown.on? and content = @messages.last.content.full?
30
30
  markdown_content = Kramdown::ANSI.parse(content)
31
31
  @output.print clear_screen, move_home, @user, ?\n, markdown_content
32
32
  else
@@ -42,9 +42,9 @@ module OllamaChat::Information
42
42
  collection_stats
43
43
  end
44
44
  STDOUT.puts "Documents database cache is #{@documents.nil? ? 'n/a' : bold{@documents.cache.class}}"
45
- @markdown.show
46
- @stream.show
47
- @location.show
45
+ markdown.show
46
+ stream.show
47
+ location.show
48
48
  STDOUT.puts "Document policy for references in user text: #{bold{@document_policy}}"
49
49
  if @voice.on?
50
50
  STDOUT.puts "Using voice #{bold{@current_voice}} to speak."
@@ -128,10 +128,10 @@ class OllamaChat::MessageList
128
128
  n = n.to_i.clamp(1, Float::INFINITY)
129
129
  r = @messages.pop(2 * n)
130
130
  m = r.size / 2
131
- STDOUT.puts "Popped the last #{m} exchanges."
131
+ STDOUT.puts "Dropped the last #{m} exchanges."
132
132
  m
133
133
  else
134
- STDOUT.puts "No more exchanges you can pop."
134
+ STDOUT.puts "No more exchanges you can drop."
135
135
  0
136
136
  end
137
137
  end
@@ -145,8 +145,7 @@ module OllamaChat::SourceFetching
145
145
  if l = @messages.at_location.full?
146
146
  query += " #{l}"
147
147
  end
148
- n = n.to_i
149
- n < 1 and n = 1
148
+ n = n.to_i.clamp(1..)
150
149
  query = URI.encode_uri_component(query)
151
150
  url = "https://www.duckduckgo.com/html/?q=#{query}"
152
151
  OllamaChat::Utils::Fetcher.get(url, debug: config.debug) do |tmp|
@@ -51,6 +51,16 @@ module OllamaChat::Switches
51
51
 
52
52
  attr_reader :markdown
53
53
 
54
+ attr_reader :stream
55
+
56
+ attr_reader :voice
57
+
58
+ attr_reader :embedding
59
+
60
+ attr_reader :embedding_enabled
61
+
62
+ attr_reader :embedding_paused
63
+
54
64
  attr_reader :location
55
65
 
56
66
  def setup_switches(config)
@@ -1,6 +1,6 @@
1
1
  module OllamaChat
2
2
  # OllamaChat version
3
- VERSION = '0.0.0'
3
+ VERSION = '0.0.1'
4
4
  VERSION_ARRAY = VERSION.split('.').map(&:to_i) # :nodoc:
5
5
  VERSION_MAJOR = VERSION_ARRAY[0] # :nodoc:
6
6
  VERSION_MINOR = VERSION_ARRAY[1] # :nodoc:
data/ollama_chat.gemspec CHANGED
@@ -1,19 +1,19 @@
1
1
  # -*- encoding: utf-8 -*-
2
- # stub: ollama_chat 0.0.0 ruby lib
2
+ # stub: ollama_chat 0.0.1 ruby lib
3
3
 
4
4
  Gem::Specification.new do |s|
5
5
  s.name = "ollama_chat".freeze
6
- s.version = "0.0.0".freeze
6
+ s.version = "0.0.1".freeze
7
7
 
8
8
  s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
9
9
  s.require_paths = ["lib".freeze]
10
10
  s.authors = ["Florian Frank".freeze]
11
- s.date = "2025-01-29"
11
+ s.date = "2025-02-02"
12
12
  s.description = "The app provides a command-line interface (CLI) to an Ollama AI model,\nallowing users to engage in text-based conversations and generate\nhuman-like responses. Users can import data from local files or web pages,\nwhich are then processed through three different modes: fully importing the\ncontent into the conversation context, summarizing the information for\nconcise reference, or storing it in an embedding vector database for later\nretrieval based on the conversation.\n".freeze
13
13
  s.email = "flori@ping.de".freeze
14
14
  s.executables = ["ollama_chat".freeze]
15
15
  s.extra_rdoc_files = ["README.md".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_type.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze]
16
- s.files = [".all_images.yml".freeze, ".gitignore".freeze, "Gemfile".freeze, "README.md".freeze, "Rakefile".freeze, "VERSION".freeze, "bin/ollama_chat".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_type.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "ollama_chat.gemspec".freeze, "spec/assets/api_show.json".freeze, "spec/assets/api_tags.json".freeze, "spec/assets/conversation.json".freeze, "spec/assets/duckduckgo.html".freeze, "spec/assets/example.atom".freeze, "spec/assets/example.csv".freeze, "spec/assets/example.html".freeze, "spec/assets/example.pdf".freeze, "spec/assets/example.ps".freeze, "spec/assets/example.rb".freeze, "spec/assets/example.rss".freeze, "spec/assets/example.xml".freeze, "spec/assets/kitten.jpg".freeze, "spec/assets/prompt.txt".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/spec_helper.rb".freeze, "tmp/.keep".freeze]
16
+ s.files = [".all_images.yml".freeze, ".envrc".freeze, ".gitignore".freeze, "CHANGES.md".freeze, "Gemfile".freeze, "README.md".freeze, "Rakefile".freeze, "VERSION".freeze, "bin/ollama_chat".freeze, "docker-compose.yml".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_type.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "ollama_chat.gemspec".freeze, "redis/redis.conf".freeze, "spec/assets/api_show.json".freeze, "spec/assets/api_tags.json".freeze, "spec/assets/conversation.json".freeze, "spec/assets/duckduckgo.html".freeze, "spec/assets/example.atom".freeze, "spec/assets/example.csv".freeze, "spec/assets/example.html".freeze, "spec/assets/example.pdf".freeze, "spec/assets/example.ps".freeze, "spec/assets/example.rb".freeze, "spec/assets/example.rss".freeze, "spec/assets/example.xml".freeze, "spec/assets/kitten.jpg".freeze, "spec/assets/prompt.txt".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/spec_helper.rb".freeze, "tmp/.keep".freeze]
17
17
  s.homepage = "https://github.com/flori/ollama_chat".freeze
18
18
  s.licenses = ["MIT".freeze]
19
19
  s.rdoc_options = ["--title".freeze, "OllamaChat - A command-line interface (CLI) for interacting with an Ollama AI model.".freeze, "--main".freeze, "README.md".freeze]
data/redis/redis.conf ADDED
@@ -0,0 +1,5 @@
1
+ save 60 1000
2
+ dbfilename dump.rdb
3
+ appendonly yes
4
+ appendfilename "appendonly.aof"
5
+ appendfsync always
@@ -48,7 +48,7 @@ RSpec.describe OllamaChat::Chat do
48
48
 
49
49
  it 'Adds documents passed to app via -D option' do
50
50
  expect_any_instance_of(OllamaChat::Chat).to receive(:add_documents_from_argv).
51
- with(kind_of(Documentrix::Documents), [ asset('example.html') ])
51
+ with([ asset('example.html') ])
52
52
  chat
53
53
  end
54
54
  end
@@ -7,8 +7,12 @@ RSpec.describe OllamaChat::FollowChat do
7
7
  ]
8
8
  end
9
9
 
10
+ let :chat do
11
+ double('Chat', markdown: double(on?: false))
12
+ end
13
+
10
14
  let :follow_chat do
11
- described_class.new(messages:, output:)
15
+ described_class.new(chat:, messages:, output:)
12
16
  end
13
17
 
14
18
  let :output do
@@ -18,7 +18,6 @@ RSpec.describe OllamaChat::Switches do
18
18
  double(test?: false)
19
19
  end
20
20
 
21
-
22
21
  it 'can be switched on' do
23
22
  expect {
24
23
  switch.set(true)
metadata CHANGED
@@ -1,13 +1,13 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ollama_chat
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.0
4
+ version: 0.0.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Florian Frank
8
8
  bindir: bin
9
9
  cert_chain: []
10
- date: 2025-01-29 00:00:00.000000000 Z
10
+ date: 2025-02-02 00:00:00.000000000 Z
11
11
  dependencies:
12
12
  - !ruby/object:Gem::Dependency
13
13
  name: gem_hadar
@@ -379,12 +379,15 @@ extra_rdoc_files:
379
379
  - lib/ollama_chat/version.rb
380
380
  files:
381
381
  - ".all_images.yml"
382
+ - ".envrc"
382
383
  - ".gitignore"
384
+ - CHANGES.md
383
385
  - Gemfile
384
386
  - README.md
385
387
  - Rakefile
386
388
  - VERSION
387
389
  - bin/ollama_chat
390
+ - docker-compose.yml
388
391
  - lib/ollama_chat.rb
389
392
  - lib/ollama_chat/chat.rb
390
393
  - lib/ollama_chat/clipboard.rb
@@ -406,6 +409,7 @@ files:
406
409
  - lib/ollama_chat/utils/file_argument.rb
407
410
  - lib/ollama_chat/version.rb
408
411
  - ollama_chat.gemspec
412
+ - redis/redis.conf
409
413
  - spec/assets/api_show.json
410
414
  - spec/assets/api_tags.json
411
415
  - spec/assets/conversation.json