ollama_chat 0.0.0 → 0.0.2

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 7968cdb60fc0612f59f3c240927a4a11f21ad664009270d614a31a119e818f17
4
- data.tar.gz: f27240bd0371ecb4c5e2b20a73f2b94e3091ee3c6e230b313c89b39112be6096
3
+ metadata.gz: 5e53e7c97f1d4cb21ae23b70b3dc14f3dfcb183978f3e3f5ebf44fa392d60c7d
4
+ data.tar.gz: 5b249fdd1c0a3acfc132fd4388608f12a64f4528ca280cb7feb427bc3ebd7c2e
5
5
  SHA512:
6
- metadata.gz: d665a8f4e8182b0b6a18f3daf86ce7a464632c2dfe3c608332528d10502ac65f4c3663cd7a6ebc18439f4c1156a183ce0962171d3bfe326b074f539fba51cee4
7
- data.tar.gz: fa7a6384dabf4204a128ae56ef43c3fe22cbe2842f69ffa5c5d1db16133d64767541be616e6219ac4923cde21d4441b6e8f1a1f24dea726ed27dbbca35043d6a
6
+ metadata.gz: aaaf8a6011430a7bba371ef08059d3abd6493e7c43e766068942aefed893ed2378b9e5193a9a43233579c49e35db88520f62083c936ca4bd8528d0cb84d81b84
7
+ data.tar.gz: 6bb078ad3be012d0936a72303df5a44186fc0c7a95fc335124a6b37eb5affbb4e64bb26ccf8127268c6410ef4eff15e53405652934f388a581d0bfea82c0c1d3
data/.envrc ADDED
@@ -0,0 +1,2 @@
1
+ export REDIS_URL=redis://localhost:9736
2
+ export REDIS_EXPRING_URL=redis://localhost:9736
data/CHANGES.md ADDED
@@ -0,0 +1,32 @@
1
+ # Changes
2
+
3
+ ## 2025-02-11 v0.0.2
4
+
5
+ * Improved handling of location in MessageList class:
6
+ * Use assistant system prompt (`assistant_system_prompt`) for adding location
7
+ to message list, if no system prompt was defined.
8
+ * Updated spec to cover new behavior.
9
+ * Simplified configuration defaults to be stored in `default_config.yml`:
10
+ - Replaced `DEFAULT_CONFIG` hash with a single line of code that reads from
11
+ `default_config.yml`
12
+ - Created new file `default_config.yml` in the same directory, containing the
13
+ old `DEFAULT_CONFIG` hash values
14
+ - Updated `initialize` method to use the new `default_config.yml` file if no
15
+ filename is provided
16
+
17
+ ## 2025-02-02 v0.0.1
18
+
19
+ * Renamed `documents` variable to `@documents` in `OllamaChat::Chat`
20
+ * Modified `add_documents_from_argv` method to accept only `document_list` as argument
21
+ * Updated spec for `OllamaChat::Chat` to reflect changes in `add_documents_from_argv` method
22
+ * Use `clamp(1..)` instead of manual checks for `n.to_i` in source fetching
23
+ * Dropped is now used consistently in the code for message popping
24
+ * Set up Redis environment and service for development:
25
+ * Added `.envrc` file with Redis URL exports.
26
+ * Added `docker-compose.yml` file to define a Redis service:
27
+ * Added `redis.conf` file with basic settings:
28
+ * Use method names rather than instance variables for switch access.
29
+
30
+ ## 2025-01-29 v0.0.0
31
+
32
+ * Start
data/VERSION CHANGED
@@ -1 +1 @@
1
- 0.0.0
1
+ 0.0.2
@@ -0,0 +1,10 @@
1
+ services:
2
+ redis:
3
+ image: valkey/valkey:7.2.8-alpine
4
+ restart: unless-stopped
5
+ ports: [ "127.0.0.1:9736:6379" ]
6
+ volumes:
7
+ - "redis-data:/data:delegated"
8
+ - "./redis/redis.conf:/etc/redis.conf"
9
+ volumes:
10
+ redis-data:
@@ -41,7 +41,7 @@ class OllamaChat::Chat
41
41
  @model = choose_model(@opts[?m], config.model.name)
42
42
  @model_options = Ollama::Options[config.model.options]
43
43
  model_system = pull_model_unless_present(@model, @model_options)
44
- @embedding_enabled.set(config.embedding.enabled && !@opts[?E])
44
+ embedding_enabled.set(config.embedding.enabled && !@opts[?E])
45
45
  @messages = OllamaChat::MessageList.new(self)
46
46
  if @opts[?c]
47
47
  @messages.load_conversation(@opts[?c])
@@ -96,19 +96,19 @@ class OllamaChat::Chat
96
96
  when %r(^/paste$)
97
97
  content = paste_from_input
98
98
  when %r(^/markdown$)
99
- @markdown.toggle
99
+ markdown.toggle
100
100
  next
101
101
  when %r(^/stream$)
102
- @stream.toggle
102
+ stream.toggle
103
103
  next
104
104
  when %r(^/location$)
105
- @location.toggle
105
+ location.toggle
106
106
  next
107
107
  when %r(^/voice(?:\s+(change))?$)
108
108
  if $1 == 'change'
109
109
  change_voice
110
110
  else
111
- @voice.toggle
111
+ voice.toggle
112
112
  end
113
113
  next
114
114
  when %r(^/list(?:\s+(\d*))?$)
@@ -192,8 +192,8 @@ class OllamaChat::Chat
192
192
  parse_content = false
193
193
  content = summarize($2, words: $1) or next
194
194
  when %r(^/embedding$)
195
- @embedding_paused.toggle(show: false)
196
- @embedding.show
195
+ embedding_paused.toggle(show: false)
196
+ embedding.show
197
197
  next
198
198
  when %r(^/embed\s+(.+))
199
199
  parse_content = false
@@ -287,7 +287,7 @@ class OllamaChat::Chat
287
287
  [ content, Documentrix::Utils::Tags.new ]
288
288
  end
289
289
 
290
- if @embedding.on? && content
290
+ if embedding.on? && content
291
291
  records = @documents.find_where(
292
292
  content.downcase,
293
293
  tags:,
@@ -304,18 +304,18 @@ class OllamaChat::Chat
304
304
  @messages << Ollama::Message.new(role: 'user', content:, images: @images.dup)
305
305
  @images.clear
306
306
  handler = OllamaChat::FollowChat.new(
307
+ chat: self,
307
308
  messages: @messages,
308
- markdown: @markdown.on?,
309
- voice: (@current_voice if @voice.on?)
309
+ voice: (@current_voice if voice.on?)
310
310
  )
311
311
  ollama.chat(
312
312
  model: @model,
313
313
  messages: @messages,
314
314
  options: @model_options,
315
- stream: @stream.on?,
315
+ stream: stream.on?,
316
316
  &handler
317
317
  )
318
- if @embedding.on? && !records.empty?
318
+ if embedding.on? && !records.empty?
319
319
  STDOUT.puts "", records.map { |record|
320
320
  link = if record.source =~ %r(\Ahttps?://)
321
321
  record.source
@@ -335,12 +335,12 @@ class OllamaChat::Chat
335
335
  private
336
336
 
337
337
  def setup_documents
338
- if @embedding.on?
338
+ if embedding.on?
339
339
  @embedding_model = config.embedding.model.name
340
340
  @embedding_model_options = Ollama::Options[config.embedding.model.options]
341
341
  pull_model_unless_present(@embedding_model, @embedding_model_options)
342
342
  collection = @opts[?C] || config.embedding.collection
343
- documents = Documentrix::Documents.new(
343
+ @documents = Documentrix::Documents.new(
344
344
  ollama:,
345
345
  model: @embedding_model,
346
346
  model_options: config.embedding.model.options,
@@ -352,14 +352,14 @@ class OllamaChat::Chat
352
352
  )
353
353
 
354
354
  document_list = @opts[?D].to_a
355
- add_documents_from_argv(documents, document_list)
356
- documents
355
+ add_documents_from_argv(document_list)
356
+ @documents
357
357
  else
358
358
  Tins::NULL
359
359
  end
360
360
  end
361
361
 
362
- def add_documents_from_argv(documents, document_list)
362
+ def add_documents_from_argv(document_list)
363
363
  if document_list.any?(&:empty?)
364
364
  STDOUT.puts "Clearing collection #{bold{documents.collection}}."
365
365
  documents.clear
@@ -4,10 +4,10 @@ class OllamaChat::FollowChat
4
4
  include Term::ANSIColor
5
5
  include OllamaChat::MessageType
6
6
 
7
- def initialize(messages:, markdown: false, voice: nil, output: STDOUT)
7
+ def initialize(chat:, messages:, voice: nil, output: STDOUT)
8
8
  super(output:)
9
+ @chat = chat
9
10
  @output.sync = true
10
- @markdown = markdown
11
11
  @say = voice ? Handlers::Say.new(voice:) : NOP
12
12
  @messages = messages
13
13
  @user = nil
@@ -20,13 +20,13 @@ class OllamaChat::FollowChat
20
20
  @messages << Message.new(role: 'assistant', content: '')
21
21
  @user = message_type(@messages.last.images) + " " +
22
22
  bold { color(111) { 'assistant:' } }
23
- @output.puts @user unless @markdown
23
+ @output.puts @user unless @chat.markdown.on?
24
24
  end
25
25
  if content = response.message&.content
26
26
  content = content.gsub(%r(<think>), "💭\n").gsub(%r(</think>), "\n💬")
27
27
  end
28
28
  @messages.last.content << content
29
- if @markdown and content = @messages.last.content.full?
29
+ if @chat.markdown.on? and content = @messages.last.content.full?
30
30
  markdown_content = Kramdown::ANSI.parse(content)
31
31
  @output.print clear_screen, move_home, @user, ?\n, markdown_content
32
32
  else
@@ -42,9 +42,9 @@ module OllamaChat::Information
42
42
  collection_stats
43
43
  end
44
44
  STDOUT.puts "Documents database cache is #{@documents.nil? ? 'n/a' : bold{@documents.cache.class}}"
45
- @markdown.show
46
- @stream.show
47
- @location.show
45
+ markdown.show
46
+ stream.show
47
+ location.show
48
48
  STDOUT.puts "Document policy for references in user text: #{bold{@document_policy}}"
49
49
  if @voice.on?
50
50
  STDOUT.puts "Using voice #{bold{@current_voice}} to speak."
@@ -128,10 +128,10 @@ class OllamaChat::MessageList
128
128
  n = n.to_i.clamp(1, Float::INFINITY)
129
129
  r = @messages.pop(2 * n)
130
130
  m = r.size / 2
131
- STDOUT.puts "Popped the last #{m} exchanges."
131
+ STDOUT.puts "Dropped the last #{m} exchanges."
132
132
  m
133
133
  else
134
- STDOUT.puts "No more exchanges you can pop."
134
+ STDOUT.puts "No more exchanges you can drop."
135
135
  0
136
136
  end
137
137
  end
@@ -182,14 +182,23 @@ class OllamaChat::MessageList
182
182
  # messages in the list.
183
183
  def to_ary
184
184
  location = at_location.full?
185
- @messages.map do |message|
185
+ add_system = !!location
186
+ result = @messages.map do |message|
186
187
  if message.role == 'system' && location
188
+ add_system = false
187
189
  content = message.content + "\n\n#{location}"
188
190
  Ollama::Message.new(role: message.role, content:)
189
191
  else
190
192
  message
191
193
  end
192
194
  end
195
+ if add_system
196
+ prompt = @chat.config.system_prompts.assistant?
197
+ content = [ prompt, location ].compact * "\n\n"
198
+ message = Ollama::Message.new(role: 'system', content:)
199
+ result.unshift message
200
+ end
201
+ result
193
202
  end
194
203
 
195
204
  # The at_location method returns the location/time/units information as a
@@ -0,0 +1,60 @@
1
+ ---
2
+ url: <%= ENV['OLLAMA_URL'] || 'http://%s' % ENV.fetch('OLLAMA_HOST') %>
3
+ proxy: null # http://localhost:8080
4
+ model:
5
+ name: <%= ENV.fetch('OLLAMA_CHAT_MODEL', 'llama3.1') %>
6
+ options:
7
+ num_ctx: 8192
8
+ location:
9
+ enabled: false
10
+ name: Berlin
11
+ decimal_degrees: [ 52.514127, 13.475211 ]
12
+ units: SI (International System of Units) # or USCS (United States Customary System)
13
+ prompts:
14
+ embed: "This source was now embedded: %{source}"
15
+ summarize: |
16
+ Generate an abstract summary of the content in this document using
17
+ %{words} words:
18
+
19
+ %{source_content}
20
+ web: |
21
+ Answer the the query %{query} using these sources and summaries:
22
+
23
+ %{results}
24
+ location: You are at %{location_name} (%{location_decimal_degrees}), on %{localtime}, preferring %{units}
25
+ system_prompts:
26
+ default: <%= ENV.fetch('OLLAMA_CHAT_SYSTEM', 'null') %>
27
+ assistant: You are a helpful assistant.
28
+ voice:
29
+ enabled: false
30
+ default: Samantha
31
+ list: <%= `say -v ? 2>/dev/null`.lines.map { _1[/^(.+?)\s+[a-z]{2}_[a-zA-Z0-9]{2,}/, 1] }.uniq.sort.to_s.force_encoding('ASCII-8BIT') %>
32
+ markdown: true
33
+ stream: true
34
+ document_policy: importing
35
+ embedding:
36
+ enabled: true
37
+ model:
38
+ name: mxbai-embed-large
39
+ embedding_length: 1024
40
+ options: {}
41
+ # Retrieval prompt template:
42
+ prompt: 'Represent this sentence for searching relevant passages: %s'
43
+ batch_size: 10
44
+ database_filename: null # ':memory:'
45
+ collection: <%= ENV['OLLAMA_CHAT_COLLECTION'] %>
46
+ found_texts_size: 4096
47
+ found_texts_count: 10
48
+ splitter:
49
+ name: RecursiveCharacter
50
+ chunk_size: 1024
51
+ cache: Documentrix::Documents::SQLiteCache
52
+ redis:
53
+ documents:
54
+ url: <%= ENV.fetch('REDIS_URL', 'null') %>
55
+ expiring:
56
+ url: <%= ENV.fetch('REDIS_EXPIRING_URL', 'null') %>
57
+ ex: 86400
58
+ debug: <%= ENV['OLLAMA_CHAT_DEBUG'].to_i == 1 ? true : false %>
59
+ ssl_no_verify: []
60
+ copy: pbcopy
@@ -1,67 +1,12 @@
1
+ require 'pathname'
2
+
1
3
  class OllamaChat::OllamaChatConfig
2
4
  include ComplexConfig
3
5
  include FileUtils
4
6
 
5
- DEFAULT_CONFIG = <<~EOT
6
- ---
7
- url: <%= ENV['OLLAMA_URL'] || 'http://%s' % ENV.fetch('OLLAMA_HOST') %>
8
- proxy: null # http://localhost:8080
9
- model:
10
- name: <%= ENV.fetch('OLLAMA_CHAT_MODEL', 'llama3.1') %>
11
- options:
12
- num_ctx: 8192
13
- location:
14
- enabled: false
15
- name: Berlin
16
- decimal_degrees: [ 52.514127, 13.475211 ]
17
- units: SI (International System of Units) # or USCS (United States Customary System)
18
- prompts:
19
- embed: "This source was now embedded: %{source}"
20
- summarize: |
21
- Generate an abstract summary of the content in this document using
22
- %{words} words:
23
-
24
- %{source_content}
25
- web: |
26
- Answer the the query %{query} using these sources and summaries:
27
-
28
- %{results}
29
- system_prompts:
30
- default: <%= ENV.fetch('OLLAMA_CHAT_SYSTEM', 'null') %>
31
- voice:
32
- enabled: false
33
- default: Samantha
34
- list: <%= `say -v ? 2>/dev/null`.lines.map { _1[/^(.+?)\s+[a-z]{2}_[a-zA-Z0-9]{2,}/, 1] }.uniq.sort.to_s.force_encoding('ASCII-8BIT') %>
35
- markdown: true
36
- stream: true
37
- document_policy: importing
38
- embedding:
39
- enabled: true
40
- model:
41
- name: mxbai-embed-large
42
- embedding_length: 1024
43
- options: {}
44
- # Retrieval prompt template:
45
- prompt: 'Represent this sentence for searching relevant passages: %s'
46
- batch_size: 10
47
- database_filename: null # ':memory:'
48
- collection: <%= ENV['OLLAMA_CHAT_COLLECTION'] %>
49
- found_texts_size: 4096
50
- found_texts_count: 10
51
- splitter:
52
- name: RecursiveCharacter
53
- chunk_size: 1024
54
- cache: Documentrix::Documents::SQLiteCache
55
- redis:
56
- documents:
57
- url: <%= ENV.fetch('REDIS_URL', 'null') %>
58
- expiring:
59
- url: <%= ENV.fetch('REDIS_EXPIRING_URL', 'null') %>
60
- ex: 86400
61
- debug: <%= ENV['OLLAMA_CHAT_DEBUG'].to_i == 1 ? true : false %>
62
- ssl_no_verify: []
63
- copy: pbcopy
64
- EOT
7
+ DEFAULT_CONFIG = File.read(
8
+ Pathname.new(__FILE__).dirname.join('ollama_chat_config/default_config.yml')
9
+ )
65
10
 
66
11
  def initialize(filename = nil)
67
12
  @filename = filename || default_path
@@ -145,8 +145,7 @@ module OllamaChat::SourceFetching
145
145
  if l = @messages.at_location.full?
146
146
  query += " #{l}"
147
147
  end
148
- n = n.to_i
149
- n < 1 and n = 1
148
+ n = n.to_i.clamp(1..)
150
149
  query = URI.encode_uri_component(query)
151
150
  url = "https://www.duckduckgo.com/html/?q=#{query}"
152
151
  OllamaChat::Utils::Fetcher.get(url, debug: config.debug) do |tmp|
@@ -51,6 +51,16 @@ module OllamaChat::Switches
51
51
 
52
52
  attr_reader :markdown
53
53
 
54
+ attr_reader :stream
55
+
56
+ attr_reader :voice
57
+
58
+ attr_reader :embedding
59
+
60
+ attr_reader :embedding_enabled
61
+
62
+ attr_reader :embedding_paused
63
+
54
64
  attr_reader :location
55
65
 
56
66
  def setup_switches(config)
@@ -1,6 +1,6 @@
1
1
  module OllamaChat
2
2
  # OllamaChat version
3
- VERSION = '0.0.0'
3
+ VERSION = '0.0.2'
4
4
  VERSION_ARRAY = VERSION.split('.').map(&:to_i) # :nodoc:
5
5
  VERSION_MAJOR = VERSION_ARRAY[0] # :nodoc:
6
6
  VERSION_MINOR = VERSION_ARRAY[1] # :nodoc:
data/ollama_chat.gemspec CHANGED
@@ -1,19 +1,19 @@
1
1
  # -*- encoding: utf-8 -*-
2
- # stub: ollama_chat 0.0.0 ruby lib
2
+ # stub: ollama_chat 0.0.2 ruby lib
3
3
 
4
4
  Gem::Specification.new do |s|
5
5
  s.name = "ollama_chat".freeze
6
- s.version = "0.0.0".freeze
6
+ s.version = "0.0.2".freeze
7
7
 
8
8
  s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
9
9
  s.require_paths = ["lib".freeze]
10
10
  s.authors = ["Florian Frank".freeze]
11
- s.date = "2025-01-29"
11
+ s.date = "2025-02-12"
12
12
  s.description = "The app provides a command-line interface (CLI) to an Ollama AI model,\nallowing users to engage in text-based conversations and generate\nhuman-like responses. Users can import data from local files or web pages,\nwhich are then processed through three different modes: fully importing the\ncontent into the conversation context, summarizing the information for\nconcise reference, or storing it in an embedding vector database for later\nretrieval based on the conversation.\n".freeze
13
13
  s.email = "flori@ping.de".freeze
14
14
  s.executables = ["ollama_chat".freeze]
15
15
  s.extra_rdoc_files = ["README.md".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_type.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze]
16
- s.files = [".all_images.yml".freeze, ".gitignore".freeze, "Gemfile".freeze, "README.md".freeze, "Rakefile".freeze, "VERSION".freeze, "bin/ollama_chat".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_type.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "ollama_chat.gemspec".freeze, "spec/assets/api_show.json".freeze, "spec/assets/api_tags.json".freeze, "spec/assets/conversation.json".freeze, "spec/assets/duckduckgo.html".freeze, "spec/assets/example.atom".freeze, "spec/assets/example.csv".freeze, "spec/assets/example.html".freeze, "spec/assets/example.pdf".freeze, "spec/assets/example.ps".freeze, "spec/assets/example.rb".freeze, "spec/assets/example.rss".freeze, "spec/assets/example.xml".freeze, "spec/assets/kitten.jpg".freeze, "spec/assets/prompt.txt".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/spec_helper.rb".freeze, "tmp/.keep".freeze]
16
+ s.files = [".all_images.yml".freeze, ".envrc".freeze, ".gitignore".freeze, "CHANGES.md".freeze, "Gemfile".freeze, "README.md".freeze, "Rakefile".freeze, "VERSION".freeze, "bin/ollama_chat".freeze, "docker-compose.yml".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_type.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/ollama_chat_config/default_config.yml".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "ollama_chat.gemspec".freeze, "redis/redis.conf".freeze, "spec/assets/api_show.json".freeze, "spec/assets/api_tags.json".freeze, "spec/assets/conversation.json".freeze, "spec/assets/duckduckgo.html".freeze, "spec/assets/example.atom".freeze, "spec/assets/example.csv".freeze, "spec/assets/example.html".freeze, "spec/assets/example.pdf".freeze, "spec/assets/example.ps".freeze, "spec/assets/example.rb".freeze, "spec/assets/example.rss".freeze, "spec/assets/example.xml".freeze, "spec/assets/kitten.jpg".freeze, "spec/assets/prompt.txt".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/spec_helper.rb".freeze, "tmp/.keep".freeze]
17
17
  s.homepage = "https://github.com/flori/ollama_chat".freeze
18
18
  s.licenses = ["MIT".freeze]
19
19
  s.rdoc_options = ["--title".freeze, "OllamaChat - A command-line interface (CLI) for interacting with an Ollama AI model.".freeze, "--main".freeze, "README.md".freeze]
data/redis/redis.conf ADDED
@@ -0,0 +1,5 @@
1
+ save 60 1000
2
+ dbfilename dump.rdb
3
+ appendonly yes
4
+ appendfilename "appendonly.aof"
5
+ appendfsync always
@@ -48,7 +48,7 @@ RSpec.describe OllamaChat::Chat do
48
48
 
49
49
  it 'Adds documents passed to app via -D option' do
50
50
  expect_any_instance_of(OllamaChat::Chat).to receive(:add_documents_from_argv).
51
- with(kind_of(Documentrix::Documents), [ asset('example.html') ])
51
+ with([ asset('example.html') ])
52
52
  chat
53
53
  end
54
54
  end
@@ -7,8 +7,12 @@ RSpec.describe OllamaChat::FollowChat do
7
7
  ]
8
8
  end
9
9
 
10
+ let :chat do
11
+ double('Chat', markdown: double(on?: false))
12
+ end
13
+
10
14
  let :follow_chat do
11
- described_class.new(messages:, output:)
15
+ described_class.new(chat:, messages:, output:)
12
16
  end
13
17
 
14
18
  let :output do
@@ -11,6 +11,9 @@ RSpec.describe OllamaChat::MessageList do
11
11
  ),
12
12
  prompts: double(
13
13
  location: 'You are at %{location_name} (%{location_decimal_degrees}), on %{localtime}, preferring %{units}'
14
+ ),
15
+ system_prompts: double(
16
+ assistant?: 'You are a helpful assistant.'
14
17
  )
15
18
  )
16
19
  end
@@ -122,6 +125,19 @@ RSpec.describe OllamaChat::MessageList do
122
125
  %r(You are at Berlin \(52.514127, 13.475211\), on))
123
126
  end
124
127
 
128
+ it 'can be converted int an Ollama::Message array with location without a system prompt' do
129
+ expect(chat).to receive(:location).and_return(double(on?: true))
130
+ list = described_class.new(chat).tap do |list|
131
+ list << Ollama::Message.new(role: 'user', content: 'hello')
132
+ list << Ollama::Message.new(role: 'assistant', content: 'world')
133
+ end
134
+ first = list.to_ary.first
135
+ expect(first.role).to eq 'system'
136
+ expect(first.content).to match(
137
+ %r(You are a helpful assistant.\n\nYou are at Berlin \(52.514127, 13.475211\), on))
138
+ end
139
+
140
+
125
141
  it 'can display messages with images' do
126
142
  expect(list.message_type([])).to eq ?📨
127
143
  end
@@ -18,7 +18,6 @@ RSpec.describe OllamaChat::Switches do
18
18
  double(test?: false)
19
19
  end
20
20
 
21
-
22
21
  it 'can be switched on' do
23
22
  expect {
24
23
  switch.set(true)
metadata CHANGED
@@ -1,13 +1,13 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ollama_chat
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.0
4
+ version: 0.0.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Florian Frank
8
8
  bindir: bin
9
9
  cert_chain: []
10
- date: 2025-01-29 00:00:00.000000000 Z
10
+ date: 2025-02-12 00:00:00.000000000 Z
11
11
  dependencies:
12
12
  - !ruby/object:Gem::Dependency
13
13
  name: gem_hadar
@@ -379,12 +379,15 @@ extra_rdoc_files:
379
379
  - lib/ollama_chat/version.rb
380
380
  files:
381
381
  - ".all_images.yml"
382
+ - ".envrc"
382
383
  - ".gitignore"
384
+ - CHANGES.md
383
385
  - Gemfile
384
386
  - README.md
385
387
  - Rakefile
386
388
  - VERSION
387
389
  - bin/ollama_chat
390
+ - docker-compose.yml
388
391
  - lib/ollama_chat.rb
389
392
  - lib/ollama_chat/chat.rb
390
393
  - lib/ollama_chat/clipboard.rb
@@ -396,6 +399,7 @@ files:
396
399
  - lib/ollama_chat/message_type.rb
397
400
  - lib/ollama_chat/model_handling.rb
398
401
  - lib/ollama_chat/ollama_chat_config.rb
402
+ - lib/ollama_chat/ollama_chat_config/default_config.yml
399
403
  - lib/ollama_chat/parsing.rb
400
404
  - lib/ollama_chat/source_fetching.rb
401
405
  - lib/ollama_chat/switches.rb
@@ -406,6 +410,7 @@ files:
406
410
  - lib/ollama_chat/utils/file_argument.rb
407
411
  - lib/ollama_chat/version.rb
408
412
  - ollama_chat.gemspec
413
+ - redis/redis.conf
409
414
  - spec/assets/api_show.json
410
415
  - spec/assets/api_tags.json
411
416
  - spec/assets/conversation.json