ollama_chat 0.0.1 → 0.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: e5c75809ec83f12f9b70645f426774dec435a2510db811fa861e9c862a1e5c71
4
- data.tar.gz: bca4f2fcf8e5e77ccc922858e4fe25db8933e5b09a9b264b94648a39feabc0c2
3
+ metadata.gz: 2d43fc32c5ffc8c706161380789195d95410721f3f03f7154218c1733036dd47
4
+ data.tar.gz: 1238495af8d19da4f6dc1538a8f452fc659192904b5b57dc32b99633199c1952
5
5
  SHA512:
6
- metadata.gz: a8e9c93775bfd1629a743436a7fd85b7b7801d7a69221a423b5abbe0a44d98afccc7f9c8eb5182fdfacd848b0d1b27140fb599c3794f77bd4fa2cec7a012a9fb
7
- data.tar.gz: 4f0460bb249fcd988a3425bdedc377ceaf5052f90c12563ef5fa01b72100857e30484622ed840b11147aed9cdc9e8a9c5cf4f9979c96fa317c8c27780d9fa990
6
+ metadata.gz: 8370cf8beedee8da4c6f9d985966037e9f16ef2bcfe772f65c6bb0c3e61c835386f1f58d02639adeab61b9dd67df2b59fcd6a816fc1ab1b837d53599a085e0c6
7
+ data.tar.gz: aa6471bdce5fd7398c2886a0df8b86023c41dc9c7e3ad9e216503c5e37b80437f7d3278e6fb7a43e638f04551e0fc521bdbbe2731528de1223886abb105a6a62
data/.all_images.yml CHANGED
@@ -1,13 +1,12 @@
1
1
  dockerfile: |-
2
- RUN apk add --no-cache build-base git
3
- RUN gem update --system
4
- RUN gem install gem_hadar bundler
2
+ RUN apk add --no-cache build-base git yaml-dev
3
+ RUN gem install gem_hadar
5
4
 
6
5
  script: &script |-
7
6
  echo -e "\e[1m"
8
7
  ruby -v
9
8
  rm -f Gemfile.lock
10
- bundle install --jobs=$(getconf _NPROCESSORS_ONLN) --full-index
9
+ bundle install --jobs=$(getconf _NPROCESSORS_ONLN)
11
10
  echo -e "\e[0m"
12
11
  rake test
13
12
 
data/CHANGES.md CHANGED
@@ -1,5 +1,39 @@
1
1
  # Changes
2
2
 
3
+ ## 2025-02-17 v0.0.3
4
+
5
+ * Support setting of request headers:
6
+ * Added `request_headers` option to `default_config.yml
7
+ * Updated `OllamaChat::SourceFetching` module to pass `config.request_headers?.to_h` to `Fetcher.get`
8
+ * Updated `OllamaChat::Utils::Fetcher.get` method to take an optional `headers:` parameter
9
+ * Updated tests for Fetcher utility to include new headers option
10
+ * Refactoring
11
+ * Added `connect_to_ollama_server` method to `spec_helper.rb`
12
+ * Stubbed API requests for tags, show, and version in this method
13
+ * Removed stubbing of API requests from individual specs
14
+ * Add support for ollama server version display:
15
+ * Add `server_version` method to display connected ollama server version
16
+ * Update `info` method to use new `server_version` method
17
+ * Add **6.6.6** as reported API version in `spec/assets/api_version.json`
18
+ * Updated chat spec to use 'test' collection:
19
+ * Updated `argv` let in OllamaChat::Chat describe block to pass '-C test'
20
+ option to be isolated from 'default' collection
21
+ * Updated output of collection stats display to reflect 'test' collection
22
+
23
+ ## 2025-02-11 v0.0.2
24
+
25
+ * Improved handling of location in MessageList class:
26
+ * Use assistant system prompt (`assistant_system_prompt`) for adding location
27
+ to message list, if no system prompt was defined.
28
+ * Updated spec to cover new behavior.
29
+ * Simplified configuration defaults to be stored in `default_config.yml`:
30
+ - Replaced `DEFAULT_CONFIG` hash with a single line of code that reads from
31
+ `default_config.yml`
32
+ - Created new file `default_config.yml` in the same directory, containing the
33
+ old `DEFAULT_CONFIG` hash values
34
+ - Updated `initialize` method to use the new `default_config.yml` file if no
35
+ filename is provided
36
+
3
37
  ## 2025-02-02 v0.0.1
4
38
 
5
39
  * Renamed `documents` variable to `@documents` in `OllamaChat::Chat`
data/Rakefile CHANGED
@@ -30,7 +30,7 @@ GemHadar do
30
30
  executables << 'ollama_chat'
31
31
 
32
32
  dependency 'excon', '~> 1.0'
33
- dependency 'ollama-ruby', '~> 0.14'
33
+ dependency 'ollama-ruby', '~> 0.15'
34
34
  dependency 'documentrix', '~> 0.0'
35
35
  dependency 'rss', '~> 0.3'
36
36
  dependency 'term-ansicolor', '~> 1.11'
data/VERSION CHANGED
@@ -1 +1 @@
1
- 0.0.1
1
+ 0.0.3
@@ -37,6 +37,7 @@ class OllamaChat::Chat
37
37
  debug: config.debug,
38
38
  user_agent:
39
39
  )
40
+ server_version
40
41
  @document_policy = config.document_policy
41
42
  @model = choose_model(@opts[?m], config.model.name)
42
43
  @model_options = Ollama::Options[config.model.options]
@@ -28,6 +28,7 @@ module OllamaChat::Information
28
28
  end
29
29
 
30
30
  def info
31
+ STDOUT.puts "Connected to ollama server version: #{bold(server_version)}"
31
32
  STDOUT.puts "Current model is #{bold{@model}}."
32
33
  if @model_options.present?
33
34
  STDOUT.puts " Options: #{JSON.pretty_generate(@model_options).gsub(/(?<!\A)^/, ' ')}"
@@ -110,4 +111,8 @@ module OllamaChat::Information
110
111
  STDOUT.puts "%s %s" % [ progname, OllamaChat::VERSION ]
111
112
  0
112
113
  end
114
+
115
+ def server_version
116
+ @server_version ||= ollama.version.version
117
+ end
113
118
  end
@@ -182,14 +182,23 @@ class OllamaChat::MessageList
182
182
  # messages in the list.
183
183
  def to_ary
184
184
  location = at_location.full?
185
- @messages.map do |message|
185
+ add_system = !!location
186
+ result = @messages.map do |message|
186
187
  if message.role == 'system' && location
188
+ add_system = false
187
189
  content = message.content + "\n\n#{location}"
188
190
  Ollama::Message.new(role: message.role, content:)
189
191
  else
190
192
  message
191
193
  end
192
194
  end
195
+ if add_system
196
+ prompt = @chat.config.system_prompts.assistant?
197
+ content = [ prompt, location ].compact * "\n\n"
198
+ message = Ollama::Message.new(role: 'system', content:)
199
+ result.unshift message
200
+ end
201
+ result
193
202
  end
194
203
 
195
204
  # The at_location method returns the location/time/units information as a
@@ -0,0 +1,62 @@
1
+ ---
2
+ url: <%= ENV['OLLAMA_URL'] || 'http://%s' % ENV.fetch('OLLAMA_HOST') %>
3
+ proxy: null # http://localhost:8080
4
+ model:
5
+ name: <%= ENV.fetch('OLLAMA_CHAT_MODEL', 'llama3.1') %>
6
+ options:
7
+ num_ctx: 8192
8
+ location:
9
+ enabled: false
10
+ name: Berlin
11
+ decimal_degrees: [ 52.514127, 13.475211 ]
12
+ units: SI (International System of Units) # or USCS (United States Customary System)
13
+ prompts:
14
+ embed: "This source was now embedded: %{source}"
15
+ summarize: |
16
+ Generate an abstract summary of the content in this document using
17
+ %{words} words:
18
+
19
+ %{source_content}
20
+ web: |
21
+ Answer the the query %{query} using these sources and summaries:
22
+
23
+ %{results}
24
+ location: You are at %{location_name} (%{location_decimal_degrees}), on %{localtime}, preferring %{units}
25
+ system_prompts:
26
+ default: <%= ENV.fetch('OLLAMA_CHAT_SYSTEM', 'null') %>
27
+ assistant: You are a helpful assistant.
28
+ voice:
29
+ enabled: false
30
+ default: Samantha
31
+ list: <%= `say -v ? 2>/dev/null`.lines.map { _1[/^(.+?)\s+[a-z]{2}_[a-zA-Z0-9]{2,}/, 1] }.uniq.sort.to_s.force_encoding('ASCII-8BIT') %>
32
+ markdown: true
33
+ stream: true
34
+ document_policy: importing
35
+ embedding:
36
+ enabled: true
37
+ model:
38
+ name: mxbai-embed-large
39
+ embedding_length: 1024
40
+ options: {}
41
+ # Retrieval prompt template:
42
+ prompt: 'Represent this sentence for searching relevant passages: %s'
43
+ batch_size: 10
44
+ database_filename: null # ':memory:'
45
+ collection: <%= ENV['OLLAMA_CHAT_COLLECTION'] %>
46
+ found_texts_size: 4096
47
+ found_texts_count: 10
48
+ splitter:
49
+ name: RecursiveCharacter
50
+ chunk_size: 1024
51
+ cache: Documentrix::Documents::SQLiteCache
52
+ redis:
53
+ documents:
54
+ url: <%= ENV.fetch('REDIS_URL', 'null') %>
55
+ expiring:
56
+ url: <%= ENV.fetch('REDIS_EXPIRING_URL', 'null') %>
57
+ ex: 86400
58
+ debug: <%= ENV['OLLAMA_CHAT_DEBUG'].to_i == 1 ? true : false %>
59
+ request_headers:
60
+ Accept: 'text/*,application/*,image/*'
61
+ ssl_no_verify: []
62
+ copy: pbcopy
@@ -1,67 +1,12 @@
1
+ require 'pathname'
2
+
1
3
  class OllamaChat::OllamaChatConfig
2
4
  include ComplexConfig
3
5
  include FileUtils
4
6
 
5
- DEFAULT_CONFIG = <<~EOT
6
- ---
7
- url: <%= ENV['OLLAMA_URL'] || 'http://%s' % ENV.fetch('OLLAMA_HOST') %>
8
- proxy: null # http://localhost:8080
9
- model:
10
- name: <%= ENV.fetch('OLLAMA_CHAT_MODEL', 'llama3.1') %>
11
- options:
12
- num_ctx: 8192
13
- location:
14
- enabled: false
15
- name: Berlin
16
- decimal_degrees: [ 52.514127, 13.475211 ]
17
- units: SI (International System of Units) # or USCS (United States Customary System)
18
- prompts:
19
- embed: "This source was now embedded: %{source}"
20
- summarize: |
21
- Generate an abstract summary of the content in this document using
22
- %{words} words:
23
-
24
- %{source_content}
25
- web: |
26
- Answer the the query %{query} using these sources and summaries:
27
-
28
- %{results}
29
- system_prompts:
30
- default: <%= ENV.fetch('OLLAMA_CHAT_SYSTEM', 'null') %>
31
- voice:
32
- enabled: false
33
- default: Samantha
34
- list: <%= `say -v ? 2>/dev/null`.lines.map { _1[/^(.+?)\s+[a-z]{2}_[a-zA-Z0-9]{2,}/, 1] }.uniq.sort.to_s.force_encoding('ASCII-8BIT') %>
35
- markdown: true
36
- stream: true
37
- document_policy: importing
38
- embedding:
39
- enabled: true
40
- model:
41
- name: mxbai-embed-large
42
- embedding_length: 1024
43
- options: {}
44
- # Retrieval prompt template:
45
- prompt: 'Represent this sentence for searching relevant passages: %s'
46
- batch_size: 10
47
- database_filename: null # ':memory:'
48
- collection: <%= ENV['OLLAMA_CHAT_COLLECTION'] %>
49
- found_texts_size: 4096
50
- found_texts_count: 10
51
- splitter:
52
- name: RecursiveCharacter
53
- chunk_size: 1024
54
- cache: Documentrix::Documents::SQLiteCache
55
- redis:
56
- documents:
57
- url: <%= ENV.fetch('REDIS_URL', 'null') %>
58
- expiring:
59
- url: <%= ENV.fetch('REDIS_EXPIRING_URL', 'null') %>
60
- ex: 86400
61
- debug: <%= ENV['OLLAMA_CHAT_DEBUG'].to_i == 1 ? true : false %>
62
- ssl_no_verify: []
63
- copy: pbcopy
64
- EOT
7
+ DEFAULT_CONFIG = File.read(
8
+ Pathname.new(__FILE__).dirname.join('ollama_chat_config/default_config.yml')
9
+ )
65
10
 
66
11
  def initialize(filename = nil)
67
12
  @filename = filename || default_path
@@ -22,6 +22,7 @@ module OllamaChat::SourceFetching
22
22
  links.add(source.to_s)
23
23
  OllamaChat::Utils::Fetcher.get(
24
24
  source,
25
+ headers: config.request_headers?.to_h,
25
26
  cache: @cache,
26
27
  debug: config.debug,
27
28
  http_options: http_options(OllamaChat::Utils::Fetcher.normalize_url(source))
@@ -148,7 +149,11 @@ module OllamaChat::SourceFetching
148
149
  n = n.to_i.clamp(1..)
149
150
  query = URI.encode_uri_component(query)
150
151
  url = "https://www.duckduckgo.com/html/?q=#{query}"
151
- OllamaChat::Utils::Fetcher.get(url, debug: config.debug) do |tmp|
152
+ OllamaChat::Utils::Fetcher.get(
153
+ url,
154
+ headers: config.request_headers?.to_h,
155
+ debug: config.debug
156
+ ) do |tmp|
152
157
  result = []
153
158
  doc = Nokogiri::HTML(tmp)
154
159
  doc.css('.results_links').each do |link|
@@ -20,14 +20,14 @@ class OllamaChat::Utils::Fetcher
20
20
 
21
21
  class RetryWithoutStreaming < StandardError; end
22
22
 
23
- def self.get(url, **options, &block)
23
+ def self.get(url, headers: {}, **options, &block)
24
24
  cache = options.delete(:cache) and
25
25
  cache = OllamaChat::Utils::CacheFetcher.new(cache)
26
26
  if result = cache&.get(url, &block)
27
27
  infobar.puts "Getting #{url.to_s.inspect} from cache."
28
28
  return result
29
29
  else
30
- new(**options).send(:get, url) do |tmp|
30
+ new(**options).send(:get, url, headers:) do |tmp|
31
31
  result = block.(tmp)
32
32
  if cache && !tmp.is_a?(StringIO)
33
33
  tmp.rewind
@@ -91,7 +91,9 @@ class OllamaChat::Utils::Fetcher
91
91
  Excon.new(url, options.merge(@http_options))
92
92
  end
93
93
 
94
- def get(url, &block)
94
+ def get(url, headers: {}, &block)
95
+ headers |= self.headers
96
+ headers = headers.transform_keys(&:to_s)
95
97
  response = nil
96
98
  Tempfile.open do |tmp|
97
99
  infobar.label = 'Getting'
@@ -1,6 +1,6 @@
1
1
  module OllamaChat
2
2
  # OllamaChat version
3
- VERSION = '0.0.1'
3
+ VERSION = '0.0.3'
4
4
  VERSION_ARRAY = VERSION.split('.').map(&:to_i) # :nodoc:
5
5
  VERSION_MAJOR = VERSION_ARRAY[0] # :nodoc:
6
6
  VERSION_MINOR = VERSION_ARRAY[1] # :nodoc:
data/ollama_chat.gemspec CHANGED
@@ -1,19 +1,19 @@
1
1
  # -*- encoding: utf-8 -*-
2
- # stub: ollama_chat 0.0.1 ruby lib
2
+ # stub: ollama_chat 0.0.3 ruby lib
3
3
 
4
4
  Gem::Specification.new do |s|
5
5
  s.name = "ollama_chat".freeze
6
- s.version = "0.0.1".freeze
6
+ s.version = "0.0.3".freeze
7
7
 
8
8
  s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
9
9
  s.require_paths = ["lib".freeze]
10
10
  s.authors = ["Florian Frank".freeze]
11
- s.date = "2025-02-02"
11
+ s.date = "2025-02-17"
12
12
  s.description = "The app provides a command-line interface (CLI) to an Ollama AI model,\nallowing users to engage in text-based conversations and generate\nhuman-like responses. Users can import data from local files or web pages,\nwhich are then processed through three different modes: fully importing the\ncontent into the conversation context, summarizing the information for\nconcise reference, or storing it in an embedding vector database for later\nretrieval based on the conversation.\n".freeze
13
13
  s.email = "flori@ping.de".freeze
14
14
  s.executables = ["ollama_chat".freeze]
15
15
  s.extra_rdoc_files = ["README.md".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_type.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze]
16
- s.files = [".all_images.yml".freeze, ".envrc".freeze, ".gitignore".freeze, "CHANGES.md".freeze, "Gemfile".freeze, "README.md".freeze, "Rakefile".freeze, "VERSION".freeze, "bin/ollama_chat".freeze, "docker-compose.yml".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_type.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "ollama_chat.gemspec".freeze, "redis/redis.conf".freeze, "spec/assets/api_show.json".freeze, "spec/assets/api_tags.json".freeze, "spec/assets/conversation.json".freeze, "spec/assets/duckduckgo.html".freeze, "spec/assets/example.atom".freeze, "spec/assets/example.csv".freeze, "spec/assets/example.html".freeze, "spec/assets/example.pdf".freeze, "spec/assets/example.ps".freeze, "spec/assets/example.rb".freeze, "spec/assets/example.rss".freeze, "spec/assets/example.xml".freeze, "spec/assets/kitten.jpg".freeze, "spec/assets/prompt.txt".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/spec_helper.rb".freeze, "tmp/.keep".freeze]
16
+ s.files = [".all_images.yml".freeze, ".envrc".freeze, ".gitignore".freeze, "CHANGES.md".freeze, "Gemfile".freeze, "README.md".freeze, "Rakefile".freeze, "VERSION".freeze, "bin/ollama_chat".freeze, "docker-compose.yml".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_type.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/ollama_chat_config/default_config.yml".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "ollama_chat.gemspec".freeze, "redis/redis.conf".freeze, "spec/assets/api_show.json".freeze, "spec/assets/api_tags.json".freeze, "spec/assets/api_version.json".freeze, "spec/assets/conversation.json".freeze, "spec/assets/duckduckgo.html".freeze, "spec/assets/example.atom".freeze, "spec/assets/example.csv".freeze, "spec/assets/example.html".freeze, "spec/assets/example.pdf".freeze, "spec/assets/example.ps".freeze, "spec/assets/example.rb".freeze, "spec/assets/example.rss".freeze, "spec/assets/example.xml".freeze, "spec/assets/kitten.jpg".freeze, "spec/assets/prompt.txt".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/spec_helper.rb".freeze, "tmp/.keep".freeze]
17
17
  s.homepage = "https://github.com/flori/ollama_chat".freeze
18
18
  s.licenses = ["MIT".freeze]
19
19
  s.rdoc_options = ["--title".freeze, "OllamaChat - A command-line interface (CLI) for interacting with an Ollama AI model.".freeze, "--main".freeze, "README.md".freeze]
@@ -32,7 +32,7 @@ Gem::Specification.new do |s|
32
32
  s.add_development_dependency(%q<debug>.freeze, [">= 0".freeze])
33
33
  s.add_development_dependency(%q<simplecov>.freeze, [">= 0".freeze])
34
34
  s.add_runtime_dependency(%q<excon>.freeze, ["~> 1.0".freeze])
35
- s.add_runtime_dependency(%q<ollama-ruby>.freeze, ["~> 0.14".freeze])
35
+ s.add_runtime_dependency(%q<ollama-ruby>.freeze, ["~> 0.15".freeze])
36
36
  s.add_runtime_dependency(%q<documentrix>.freeze, ["~> 0.0".freeze])
37
37
  s.add_runtime_dependency(%q<rss>.freeze, ["~> 0.3".freeze])
38
38
  s.add_runtime_dependency(%q<term-ansicolor>.freeze, ["~> 1.11".freeze])
@@ -0,0 +1,3 @@
1
+ {
2
+ "version": "6.6.6"
3
+ }
@@ -2,19 +2,14 @@ require 'spec_helper'
2
2
 
3
3
  RSpec.describe OllamaChat::Chat do
4
4
  let :argv do
5
- []
5
+ %w[ -C test ]
6
6
  end
7
7
 
8
8
  let :chat do
9
9
  OllamaChat::Chat.new argv: argv
10
10
  end
11
11
 
12
- before do
13
- stub_request(:get, %r(/api/tags\z)).
14
- to_return(status: 200, body: asset_json('api_tags.json'))
15
- stub_request(:post, %r(/api/show\z)).
16
- to_return(status: 200, body: asset_json('api_show.json'))
17
- end
12
+ connect_to_ollama_server(instantiate: false)
18
13
 
19
14
  it 'can be instantiated' do
20
15
  expect(chat).to be_a described_class
@@ -43,7 +38,7 @@ RSpec.describe OllamaChat::Chat do
43
38
  describe Documentrix::Documents do
44
39
  context 'with documents' do
45
40
  let :argv do
46
- %w[ -D ] << asset('example.html')
41
+ %w[ -C test -D ] << asset('example.html')
47
42
  end
48
43
 
49
44
  it 'Adds documents passed to app via -D option' do
@@ -66,7 +61,7 @@ RSpec.describe OllamaChat::Chat do
66
61
  it 'can display collection_stats' do
67
62
  chat
68
63
  expect(STDOUT).to receive(:puts).with(
69
- "Current Collection\n Name: \e[1mdefault\e[0m\n #Embeddings: 0\n #Tags: 0\n Tags: \n"
64
+ "Current Collection\n Name: \e[1mtest\e[0m\n #Embeddings: 0\n #Tags: 0\n Tags: \n"
70
65
  )
71
66
  expect(chat.collection_stats).to be_nil
72
67
  end
@@ -76,6 +71,7 @@ RSpec.describe OllamaChat::Chat do
76
71
  expect(STDOUT).to receive(:puts).
77
72
  with(
78
73
  /
74
+ Connected\ to\ ollama\ server|
79
75
  Current\ model|
80
76
  Options|
81
77
  Embedding|
@@ -5,13 +5,7 @@ RSpec.describe OllamaChat::Clipboard do
5
5
  OllamaChat::Chat.new
6
6
  end
7
7
 
8
- before do
9
- stub_request(:get, %r(/api/tags\z)).
10
- to_return(status: 200, body: asset_json('api_tags.json'))
11
- stub_request(:post, %r(/api/show\z)).
12
- to_return(status: 200, body: asset_json('api_show.json'))
13
- chat
14
- end
8
+ connect_to_ollama_server
15
9
 
16
10
  it 'can copy to clipboard' do
17
11
  expect(STDERR).to receive(:puts).with(/No response available to copy to the system clipboard/)
@@ -5,13 +5,7 @@ RSpec.describe OllamaChat::Information do
5
5
  OllamaChat::Chat.new
6
6
  end
7
7
 
8
- before do
9
- stub_request(:get, %r(/api/tags\z)).
10
- to_return(status: 200, body: asset_json('api_tags.json'))
11
- stub_request(:post, %r(/api/show\z)).
12
- to_return(status: 200, body: asset_json('api_show.json'))
13
- chat
14
- end
8
+ connect_to_ollama_server
15
9
 
16
10
  describe ::OllamaChat::Information::UserAgent do
17
11
  it 'has progname' do
@@ -29,6 +23,7 @@ RSpec.describe OllamaChat::Information do
29
23
  end
30
24
 
31
25
  it 'can show info' do
26
+ expect(STDOUT).to receive(:puts).with(/Connected to ollama server version/)
32
27
  expect(STDOUT).to receive(:puts).with(/Current model is/)
33
28
  expect(STDOUT).to receive(:puts).at_least(1)
34
29
  expect(chat.info).to be_nil
@@ -11,6 +11,9 @@ RSpec.describe OllamaChat::MessageList do
11
11
  ),
12
12
  prompts: double(
13
13
  location: 'You are at %{location_name} (%{location_decimal_degrees}), on %{localtime}, preferring %{units}'
14
+ ),
15
+ system_prompts: double(
16
+ assistant?: 'You are a helpful assistant.'
14
17
  )
15
18
  )
16
19
  end
@@ -122,6 +125,19 @@ RSpec.describe OllamaChat::MessageList do
122
125
  %r(You are at Berlin \(52.514127, 13.475211\), on))
123
126
  end
124
127
 
128
+ it 'can be converted int an Ollama::Message array with location without a system prompt' do
129
+ expect(chat).to receive(:location).and_return(double(on?: true))
130
+ list = described_class.new(chat).tap do |list|
131
+ list << Ollama::Message.new(role: 'user', content: 'hello')
132
+ list << Ollama::Message.new(role: 'assistant', content: 'world')
133
+ end
134
+ first = list.to_ary.first
135
+ expect(first.role).to eq 'system'
136
+ expect(first.content).to match(
137
+ %r(You are a helpful assistant.\n\nYou are at Berlin \(52.514127, 13.475211\), on))
138
+ end
139
+
140
+
125
141
  it 'can display messages with images' do
126
142
  expect(list.message_type([])).to eq ?📨
127
143
  end
@@ -5,13 +5,7 @@ RSpec.describe OllamaChat::ModelHandling do
5
5
  OllamaChat::Chat.new
6
6
  end
7
7
 
8
- before do
9
- stub_request(:get, %r(/api/tags\z)).
10
- to_return(status: 200, body: asset_json('api_tags.json'))
11
- stub_request(:post, %r(/api/show\z)).
12
- to_return(status: 200, body: asset_json('api_show.json'))
13
- chat
14
- end
8
+ connect_to_ollama_server
15
9
 
16
10
  it 'can check if model_present?' do
17
11
  expect(chat.ollama).to receive(:show).and_raise Ollama::Errors::NotFoundError
@@ -6,13 +6,7 @@ RSpec.describe OllamaChat::Parsing do
6
6
  OllamaChat::Chat.new
7
7
  end
8
8
 
9
- before do
10
- stub_request(:get, %r(/api/tags\z)).
11
- to_return(status: 200, body: asset_json('api_tags.json'))
12
- stub_request(:post, %r(/api/show\z)).
13
- to_return(status: 200, body: asset_json('api_show.json'))
14
- chat
15
- end
9
+ connect_to_ollama_server
16
10
 
17
11
  describe '#parse_source' do
18
12
  it 'can parse HTML' do
@@ -5,13 +5,7 @@ RSpec.describe OllamaChat::SourceFetching do
5
5
  OllamaChat::Chat.new
6
6
  end
7
7
 
8
- before do
9
- stub_request(:get, %r(/api/tags\z)).
10
- to_return(status: 200, body: asset_json('api_tags.json'))
11
- stub_request(:post, %r(/api/show\z)).
12
- to_return(status: 200, body: asset_json('api_show.json'))
13
- allow(chat).to receive(:location).and_return(double(on?: false))
14
- end
8
+ connect_to_ollama_server
15
9
 
16
10
  it 'can import' do
17
11
  expect(chat.import('./spec/assets/example.html')).to start_with(<<~EOT)
@@ -33,6 +33,21 @@ RSpec.describe OllamaChat::Utils::Fetcher do
33
33
  end
34
34
  end
35
35
 
36
+ it 'can #get with headers' do
37
+ stub_request(:get, url).
38
+ with(headers: { 'Accept' => 'text/html' } | fetcher.headers).
39
+ to_return(
40
+ status: 200,
41
+ body: 'world',
42
+ headers: { 'Content-Type' => 'text/plain' },
43
+ )
44
+ fetcher.get(url, headers: { 'Accept' => 'text/html' }) do |tmp|
45
+ expect(tmp).to be_a Tempfile
46
+ expect(tmp.read).to eq 'world'
47
+ expect(tmp.content_type).to eq 'text/plain'
48
+ end
49
+ end
50
+
36
51
  it 'can #get without ssl peer verification' do
37
52
  fetcher = described_class.new(
38
53
  http_options: { ssl_verify_peer: false }
data/spec/spec_helper.rb CHANGED
@@ -39,6 +39,18 @@ def asset_json(name)
39
39
  JSON(JSON(File.read(asset(name))))
40
40
  end
41
41
 
42
+ def connect_to_ollama_server(instantiate: true)
43
+ before do
44
+ stub_request(:get, %r(/api/tags\z)).
45
+ to_return(status: 200, body: asset_json('api_tags.json'))
46
+ stub_request(:post, %r(/api/show\z)).
47
+ to_return(status: 200, body: asset_json('api_show.json'))
48
+ stub_request(:get, %r(/api/version\z)).
49
+ to_return(status: 200, body: asset_json('api_version.json'))
50
+ instantiate and chat
51
+ end
52
+ end
53
+
42
54
  RSpec.configure do |config|
43
55
  config.before(:suite) do
44
56
  infobar.show = nil
metadata CHANGED
@@ -1,13 +1,13 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ollama_chat
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.1
4
+ version: 0.0.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - Florian Frank
8
8
  bindir: bin
9
9
  cert_chain: []
10
- date: 2025-02-02 00:00:00.000000000 Z
10
+ date: 2025-02-17 00:00:00.000000000 Z
11
11
  dependencies:
12
12
  - !ruby/object:Gem::Dependency
13
13
  name: gem_hadar
@@ -127,14 +127,14 @@ dependencies:
127
127
  requirements:
128
128
  - - "~>"
129
129
  - !ruby/object:Gem::Version
130
- version: '0.14'
130
+ version: '0.15'
131
131
  type: :runtime
132
132
  prerelease: false
133
133
  version_requirements: !ruby/object:Gem::Requirement
134
134
  requirements:
135
135
  - - "~>"
136
136
  - !ruby/object:Gem::Version
137
- version: '0.14'
137
+ version: '0.15'
138
138
  - !ruby/object:Gem::Dependency
139
139
  name: documentrix
140
140
  requirement: !ruby/object:Gem::Requirement
@@ -399,6 +399,7 @@ files:
399
399
  - lib/ollama_chat/message_type.rb
400
400
  - lib/ollama_chat/model_handling.rb
401
401
  - lib/ollama_chat/ollama_chat_config.rb
402
+ - lib/ollama_chat/ollama_chat_config/default_config.yml
402
403
  - lib/ollama_chat/parsing.rb
403
404
  - lib/ollama_chat/source_fetching.rb
404
405
  - lib/ollama_chat/switches.rb
@@ -412,6 +413,7 @@ files:
412
413
  - redis/redis.conf
413
414
  - spec/assets/api_show.json
414
415
  - spec/assets/api_tags.json
416
+ - spec/assets/api_version.json
415
417
  - spec/assets/conversation.json
416
418
  - spec/assets/duckduckgo.html
417
419
  - spec/assets/example.atom