ollama_chat 0.0.1 → 0.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGES.md +14 -0
- data/VERSION +1 -1
- data/lib/ollama_chat/message_list.rb +10 -1
- data/lib/ollama_chat/ollama_chat_config/default_config.yml +60 -0
- data/lib/ollama_chat/ollama_chat_config.rb +5 -60
- data/lib/ollama_chat/version.rb +1 -1
- data/ollama_chat.gemspec +4 -4
- data/spec/ollama_chat/message_list_spec.rb +16 -0
- metadata +3 -2
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: 5e53e7c97f1d4cb21ae23b70b3dc14f3dfcb183978f3e3f5ebf44fa392d60c7d
|
|
4
|
+
data.tar.gz: 5b249fdd1c0a3acfc132fd4388608f12a64f4528ca280cb7feb427bc3ebd7c2e
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: aaaf8a6011430a7bba371ef08059d3abd6493e7c43e766068942aefed893ed2378b9e5193a9a43233579c49e35db88520f62083c936ca4bd8528d0cb84d81b84
|
|
7
|
+
data.tar.gz: 6bb078ad3be012d0936a72303df5a44186fc0c7a95fc335124a6b37eb5affbb4e64bb26ccf8127268c6410ef4eff15e53405652934f388a581d0bfea82c0c1d3
|
data/CHANGES.md
CHANGED
|
@@ -1,5 +1,19 @@
|
|
|
1
1
|
# Changes
|
|
2
2
|
|
|
3
|
+
## 2025-02-11 v0.0.2
|
|
4
|
+
|
|
5
|
+
* Improved handling of location in MessageList class:
|
|
6
|
+
* Use assistant system prompt (`assistant_system_prompt`) for adding location
|
|
7
|
+
to message list, if no system prompt was defined.
|
|
8
|
+
* Updated spec to cover new behavior.
|
|
9
|
+
* Simplified configuration defaults to be stored in `default_config.yml`:
|
|
10
|
+
- Replaced `DEFAULT_CONFIG` hash with a single line of code that reads from
|
|
11
|
+
`default_config.yml`
|
|
12
|
+
- Created new file `default_config.yml` in the same directory, containing the
|
|
13
|
+
old `DEFAULT_CONFIG` hash values
|
|
14
|
+
- Updated `initialize` method to use the new `default_config.yml` file if no
|
|
15
|
+
filename is provided
|
|
16
|
+
|
|
3
17
|
## 2025-02-02 v0.0.1
|
|
4
18
|
|
|
5
19
|
* Renamed `documents` variable to `@documents` in `OllamaChat::Chat`
|
data/VERSION
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
0.0.
|
|
1
|
+
0.0.2
|
|
@@ -182,14 +182,23 @@ class OllamaChat::MessageList
|
|
|
182
182
|
# messages in the list.
|
|
183
183
|
def to_ary
|
|
184
184
|
location = at_location.full?
|
|
185
|
-
|
|
185
|
+
add_system = !!location
|
|
186
|
+
result = @messages.map do |message|
|
|
186
187
|
if message.role == 'system' && location
|
|
188
|
+
add_system = false
|
|
187
189
|
content = message.content + "\n\n#{location}"
|
|
188
190
|
Ollama::Message.new(role: message.role, content:)
|
|
189
191
|
else
|
|
190
192
|
message
|
|
191
193
|
end
|
|
192
194
|
end
|
|
195
|
+
if add_system
|
|
196
|
+
prompt = @chat.config.system_prompts.assistant?
|
|
197
|
+
content = [ prompt, location ].compact * "\n\n"
|
|
198
|
+
message = Ollama::Message.new(role: 'system', content:)
|
|
199
|
+
result.unshift message
|
|
200
|
+
end
|
|
201
|
+
result
|
|
193
202
|
end
|
|
194
203
|
|
|
195
204
|
# The at_location method returns the location/time/units information as a
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
---
|
|
2
|
+
url: <%= ENV['OLLAMA_URL'] || 'http://%s' % ENV.fetch('OLLAMA_HOST') %>
|
|
3
|
+
proxy: null # http://localhost:8080
|
|
4
|
+
model:
|
|
5
|
+
name: <%= ENV.fetch('OLLAMA_CHAT_MODEL', 'llama3.1') %>
|
|
6
|
+
options:
|
|
7
|
+
num_ctx: 8192
|
|
8
|
+
location:
|
|
9
|
+
enabled: false
|
|
10
|
+
name: Berlin
|
|
11
|
+
decimal_degrees: [ 52.514127, 13.475211 ]
|
|
12
|
+
units: SI (International System of Units) # or USCS (United States Customary System)
|
|
13
|
+
prompts:
|
|
14
|
+
embed: "This source was now embedded: %{source}"
|
|
15
|
+
summarize: |
|
|
16
|
+
Generate an abstract summary of the content in this document using
|
|
17
|
+
%{words} words:
|
|
18
|
+
|
|
19
|
+
%{source_content}
|
|
20
|
+
web: |
|
|
21
|
+
Answer the the query %{query} using these sources and summaries:
|
|
22
|
+
|
|
23
|
+
%{results}
|
|
24
|
+
location: You are at %{location_name} (%{location_decimal_degrees}), on %{localtime}, preferring %{units}
|
|
25
|
+
system_prompts:
|
|
26
|
+
default: <%= ENV.fetch('OLLAMA_CHAT_SYSTEM', 'null') %>
|
|
27
|
+
assistant: You are a helpful assistant.
|
|
28
|
+
voice:
|
|
29
|
+
enabled: false
|
|
30
|
+
default: Samantha
|
|
31
|
+
list: <%= `say -v ? 2>/dev/null`.lines.map { _1[/^(.+?)\s+[a-z]{2}_[a-zA-Z0-9]{2,}/, 1] }.uniq.sort.to_s.force_encoding('ASCII-8BIT') %>
|
|
32
|
+
markdown: true
|
|
33
|
+
stream: true
|
|
34
|
+
document_policy: importing
|
|
35
|
+
embedding:
|
|
36
|
+
enabled: true
|
|
37
|
+
model:
|
|
38
|
+
name: mxbai-embed-large
|
|
39
|
+
embedding_length: 1024
|
|
40
|
+
options: {}
|
|
41
|
+
# Retrieval prompt template:
|
|
42
|
+
prompt: 'Represent this sentence for searching relevant passages: %s'
|
|
43
|
+
batch_size: 10
|
|
44
|
+
database_filename: null # ':memory:'
|
|
45
|
+
collection: <%= ENV['OLLAMA_CHAT_COLLECTION'] %>
|
|
46
|
+
found_texts_size: 4096
|
|
47
|
+
found_texts_count: 10
|
|
48
|
+
splitter:
|
|
49
|
+
name: RecursiveCharacter
|
|
50
|
+
chunk_size: 1024
|
|
51
|
+
cache: Documentrix::Documents::SQLiteCache
|
|
52
|
+
redis:
|
|
53
|
+
documents:
|
|
54
|
+
url: <%= ENV.fetch('REDIS_URL', 'null') %>
|
|
55
|
+
expiring:
|
|
56
|
+
url: <%= ENV.fetch('REDIS_EXPIRING_URL', 'null') %>
|
|
57
|
+
ex: 86400
|
|
58
|
+
debug: <%= ENV['OLLAMA_CHAT_DEBUG'].to_i == 1 ? true : false %>
|
|
59
|
+
ssl_no_verify: []
|
|
60
|
+
copy: pbcopy
|
|
@@ -1,67 +1,12 @@
|
|
|
1
|
+
require 'pathname'
|
|
2
|
+
|
|
1
3
|
class OllamaChat::OllamaChatConfig
|
|
2
4
|
include ComplexConfig
|
|
3
5
|
include FileUtils
|
|
4
6
|
|
|
5
|
-
DEFAULT_CONFIG =
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
proxy: null # http://localhost:8080
|
|
9
|
-
model:
|
|
10
|
-
name: <%= ENV.fetch('OLLAMA_CHAT_MODEL', 'llama3.1') %>
|
|
11
|
-
options:
|
|
12
|
-
num_ctx: 8192
|
|
13
|
-
location:
|
|
14
|
-
enabled: false
|
|
15
|
-
name: Berlin
|
|
16
|
-
decimal_degrees: [ 52.514127, 13.475211 ]
|
|
17
|
-
units: SI (International System of Units) # or USCS (United States Customary System)
|
|
18
|
-
prompts:
|
|
19
|
-
embed: "This source was now embedded: %{source}"
|
|
20
|
-
summarize: |
|
|
21
|
-
Generate an abstract summary of the content in this document using
|
|
22
|
-
%{words} words:
|
|
23
|
-
|
|
24
|
-
%{source_content}
|
|
25
|
-
web: |
|
|
26
|
-
Answer the the query %{query} using these sources and summaries:
|
|
27
|
-
|
|
28
|
-
%{results}
|
|
29
|
-
system_prompts:
|
|
30
|
-
default: <%= ENV.fetch('OLLAMA_CHAT_SYSTEM', 'null') %>
|
|
31
|
-
voice:
|
|
32
|
-
enabled: false
|
|
33
|
-
default: Samantha
|
|
34
|
-
list: <%= `say -v ? 2>/dev/null`.lines.map { _1[/^(.+?)\s+[a-z]{2}_[a-zA-Z0-9]{2,}/, 1] }.uniq.sort.to_s.force_encoding('ASCII-8BIT') %>
|
|
35
|
-
markdown: true
|
|
36
|
-
stream: true
|
|
37
|
-
document_policy: importing
|
|
38
|
-
embedding:
|
|
39
|
-
enabled: true
|
|
40
|
-
model:
|
|
41
|
-
name: mxbai-embed-large
|
|
42
|
-
embedding_length: 1024
|
|
43
|
-
options: {}
|
|
44
|
-
# Retrieval prompt template:
|
|
45
|
-
prompt: 'Represent this sentence for searching relevant passages: %s'
|
|
46
|
-
batch_size: 10
|
|
47
|
-
database_filename: null # ':memory:'
|
|
48
|
-
collection: <%= ENV['OLLAMA_CHAT_COLLECTION'] %>
|
|
49
|
-
found_texts_size: 4096
|
|
50
|
-
found_texts_count: 10
|
|
51
|
-
splitter:
|
|
52
|
-
name: RecursiveCharacter
|
|
53
|
-
chunk_size: 1024
|
|
54
|
-
cache: Documentrix::Documents::SQLiteCache
|
|
55
|
-
redis:
|
|
56
|
-
documents:
|
|
57
|
-
url: <%= ENV.fetch('REDIS_URL', 'null') %>
|
|
58
|
-
expiring:
|
|
59
|
-
url: <%= ENV.fetch('REDIS_EXPIRING_URL', 'null') %>
|
|
60
|
-
ex: 86400
|
|
61
|
-
debug: <%= ENV['OLLAMA_CHAT_DEBUG'].to_i == 1 ? true : false %>
|
|
62
|
-
ssl_no_verify: []
|
|
63
|
-
copy: pbcopy
|
|
64
|
-
EOT
|
|
7
|
+
DEFAULT_CONFIG = File.read(
|
|
8
|
+
Pathname.new(__FILE__).dirname.join('ollama_chat_config/default_config.yml')
|
|
9
|
+
)
|
|
65
10
|
|
|
66
11
|
def initialize(filename = nil)
|
|
67
12
|
@filename = filename || default_path
|
data/lib/ollama_chat/version.rb
CHANGED
data/ollama_chat.gemspec
CHANGED
|
@@ -1,19 +1,19 @@
|
|
|
1
1
|
# -*- encoding: utf-8 -*-
|
|
2
|
-
# stub: ollama_chat 0.0.
|
|
2
|
+
# stub: ollama_chat 0.0.2 ruby lib
|
|
3
3
|
|
|
4
4
|
Gem::Specification.new do |s|
|
|
5
5
|
s.name = "ollama_chat".freeze
|
|
6
|
-
s.version = "0.0.
|
|
6
|
+
s.version = "0.0.2".freeze
|
|
7
7
|
|
|
8
8
|
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
|
|
9
9
|
s.require_paths = ["lib".freeze]
|
|
10
10
|
s.authors = ["Florian Frank".freeze]
|
|
11
|
-
s.date = "2025-02-
|
|
11
|
+
s.date = "2025-02-12"
|
|
12
12
|
s.description = "The app provides a command-line interface (CLI) to an Ollama AI model,\nallowing users to engage in text-based conversations and generate\nhuman-like responses. Users can import data from local files or web pages,\nwhich are then processed through three different modes: fully importing the\ncontent into the conversation context, summarizing the information for\nconcise reference, or storing it in an embedding vector database for later\nretrieval based on the conversation.\n".freeze
|
|
13
13
|
s.email = "flori@ping.de".freeze
|
|
14
14
|
s.executables = ["ollama_chat".freeze]
|
|
15
15
|
s.extra_rdoc_files = ["README.md".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_type.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze]
|
|
16
|
-
s.files = [".all_images.yml".freeze, ".envrc".freeze, ".gitignore".freeze, "CHANGES.md".freeze, "Gemfile".freeze, "README.md".freeze, "Rakefile".freeze, "VERSION".freeze, "bin/ollama_chat".freeze, "docker-compose.yml".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_type.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "ollama_chat.gemspec".freeze, "redis/redis.conf".freeze, "spec/assets/api_show.json".freeze, "spec/assets/api_tags.json".freeze, "spec/assets/conversation.json".freeze, "spec/assets/duckduckgo.html".freeze, "spec/assets/example.atom".freeze, "spec/assets/example.csv".freeze, "spec/assets/example.html".freeze, "spec/assets/example.pdf".freeze, "spec/assets/example.ps".freeze, "spec/assets/example.rb".freeze, "spec/assets/example.rss".freeze, "spec/assets/example.xml".freeze, "spec/assets/kitten.jpg".freeze, "spec/assets/prompt.txt".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/spec_helper.rb".freeze, "tmp/.keep".freeze]
|
|
16
|
+
s.files = [".all_images.yml".freeze, ".envrc".freeze, ".gitignore".freeze, "CHANGES.md".freeze, "Gemfile".freeze, "README.md".freeze, "Rakefile".freeze, "VERSION".freeze, "bin/ollama_chat".freeze, "docker-compose.yml".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_type.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/ollama_chat_config/default_config.yml".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "ollama_chat.gemspec".freeze, "redis/redis.conf".freeze, "spec/assets/api_show.json".freeze, "spec/assets/api_tags.json".freeze, "spec/assets/conversation.json".freeze, "spec/assets/duckduckgo.html".freeze, "spec/assets/example.atom".freeze, "spec/assets/example.csv".freeze, "spec/assets/example.html".freeze, "spec/assets/example.pdf".freeze, "spec/assets/example.ps".freeze, "spec/assets/example.rb".freeze, "spec/assets/example.rss".freeze, "spec/assets/example.xml".freeze, "spec/assets/kitten.jpg".freeze, "spec/assets/prompt.txt".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/spec_helper.rb".freeze, "tmp/.keep".freeze]
|
|
17
17
|
s.homepage = "https://github.com/flori/ollama_chat".freeze
|
|
18
18
|
s.licenses = ["MIT".freeze]
|
|
19
19
|
s.rdoc_options = ["--title".freeze, "OllamaChat - A command-line interface (CLI) for interacting with an Ollama AI model.".freeze, "--main".freeze, "README.md".freeze]
|
|
@@ -11,6 +11,9 @@ RSpec.describe OllamaChat::MessageList do
|
|
|
11
11
|
),
|
|
12
12
|
prompts: double(
|
|
13
13
|
location: 'You are at %{location_name} (%{location_decimal_degrees}), on %{localtime}, preferring %{units}'
|
|
14
|
+
),
|
|
15
|
+
system_prompts: double(
|
|
16
|
+
assistant?: 'You are a helpful assistant.'
|
|
14
17
|
)
|
|
15
18
|
)
|
|
16
19
|
end
|
|
@@ -122,6 +125,19 @@ RSpec.describe OllamaChat::MessageList do
|
|
|
122
125
|
%r(You are at Berlin \(52.514127, 13.475211\), on))
|
|
123
126
|
end
|
|
124
127
|
|
|
128
|
+
it 'can be converted int an Ollama::Message array with location without a system prompt' do
|
|
129
|
+
expect(chat).to receive(:location).and_return(double(on?: true))
|
|
130
|
+
list = described_class.new(chat).tap do |list|
|
|
131
|
+
list << Ollama::Message.new(role: 'user', content: 'hello')
|
|
132
|
+
list << Ollama::Message.new(role: 'assistant', content: 'world')
|
|
133
|
+
end
|
|
134
|
+
first = list.to_ary.first
|
|
135
|
+
expect(first.role).to eq 'system'
|
|
136
|
+
expect(first.content).to match(
|
|
137
|
+
%r(You are a helpful assistant.\n\nYou are at Berlin \(52.514127, 13.475211\), on))
|
|
138
|
+
end
|
|
139
|
+
|
|
140
|
+
|
|
125
141
|
it 'can display messages with images' do
|
|
126
142
|
expect(list.message_type([])).to eq ?📨
|
|
127
143
|
end
|
metadata
CHANGED
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: ollama_chat
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 0.0.
|
|
4
|
+
version: 0.0.2
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- Florian Frank
|
|
8
8
|
bindir: bin
|
|
9
9
|
cert_chain: []
|
|
10
|
-
date: 2025-02-
|
|
10
|
+
date: 2025-02-12 00:00:00.000000000 Z
|
|
11
11
|
dependencies:
|
|
12
12
|
- !ruby/object:Gem::Dependency
|
|
13
13
|
name: gem_hadar
|
|
@@ -399,6 +399,7 @@ files:
|
|
|
399
399
|
- lib/ollama_chat/message_type.rb
|
|
400
400
|
- lib/ollama_chat/model_handling.rb
|
|
401
401
|
- lib/ollama_chat/ollama_chat_config.rb
|
|
402
|
+
- lib/ollama_chat/ollama_chat_config/default_config.yml
|
|
402
403
|
- lib/ollama_chat/parsing.rb
|
|
403
404
|
- lib/ollama_chat/source_fetching.rb
|
|
404
405
|
- lib/ollama_chat/switches.rb
|