ollama_chat 0.0.22 → 0.0.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGES.md +29 -0
- data/README.md +28 -0
- data/VERSION +1 -1
- data/lib/ollama_chat/chat.rb +6 -4
- data/lib/ollama_chat/follow_chat.rb +2 -2
- data/lib/ollama_chat/kramdown_ansi.rb +31 -0
- data/lib/ollama_chat/message_list.rb +3 -3
- data/lib/ollama_chat/parsing.rb +5 -0
- data/lib/ollama_chat/version.rb +1 -1
- data/lib/ollama_chat.rb +1 -0
- data/ollama_chat.gemspec +4 -4
- data/spec/ollama_chat/message_list_spec.rb +6 -2
- data/spec/ollama_chat/parsing_spec.rb +1 -1
- metadata +3 -1
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 7811d92313881ed278cca428c652ec13b6e52288a7f403f0dd88d771f8c7f494
|
4
|
+
data.tar.gz: 950605c809843f1b81ff94795f0db0ae062b9e8485f85dbe4ad23eded4001507
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 60980bfcfd8aca4de1c7bbbe4451d08d99512bd70b438e63f7fca5dbb4bbbcf9e2855477f7e6cf26d38ed9d673a36beca565d8fe29bacd99e66a34a01de114bc
|
7
|
+
data.tar.gz: e1b5a35b0a24c870668d59670d405fce3f6d654bc801e280677a43b04ad720c564d1dbc48e058464b00e159bac1e5d27d5782ec045fce97d490046064ff5a560
|
data/CHANGES.md
CHANGED
@@ -1,5 +1,34 @@
|
|
1
1
|
# Changes
|
2
2
|
|
3
|
+
## 2025-08-17 v0.0.23
|
4
|
+
|
5
|
+
- Added `OllamaChat::KramdownANSI` module with `configure_kramdown_ansi_styles` and `kramdown_ansi_parse` methods for consistent Markdown formatting
|
6
|
+
- Replaced direct calls to `Kramdown::ANSI.parse` with `@chat.kramdown_ansi_parse` in `FollowChat` and `MessageList`
|
7
|
+
- Integrated `OllamaChat::KramdownANSI` module into `OllamaChat::Chat` class
|
8
|
+
- Configured `@kramdown_ansi_styles` during chat initialization
|
9
|
+
- Added support for environment variables `KRAMDOWN_ANSI_OLLAMA_CHAT_STYLES` and `KRAMDOWN_ANSI_STYLES` for styling configuration
|
10
|
+
- Updated tests to mock `kramdown_ansi_parse` instead of direct `Kramdown::ANSI.parse`
|
11
|
+
- Documented environment variables for customizing Markdown formatting with example JSON format
|
12
|
+
- Added `lib/ollama_chat/kramdown_ansi.rb` to `extra_rdoc_files` and `files` list in gemspec
|
13
|
+
- Escaped dot in regex pattern in `parsing_spec.rb` for proper image file matching
|
14
|
+
- Implemented `File.expand_path` to resolve `~` shortcuts before existence check in parsing module
|
15
|
+
- Added error handling for malformed paths by rescuing `ArgumentError` exceptions
|
16
|
+
- Skipped invalid file paths during processing loop using `next` statement
|
17
|
+
- Maintained backward compatibility for standard file paths
|
18
|
+
- Added comprehensive list of supported environment variables in documentation
|
19
|
+
|
20
|
+
## 2025-08-13 v0.0.22
|
21
|
+
|
22
|
+
- Added new `-p` command line flag for enabling source parsing functionality
|
23
|
+
- Enhanced `send_to_server_socket` method to accept and pass a `parse` parameter
|
24
|
+
- Modified `chat.rb` to handle the `parse` content flag from server messages
|
25
|
+
- Updated documentation in `README.md` with example usage of the new `-p` flag
|
26
|
+
- Added comprehensive tests for the new parsing functionality in `server_socket_spec.rb`
|
27
|
+
- Improved method documentation in `server_socket.rb` with detailed parameter descriptions
|
28
|
+
- Replaced `messages.list_conversation(2)` with `messages.show_last` in `/drop` command behavior
|
29
|
+
- Updated `gem_hadar` development dependency from version **1.27** to **2.0**
|
30
|
+
- Simplified SimpleCov setup by using `GemHadar::SimpleCov.start` instead of manual configuration
|
31
|
+
|
3
32
|
## 2025-08-11 v0.0.21
|
4
33
|
|
5
34
|
* **Vim Integration**: The `/vim` command allows users to insert the last chat
|
data/README.md
CHANGED
@@ -15,6 +15,34 @@ gem install ollama_chat
|
|
15
15
|
|
16
16
|
in your terminal.
|
17
17
|
|
18
|
+
## Configuration
|
19
|
+
|
20
|
+
### Environment Variables
|
21
|
+
|
22
|
+
The following environment variables can be used to configure behavior:
|
23
|
+
|
24
|
+
- `OLLAMA_URL` - Base URL for Ollama server (default: `http://localhost:11434`)
|
25
|
+
- `OLLAMA_HOST` - Base URL for Ollama server (default: `localhost:11434`)
|
26
|
+
- `OLLAMA_MODEL` - Default model to use (e.g., `llama3.1`)
|
27
|
+
- `KRAMDOWN_ANSI_OLLAMA_CHAT_STYLES` - Custom ANSI styles for Markdown formatting
|
28
|
+
- `KRAMDOWN_ANSI_STYLES` - Fallback ANSI styles configuration
|
29
|
+
- `OLLAMA_CHAT_SYSTEM` - System prompt file or content (default: `null`)
|
30
|
+
- `OLLAMA_CHAT_COLLECTION` - Collection name for embeddings
|
31
|
+
- `PAGER` - Default pager for output
|
32
|
+
- `REDIS_URL` - Redis connection URL for caching
|
33
|
+
- `REDIS_EXPIRING_URL` - Redis connection URL for expiring data
|
34
|
+
- `OLLAMA_CHAT_HISTORY` - Chat history filename (default: `~/.ollama_chat_history`)
|
35
|
+
- `OLLAMA_CHAT_DEBUG` - Debug mode toggle (1 = enabled)
|
36
|
+
- `DIFF_TOOL` - Tool for diff operations (default: `vimdiff`)
|
37
|
+
- `OLLAMA_SEARXNG_URL` - SearxNG search endpoint URL
|
38
|
+
|
39
|
+
Example usage for `KRAMDOWN_ANSI_OLLAMA_CHAT_STYLES`:
|
40
|
+
|
41
|
+
```bash
|
42
|
+
# Set custom ANSI colors for Markdown output as a JSON object:
|
43
|
+
export KRAMDOWN_ANSI_OLLAMA_CHAT_STYLES='{"header":["bold","on_color241","white"],"strong":["bold","color76"],"em":["italic","color227"],"code":["bold","color214"]}'
|
44
|
+
```
|
45
|
+
|
18
46
|
## Usage
|
19
47
|
|
20
48
|
It can be started with the following arguments:
|
data/VERSION
CHANGED
@@ -1 +1 @@
|
|
1
|
-
0.0.
|
1
|
+
0.0.23
|
data/lib/ollama_chat/chat.rb
CHANGED
@@ -33,6 +33,7 @@ class OllamaChat::Chat
|
|
33
33
|
include OllamaChat::MessageFormat
|
34
34
|
include OllamaChat::History
|
35
35
|
include OllamaChat::ServerSocket
|
36
|
+
include OllamaChat::KramdownANSI
|
36
37
|
|
37
38
|
# Initializes a new OllamaChat::Chat instance with the given command-line
|
38
39
|
# arguments.
|
@@ -97,10 +98,11 @@ class OllamaChat::Chat
|
|
97
98
|
system.present? and messages.set_system_prompt(system)
|
98
99
|
end
|
99
100
|
end
|
100
|
-
@documents
|
101
|
-
@cache
|
102
|
-
@current_voice
|
103
|
-
@images
|
101
|
+
@documents = setup_documents
|
102
|
+
@cache = setup_cache
|
103
|
+
@current_voice = config.voice.default
|
104
|
+
@images = []
|
105
|
+
@kramdown_ansi_styles = configure_kramdown_ansi_styles
|
104
106
|
init_chat_history
|
105
107
|
@opts[?S] and init_server_socket
|
106
108
|
rescue ComplexConfig::AttributeMissing, ComplexConfig::ConfigurationSyntaxError => e
|
@@ -102,9 +102,9 @@ class OllamaChat::FollowChat
|
|
102
102
|
def display_formatted_terminal_output
|
103
103
|
content, thinking = @messages.last.content, @messages.last.thinking
|
104
104
|
if @chat.markdown.on?
|
105
|
-
content = talk_annotate {
|
105
|
+
content = talk_annotate { @chat.kramdown_ansi_parse(content) }
|
106
106
|
if @chat.think.on?
|
107
|
-
thinking = think_annotate {
|
107
|
+
thinking = think_annotate { @chat.kramdown_ansi_parse(content) }
|
108
108
|
end
|
109
109
|
else
|
110
110
|
content = talk_annotate { content }
|
@@ -0,0 +1,31 @@
|
|
1
|
+
module OllamaChat::KramdownANSI
|
2
|
+
# The configure_kramdown_ansi_styles method sets up ANSI styling for
|
3
|
+
# Kramdown::ANSI output by checking for specific environment variables and
|
4
|
+
# falling back to default styles.
|
5
|
+
#
|
6
|
+
# @return [ Hash ] a hash of ANSI styles configured either from environment
|
7
|
+
# variables or using default settings
|
8
|
+
def configure_kramdown_ansi_styles
|
9
|
+
if env_var = %w[ KRAMDOWN_ANSI_OLLAMA_CHAT_STYLES KRAMDOWN_ANSI_STYLES ].find { ENV.key?(_1) }
|
10
|
+
Kramdown::ANSI::Styles.from_env_var(env_var).ansi_styles
|
11
|
+
else
|
12
|
+
Kramdown::ANSI::Styles.new.ansi_styles
|
13
|
+
end
|
14
|
+
end
|
15
|
+
|
16
|
+
# The kramdown_ansi_parse method processes content using Kramdown::ANSI with
|
17
|
+
# custom ANSI styles.
|
18
|
+
#
|
19
|
+
# This method takes raw content and converts it into formatted ANSI output by
|
20
|
+
# applying the instance's configured ANSI styles. It is used to render
|
21
|
+
# content with appropriate terminal formatting based on the application's
|
22
|
+
# styling configuration.
|
23
|
+
#
|
24
|
+
# @param content [ String ] the raw content to be parsed and formatted
|
25
|
+
#
|
26
|
+
# @return [ String ] the content formatted with ANSI escape sequences
|
27
|
+
# according to the configured styles
|
28
|
+
def kramdown_ansi_parse(content)
|
29
|
+
Kramdown::ANSI.parse(content, ansi_styles: @kramdown_ansi_styles)
|
30
|
+
end
|
31
|
+
end
|
@@ -193,7 +193,7 @@ class OllamaChat::MessageList
|
|
193
193
|
#
|
194
194
|
# @return [self, NilClass] nil if the system prompt is empty, otherwise self.
|
195
195
|
def show_system_prompt
|
196
|
-
system_prompt =
|
196
|
+
system_prompt = @chat.kramdown_ansi_parse(system.to_s).gsub(/\n+\z/, '').full?
|
197
197
|
system_prompt or return
|
198
198
|
STDOUT.puts <<~EOT
|
199
199
|
Configured system prompt is:
|
@@ -307,10 +307,10 @@ class OllamaChat::MessageList
|
|
307
307
|
end
|
308
308
|
thinking = if @chat.think.on?
|
309
309
|
think_annotate do
|
310
|
-
message.thinking.full? { @chat.markdown.on? ?
|
310
|
+
message.thinking.full? { @chat.markdown.on? ? @chat.kramdown_ansi_parse(_1) : _1 }
|
311
311
|
end
|
312
312
|
end
|
313
|
-
content = message.content.full? { @chat.markdown.on? ?
|
313
|
+
content = message.content.full? { @chat.markdown.on? ? @chat.kramdown_ansi_parse(_1) : _1 }
|
314
314
|
message_text = message_type(message.images) + " "
|
315
315
|
message_text += bold { color(role_color) { message.role } }
|
316
316
|
if thinking
|
data/lib/ollama_chat/parsing.rb
CHANGED
@@ -200,6 +200,11 @@ module OllamaChat::Parsing
|
|
200
200
|
when file
|
201
201
|
file = file.sub(/#.*/, '')
|
202
202
|
file =~ %r(\A[~./]) or file.prepend('./')
|
203
|
+
file = begin
|
204
|
+
File.expand_path(file)
|
205
|
+
rescue ArgumentError
|
206
|
+
next
|
207
|
+
end
|
203
208
|
File.exist?(file) or next
|
204
209
|
source = file
|
205
210
|
when url
|
data/lib/ollama_chat/version.rb
CHANGED
data/lib/ollama_chat.rb
CHANGED
data/ollama_chat.gemspec
CHANGED
@@ -1,9 +1,9 @@
|
|
1
1
|
# -*- encoding: utf-8 -*-
|
2
|
-
# stub: ollama_chat 0.0.
|
2
|
+
# stub: ollama_chat 0.0.23 ruby lib
|
3
3
|
|
4
4
|
Gem::Specification.new do |s|
|
5
5
|
s.name = "ollama_chat".freeze
|
6
|
-
s.version = "0.0.
|
6
|
+
s.version = "0.0.23".freeze
|
7
7
|
|
8
8
|
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
|
9
9
|
s.require_paths = ["lib".freeze]
|
@@ -12,8 +12,8 @@ Gem::Specification.new do |s|
|
|
12
12
|
s.description = "The app provides a command-line interface (CLI) to an Ollama AI model,\nallowing users to engage in text-based conversations and generate\nhuman-like responses. Users can import data from local files or web pages,\nwhich are then processed through three different modes: fully importing the\ncontent into the conversation context, summarizing the information for\nconcise reference, or storing it in an embedding vector database for later\nretrieval based on the conversation.\n".freeze
|
13
13
|
s.email = "flori@ping.de".freeze
|
14
14
|
s.executables = ["ollama_chat".freeze, "ollama_chat_send".freeze]
|
15
|
-
s.extra_rdoc_files = ["README.md".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_output.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/vim.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze]
|
16
|
-
s.files = [".all_images.yml".freeze, ".envrc".freeze, ".gitignore".freeze, "CHANGES.md".freeze, "Gemfile".freeze, "README.md".freeze, "Rakefile".freeze, "VERSION".freeze, "bin/ollama_chat".freeze, "bin/ollama_chat_send".freeze, "config/searxng/settings.yml".freeze, "docker-compose.yml".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_output.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/ollama_chat_config/default_config.yml".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/vim.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze, "ollama_chat.gemspec".freeze, "redis/redis.conf".freeze, "spec/assets/api_show.json".freeze, "spec/assets/api_tags.json".freeze, "spec/assets/api_version.json".freeze, "spec/assets/conversation.json".freeze, "spec/assets/duckduckgo.html".freeze, "spec/assets/example.atom".freeze, "spec/assets/example.csv".freeze, "spec/assets/example.html".freeze, "spec/assets/example.pdf".freeze, "spec/assets/example.ps".freeze, "spec/assets/example.rb".freeze, "spec/assets/example.rss".freeze, "spec/assets/example.xml".freeze, "spec/assets/kitten.jpg".freeze, "spec/assets/prompt.txt".freeze, "spec/assets/searxng.json".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/message_output_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/server_socket_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/ollama_chat/web_searching_spec.rb".freeze, "spec/spec_helper.rb".freeze, "tmp/.keep".freeze]
|
15
|
+
s.extra_rdoc_files = ["README.md".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/kramdown_ansi.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_output.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/vim.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze]
|
16
|
+
s.files = [".all_images.yml".freeze, ".envrc".freeze, ".gitignore".freeze, "CHANGES.md".freeze, "Gemfile".freeze, "README.md".freeze, "Rakefile".freeze, "VERSION".freeze, "bin/ollama_chat".freeze, "bin/ollama_chat_send".freeze, "config/searxng/settings.yml".freeze, "docker-compose.yml".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/kramdown_ansi.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_output.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/ollama_chat_config/default_config.yml".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/vim.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze, "ollama_chat.gemspec".freeze, "redis/redis.conf".freeze, "spec/assets/api_show.json".freeze, "spec/assets/api_tags.json".freeze, "spec/assets/api_version.json".freeze, "spec/assets/conversation.json".freeze, "spec/assets/duckduckgo.html".freeze, "spec/assets/example.atom".freeze, "spec/assets/example.csv".freeze, "spec/assets/example.html".freeze, "spec/assets/example.pdf".freeze, "spec/assets/example.ps".freeze, "spec/assets/example.rb".freeze, "spec/assets/example.rss".freeze, "spec/assets/example.xml".freeze, "spec/assets/kitten.jpg".freeze, "spec/assets/prompt.txt".freeze, "spec/assets/searxng.json".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/message_output_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/server_socket_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/ollama_chat/web_searching_spec.rb".freeze, "spec/spec_helper.rb".freeze, "tmp/.keep".freeze]
|
17
17
|
s.homepage = "https://github.com/flori/ollama_chat".freeze
|
18
18
|
s.licenses = ["MIT".freeze]
|
19
19
|
s.rdoc_options = ["--title".freeze, "OllamaChat - A command-line interface (CLI) for interacting with an Ollama AI model.".freeze, "--main".freeze, "README.md".freeze]
|
@@ -23,6 +23,12 @@ describe OllamaChat::MessageList do
|
|
23
23
|
double('Chat', config:)
|
24
24
|
end
|
25
25
|
|
26
|
+
before do
|
27
|
+
allow(chat).to receive(:kramdown_ansi_parse) do |content|
|
28
|
+
Kramdown::ANSI.parse(content)
|
29
|
+
end
|
30
|
+
end
|
31
|
+
|
26
32
|
let :list do
|
27
33
|
described_class.new(chat).tap do |list|
|
28
34
|
list << Ollama::Message.new(role: 'system', content: 'hello')
|
@@ -133,8 +139,6 @@ describe OllamaChat::MessageList do
|
|
133
139
|
|
134
140
|
it 'can show_system_prompt' do
|
135
141
|
expect(list).to receive(:system).and_return 'test **prompt**'
|
136
|
-
expect(Kramdown::ANSI).to receive(:parse).with('test **prompt**').
|
137
|
-
and_call_original
|
138
142
|
expect(list.show_system_prompt).to eq list
|
139
143
|
end
|
140
144
|
|
@@ -191,7 +191,7 @@ EOT
|
|
191
191
|
it 'can add images' do
|
192
192
|
images = []
|
193
193
|
expect(chat).to receive(:add_image).
|
194
|
-
with(images, kind_of(IO), %r(/spec/assets/kitten
|
194
|
+
with(images, kind_of(IO), %r(/spec/assets/kitten\.jpg\z)).
|
195
195
|
and_call_original
|
196
196
|
chat.parse_content('./spec/assets/kitten.jpg', images)
|
197
197
|
expect(images.size).to eq 1
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: ollama_chat
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.0.
|
4
|
+
version: 0.0.23
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Florian Frank
|
@@ -386,6 +386,7 @@ extra_rdoc_files:
|
|
386
386
|
- lib/ollama_chat/follow_chat.rb
|
387
387
|
- lib/ollama_chat/history.rb
|
388
388
|
- lib/ollama_chat/information.rb
|
389
|
+
- lib/ollama_chat/kramdown_ansi.rb
|
389
390
|
- lib/ollama_chat/message_format.rb
|
390
391
|
- lib/ollama_chat/message_list.rb
|
391
392
|
- lib/ollama_chat/message_output.rb
|
@@ -424,6 +425,7 @@ files:
|
|
424
425
|
- lib/ollama_chat/follow_chat.rb
|
425
426
|
- lib/ollama_chat/history.rb
|
426
427
|
- lib/ollama_chat/information.rb
|
428
|
+
- lib/ollama_chat/kramdown_ansi.rb
|
427
429
|
- lib/ollama_chat/message_format.rb
|
428
430
|
- lib/ollama_chat/message_list.rb
|
429
431
|
- lib/ollama_chat/message_output.rb
|