ollama_chat 0.0.24 → 0.0.25
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.contexts/full.rb +25 -0
- data/.contexts/info.rb +17 -0
- data/.contexts/lib.rb +23 -0
- data/CHANGES.md +24 -0
- data/README.md +8 -0
- data/Rakefile +3 -2
- data/VERSION +1 -1
- data/bin/ollama_chat_send +9 -2
- data/lib/ollama_chat/chat.rb +23 -12
- data/lib/ollama_chat/clipboard.rb +12 -0
- data/lib/ollama_chat/dialog.rb +17 -0
- data/lib/ollama_chat/kramdown_ansi.rb +3 -1
- data/lib/ollama_chat/message_list.rb +1 -1
- data/lib/ollama_chat/ollama_chat_config/default_config.yml +4 -2
- data/lib/ollama_chat/server_socket.rb +7 -3
- data/lib/ollama_chat/source_fetching.rb +0 -2
- data/lib/ollama_chat/utils/chooser.rb +16 -0
- data/lib/ollama_chat/utils/fetcher.rb +1 -1
- data/lib/ollama_chat/utils/file_argument.rb +19 -0
- data/lib/ollama_chat/version.rb +1 -1
- data/ollama_chat.gemspec +7 -6
- data/spec/ollama_chat/kramdown_ansi_spec.rb +45 -0
- data/spec/ollama_chat/message_list_spec.rb +26 -0
- data/spec/ollama_chat/server_socket_spec.rb +68 -47
- metadata +24 -5
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: a08d4184578d406a8e5d02823cbd5fc0c1999591f86f715b99637b9a263ba7f4
|
4
|
+
data.tar.gz: 2cdbb7c09d8de378fc58b292e4d2e29ba6849f3b9c3f1bb742ba658ec6b997e0
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: da10d3bd76363f8856a0e35dfe927084374025c3e87a9e531d02ba07bcaa214e057a649ab9c3c46057947e651f8ca616344a1de30bcecc1d64fe4e8c13df3893
|
7
|
+
data.tar.gz: c2944a6a328e5a1833d4becfe9842dbdd6b4d223bb36903b22254914d22ec19eadcc7942f0e4ba0d6a11d0fccfb4d41f3bf5624019901fb594e652aeacd70c21
|
data/.contexts/full.rb
ADDED
@@ -0,0 +1,25 @@
|
|
1
|
+
context do
|
2
|
+
variable project_name: Pathname.pwd.basename
|
3
|
+
|
4
|
+
variable project_version: File.read('VERSION').chomp
|
5
|
+
|
6
|
+
variable branch: `git rev-parse --abbrev-ref HEAD`.chomp
|
7
|
+
|
8
|
+
namespace "structure" do
|
9
|
+
command "tree lib", tags: %w[ project_structure ]
|
10
|
+
end
|
11
|
+
|
12
|
+
namespace "lib" do
|
13
|
+
Dir['lib/**/*.rb'].each do |filename|
|
14
|
+
file filename, tags: 'lib'
|
15
|
+
end
|
16
|
+
end
|
17
|
+
|
18
|
+
file 'Rakefile', tags: 'gem_hadar'
|
19
|
+
|
20
|
+
file 'README.md', tags: 'documentation'
|
21
|
+
|
22
|
+
meta ruby: RUBY_DESCRIPTION
|
23
|
+
|
24
|
+
meta code_coverage: json('coverage/coverage_context.json')
|
25
|
+
end
|
data/.contexts/info.rb
ADDED
@@ -0,0 +1,17 @@
|
|
1
|
+
context do
|
2
|
+
variable project_name: Pathname.pwd.basename
|
3
|
+
|
4
|
+
variable project_version: File.read('VERSION').chomp
|
5
|
+
|
6
|
+
variable branch: `git rev-parse --abbrev-ref HEAD`.chomp
|
7
|
+
|
8
|
+
namespace "structure" do
|
9
|
+
command "tree lib", tags: %w[ project_structure ]
|
10
|
+
end
|
11
|
+
|
12
|
+
file 'Rakefile', tags: 'gem_hadar'
|
13
|
+
|
14
|
+
file 'README.md', tags: 'documentation'
|
15
|
+
|
16
|
+
meta ruby: RUBY_DESCRIPTION
|
17
|
+
end
|
data/.contexts/lib.rb
ADDED
@@ -0,0 +1,23 @@
|
|
1
|
+
context do
|
2
|
+
variable project_name: Pathname.pwd.basename
|
3
|
+
|
4
|
+
variable project_version: File.read('VERSION').chomp
|
5
|
+
|
6
|
+
variable branch: `git rev-parse --abbrev-ref HEAD`.chomp
|
7
|
+
|
8
|
+
namespace "structure" do
|
9
|
+
command "tree lib", tags: %w[ project_structure ]
|
10
|
+
end
|
11
|
+
|
12
|
+
namespace "lib" do
|
13
|
+
Dir['lib/**/*.rb'].each do |filename|
|
14
|
+
file filename, tags: 'lib'
|
15
|
+
end
|
16
|
+
end
|
17
|
+
|
18
|
+
file 'Rakefile', tags: 'gem_hadar'
|
19
|
+
|
20
|
+
meta ruby: RUBY_DESCRIPTION
|
21
|
+
|
22
|
+
meta code_coverage: json('coverage/coverage_context.json')
|
23
|
+
end
|
data/CHANGES.md
CHANGED
@@ -1,5 +1,29 @@
|
|
1
1
|
# Changes
|
2
2
|
|
3
|
+
## 2025-08-18 v0.0.25
|
4
|
+
|
5
|
+
- Integrated `context_spook` gem as development dependency
|
6
|
+
- Added new context files: `.contexts/full.rb`, `.contexts/info.rb`, and
|
7
|
+
`.contexts/lib.rb`
|
8
|
+
- Updated `ollama-ruby` dependency version constraint from `~> 1.2` to `~> 1.6`
|
9
|
+
- Bumped **tins** dependency from ~> **1.34** to ~> **1.41**
|
10
|
+
- Refactored `web` method in `chat.rb` to conditionally handle embeddings
|
11
|
+
- Split web prompt templates into `web_embed` and `web_import`
|
12
|
+
- Moved cache check message to display before cache retrieval
|
13
|
+
- Fixed `show_last` behavior for empty lists with comprehensive tests
|
14
|
+
- Added nil check to `kramdown_ansi_parse` method to prevent `NoMethodError`
|
15
|
+
- Added documentation comments to `OllamaChat::Clipboard`,
|
16
|
+
`OllamaChat::Dialog`, `OllamaChat::Utils::Chooser`, and
|
17
|
+
`OllamaChat::Utils::FileArgument` modules
|
18
|
+
- Added new command line option `-d DIR` to specify runtime directory for
|
19
|
+
socket file
|
20
|
+
- Updated `OllamaChat::ServerSocket.send_to_server_socket` method to accept
|
21
|
+
`runtime_dir` parameter
|
22
|
+
- Modified `create_socket_server` method to use provided `runtime_dir` when
|
23
|
+
creating Unix socket server
|
24
|
+
- Updated help text to document the new `-d` option
|
25
|
+
- Added separate context for `runtime_dir` parameter testing in spec
|
26
|
+
|
3
27
|
## 2025-08-17 v0.0.24
|
4
28
|
|
5
29
|
- Updated `kramdown-ansi` dependency version constraint from **0.0** to **0.1**
|
data/README.md
CHANGED
@@ -227,6 +227,14 @@ The `ollama_chat_send` command now supports additional parameters to enhance fun
|
|
227
227
|
$ echo "Visit https://example.com for more info" | ollama_chat_send -p
|
228
228
|
```
|
229
229
|
|
230
|
+
- **Runtime Directory (`-d`)**: Specifies the directory where the Unix socket
|
231
|
+
file of `ollama_chat` was created, if you want to send to a specific
|
232
|
+
`ollama_chat`.
|
233
|
+
|
234
|
+
```bash
|
235
|
+
$ echo "Hello world" | ollama_chat_send -d /tmp/my_runtime_dir -r
|
236
|
+
```
|
237
|
+
|
230
238
|
- **Help (`-h` or `--help`)**: Displays usage information and available options.
|
231
239
|
|
232
240
|
```bash
|
data/Rakefile
CHANGED
@@ -30,7 +30,7 @@ GemHadar do
|
|
30
30
|
executables << 'ollama_chat' << 'ollama_chat_send'
|
31
31
|
|
32
32
|
dependency 'excon', '~> 1.0'
|
33
|
-
dependency 'ollama-ruby', '~> 1.
|
33
|
+
dependency 'ollama-ruby', '~> 1.6'
|
34
34
|
dependency 'documentrix', '~> 0.0', '>= 0.0.2'
|
35
35
|
dependency 'unix_socks', '>= 0.0.1'
|
36
36
|
dependency 'rss', '~> 0.3'
|
@@ -41,7 +41,7 @@ GemHadar do
|
|
41
41
|
dependency 'xdg'
|
42
42
|
dependency 'kramdown-ansi', '~> 0.1'
|
43
43
|
dependency 'complex_config', '~> 0.22', '>= 0.22.2'
|
44
|
-
dependency 'tins', '~> 1.
|
44
|
+
dependency 'tins', '~> 1.41'
|
45
45
|
dependency 'search_ui', '~> 0.0'
|
46
46
|
dependency 'amatch', '~> 0.4.1'
|
47
47
|
dependency 'pdf-reader', '~> 2.0'
|
@@ -52,6 +52,7 @@ GemHadar do
|
|
52
52
|
development_dependency 'webmock'
|
53
53
|
development_dependency 'debug'
|
54
54
|
development_dependency 'simplecov'
|
55
|
+
development_dependency 'context_spook'
|
55
56
|
|
56
57
|
licenses << 'MIT'
|
57
58
|
|
data/VERSION
CHANGED
@@ -1 +1 @@
|
|
1
|
-
0.0.
|
1
|
+
0.0.25
|
data/bin/ollama_chat_send
CHANGED
@@ -5,7 +5,7 @@ require 'tins/go'
|
|
5
5
|
include Tins::GO
|
6
6
|
|
7
7
|
|
8
|
-
opts = go 'f:rtph', ARGV
|
8
|
+
opts = go 'f:d:rtph', ARGV
|
9
9
|
|
10
10
|
def usage(rc = 0)
|
11
11
|
puts <<~EOT
|
@@ -16,6 +16,7 @@ def usage(rc = 0)
|
|
16
16
|
-t Send input as terminal input including commands, e. g. /import
|
17
17
|
-p Send input with source parsing enabled (defaults to disabled)
|
18
18
|
-f CONFIG file to read
|
19
|
+
-d DIR the runtime directory to look for the socket file
|
19
20
|
-h Show this help message
|
20
21
|
|
21
22
|
Send data to a running Ollame Chat client via standard input.
|
@@ -31,7 +32,13 @@ begin
|
|
31
32
|
else
|
32
33
|
opts[?r] ? :socket_input_with_response : :socket_input
|
33
34
|
end
|
34
|
-
response = OllamaChat::ServerSocket.send_to_server_socket(
|
35
|
+
response = OllamaChat::ServerSocket.send_to_server_socket(
|
36
|
+
STDIN.read,
|
37
|
+
type:,
|
38
|
+
config:,
|
39
|
+
runtime_dir: opts[?d],
|
40
|
+
parse: !!opts[?p]
|
41
|
+
)
|
35
42
|
type == :socket_input_with_response and puts response.content
|
36
43
|
rescue => e
|
37
44
|
warn "Caught #{e.class}: #{e}"
|
data/lib/ollama_chat/chat.rb
CHANGED
@@ -348,23 +348,34 @@ class OllamaChat::Chat
|
|
348
348
|
end
|
349
349
|
end
|
350
350
|
|
351
|
-
# The web method
|
352
|
-
#
|
353
|
-
# results into a prompt string.
|
351
|
+
# The web method performs a web search and processes the results based on
|
352
|
+
# embedding configuration.
|
354
353
|
#
|
355
|
-
#
|
354
|
+
# It searches for the given query using the configured search engine and
|
355
|
+
# processes up to the specified number of URLs. If embeddings are enabled, it
|
356
|
+
# embeds each result and interpolates the query into the web_embed prompt.
|
357
|
+
# Otherwise, it imports each result and interpolates both the query and
|
358
|
+
# results into the web_import prompt.
|
359
|
+
#
|
360
|
+
# @param count [ String ] the maximum number of search results to process
|
356
361
|
# @param query [ String ] the search query string
|
357
362
|
#
|
358
|
-
# @return [ String ] the
|
363
|
+
# @return [ String, Symbol ] the interpolated prompt content or :next if no URLs were found
|
359
364
|
def web(count, query)
|
360
|
-
urls
|
361
|
-
|
362
|
-
|
365
|
+
urls = search_web(query, count.to_i) or return :next
|
366
|
+
if @embedding.on?
|
367
|
+
prompt = config.prompts.web_embed
|
368
|
+
urls.each do |url|
|
369
|
+
fetch_source(url) { |url_io| embed_source(url_io, url) }
|
370
|
+
end
|
371
|
+
prompt.named_placeholders_interpolate({query:})
|
372
|
+
else
|
373
|
+
prompt = config.prompts.web_import
|
374
|
+
results = urls.each_with_object('') do |url, content|
|
375
|
+
import(url).full? { |c| content << c }
|
376
|
+
end
|
377
|
+
prompt.named_placeholders_interpolate({query:, results:})
|
363
378
|
end
|
364
|
-
urls_summarized = urls.map { summarize(_1) }
|
365
|
-
results = urls.zip(urls_summarized).
|
366
|
-
map { |u, s| "%s as \n:%s" % [ u, s ] } * "\n\n"
|
367
|
-
config.prompts.web % { query:, results: }
|
368
379
|
end
|
369
380
|
|
370
381
|
# The manage_links method handles operations on a collection of links, such
|
@@ -1,3 +1,15 @@
|
|
1
|
+
# A module that provides clipboard functionality for copying and pasting chat
|
2
|
+
# messages.
|
3
|
+
#
|
4
|
+
# This module enables users to copy the last assistant message to the system
|
5
|
+
# clipboard and paste content from input, facilitating easy transfer of
|
6
|
+
# conversation content between different applications and contexts.
|
7
|
+
#
|
8
|
+
# @example Copying a message to clipboard
|
9
|
+
# chat.copy_to_clipboard
|
10
|
+
#
|
11
|
+
# @example Pasting content from input
|
12
|
+
# content = chat.paste_from_input
|
1
13
|
module OllamaChat::Clipboard
|
2
14
|
# Copy the last assistant's message to the system clipboard.
|
3
15
|
#
|
data/lib/ollama_chat/dialog.rb
CHANGED
@@ -1,3 +1,20 @@
|
|
1
|
+
# A module that provides interactive selection and configuration functionality
|
2
|
+
# for OllamaChat.
|
3
|
+
#
|
4
|
+
# The Dialog module encapsulates various helper methods for choosing models,
|
5
|
+
# system prompts, document policies, and voices, as well as displaying
|
6
|
+
# information and managing chat sessions. It leverages user interaction
|
7
|
+
# components like choosers and prompts to enable dynamic configuration during
|
8
|
+
# runtime.
|
9
|
+
#
|
10
|
+
# @example Selecting a model from available options
|
11
|
+
# chat.choose_model('-m llama3.1', 'llama3.1')
|
12
|
+
#
|
13
|
+
# @example Changing the system prompt
|
14
|
+
# chat.change_system_prompt('default_prompt', system: '?sherlock')
|
15
|
+
#
|
16
|
+
# @example Choosing a document policy
|
17
|
+
# chat.choose_document_policy
|
1
18
|
module OllamaChat::Dialog
|
2
19
|
# The model_with_size method formats a model's size for display
|
3
20
|
# by creating a formatted string that includes the model name and its size
|
@@ -21,11 +21,13 @@ module OllamaChat::KramdownANSI
|
|
21
21
|
# content with appropriate terminal formatting based on the application's
|
22
22
|
# styling configuration.
|
23
23
|
#
|
24
|
-
# @param content [ String ] the raw content to be parsed and formatted
|
24
|
+
# @param content [ String, nil ] the raw content to be parsed and formatted.
|
25
|
+
# If nil, returns an empty string.
|
25
26
|
#
|
26
27
|
# @return [ String ] the content formatted with ANSI escape sequences
|
27
28
|
# according to the configured styles
|
28
29
|
def kramdown_ansi_parse(content)
|
30
|
+
content.nil? and return ''
|
29
31
|
Kramdown::ANSI.parse(content, ansi_styles: @kramdown_ansi_styles)
|
30
32
|
end
|
31
33
|
end
|
@@ -113,7 +113,7 @@ class OllamaChat::MessageList
|
|
113
113
|
# @return [ OllamaChat::MessageList ] returns the instance of the class
|
114
114
|
def show_last
|
115
115
|
message = last
|
116
|
-
message
|
116
|
+
!message || message.role == 'user' and return
|
117
117
|
use_pager do |output|
|
118
118
|
output.puts message_text_for(message)
|
119
119
|
end
|
@@ -21,8 +21,10 @@ prompts:
|
|
21
21
|
%{words} words:
|
22
22
|
|
23
23
|
%{source_content}
|
24
|
-
|
25
|
-
Answer the the query %{query} using
|
24
|
+
web_embed: |
|
25
|
+
Answer the the query %{query} using the provided chunks.
|
26
|
+
web_import: |
|
27
|
+
Answer the the query %{query} using these imported source:
|
26
28
|
|
27
29
|
%{results}
|
28
30
|
location: You are at %{location_name}, %{location_decimal_degrees}, on %{localtime}, preferring %{units}
|
@@ -13,12 +13,13 @@ module OllamaChat::ServerSocket
|
|
13
13
|
# @param content [ String ] the message content to be sent
|
14
14
|
# @param config [ ComplexConfig::Settings ] the configuration object containing server settings
|
15
15
|
# @param type [ Symbol ] the type of message transmission, defaults to :socket_input
|
16
|
+
# @param runtime_dir [ String ] pathname to runtime_dir of socket file
|
16
17
|
# @param parse [ TrueClass, FalseClass ] whether to parse the response, defaults to false
|
17
18
|
#
|
18
19
|
# @return [ UnixSocks::Message, nil ] the response from transmit_with_response if type
|
19
20
|
# is :socket_input_with_response, otherwise nil
|
20
|
-
def send_to_server_socket(content, config:, type: :socket_input, parse: false)
|
21
|
-
server = create_socket_server(config:)
|
21
|
+
def send_to_server_socket(content, config:, type: :socket_input, runtime_dir: nil, parse: false)
|
22
|
+
server = create_socket_server(config:, runtime_dir:)
|
22
23
|
message = { content:, type:, parse: }
|
23
24
|
if type.to_sym == :socket_input_with_response
|
24
25
|
server.transmit_with_response(message)
|
@@ -43,7 +44,10 @@ module OllamaChat::ServerSocket
|
|
43
44
|
#
|
44
45
|
# @return [UnixSocks::Server] a configured Unix domain socket server
|
45
46
|
# instance ready to receive messages
|
46
|
-
def create_socket_server(config:)
|
47
|
+
def create_socket_server(config:, runtime_dir: nil)
|
48
|
+
if runtime_dir
|
49
|
+
return UnixSocks::Server.new(socket_name: 'ollama_chat.sock', runtime_dir:)
|
50
|
+
end
|
47
51
|
if runtime_dir = config.server_socket_runtime_dir
|
48
52
|
UnixSocks::Server.new(socket_name: 'ollama_chat.sock', runtime_dir:)
|
49
53
|
else
|
@@ -110,7 +110,6 @@ module OllamaChat::SourceFetching
|
|
110
110
|
end
|
111
111
|
end
|
112
112
|
|
113
|
-
|
114
113
|
# Summarizes content from the given source IO and source identifier.
|
115
114
|
#
|
116
115
|
# This method takes an IO object containing document content and generates a
|
@@ -146,7 +145,6 @@ module OllamaChat::SourceFetching
|
|
146
145
|
end
|
147
146
|
end
|
148
147
|
|
149
|
-
|
150
148
|
# Embeds content from the given source IO and source identifier.
|
151
149
|
#
|
152
150
|
# This method processes document content by splitting it into chunks using
|
@@ -2,6 +2,22 @@ require 'amatch'
|
|
2
2
|
require 'search_ui'
|
3
3
|
require 'term/ansicolor'
|
4
4
|
|
5
|
+
# A module that provides interactive selection functionality using fuzzy
|
6
|
+
# matching and search capabilities.
|
7
|
+
#
|
8
|
+
# The Chooser module enables users to interactively select items from a list
|
9
|
+
# using a search interface with fuzzy matching. It leverages the Amatch library
|
10
|
+
# for similarity matching and SearchUI for the interactive display and
|
11
|
+
# selection experience.
|
12
|
+
#
|
13
|
+
# @example Using the chooser in an interactive menu
|
14
|
+
# entries = ['apple', 'banana', 'cherry']
|
15
|
+
# selected = OllamaChat::Utils::Chooser.choose(entries, prompt: 'Choose a fruit:')
|
16
|
+
#
|
17
|
+
# @example Returning immediately if only one entry exists
|
18
|
+
# entries = ['single_option']
|
19
|
+
# result = OllamaChat::Utils::Chooser.choose(entries, return_immediately: true)
|
20
|
+
# # Returns 'single_option' directly without user interaction
|
5
21
|
module OllamaChat::Utils::Chooser
|
6
22
|
class << self
|
7
23
|
include SearchUI
|
@@ -51,8 +51,8 @@ class OllamaChat::Utils::Fetcher
|
|
51
51
|
def self.get(url, headers: {}, **options, &block)
|
52
52
|
cache = options.delete(:cache) and
|
53
53
|
cache = OllamaChat::Utils::CacheFetcher.new(cache)
|
54
|
+
cache and infobar.puts "Getting #{url.to_s.inspect} from cache."
|
54
55
|
if result = cache&.get(url, &block)
|
55
|
-
infobar.puts "Getting #{url.to_s.inspect} from cache."
|
56
56
|
return result
|
57
57
|
else
|
58
58
|
new(**options).send(:get, url, headers:) do |tmp|
|
@@ -1,3 +1,22 @@
|
|
1
|
+
# A module that provides functionality for handling file arguments and content
|
2
|
+
# retrieval.
|
3
|
+
#
|
4
|
+
# The FileArgument module offers methods to process either file paths or direct
|
5
|
+
# string content, determining whether the input represents a file that should
|
6
|
+
# be read or if it's already a string of content to be used directly. It also
|
7
|
+
# includes logic to handle default values when no valid input is provided.
|
8
|
+
#
|
9
|
+
# @example Retrieving file contents or using direct content
|
10
|
+
# FileArgument.get_file_argument('path/to/file.txt')
|
11
|
+
# # Returns the contents of the file if it exists
|
12
|
+
#
|
13
|
+
# @example Using a string as content
|
14
|
+
# FileArgument.get_file_argument('direct content string')
|
15
|
+
# # Returns the string itself
|
16
|
+
#
|
17
|
+
# @example Providing a default value
|
18
|
+
# FileArgument.get_file_argument(nil, default: 'fallback content')
|
19
|
+
# # Returns 'fallback content' when no valid input is given
|
1
20
|
module OllamaChat::Utils::FileArgument
|
2
21
|
module_function
|
3
22
|
|
data/lib/ollama_chat/version.rb
CHANGED
data/ollama_chat.gemspec
CHANGED
@@ -1,9 +1,9 @@
|
|
1
1
|
# -*- encoding: utf-8 -*-
|
2
|
-
# stub: ollama_chat 0.0.
|
2
|
+
# stub: ollama_chat 0.0.25 ruby lib
|
3
3
|
|
4
4
|
Gem::Specification.new do |s|
|
5
5
|
s.name = "ollama_chat".freeze
|
6
|
-
s.version = "0.0.
|
6
|
+
s.version = "0.0.25".freeze
|
7
7
|
|
8
8
|
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
|
9
9
|
s.require_paths = ["lib".freeze]
|
@@ -13,14 +13,14 @@ Gem::Specification.new do |s|
|
|
13
13
|
s.email = "flori@ping.de".freeze
|
14
14
|
s.executables = ["ollama_chat".freeze, "ollama_chat_send".freeze]
|
15
15
|
s.extra_rdoc_files = ["README.md".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/kramdown_ansi.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_output.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/vim.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze]
|
16
|
-
s.files = [".all_images.yml".freeze, ".envrc".freeze, ".gitignore".freeze, "CHANGES.md".freeze, "Gemfile".freeze, "README.md".freeze, "Rakefile".freeze, "VERSION".freeze, "bin/ollama_chat".freeze, "bin/ollama_chat_send".freeze, "config/searxng/settings.yml".freeze, "docker-compose.yml".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/kramdown_ansi.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_output.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/ollama_chat_config/default_config.yml".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/vim.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze, "ollama_chat.gemspec".freeze, "redis/redis.conf".freeze, "spec/assets/api_show.json".freeze, "spec/assets/api_tags.json".freeze, "spec/assets/api_version.json".freeze, "spec/assets/conversation.json".freeze, "spec/assets/duckduckgo.html".freeze, "spec/assets/example.atom".freeze, "spec/assets/example.csv".freeze, "spec/assets/example.html".freeze, "spec/assets/example.pdf".freeze, "spec/assets/example.ps".freeze, "spec/assets/example.rb".freeze, "spec/assets/example.rss".freeze, "spec/assets/example.xml".freeze, "spec/assets/kitten.jpg".freeze, "spec/assets/prompt.txt".freeze, "spec/assets/searxng.json".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/message_output_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/server_socket_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/ollama_chat/web_searching_spec.rb".freeze, "spec/spec_helper.rb".freeze, "tmp/.keep".freeze]
|
16
|
+
s.files = [".all_images.yml".freeze, ".contexts/full.rb".freeze, ".contexts/info.rb".freeze, ".contexts/lib.rb".freeze, ".envrc".freeze, ".gitignore".freeze, "CHANGES.md".freeze, "Gemfile".freeze, "README.md".freeze, "Rakefile".freeze, "VERSION".freeze, "bin/ollama_chat".freeze, "bin/ollama_chat_send".freeze, "config/searxng/settings.yml".freeze, "docker-compose.yml".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/kramdown_ansi.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_output.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/ollama_chat_config/default_config.yml".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/vim.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze, "ollama_chat.gemspec".freeze, "redis/redis.conf".freeze, "spec/assets/api_show.json".freeze, "spec/assets/api_tags.json".freeze, "spec/assets/api_version.json".freeze, "spec/assets/conversation.json".freeze, "spec/assets/duckduckgo.html".freeze, "spec/assets/example.atom".freeze, "spec/assets/example.csv".freeze, "spec/assets/example.html".freeze, "spec/assets/example.pdf".freeze, "spec/assets/example.ps".freeze, "spec/assets/example.rb".freeze, "spec/assets/example.rss".freeze, "spec/assets/example.xml".freeze, "spec/assets/kitten.jpg".freeze, "spec/assets/prompt.txt".freeze, "spec/assets/searxng.json".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/kramdown_ansi_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/message_output_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/server_socket_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/ollama_chat/web_searching_spec.rb".freeze, "spec/spec_helper.rb".freeze, "tmp/.keep".freeze]
|
17
17
|
s.homepage = "https://github.com/flori/ollama_chat".freeze
|
18
18
|
s.licenses = ["MIT".freeze]
|
19
19
|
s.rdoc_options = ["--title".freeze, "OllamaChat - A command-line interface (CLI) for interacting with an Ollama AI model.".freeze, "--main".freeze, "README.md".freeze]
|
20
20
|
s.required_ruby_version = Gem::Requirement.new("~> 3.1".freeze)
|
21
21
|
s.rubygems_version = "3.6.9".freeze
|
22
22
|
s.summary = "A command-line interface (CLI) for interacting with an Ollama AI model.".freeze
|
23
|
-
s.test_files = ["spec/assets/example.rb".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/message_output_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/server_socket_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/ollama_chat/web_searching_spec.rb".freeze, "spec/spec_helper.rb".freeze]
|
23
|
+
s.test_files = ["spec/assets/example.rb".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/kramdown_ansi_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/message_output_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/server_socket_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/ollama_chat/web_searching_spec.rb".freeze, "spec/spec_helper.rb".freeze]
|
24
24
|
|
25
25
|
s.specification_version = 4
|
26
26
|
|
@@ -31,8 +31,9 @@ Gem::Specification.new do |s|
|
|
31
31
|
s.add_development_dependency(%q<webmock>.freeze, [">= 0".freeze])
|
32
32
|
s.add_development_dependency(%q<debug>.freeze, [">= 0".freeze])
|
33
33
|
s.add_development_dependency(%q<simplecov>.freeze, [">= 0".freeze])
|
34
|
+
s.add_development_dependency(%q<context_spook>.freeze, [">= 0".freeze])
|
34
35
|
s.add_runtime_dependency(%q<excon>.freeze, ["~> 1.0".freeze])
|
35
|
-
s.add_runtime_dependency(%q<ollama-ruby>.freeze, ["~> 1.
|
36
|
+
s.add_runtime_dependency(%q<ollama-ruby>.freeze, ["~> 1.6".freeze])
|
36
37
|
s.add_runtime_dependency(%q<documentrix>.freeze, ["~> 0.0".freeze, ">= 0.0.2".freeze])
|
37
38
|
s.add_runtime_dependency(%q<unix_socks>.freeze, [">= 0.0.1".freeze])
|
38
39
|
s.add_runtime_dependency(%q<rss>.freeze, ["~> 0.3".freeze])
|
@@ -43,7 +44,7 @@ Gem::Specification.new do |s|
|
|
43
44
|
s.add_runtime_dependency(%q<xdg>.freeze, [">= 0".freeze])
|
44
45
|
s.add_runtime_dependency(%q<kramdown-ansi>.freeze, ["~> 0.1".freeze])
|
45
46
|
s.add_runtime_dependency(%q<complex_config>.freeze, ["~> 0.22".freeze, ">= 0.22.2".freeze])
|
46
|
-
s.add_runtime_dependency(%q<tins>.freeze, ["~> 1.
|
47
|
+
s.add_runtime_dependency(%q<tins>.freeze, ["~> 1.41".freeze])
|
47
48
|
s.add_runtime_dependency(%q<search_ui>.freeze, ["~> 0.0".freeze])
|
48
49
|
s.add_runtime_dependency(%q<amatch>.freeze, ["~> 0.4.1".freeze])
|
49
50
|
s.add_runtime_dependency(%q<pdf-reader>.freeze, ["~> 2.0".freeze])
|
@@ -0,0 +1,45 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe OllamaChat::KramdownANSI do
|
4
|
+
let :chat do
|
5
|
+
double('Chat').extend(described_class)
|
6
|
+
end
|
7
|
+
|
8
|
+
describe '#configure_kramdown_ansi_styles' do
|
9
|
+
it 'can be configured via env var' do
|
10
|
+
allow(ENV).to receive(:key?).with('KRAMDOWN_ANSI_OLLAMA_CHAT_STYLES').and_return(true)
|
11
|
+
allow(ENV).to receive(:key?).with('KRAMDOWN_ANSI_STYLES').and_return(false)
|
12
|
+
|
13
|
+
styles = { bold: '1' }
|
14
|
+
expect(Kramdown::ANSI::Styles).to receive(:from_env_var).
|
15
|
+
with('KRAMDOWN_ANSI_OLLAMA_CHAT_STYLES').
|
16
|
+
and_return(double(ansi_styles: styles))
|
17
|
+
|
18
|
+
expect(chat.configure_kramdown_ansi_styles).to eq(styles)
|
19
|
+
end
|
20
|
+
|
21
|
+
it 'has a default configuration' do
|
22
|
+
allow(ENV).to receive(:key?).with('KRAMDOWN_ANSI_OLLAMA_CHAT_STYLES').and_return(false)
|
23
|
+
allow(ENV).to receive(:key?).with('KRAMDOWN_ANSI_STYLES').and_return(false)
|
24
|
+
|
25
|
+
expect(chat.configure_kramdown_ansi_styles).to be_a(Hash)
|
26
|
+
end
|
27
|
+
end
|
28
|
+
|
29
|
+
describe '#kramdown_ansi_parse' do
|
30
|
+
it 'can parse markdown' do
|
31
|
+
content = "# Header\n\nParagraph text"
|
32
|
+
result = chat.kramdown_ansi_parse(content)
|
33
|
+
|
34
|
+
expect(result).to be_a(String)
|
35
|
+
end
|
36
|
+
|
37
|
+
it 'handles empty string correctly' do
|
38
|
+
expect(chat.kramdown_ansi_parse('')).to eq ''
|
39
|
+
end
|
40
|
+
|
41
|
+
it 'handles nil correctly' do
|
42
|
+
expect(chat.kramdown_ansi_parse(nil)).to eq ''
|
43
|
+
end
|
44
|
+
end
|
45
|
+
end
|
@@ -71,6 +71,32 @@ describe OllamaChat::MessageList do
|
|
71
71
|
FileUtils.rm_f 'tmp/test-conversation.json'
|
72
72
|
end
|
73
73
|
|
74
|
+
|
75
|
+
describe '.show_last' do
|
76
|
+
it 'shows nothing when there are no messages' do
|
77
|
+
empty_list = described_class.new(chat)
|
78
|
+
expect { empty_list.show_last }.not_to raise_error
|
79
|
+
expect(empty_list.show_last).to be nil
|
80
|
+
end
|
81
|
+
|
82
|
+
it 'shows nothing when the last message is by the assistant' do
|
83
|
+
list = described_class.new(chat)
|
84
|
+
allow(chat).to receive(:think).and_return(double(on?: false))
|
85
|
+
allow(chat).to receive(:markdown).and_return(double(on?: false))
|
86
|
+
list << Ollama::Message.new(role: 'assistant', content: 'hello')
|
87
|
+
expect(STDOUT).to receive(:puts).
|
88
|
+
with("📨 \e[1m\e[38;5;111massistant\e[0m\e[0m:\nhello\n")
|
89
|
+
expect(list.show_last).to be_a described_class
|
90
|
+
end
|
91
|
+
|
92
|
+
it 'shows nothing when the last message is by the user' do
|
93
|
+
list = described_class.new(chat)
|
94
|
+
list << Ollama::Message.new(role: 'user', content: 'world')
|
95
|
+
expect { list.show_last }.not_to raise_error
|
96
|
+
expect(list.show_last).to be nil
|
97
|
+
end
|
98
|
+
end
|
99
|
+
|
74
100
|
context 'without pager' do
|
75
101
|
before do
|
76
102
|
expect(list).to receive(:determine_pager_command).and_return nil
|
@@ -9,75 +9,96 @@ describe OllamaChat::ServerSocket do
|
|
9
9
|
let(:config) { double('Config') }
|
10
10
|
let(:server) { double('Server') }
|
11
11
|
|
12
|
-
|
13
|
-
expect(OllamaChat::ServerSocket).to receive(:create_socket_server).with(config: config).and_return(server)
|
14
|
-
end
|
12
|
+
context 'without runtime_dir' do
|
15
13
|
|
16
|
-
|
17
|
-
|
18
|
-
|
14
|
+
before do
|
15
|
+
expect(OllamaChat::ServerSocket).to receive(:create_socket_server).
|
16
|
+
with(config: config, runtime_dir: nil).and_return(server)
|
17
|
+
end
|
19
18
|
|
20
|
-
|
19
|
+
context 'with default parameters' do
|
20
|
+
it 'uses correct defaults' do
|
21
|
+
message = { content: 'test', type: :socket_input, parse: false }
|
21
22
|
|
22
|
-
|
23
|
+
expect(server).to receive(:transmit).with(message).and_return(nil)
|
23
24
|
|
24
|
-
|
25
|
+
result = OllamaChat::ServerSocket.send_to_server_socket('test', config: config)
|
26
|
+
|
27
|
+
expect(result).to be_nil
|
28
|
+
end
|
25
29
|
end
|
26
|
-
end
|
27
30
|
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
+
context 'with :socket_input type and parse: true' do
|
32
|
+
it 'sends message with parse flag and returns nil' do
|
33
|
+
message = { content: 'test', type: :socket_input, parse: true }
|
31
34
|
|
32
|
-
|
35
|
+
expect(server).to receive(:transmit).with(message).and_return(nil)
|
33
36
|
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
37
|
+
result = OllamaChat::ServerSocket.send_to_server_socket(
|
38
|
+
'test',
|
39
|
+
config: config,
|
40
|
+
type: :socket_input,
|
41
|
+
parse: true
|
42
|
+
)
|
40
43
|
|
41
|
-
|
44
|
+
expect(result).to be_nil
|
45
|
+
end
|
42
46
|
end
|
43
|
-
end
|
44
47
|
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
48
|
+
context 'with :socket_input_with_response type and parse: false' do
|
49
|
+
it 'sends message and returns response with parse flag' do
|
50
|
+
message = { content: 'test', type: :socket_input_with_response, parse: false }
|
51
|
+
response = double('Response')
|
49
52
|
|
50
|
-
|
53
|
+
expect(server).to receive(:transmit_with_response).with(message).and_return(response)
|
51
54
|
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
55
|
+
result = OllamaChat::ServerSocket.send_to_server_socket(
|
56
|
+
'test',
|
57
|
+
config: config,
|
58
|
+
type: :socket_input_with_response,
|
59
|
+
parse: false
|
60
|
+
)
|
58
61
|
|
59
|
-
|
62
|
+
expect(result).to eq(response)
|
63
|
+
end
|
60
64
|
end
|
61
|
-
end
|
62
65
|
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
66
|
+
context 'with :socket_input_with_response type and parse: true' do
|
67
|
+
it 'sends message and returns response with parse flag' do
|
68
|
+
message = { content: 'test', type: :socket_input_with_response, parse: true }
|
69
|
+
response = double('Response')
|
67
70
|
|
68
|
-
|
71
|
+
expect(server).to receive(:transmit_with_response).with(message).and_return(response)
|
69
72
|
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
73
|
+
result = OllamaChat::ServerSocket.send_to_server_socket(
|
74
|
+
'test',
|
75
|
+
config: config,
|
76
|
+
type: :socket_input_with_response,
|
77
|
+
parse: true
|
78
|
+
)
|
76
79
|
|
77
|
-
|
80
|
+
expect(result).to eq(response)
|
81
|
+
end
|
78
82
|
end
|
79
83
|
end
|
80
84
|
|
85
|
+
context 'with runtime_dir parameter' do
|
86
|
+
before do
|
87
|
+
expect(OllamaChat::ServerSocket).to receive(:create_socket_server).
|
88
|
+
with(config: config, runtime_dir: '/foo/bar').and_return(server)
|
89
|
+
end
|
90
|
+
|
91
|
+
it 'uses correct defaults' do
|
92
|
+
message = { content: 'test', type: :socket_input, parse: false }
|
93
|
+
|
94
|
+
expect(server).to receive(:transmit).with(message).and_return(nil)
|
95
|
+
|
96
|
+
|
97
|
+
result = OllamaChat::ServerSocket.send_to_server_socket('test', config: config, runtime_dir: '/foo/bar')
|
98
|
+
|
99
|
+
expect(result).to be_nil
|
100
|
+
end
|
101
|
+
end
|
81
102
|
end
|
82
103
|
|
83
104
|
describe '#create_socket_server' do
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: ollama_chat
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.0.
|
4
|
+
version: 0.0.25
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Florian Frank
|
@@ -107,6 +107,20 @@ dependencies:
|
|
107
107
|
- - ">="
|
108
108
|
- !ruby/object:Gem::Version
|
109
109
|
version: '0'
|
110
|
+
- !ruby/object:Gem::Dependency
|
111
|
+
name: context_spook
|
112
|
+
requirement: !ruby/object:Gem::Requirement
|
113
|
+
requirements:
|
114
|
+
- - ">="
|
115
|
+
- !ruby/object:Gem::Version
|
116
|
+
version: '0'
|
117
|
+
type: :development
|
118
|
+
prerelease: false
|
119
|
+
version_requirements: !ruby/object:Gem::Requirement
|
120
|
+
requirements:
|
121
|
+
- - ">="
|
122
|
+
- !ruby/object:Gem::Version
|
123
|
+
version: '0'
|
110
124
|
- !ruby/object:Gem::Dependency
|
111
125
|
name: excon
|
112
126
|
requirement: !ruby/object:Gem::Requirement
|
@@ -127,14 +141,14 @@ dependencies:
|
|
127
141
|
requirements:
|
128
142
|
- - "~>"
|
129
143
|
- !ruby/object:Gem::Version
|
130
|
-
version: '1.
|
144
|
+
version: '1.6'
|
131
145
|
type: :runtime
|
132
146
|
prerelease: false
|
133
147
|
version_requirements: !ruby/object:Gem::Requirement
|
134
148
|
requirements:
|
135
149
|
- - "~>"
|
136
150
|
- !ruby/object:Gem::Version
|
137
|
-
version: '1.
|
151
|
+
version: '1.6'
|
138
152
|
- !ruby/object:Gem::Dependency
|
139
153
|
name: documentrix
|
140
154
|
requirement: !ruby/object:Gem::Requirement
|
@@ -293,14 +307,14 @@ dependencies:
|
|
293
307
|
requirements:
|
294
308
|
- - "~>"
|
295
309
|
- !ruby/object:Gem::Version
|
296
|
-
version: '1.
|
310
|
+
version: '1.41'
|
297
311
|
type: :runtime
|
298
312
|
prerelease: false
|
299
313
|
version_requirements: !ruby/object:Gem::Requirement
|
300
314
|
requirements:
|
301
315
|
- - "~>"
|
302
316
|
- !ruby/object:Gem::Version
|
303
|
-
version: '1.
|
317
|
+
version: '1.41'
|
304
318
|
- !ruby/object:Gem::Dependency
|
305
319
|
name: search_ui
|
306
320
|
requirement: !ruby/object:Gem::Requirement
|
@@ -400,6 +414,9 @@ extra_rdoc_files:
|
|
400
414
|
- lib/ollama_chat/web_searching.rb
|
401
415
|
files:
|
402
416
|
- ".all_images.yml"
|
417
|
+
- ".contexts/full.rb"
|
418
|
+
- ".contexts/info.rb"
|
419
|
+
- ".contexts/lib.rb"
|
403
420
|
- ".envrc"
|
404
421
|
- ".gitignore"
|
405
422
|
- CHANGES.md
|
@@ -460,6 +477,7 @@ files:
|
|
460
477
|
- spec/ollama_chat/clipboard_spec.rb
|
461
478
|
- spec/ollama_chat/follow_chat_spec.rb
|
462
479
|
- spec/ollama_chat/information_spec.rb
|
480
|
+
- spec/ollama_chat/kramdown_ansi_spec.rb
|
463
481
|
- spec/ollama_chat/message_list_spec.rb
|
464
482
|
- spec/ollama_chat/message_output_spec.rb
|
465
483
|
- spec/ollama_chat/model_handling_spec.rb
|
@@ -504,6 +522,7 @@ test_files:
|
|
504
522
|
- spec/ollama_chat/clipboard_spec.rb
|
505
523
|
- spec/ollama_chat/follow_chat_spec.rb
|
506
524
|
- spec/ollama_chat/information_spec.rb
|
525
|
+
- spec/ollama_chat/kramdown_ansi_spec.rb
|
507
526
|
- spec/ollama_chat/message_list_spec.rb
|
508
527
|
- spec/ollama_chat/message_output_spec.rb
|
509
528
|
- spec/ollama_chat/model_handling_spec.rb
|