ollama_chat 0.0.50 → 0.0.51

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 913e3ff0ad4e2b4a7a6d2825f5772f8a4e68dc2931925d096d0989b23e153756
4
- data.tar.gz: ae44b7d36ebbdc1013bc90841e4dfacd685d99d949a84c201ab0f8cc512dee0b
3
+ metadata.gz: 6fb9a8c4d866252bfa8ace6399ae3a9c7b72ace633c5b98b7a5c40543be105ba
4
+ data.tar.gz: d899a33db800219a4874d1db8da428dfacf14d6013fd2a2b76f58c12c7cece82
5
5
  SHA512:
6
- metadata.gz: 7ce3e6e9cebf291d8ffb56fa8960b244f500e6f1e15f65f49a6f1905779032b22559e3b720b8fa22bef2f9992ae634f1aad6a92797a61e080788e9277ca1ee89
7
- data.tar.gz: 2c7a36c2487c92767345e00a65c3167b5b41339891918421700322fe2722efeda525c6baf447553cd3021fd7a174a10b2d93c32aa555ed274b4eac89dc671a96
6
+ metadata.gz: 303e0bda16f1cf0eba110271a63e40eb4563a90af5cb152465ba51bde1a3dcfaf3b317210df77cfcdc78541f1c0c0bbcf0346edfd134821928922083cc37cddd
7
+ data.tar.gz: b05f2ead74ccb3e534beb407203fb26d6070715337255ab09b451b52fd355a740c0a3ceaad348091a5e302b11db4d56cd9c3c71e0e207268c4fc0a93d2231b8f
data/CHANGES.md CHANGED
@@ -1,5 +1,35 @@
1
1
  # Changes
2
2
 
3
+ ## 2026-01-06 v0.0.51
4
+
5
+ - Added `/input` command to allow users to select files using glob patterns
6
+ like `src/*.c` or `**/*.rb`
7
+ - Implemented `input` method in `OllamaChat::Chat` class using `Dir.glob` for
8
+ pattern matching
9
+ - Integrated with `OllamaChat::Utils::Chooser` for interactive file selection
10
+ - Added `/input` command to help text in `OllamaChat::Information` module
11
+ - Supports default pattern `**/*` when no argument is provided
12
+ - Returns file content as string for use in chat conversations
13
+ - Integrated `/context` command in `OllamaChat::Chat` with pattern matching
14
+ support
15
+ - Updated `display_chat_help_message` to document `/context [pattern...]`
16
+ command
17
+ - Modified `README.md` to include `/context` command in help section
18
+ - Introduced `context_spook` **~> 1.1** dependency
19
+ - Created `OllamaChat::InputContent` module with `input`, `choose_filename`,
20
+ and `context_spook` methods
21
+ - Extracted input functionality from `Chat` class to `InputContent` module for
22
+ better organization
23
+ - Support both static context files (`.contexts/*.rb`) and dynamic pattern
24
+ matching (`lib/**/*.rb`)
25
+ - Implemented proper content handling with `@parse_content = false` to prevent
26
+ parsing of URLs
27
+ - Used `File.file?` instead of `File.stat(it).file?` for cleaner file checking
28
+ - Support multiple space-separated patterns in `/context` command
29
+ - Added comprehensive YARD documentation for all new methods and parameters
30
+ - Improved pipe command handling with dynamic command resolution
31
+ - Moved `RedisCache` class comment
32
+
3
33
  ## 2026-01-03 v0.0.50
4
34
 
5
35
  - Use Redis-based expiring cache implementation with the new
data/README.md CHANGED
@@ -180,6 +180,8 @@ The following commands can be given inside the chat, if prefixed by a `/`:
180
180
  /links [clear] display (or clear) links used in the chat
181
181
  /save filename store conversation messages
182
182
  /load filename load conversation messages
183
+ /input [pattern] select and read content from a file (default: **/*)
184
+ /context [pattern...] collect context with glob patterns
183
185
  /output filename save last response to filename
184
186
  /pipe command write last response to command's stdin
185
187
  /vim insert the last message into a vim server
data/Rakefile CHANGED
@@ -58,6 +58,7 @@ GemHadar do
58
58
  dependency 'bigdecimal', '~> 3.1'
59
59
  dependency 'csv', '~> 3.0'
60
60
  dependency 'const_conf', '~> 0.3'
61
+ dependency 'context_spook', '~> 1.1'
61
62
  development_dependency 'all_images', '~> 0.6'
62
63
  development_dependency 'rspec', '~> 3.2'
63
64
  development_dependency 'kramdown', '~> 2.0'
@@ -15,6 +15,7 @@ require 'pdf/reader'
15
15
  require 'csv'
16
16
  require 'socket'
17
17
  require 'shellwords'
18
+ require 'context_spook'
18
19
 
19
20
  # A chat client for interacting with Ollama models through a terminal
20
21
  # interface.
@@ -49,6 +50,7 @@ class OllamaChat::Chat
49
50
  include OllamaChat::ServerSocket
50
51
  include OllamaChat::KramdownANSI
51
52
  include OllamaChat::Conversation
53
+ include OllamaChat::InputContent
52
54
 
53
55
  # Initializes a new OllamaChat::Chat instance with the given command-line
54
56
  # arguments.
@@ -328,6 +330,14 @@ class OllamaChat::Chat
328
330
  when %r(^/web\s+(?:(\d+)\s+)?(.+))
329
331
  @parse_content = false
330
332
  web($1, $2)
333
+ when %r(^/input(?:\s+(.+))?$)
334
+ @parse_content = false
335
+ input($1) or :next
336
+ when %r(^/context(?:\s+(.+))?$)
337
+ arg = $1
338
+ arg and patterns = arg.scan(/(\S+)/).flatten
339
+ @parse_content = false
340
+ context_spook(patterns) or :next
331
341
  when %r(^/save\s+(.+)$)
332
342
  save_conversation($1)
333
343
  :next
@@ -147,6 +147,8 @@ module OllamaChat::Information
147
147
  /links [clear] display (or clear) links used in the chat
148
148
  /save filename store conversation messages
149
149
  /load filename load conversation messages
150
+ /input [pattern] select and read content from a file (default: **/*)
151
+ /context [pattern...] collect context with glob patterns
150
152
  /output filename save last response to filename
151
153
  /pipe command write last response to command's stdin
152
154
  /vim insert the last message into a vim server
@@ -0,0 +1,91 @@
1
+ # A module that provides input content processing functionality for OllamaChat.
2
+ #
3
+ # The InputContent module encapsulates methods for reading and returning
4
+ # content from selected files, selecting files from a list of matching files,
5
+ # and collecting project context using the context_spook library. It supports
6
+ # interactive file selection and context collection for enhancing chat
7
+ # interactions with local or remote content.
8
+ module OllamaChat::InputContent
9
+ # The input method reads and returns the content of a selected file.
10
+ #
11
+ # This method searches for files matching the given pattern and presents them
12
+ # in an interactive chooser menu. If a file is selected, its content is read
13
+ # and returned. If the user chooses to exit or no file is selected, the
14
+ # method returns nil.
15
+ #
16
+ # @param pattern [ String ] the glob pattern to search for files (defaults to '**/*')
17
+ #
18
+ # @return [ String, nil ] the content of the selected file or nil if no file
19
+ # was chosen
20
+ def input(pattern)
21
+ pattern ||= '**/*'
22
+ if filename = choose_filename(pattern)
23
+ File.read(filename)
24
+ end
25
+ end
26
+
27
+ # The choose_filename method selects a file from a list of matching files.
28
+ #
29
+ # This method searches for files matching the given glob pattern, presents
30
+ # them in an interactive chooser menu, and returns the selected filename. If
31
+ # the user chooses to exit or no file is selected, the method returns nil.
32
+ #
33
+ # @param pattern [ String ] the glob pattern to search for files (defaults to '**/*')
34
+ #
35
+ # @return [ String, nil ] the path to the selected file or nil if no file was chosen
36
+ def choose_filename(pattern)
37
+ files = Dir.glob(pattern).select { File.file?(it) }
38
+ files.unshift('[EXIT]')
39
+ case chosen = OllamaChat::Utils::Chooser.choose(files)
40
+ when '[EXIT]', nil
41
+ STDOUT.puts "Exiting chooser."
42
+ return
43
+ else
44
+ chosen
45
+ end
46
+ end
47
+
48
+ # The context_spook method collects and returns project context using the
49
+ # context_spook library.
50
+ #
51
+ # This method generates structured project context that can be used to
52
+ # provide AI models with comprehensive information about the codebase. It
53
+ # supports both:
54
+ # - On-the-fly pattern matching for specific file patterns
55
+ # - Loading context from predefined definition files in ./.contexts/
56
+ #
57
+ # When patterns are provided, it collects files matching the glob patterns
58
+ # and generates context data including file contents, sizes, and metadata.
59
+ # When no patterns are provided, it loads the default context definition
60
+ # file.
61
+ #
62
+ # @param patterns [Array<String>, nil] Optional array of glob patterns to
63
+ # filter files
64
+ # @return [String, nil] JSON string of context data or nil if no context
65
+ # could be generated
66
+ #
67
+ # @example Collect context for Ruby files only
68
+ # context_spook(['lib/**/*.rb'])
69
+ #
70
+ # @example Collect context for multiple patterns
71
+ # context_spook(['lib/**/*.rb', 'spec/**/*.rb'])
72
+ #
73
+ # @example Load default context
74
+ # context_spook(nil)
75
+ def context_spook(patterns)
76
+ if patterns
77
+ ContextSpook::generate_context(verbose: false) do |context|
78
+ context do
79
+ Dir.glob(patterns).each do |filename|
80
+ File.file?(filename) or next
81
+ file filename
82
+ end
83
+ end
84
+ end.to_json
85
+ else
86
+ if context_filename = choose_filename('.contexts/*.rb')
87
+ ContextSpook.generate_context(context_filename, verbose: false).to_json
88
+ end
89
+ end
90
+ end
91
+ end
@@ -1,31 +1,31 @@
1
1
  require 'redis'
2
2
 
3
- # A Redis-based cache implementation for OllamaChat
4
- #
5
- # This class provides a wrapper around Redis that offers a simple key-value
6
- # caching interface with support for expiration times and namespace isolation.
7
- # It's designed to be used as a cache backend for various components in the
8
- # OllamaChat application.
9
- #
10
- # @example Basic usage
11
- # cache = OllamaChat::RedisCache.new(prefix: 'myapp-', url: 'redis://localhost:6379')
12
- # cache['key'] = 'value'
13
- # value = cache['key']
14
- # cache.delete('key')
15
- #
16
- # @example With expiration
17
- # cache = OllamaChat::RedisCache.new(prefix: 'expiring-', url: 'redis://localhost:6379', ex: 3600)
18
- # cache['key'] = 'value' # Automatically expires in 1 hour
19
- #
20
- # @example Iteration
21
- # cache.each do |key, value|
22
- # puts "#{key}: #{value}"
23
- # end
24
- #
25
- # @example Cache management
26
- # cache.clear # Remove all entries with this prefix
27
- # size = cache.size # Get number of entries
28
3
  module OllamaChat
4
+ # A Redis-based cache implementation for OllamaChat
5
+ #
6
+ # This class provides a wrapper around Redis that offers a simple key-value
7
+ # caching interface with support for expiration times and namespace isolation.
8
+ # It's designed to be used as a cache backend for various components in the
9
+ # OllamaChat application.
10
+ #
11
+ # @example Basic usage
12
+ # cache = OllamaChat::RedisCache.new(prefix: 'myapp-', url: 'redis://localhost:6379')
13
+ # cache['key'] = 'value'
14
+ # value = cache['key']
15
+ # cache.delete('key')
16
+ #
17
+ # @example With expiration
18
+ # cache = OllamaChat::RedisCache.new(prefix: 'expiring-', url: 'redis://localhost:6379', ex: 3600)
19
+ # cache['key'] = 'value' # Automatically expires in 1 hour
20
+ #
21
+ # @example Iteration
22
+ # cache.each do |key, value|
23
+ # puts "#{key}: #{value}"
24
+ # end
25
+ #
26
+ # @example Cache management
27
+ # cache.clear # Remove all entries with this prefix
28
+ # size = cache.size # Get number of entries
29
29
  class RedisCache
30
30
  include Enumerable
31
31
 
@@ -1,6 +1,6 @@
1
1
  module OllamaChat
2
2
  # OllamaChat version
3
- VERSION = '0.0.50'
3
+ VERSION = '0.0.51'
4
4
  VERSION_ARRAY = VERSION.split('.').map(&:to_i) # :nodoc:
5
5
  VERSION_MAJOR = VERSION_ARRAY[0] # :nodoc:
6
6
  VERSION_MINOR = VERSION_ARRAY[1] # :nodoc:
data/lib/ollama_chat.rb CHANGED
@@ -36,5 +36,6 @@ require 'ollama_chat/history'
36
36
  require 'ollama_chat/server_socket'
37
37
  require 'ollama_chat/kramdown_ansi'
38
38
  require 'ollama_chat/conversation'
39
+ require 'ollama_chat/input_content'
39
40
  require 'ollama_chat/env_config'
40
41
  require 'ollama_chat/chat'
data/ollama_chat.gemspec CHANGED
@@ -1,9 +1,9 @@
1
1
  # -*- encoding: utf-8 -*-
2
- # stub: ollama_chat 0.0.50 ruby lib
2
+ # stub: ollama_chat 0.0.51 ruby lib
3
3
 
4
4
  Gem::Specification.new do |s|
5
5
  s.name = "ollama_chat".freeze
6
- s.version = "0.0.50".freeze
6
+ s.version = "0.0.51".freeze
7
7
 
8
8
  s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
9
9
  s.require_paths = ["lib".freeze]
@@ -12,8 +12,8 @@ Gem::Specification.new do |s|
12
12
  s.description = "The app provides a command-line interface (CLI) to an Ollama AI model,\nallowing users to engage in text-based conversations and generate\nhuman-like responses. Users can import data from local files or web pages,\nwhich are then processed through three different modes: fully importing the\ncontent into the conversation context, summarizing the information for\nconcise reference, or storing it in an embedding vector database for later\nretrieval based on the conversation.\n".freeze
13
13
  s.email = "flori@ping.de".freeze
14
14
  s.executables = ["ollama_chat".freeze, "ollama_chat_send".freeze]
15
- s.extra_rdoc_files = ["README.md".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/conversation.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/env_config.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/kramdown_ansi.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_output.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/redis_cache.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/think_control.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/vim.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze]
16
- s.files = [".utilsrc".freeze, "CHANGES.md".freeze, "Gemfile".freeze, "README.md".freeze, "Rakefile".freeze, "bin/ollama_chat".freeze, "bin/ollama_chat_send".freeze, "config/searxng/settings.yml".freeze, "docker-compose.yml".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/conversation.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/env_config.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/kramdown_ansi.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_output.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/ollama_chat_config/default_config.yml".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/redis_cache.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/think_control.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/vim.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze, "ollama_chat.gemspec".freeze, "redis/redis.conf".freeze, "spec/assets/api_show.json".freeze, "spec/assets/api_tags.json".freeze, "spec/assets/api_version.json".freeze, "spec/assets/conversation.json".freeze, "spec/assets/duckduckgo.html".freeze, "spec/assets/example.atom".freeze, "spec/assets/example.csv".freeze, "spec/assets/example.html".freeze, "spec/assets/example.pdf".freeze, "spec/assets/example.ps".freeze, "spec/assets/example.rb".freeze, "spec/assets/example.rss".freeze, "spec/assets/example.xml".freeze, "spec/assets/example_with_quote.html".freeze, "spec/assets/kitten.jpg".freeze, "spec/assets/prompt.txt".freeze, "spec/assets/searxng.json".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/kramdown_ansi_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/message_output_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/redis_cache_spec.rb".freeze, "spec/ollama_chat/server_socket_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/ollama_chat/web_searching_spec.rb".freeze, "spec/spec_helper.rb".freeze, "tmp/.keep".freeze]
15
+ s.extra_rdoc_files = ["README.md".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/conversation.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/env_config.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/input_content.rb".freeze, "lib/ollama_chat/kramdown_ansi.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_output.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/redis_cache.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/think_control.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/vim.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze]
16
+ s.files = [".utilsrc".freeze, "CHANGES.md".freeze, "Gemfile".freeze, "README.md".freeze, "Rakefile".freeze, "bin/ollama_chat".freeze, "bin/ollama_chat_send".freeze, "config/searxng/settings.yml".freeze, "docker-compose.yml".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/conversation.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/env_config.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/input_content.rb".freeze, "lib/ollama_chat/kramdown_ansi.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_output.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/ollama_chat_config/default_config.yml".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/redis_cache.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/think_control.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/vim.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze, "ollama_chat.gemspec".freeze, "redis/redis.conf".freeze, "spec/assets/api_show.json".freeze, "spec/assets/api_tags.json".freeze, "spec/assets/api_version.json".freeze, "spec/assets/conversation.json".freeze, "spec/assets/duckduckgo.html".freeze, "spec/assets/example.atom".freeze, "spec/assets/example.csv".freeze, "spec/assets/example.html".freeze, "spec/assets/example.pdf".freeze, "spec/assets/example.ps".freeze, "spec/assets/example.rb".freeze, "spec/assets/example.rss".freeze, "spec/assets/example.xml".freeze, "spec/assets/example_with_quote.html".freeze, "spec/assets/kitten.jpg".freeze, "spec/assets/prompt.txt".freeze, "spec/assets/searxng.json".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/kramdown_ansi_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/message_output_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/redis_cache_spec.rb".freeze, "spec/ollama_chat/server_socket_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/ollama_chat/web_searching_spec.rb".freeze, "spec/spec_helper.rb".freeze, "tmp/.keep".freeze]
17
17
  s.homepage = "https://github.com/flori/ollama_chat".freeze
18
18
  s.licenses = ["MIT".freeze]
19
19
  s.rdoc_options = ["--title".freeze, "OllamaChat - A command-line interface (CLI) for interacting with an Ollama AI model.".freeze, "--main".freeze, "README.md".freeze]
@@ -50,4 +50,5 @@ Gem::Specification.new do |s|
50
50
  s.add_runtime_dependency(%q<bigdecimal>.freeze, ["~> 3.1".freeze])
51
51
  s.add_runtime_dependency(%q<csv>.freeze, ["~> 3.0".freeze])
52
52
  s.add_runtime_dependency(%q<const_conf>.freeze, ["~> 0.3".freeze])
53
+ s.add_runtime_dependency(%q<context_spook>.freeze, ["~> 1.1".freeze])
53
54
  end
@@ -17,10 +17,10 @@ describe OllamaChat::MessageOutput do
17
17
  end
18
18
 
19
19
  it 'pipe can write to command stdin' do
20
- expect(STDERR).to receive(:puts).with(/No response available to output to pipe command "true"/)
21
- expect(chat.pipe('true')).to be_nil
20
+ expect(STDERR).to receive(:puts).with(/No response available to output to pipe command ".*true.*"/)
21
+ expect(chat.pipe(`which true`)).to be_nil
22
22
  chat.instance_variable_get(:@messages).load_conversation(asset('conversation.json'))
23
- expect(STDOUT).to receive(:puts).with(/Last response was piped to \"true\"./)
24
- expect(chat.pipe('true')).to eq chat
23
+ expect(STDOUT).to receive(:puts).with(/Last response was piped to \".*true.*\"./)
24
+ expect(chat.pipe(`which true`)).to eq chat
25
25
  end
26
26
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ollama_chat
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.50
4
+ version: 0.0.51
5
5
  platform: ruby
6
6
  authors:
7
7
  - Florian Frank
@@ -379,6 +379,20 @@ dependencies:
379
379
  - - "~>"
380
380
  - !ruby/object:Gem::Version
381
381
  version: '0.3'
382
+ - !ruby/object:Gem::Dependency
383
+ name: context_spook
384
+ requirement: !ruby/object:Gem::Requirement
385
+ requirements:
386
+ - - "~>"
387
+ - !ruby/object:Gem::Version
388
+ version: '1.1'
389
+ type: :runtime
390
+ prerelease: false
391
+ version_requirements: !ruby/object:Gem::Requirement
392
+ requirements:
393
+ - - "~>"
394
+ - !ruby/object:Gem::Version
395
+ version: '1.1'
382
396
  description: |
383
397
  The app provides a command-line interface (CLI) to an Ollama AI model,
384
398
  allowing users to engage in text-based conversations and generate
@@ -404,6 +418,7 @@ extra_rdoc_files:
404
418
  - lib/ollama_chat/follow_chat.rb
405
419
  - lib/ollama_chat/history.rb
406
420
  - lib/ollama_chat/information.rb
421
+ - lib/ollama_chat/input_content.rb
407
422
  - lib/ollama_chat/kramdown_ansi.rb
408
423
  - lib/ollama_chat/message_format.rb
409
424
  - lib/ollama_chat/message_list.rb
@@ -444,6 +459,7 @@ files:
444
459
  - lib/ollama_chat/follow_chat.rb
445
460
  - lib/ollama_chat/history.rb
446
461
  - lib/ollama_chat/information.rb
462
+ - lib/ollama_chat/input_content.rb
447
463
  - lib/ollama_chat/kramdown_ansi.rb
448
464
  - lib/ollama_chat/message_format.rb
449
465
  - lib/ollama_chat/message_list.rb