ollama_chat 0.0.18 → 0.0.20

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 5b9fa311f0d8759e37629d79db604ca81c344699e18203efd60f4b3361eb95f0
4
- data.tar.gz: '0727224708a9fb941a6572322793f0161fc21cee945ed08642fd41707c26d57f'
3
+ metadata.gz: f42ba971131695901f87aea1e9b7c99a41781a61acfae487e812c5c3f185aa01
4
+ data.tar.gz: b27f7e733b13c3f0023c189fc7be8ac9a1efab943ef562ad5c322e6ad085c7c6
5
5
  SHA512:
6
- metadata.gz: ba5167d27ca64037115b36fc77c405e85311f1595fb771835b3c55429305b2e6304e7064c30e08f57fb8d97e99ccda818cc45cb43681c25b42e120c30d9feb64
7
- data.tar.gz: 43b994f34e3e161b49c989dfec1df0d02f6debb710c7a078902d3872b2bebea2fc3606481ef5d36142afba36b9a60a5daa80630663f2ef5614d30674e4329c1e
6
+ metadata.gz: 393146c6eb88e53e056ef5b303076c0584cb8f28f32db4e37b44dd24a845bc86c2987239f31cbfd600906030af004f020e68e4a30491945be250704353b3a567
7
+ data.tar.gz: b9ab5d2f1ab4d9abf34da61e341304fcf0f1a7fd7fe5e14eb3819ea41f37e3394102b4990a71bf7abd6641b181ef1cf2e40d90fd47e5e3f5665ee081bfb32568
data/CHANGES.md CHANGED
@@ -1,5 +1,44 @@
1
1
  # Changes
2
2
 
3
+ ## 2025-08-11 v0.0.20
4
+
5
+ ### Documentation
6
+
7
+ - Added more YARD-style documentation to all public methods throughout the codebase.
8
+
9
+ ### Fixed
10
+
11
+ - **Message Output**:
12
+ - Corrected `output(filename)` method to pass the message object to
13
+ `write_file_unless_exist` for proper content writing.
14
+
15
+ ## 2025-08-11 v0.0.19
16
+
17
+ * Added `/last` command to show last assistant message:
18
+ * Introduced `show_last` method in `MessageList` class to display the last
19
+ non-user message.
20
+ * Extracted message formatting logic into `message_text_for` method for
21
+ better code organization.
22
+ * Updated documentation comments for improved clarity.
23
+ * Updated `README.md` to document the new `/last` command.
24
+ * Added `/output` and `/pipe` commands for response handling:
25
+ * Introduced `OllamaChat::MessageOutput` module with `pipe` and `output`
26
+ methods.
27
+ * Updated `MessageList#save_conversation` and `MessageList#load_conversation`
28
+ to use `STDERR` for errors.
29
+ * Added comprehensive error handling with exit code checking for pipe
30
+ operations.
31
+ * Updated help text to document new `/output` and `/pipe` commands.
32
+ * Sorted prompt lists for consistent ordering:
33
+ * Ensured predictable prompt selection order in dialog interface.
34
+ * Removed RSpec describe syntax in favor of bare `describe`.
35
+ * Supported application/xml content type for RSS parsing:
36
+ * Added `application/xml` MIME type support alongside existing `text/xml`.
37
+ * Updated `OllamaChat::Parsing` module condition matching.
38
+ * Added test case for `application/xml` RSS parsing.
39
+ * Maintained other development dependencies at their current versions.
40
+ * Updated error message wording in parsing module.
41
+
3
42
  ## 2025-07-31 v0.0.18
4
43
 
5
44
  * **Added /prompt command**: The `/prompt` command was added to the list of
data/README.md CHANGED
@@ -125,6 +125,7 @@ The following commands can be given inside the chat, if prefixed by a `/`:
125
125
  /stream toggle stream output
126
126
  /location toggle location submission
127
127
  /voice [change] toggle voice output or change the voice
128
+ /last show the last system/assistant message
128
129
  /list [n] list the last n / all conversation exchanges
129
130
  /clear [what] clear what=messages|links|history|tags|all
130
131
  /clobber clear the conversation, links, and collection
@@ -146,6 +147,8 @@ The following commands can be given inside the chat, if prefixed by a `/`:
146
147
  /links [clear] display (or clear) links used in the chat
147
148
  /save filename store conversation messages
148
149
  /load filename load conversation messages
150
+ /output filename save last response to filename
151
+ /pipe command write last response to command's stdin
149
152
  /quit to quit
150
153
  /help to view this help
151
154
  ```
data/VERSION CHANGED
@@ -1 +1 @@
1
- 0.0.18
1
+ 0.0.20
@@ -28,6 +28,7 @@ class OllamaChat::Chat
28
28
  include OllamaChat::WebSearching
29
29
  include OllamaChat::Dialog
30
30
  include OllamaChat::Information
31
+ include OllamaChat::MessageOutput
31
32
  include OllamaChat::Clipboard
32
33
  include OllamaChat::MessageFormat
33
34
  include OllamaChat::History
@@ -140,6 +141,9 @@ class OllamaChat::Chat
140
141
  last = 2 * $1.to_i if $1
141
142
  messages.list_conversation(last)
142
143
  :next
144
+ when %r(^/last$)
145
+ messages.show_last
146
+ :next
143
147
  when %r(^/clear(?:\s+(messages|links|history|tags|all))?$)
144
148
  clean($1)
145
149
  :next
@@ -227,18 +231,33 @@ class OllamaChat::Chat
227
231
  @parse_content = false
228
232
  web($1, $2)
229
233
  when %r(^/save\s+(.+)$)
230
- messages.save_conversation($1)
231
- STDOUT.puts "Saved conversation to #$1."
234
+ filename = $1
235
+ if messages.save_conversation(filename)
236
+ STDOUT.puts "Saved conversation to #{filename.inspect}."
237
+ else
238
+ STDOUT.puts "Saving conversation to #{filename.inspect} failed."
239
+ end
232
240
  :next
233
241
  when %r(^/links(?:\s+(clear))?$)
234
242
  manage_links($1)
235
243
  :next
236
244
  when %r(^/load\s+(.+)$)
237
- messages.load_conversation($1)
245
+ filename = $1
246
+ success = messages.load_conversation(filename)
238
247
  if messages.size > 1
239
248
  messages.list_conversation(2)
240
249
  end
241
- STDOUT.puts "Loaded conversation from #$1."
250
+ if success
251
+ STDOUT.puts "Loaded conversation from #{filename.inspect}."
252
+ else
253
+ STDOUT.puts "Loading conversation from #{filename.inspect} failed."
254
+ end
255
+ :next
256
+ when %r(^/pipe\s+(.+)$)
257
+ pipe($1)
258
+ :next
259
+ when %r(^/output\s+(.+)$)
260
+ output($1)
242
261
  :next
243
262
  when %r(^/config$)
244
263
  display_config
@@ -1,5 +1,4 @@
1
1
  module OllamaChat::Clipboard
2
-
3
2
  # Copy the last assistant's message to the system clipboard.
4
3
  #
5
4
  # This method checks if there is a last message from an assistant in the `@messages`
@@ -1,4 +1,12 @@
1
1
  module OllamaChat::Dialog
2
+ # The model_with_size method formats a model's size for display
3
+ # by creating a formatted string that includes the model name and its size
4
+ # in a human-readable format with appropriate units.
5
+ #
6
+ # @param model [ Object ] the model object that has name and size attributes
7
+ #
8
+ # @return [ Object ] a result object with an overridden to_s method
9
+ # that combines the model name and formatted size
2
10
  private def model_with_size(model)
3
11
  result = model.name
4
12
  formatted_size = Term::ANSIColor.bold {
@@ -10,6 +18,13 @@ module OllamaChat::Dialog
10
18
  result
11
19
  end
12
20
 
21
+ # The choose_model method selects a model from the available list based on
22
+ # CLI input or user interaction.
23
+ # It processes the provided CLI model parameter to determine if a regex
24
+ # selector is used, filters the models accordingly, and prompts the user to
25
+ # choose from the filtered list if needed.
26
+ # The method ensures that a model is selected and displays a connection
27
+ # message with the chosen model and base URL.
13
28
  def choose_model(cli_model, current_model)
14
29
  selector = if cli_model =~ /\A\?+(.*)\z/
15
30
  cli_model = ''
@@ -26,11 +41,23 @@ module OllamaChat::Dialog
26
41
  STDOUT.puts green { "Connecting to #{model}@#{ollama.base_url} now…" }
27
42
  end
28
43
 
44
+ # The ask? method prompts the user with a question and returns their input.
45
+ #
46
+ # @param prompt [ String ] the message to display to the user
47
+ #
48
+ # @return [ String ] the user's response with trailing newline removed
29
49
  def ask?(prompt:)
30
50
  print prompt
31
51
  STDIN.gets.chomp
32
52
  end
33
53
 
54
+ # The choose_collection method presents a menu to select or create a document
55
+ # collection. It displays existing collections along with options to create a
56
+ # new one or exit.
57
+ # The method prompts the user for input and updates the document collection
58
+ # accordingly.
59
+ #
60
+ # @param current_collection [ String, nil ] the name of the currently active collection
34
61
  def choose_collection(current_collection)
35
62
  collections = [ current_collection ] + @documents.collections
36
63
  collections = collections.compact.map(&:to_s).uniq.sort
@@ -49,8 +76,19 @@ module OllamaChat::Dialog
49
76
  info
50
77
  end
51
78
 
79
+ # The document_policy method sets the policy for handling document imports.
80
+ #
81
+ # @param value [ String ] the document policy to be set
52
82
  attr_writer :document_policy
53
83
 
84
+ # The choose_document_policy method presents a menu to select a document policy.
85
+ # It allows the user to choose from importing, embedding, summarizing, or
86
+ # ignoring documents.
87
+ # The method displays available policies and sets the selected policy as the
88
+ # current document policy.
89
+ # If no valid policy is found, it defaults to the first option.
90
+ # After selection, it outputs the chosen policy and displays the current
91
+ # configuration information.
54
92
  def choose_document_policy
55
93
  policies = %w[ importing embedding summarizing ignoring ].sort
56
94
  current = if policies.index(@document_policy)
@@ -73,13 +111,22 @@ module OllamaChat::Dialog
73
111
  info
74
112
  end
75
113
 
114
+ # The change_system_prompt method allows the user to select or enter a new
115
+ # system prompt for the chat session.
116
+ # It provides an interactive chooser when multiple prompts match the given
117
+ # selector, and sets the selected prompt as the current system prompt for the
118
+ # messages.
119
+ #
120
+ # @param default [ String ] the default system prompt to fall back to
121
+ # @param system [ String ] the system prompt identifier or pattern to
122
+ # search for
76
123
  def change_system_prompt(default, system: nil)
77
124
  selector = if system =~ /\A\?(.+)\z/
78
125
  Regexp.new($1)
79
126
  else
80
127
  Regexp.new(system.to_s)
81
128
  end
82
- prompts = config.system_prompts.attribute_names.compact.grep(selector)
129
+ prompts = config.system_prompts.attribute_names.compact.grep(selector).sort
83
130
  if prompts.size == 1
84
131
  system = config.system_prompts.send(prompts.first)
85
132
  else
@@ -103,8 +150,13 @@ module OllamaChat::Dialog
103
150
  @messages.set_system_prompt(system)
104
151
  end
105
152
 
153
+ # The choose_prompt method presents a menu of available prompts for selection.
154
+ # It retrieves the list of prompt attributes from the configuration,
155
+ # adds an '[EXIT]' option to the list, and displays it to the user.
156
+ # After the user makes a choice, the method either exits the chooser
157
+ # or applies the selected prompt configuration.
106
158
  def choose_prompt
107
- prompts = config.prompts.attribute_names
159
+ prompts = config.prompts.attribute_names.sort
108
160
  prompts.unshift('[EXIT]')
109
161
  case chosen = OllamaChat::Utils::Chooser.choose(prompts)
110
162
  when '[EXIT]', nil
@@ -115,11 +167,20 @@ module OllamaChat::Dialog
115
167
  end
116
168
  end
117
169
 
170
+ # The change_voice method allows the user to select a voice from a list of
171
+ # available options. It uses the chooser to present the options and sets the
172
+ # selected voice as the current voice.
173
+ #
174
+ # @return [ String ] the full name of the chosen voice
118
175
  def change_voice
119
176
  chosen = OllamaChat::Utils::Chooser.choose(config.voice.list)
120
177
  @current_voice = chosen.full? || config.voice.default
121
178
  end
122
179
 
180
+ # The message_list method creates and returns a new MessageList instance
181
+ # initialized with the current object as its argument.
182
+ #
183
+ # @return [ MessageList ] a new MessageList object
123
184
  def message_list
124
185
  MessageList.new(self)
125
186
  end
@@ -1,8 +1,20 @@
1
1
  module OllamaChat::DocumentCache
2
+ # The document_cache_class method returns the cache class specified in the
3
+ # configuration.
4
+ #
5
+ # @return [ Class ] the cache class defined by the config.cache setting
2
6
  def document_cache_class
3
7
  Object.const_get(config.cache)
4
8
  end
5
9
 
10
+ # The configure_cache method determines the appropriate cache class to use
11
+ # for document storage.
12
+ # It checks if the -M option was specified to use MemoryCache, otherwise it
13
+ # attempts to use the configured cache class.
14
+ # If an error occurs during this process, it falls back to using MemoryCache
15
+ # and reports the error.
16
+ #
17
+ # @return [ Class ] the selected cache class to be used for document caching
6
18
  def configure_cache
7
19
  if @opts[?M]
8
20
  Documentrix::Documents::MemoryCache
@@ -4,7 +4,6 @@ class OllamaChat::FollowChat
4
4
  include Term::ANSIColor
5
5
  include OllamaChat::MessageFormat
6
6
 
7
-
8
7
  # Initializes a new instance of OllamaChat::FollowChat.
9
8
  #
10
9
  # @param [OllamaChat::Chat] chat The chat object, which represents the conversation context.
@@ -7,15 +7,31 @@ module OllamaChat::Information
7
7
  end
8
8
 
9
9
  module UserAgent
10
+ # The progname method returns the name of the application.
11
+ #
12
+ # @return [ String ] the application name "ollama_chat"
10
13
  def progname
11
14
  'ollama_chat'
12
15
  end
13
16
 
17
+ # The user_agent method constructs and returns a user agent string
18
+ # that combines the program name and the OllamaChat version
19
+ # separated by a forward slash.
20
+ #
21
+ # @return [ String ] the formatted user agent string
14
22
  def user_agent
15
23
  [ progname, OllamaChat::VERSION ] * ?/
16
24
  end
17
25
  end
18
26
 
27
+ # The collection_stats method displays statistics about the current document
28
+ # collection.
29
+ #
30
+ # This method outputs information regarding the active document collection,
31
+ # including the collection name, total number of embeddings, and a list of
32
+ # tags.
33
+ #
34
+ # @return [ nil ] This method always returns nil.
19
35
  def collection_stats
20
36
  STDOUT.puts <<~EOT
21
37
  Current Collection
@@ -27,6 +43,13 @@ module OllamaChat::Information
27
43
  nil
28
44
  end
29
45
 
46
+ # The info method displays comprehensive information about the current state
47
+ # of the ollama_chat instance.
48
+ # This includes version details, server connection status, model
49
+ # configurations, embedding settings, and various operational switches.
50
+ #
51
+ # @return [ nil ] This method does not return a value; it outputs information
52
+ # directly to standard output.
30
53
  def info
31
54
  STDOUT.puts "Running ollama_chat version: #{bold(OllamaChat::VERSION)}"
32
55
  STDOUT.puts "Connected to ollama server version: #{bold(server_version)} on: #{bold(server_url)}"
@@ -58,6 +81,9 @@ module OllamaChat::Information
58
81
  nil
59
82
  end
60
83
 
84
+ # The display_chat_help_message method returns a formatted string containing
85
+ # all available command-line options and their descriptions for the chat
86
+ # interface.
61
87
  private def display_chat_help_message
62
88
  <<~EOT
63
89
  /copy to copy last response to clipboard
@@ -66,6 +92,7 @@ module OllamaChat::Information
66
92
  /stream toggle stream output
67
93
  /location toggle location submission
68
94
  /voice [change] toggle voice output or change the voice
95
+ /last show the last system/assistant message
69
96
  /list [n] list the last n / all conversation exchanges
70
97
  /clear [what] clear what=messages|links|history|tags|all
71
98
  /clobber clear the conversation, links, and collection
@@ -87,16 +114,25 @@ module OllamaChat::Information
87
114
  /links [clear] display (or clear) links used in the chat
88
115
  /save filename store conversation messages
89
116
  /load filename load conversation messages
117
+ /output filename save last response to filename
118
+ /pipe command write last response to command's stdin
90
119
  /quit to quit
91
120
  /help to view this help
92
121
  EOT
93
122
  end
94
123
 
124
+ # The display_chat_help method outputs the chat help message to standard output.
125
+ #
126
+ # @return [ nil ] This method always returns nil after printing the help message.
95
127
  def display_chat_help
96
128
  STDOUT.puts display_chat_help_message
97
129
  nil
98
130
  end
99
131
 
132
+ # The usage method displays the command-line interface help text
133
+ # and returns an exit code of 0.
134
+ #
135
+ # @return [ Integer ] always returns 0 indicating successful help display
100
136
  def usage
101
137
  STDOUT.puts <<~EOT
102
138
  Usage: #{progname} [OPTIONS]
@@ -120,15 +156,25 @@ module OllamaChat::Information
120
156
  0
121
157
  end
122
158
 
159
+ # The version method outputs the program name and its version number to
160
+ # standard output.
161
+ #
162
+ # @return [ Integer ] returns 0 indicating successful execution
123
163
  def version
124
164
  STDOUT.puts "%s %s" % [ progname, OllamaChat::VERSION ]
125
165
  0
126
166
  end
127
167
 
168
+ # The server_version method retrieves the version of the Ollama server.
169
+ #
170
+ # @return [ String ] the version string of the connected Ollama server
128
171
  def server_version
129
172
  @server_version ||= ollama.version.version
130
173
  end
131
174
 
175
+ # The server_url method returns the base URL of the Ollama server connection.
176
+ #
177
+ # @return [ String ] the base URL used for communicating with the Ollama API
132
178
  def server_url
133
179
  @server_url ||= ollama.base_url
134
180
  end
@@ -1,8 +1,20 @@
1
1
  module OllamaChat::MessageFormat
2
+ # The message_type method determines the appropriate message icon based on
3
+ # whether images are present.
4
+ #
5
+ # @param images [ Array ] an array of images
6
+ #
7
+ # @return [ String ] returns 📸 if images are present, 📨 otherwise
2
8
  def message_type(images)
3
9
  images.present? ? ?📸 : ?📨
4
10
  end
5
11
 
12
+ # The think_annotate method processes a string and conditionally annotates it
13
+ # with a thinking emoji if the think feature is enabled.
14
+ #
15
+ # @param block [ Proc ] a block that returns a string to be processed
16
+ #
17
+ # @return [ String, nil ] the annotated string with a thinking emoji if enabled, otherwise nil
6
18
  def think_annotate(&block)
7
19
  string = block.()
8
20
  string.to_s.size == 0 and return
@@ -11,6 +23,12 @@ module OllamaChat::MessageFormat
11
23
  end
12
24
  end
13
25
 
26
+ # The talk_annotate method processes a string output by a block and
27
+ # conditionally adds annotation.
28
+ #
29
+ # @param block [ Proc ] a block that returns a string to be processed
30
+ #
31
+ # @return [ String, nil ] the annotated string if it has content, otherwise nil
14
32
  def talk_annotate(&block)
15
33
  string = block.()
16
34
  string.to_s.size == 0 and return
@@ -66,7 +66,7 @@ class OllamaChat::MessageList
66
66
  # @return [ OllamaChat::MessageList ] self
67
67
  def load_conversation(filename)
68
68
  unless File.exist?(filename)
69
- STDOUT.puts "File #{filename} doesn't exist. Choose another filename."
69
+ STDERR.puts "File #{filename.inspect} doesn't exist. Choose another filename."
70
70
  return
71
71
  end
72
72
  @messages =
@@ -83,10 +83,10 @@ class OllamaChat::MessageList
83
83
  # @return [ OllamaChat::MessageList ] self
84
84
  def save_conversation(filename)
85
85
  if File.exist?(filename)
86
- STDOUT.puts "File #{filename} already exists. Choose another filename."
86
+ STDERR.puts "File #{filename.inspect} already exists. Choose another filename."
87
87
  return
88
88
  end
89
- File.open(filename, 'w') do |output|
89
+ File.open(filename, ?w) do |output|
90
90
  output.puts JSON(@messages)
91
91
  end
92
92
  self
@@ -100,36 +100,26 @@ class OllamaChat::MessageList
100
100
  def list_conversation(last = nil)
101
101
  last = (last || @messages.size).clamp(0, @messages.size)
102
102
  use_pager do |output|
103
- @messages[-last..-1].to_a.each do |m|
104
- role_color = case m.role
105
- when 'user' then 172
106
- when 'assistant' then 111
107
- when 'system' then 213
108
- else 210
109
- end
110
- thinking = if @chat.think.on?
111
- think_annotate do
112
- m.thinking.full? { @chat.markdown.on? ? Kramdown::ANSI.parse(_1) : _1 }
113
- end
114
- end
115
- content = m.content.full? { @chat.markdown.on? ? Kramdown::ANSI.parse(_1) : _1 }
116
- message_text = message_type(m.images) + " "
117
- message_text += bold { color(role_color) { m.role } }
118
- if thinking
119
- message_text += [ ?:, thinking, talk_annotate { content } ].compact.
120
- map { _1.chomp } * ?\n
121
- else
122
- message_text += ":\n#{content}"
123
- end
124
- m.images.full? { |images|
125
- message_text += "\nImages: " + italic { images.map(&:path) * ', ' }
126
- }
127
- output.puts message_text
103
+ @messages[-last..-1].to_a.each do |message|
104
+ output.puts message_text_for(message)
128
105
  end
129
106
  end
130
107
  self
131
108
  end
132
109
 
110
+ # The show_last method displays the text of the last message if it is not
111
+ # from the user. It uses a pager for output and returns the instance itself.
112
+ #
113
+ # @return [ OllamaChat::MessageList ] returns the instance of the class
114
+ def show_last
115
+ message = last
116
+ message&.role == 'user' and return
117
+ use_pager do |output|
118
+ output.puts message_text_for(message)
119
+ end
120
+ self
121
+ end
122
+
133
123
  # Removes the last `n` exchanges from the message list. An exchange consists
134
124
  # of a user and an assistant message. If only a single user message is
135
125
  # present at the end, it will be removed first before proceeding with
@@ -260,10 +250,21 @@ class OllamaChat::MessageList
260
250
 
261
251
  private
262
252
 
253
+ # The config method provides access to the chat configuration object.
254
+ #
255
+ # @return [ Object ] the configuration object associated with the chat instance
263
256
  def config
264
257
  @chat.config
265
258
  end
266
259
 
260
+ # The determine_pager_command method identifies an appropriate pager command
261
+ # for displaying content.
262
+ # It first checks for a default pager specified by the PAGER environment variable.
263
+ # If no default is found, it attempts to locate 'less' or 'more' in the
264
+ # system PATH as fallback options.
265
+ # The method returns the selected pager command, ensuring it includes the
266
+ # '-r' flag for proper handling of raw control characters when a fallback
267
+ # pager is used.
267
268
  def determine_pager_command
268
269
  default_pager = ENV['PAGER'].full?
269
270
  if fallback_pager = `which less`.chomp.full? || `which more`.chomp.full?
@@ -272,6 +273,11 @@ class OllamaChat::MessageList
272
273
  default_pager || fallback_pager
273
274
  end
274
275
 
276
+ # The use_pager method wraps the given block with a pager context.
277
+ # If the output would exceed the terminal's line capacity, it pipes the content
278
+ # through an appropriate pager command (like 'less' or 'more').
279
+ #
280
+ # @param block [Proc] A block that yields an IO object to write output to
275
281
  def use_pager
276
282
  command = determine_pager_command
277
283
  output_buffer = StringIO.new
@@ -281,4 +287,41 @@ class OllamaChat::MessageList
281
287
  output.puts messages
282
288
  end
283
289
  end
290
+
291
+ # The message_text_for method generates formatted text representation of a
292
+ # message including its role, content, thinking annotations, and associated
293
+ # images.
294
+ # It applies color coding to different message roles and uses markdown
295
+ # parsing when enabled. The method also handles special formatting for
296
+ # thinking annotations and image references within the message.
297
+ #
298
+ # @param message [Object] the message object containing role, content, thinking, and images
299
+ #
300
+ # @return [String] the formatted text representation of the message
301
+ def message_text_for(message)
302
+ role_color = case message.role
303
+ when 'user' then 172
304
+ when 'assistant' then 111
305
+ when 'system' then 213
306
+ else 210
307
+ end
308
+ thinking = if @chat.think.on?
309
+ think_annotate do
310
+ message.thinking.full? { @chat.markdown.on? ? Kramdown::ANSI.parse(_1) : _1 }
311
+ end
312
+ end
313
+ content = message.content.full? { @chat.markdown.on? ? Kramdown::ANSI.parse(_1) : _1 }
314
+ message_text = message_type(message.images) + " "
315
+ message_text += bold { color(role_color) { message.role } }
316
+ if thinking
317
+ message_text += [ ?:, thinking, talk_annotate { content } ].compact.
318
+ map { _1.chomp } * ?\n
319
+ else
320
+ message_text += ":\n#{content}"
321
+ end
322
+ message.images.full? { |images|
323
+ message_text += "\nImages: " + italic { images.map(&:path) * ', ' }
324
+ }
325
+ message_text
326
+ end
284
327
  end
@@ -0,0 +1,75 @@
1
+ module OllamaChat::MessageOutput
2
+ # The pipe method forwards the last assistant message to a command's standard
3
+ # input.
4
+ #
5
+ # @param cmd [ String ] the command to which the output should be piped
6
+ #
7
+ # @return [ OllamaChat::Chat ] returns self
8
+ # @return [ nil ] returns nil if the command is not provided or if there is
9
+ # no assistant message
10
+ def pipe(cmd)
11
+ cmd.present? or return
12
+ if message = @messages.last and message.role == 'assistant'
13
+ begin
14
+ IO.popen(cmd, ?w) do |output|
15
+ output.write(message.content)
16
+ end
17
+ exit_code = $?&.exitstatus
18
+ if exit_code == 0
19
+ STDOUT.puts "Last response was piped to #{cmd.inspect}."
20
+ else
21
+ STDERR.puts "Executing #{cmd.inspect}, failed with exit code #{exit_code}."
22
+ end
23
+ self
24
+ rescue => e
25
+ STDERR.puts "Executing #{cmd.inspect}, caused #{e.class}: #{e}."
26
+ end
27
+ else
28
+ STDERR.puts "No response available to output to pipe command #{cmd.inspect}."
29
+ end
30
+ end
31
+
32
+ # The output method writes the last assistant message to a file.
33
+ #
34
+ # @param filename [ String ] the path to the file where the output should be written
35
+ #
36
+ # @return [ Chat ] returns self on success, nil on failure
37
+ #
38
+ # @see write_file_unless_exist
39
+ #
40
+ # @note If no assistant message is available, an error message is printed to stderr.
41
+ def output(filename)
42
+ if message = @messages.last and message.role == 'assistant'
43
+ begin
44
+ write_file_unless_exist(filename, message)
45
+ STDOUT.puts "Last response was written to #{filename.inspect}."
46
+ self
47
+ rescue => e
48
+ STDERR.puts "Writing to #{filename.inspect}, caused #{e.class}: #{e}."
49
+ end
50
+ else
51
+ STDERR.puts "No response available to write to #{filename.inspect}."
52
+ end
53
+ end
54
+
55
+ private
56
+
57
+ # The write_file_unless_exist method creates a new file with the specified
58
+ # message content, but only if a file with that name does not already exist.
59
+ #
60
+ # @param filename [ String ] the path of the file to be created
61
+ # @param message [ Ollama::Message ] the message object containing the content to write
62
+ #
63
+ # @return [ TrueClass ] if the file was successfully created
64
+ # @return [ nil ] if the file already exists and was not created
65
+ def write_file_unless_exist(filename, message)
66
+ if File.exist?(filename)
67
+ STDERR.puts "File #{filename.inspect} already exists. Choose another filename."
68
+ return
69
+ end
70
+ File.open(filename, ?w) do |output|
71
+ output.write(message.content)
72
+ end
73
+ true
74
+ end
75
+ end
@@ -1,5 +1,4 @@
1
1
  module OllamaChat::ModelHandling
2
-
3
2
  # The model_present? method checks if the specified Ollama model is available.
4
3
  #
5
4
  # @param model [ String ] the name of the Ollama model
@@ -9,6 +9,15 @@ class OllamaChat::OllamaChatConfig
9
9
 
10
10
  DEFAULT_CONFIG = File.read(DEFAULT_CONFIG_PATH)
11
11
 
12
+ # The initialize method sets up the configuration file path and ensures the
13
+ # cache directory exists.
14
+ # It attempts to load configuration from the specified filename or uses a
15
+ # default path.
16
+ # If the configuration file is missing and the default path is used, it
17
+ # creates the necessary directory structure and writes a default
18
+ # configuration file.
19
+ #
20
+ # @param filename [ String, nil ] the path to the configuration file
12
21
  def initialize(filename = nil)
13
22
  @filename = filename || default_path
14
23
  unless File.directory?(cache_dir_path)
@@ -27,30 +36,62 @@ class OllamaChat::OllamaChatConfig
27
36
  end
28
37
  end
29
38
 
39
+ # The filename reader returns the name of the file associated with this instance.
30
40
  attr_reader :filename
31
41
 
42
+ # The config reader returns the configuration object for the chat instance.
43
+ #
44
+ # @return [ ComplexConfig::Settings ] the configuration object
32
45
  attr_reader :config
33
46
 
47
+ # The default_config_path method returns the path to the default
48
+ # configuration file.
49
+ #
50
+ # @return [ String ] the path to the default configuration file
34
51
  def default_config_path
35
52
  DEFAULT_CONFIG_PATH
36
53
  end
37
54
 
55
+ # The default_path method constructs the full path to the default
56
+ # configuration file.
57
+ #
58
+ # @return [ Pathname ] a Pathname object representing the path to the
59
+ # config.yml file within the configuration directory
38
60
  def default_path
39
61
  config_dir_path + 'config.yml'
40
62
  end
41
63
 
64
+ # The config_dir_path method returns the path to the ollama_chat
65
+ # configuration directory by combining the XDG config home directory with the
66
+ # 'ollama_chat' subdirectory.
67
+ #
68
+ # @return [ Pathname ] the pathname object representing the configuration
69
+ # directory
42
70
  def config_dir_path
43
71
  XDG.new.config_home + 'ollama_chat'
44
72
  end
45
73
 
74
+ # The cache_dir_path method returns the path to the ollama_chat cache
75
+ # directory within the XDG cache home directory.
76
+ #
77
+ # @return [ Pathname ] the pathname object representing the cache directory path
46
78
  def cache_dir_path
47
79
  XDG.new.cache_home + 'ollama_chat'
48
80
  end
49
81
 
82
+ # The database_path method constructs the full path to the documents database
83
+ # file by joining the cache directory path with the filename 'documents.db'.
84
+ #
85
+ # @return [ Pathname ] the full path to the documents database file
50
86
  def database_path
51
87
  cache_dir_path + 'documents.db'
52
88
  end
53
89
 
90
+ # The diff_tool method returns the preferred diff tool command.
91
+ # It checks for the DIFF_TOOL environment variable and falls back to
92
+ # 'vimdiff' if not set.
93
+ #
94
+ # @return [ String ] the command name of the diff tool to be used
54
95
  def diff_tool
55
96
  ENV.fetch('DIFF_TOOL', 'vimdiff')
56
97
  end
@@ -1,9 +1,16 @@
1
1
  module OllamaChat::Parsing
2
+ # The parse_source method processes different types of input sources and
3
+ # converts them into a standardized text representation.
4
+ #
5
+ # @param source_io [IO] the input source to be parsed
6
+ #
7
+ # @return [ String, nil ] the parsed content as a string or nil if the
8
+ # content type is not supported
2
9
  def parse_source(source_io)
3
10
  case source_io&.content_type
4
11
  when 'text/html'
5
12
  reverse_markdown(source_io.read)
6
- when 'text/xml'
13
+ when 'text/xml', 'application/xml'
7
14
  if source_io.read(8192) =~ %r(^\s*<rss\s)
8
15
  source_io.rewind
9
16
  return parse_rss(source_io)
@@ -23,11 +30,21 @@ module OllamaChat::Parsing
23
30
  when %r(\Aapplication/(json|ld\+json|x-ruby|x-perl|x-gawk|x-python|x-javascript|x-c?sh|x-dosexec|x-shellscript|x-tex|x-latex|x-lyx|x-bibtex)), %r(\Atext/), nil
24
31
  source_io.read
25
32
  else
26
- STDERR.puts "Cannot embed #{source_io&.content_type} document."
33
+ STDERR.puts "Cannot parse #{source_io&.content_type} document."
27
34
  return
28
35
  end
29
36
  end
30
37
 
38
+ # The parse_csv method processes CSV content from an input source and
39
+ # converts it into a formatted string representation.
40
+ # It iterates through each row of the CSV, skipping empty rows, and
41
+ # constructs a structured output where each row's fields are formatted with
42
+ # indentation and separated by newlines. The resulting string includes double
43
+ # newlines between rows for readability.
44
+ #
45
+ # @param source_io [ IO ] the input source containing CSV data
46
+ #
47
+ # @return [ String ] a formatted string representation of the CSV content
31
48
  def parse_csv(source_io)
32
49
  result = +''
33
50
  CSV.table(File.new(source_io), col_sep: ?,).each do |row|
@@ -40,6 +57,18 @@ module OllamaChat::Parsing
40
57
  result
41
58
  end
42
59
 
60
+ # The parse_rss method processes an RSS feed source and converts it into a
61
+ # formatted text representation.
62
+ # It extracts the channel title and iterates through each item in the feed to
63
+ # build a structured output.
64
+ # The method uses the RSS parser to handle the source input and formats the
65
+ # title, link, publication date, and description of each item into a readable
66
+ # text format with markdown-style headers and links.
67
+ #
68
+ # @param source_io [IO] the input stream containing the RSS feed data
69
+ #
70
+ # @return [String] a formatted string representation of the RSS feed with
71
+ # channel title and item details
43
72
  def parse_rss(source_io)
44
73
  feed = RSS::Parser.parse(source_io, false, false)
45
74
  title = <<~EOT
@@ -58,6 +87,18 @@ module OllamaChat::Parsing
58
87
  end
59
88
  end
60
89
 
90
+ # The parse_atom method processes an Atom feed from the provided IO source
91
+ # and converts it into a formatted text representation.
92
+ # It extracts the feed title and iterates through each item to build a
93
+ # structured output containing titles, links, and update dates.
94
+ #
95
+ # The content of each item is converted using reverse_markdown for better
96
+ # readability.
97
+ #
98
+ # @param source_io [IO] the input stream containing the Atom feed data
99
+ #
100
+ # @return [String] a formatted string representation of the Atom feed with
101
+ # title, items, links, update dates, and content
61
102
  def parse_atom(source_io)
62
103
  feed = RSS::Parser.parse(source_io, false, false)
63
104
  title = <<~EOT
@@ -76,11 +117,26 @@ module OllamaChat::Parsing
76
117
  end
77
118
  end
78
119
 
120
+ # The pdf_read method extracts text content from a PDF file by reading all
121
+ # pages.
122
+ #
123
+ # @param io [IO] the input stream containing the PDF data
124
+ #
125
+ # @return [String] the concatenated text content from all pages in the PDF
79
126
  def pdf_read(io)
80
127
  reader = PDF::Reader.new(io)
81
128
  reader.pages.inject(+'') { |result, page| result << page.text }
82
129
  end
83
130
 
131
+
132
+ # Reads and processes PDF content using Ghostscript for conversion
133
+ #
134
+ # This method takes an IO object containing PDF data, processes it through
135
+ # Ghostscript's pdfwrite device, and returns the processed PDF content.
136
+ # If Ghostscript is not available in the system path, it outputs an error message.
137
+ #
138
+ # @param io [IO] An IO object containing PDF data to be processed
139
+ # @return [String, nil] The processed PDF content as a string, or nil if processing fails
84
140
  def ps_read(io)
85
141
  gs = `which gs`.chomp
86
142
  if gs.present?
@@ -102,6 +158,15 @@ module OllamaChat::Parsing
102
158
  end
103
159
  end
104
160
 
161
+ # The reverse_markdown method converts HTML content into Markdown format.
162
+ #
163
+ # This method processes HTML input and transforms it into equivalent
164
+ # Markdown, using specific conversion options to ensure compatibility and
165
+ # formatting.
166
+ #
167
+ # @param html [ String ] the HTML string to be converted
168
+ #
169
+ # @return [ String ] the resulting Markdown formatted string
105
170
  def reverse_markdown(html)
106
171
  ReverseMarkdown.convert(
107
172
  html,
@@ -111,6 +176,17 @@ module OllamaChat::Parsing
111
176
  )
112
177
  end
113
178
 
179
+ # Parses content and processes embedded resources based on document policy
180
+ #
181
+ # This method analyzes input content for URLs, tags, and file references,
182
+ # fetches referenced resources, and processes them according to the current
183
+ # document policy. It supports different processing modes for various content
184
+ # types.
185
+ #
186
+ # @param content [String] The input content string to parse
187
+ # @param images [Array] An array to collect image references (will be cleared)
188
+ # @return [Array<String, Documentrix::Utils::Tags>] Returns an array containing
189
+ # the processed content string and tags object if any tags were found
114
190
  def parse_content(content, images)
115
191
  images.clear
116
192
  tags = Documentrix::Utils::Tags.new valid_tag: /\A#*([\w\]\[]+)/
@@ -1,6 +1,6 @@
1
1
  module OllamaChat
2
2
  # OllamaChat version
3
- VERSION = '0.0.18'
3
+ VERSION = '0.0.20'
4
4
  VERSION_ARRAY = VERSION.split('.').map(&:to_i) # :nodoc:
5
5
  VERSION_MAJOR = VERSION_ARRAY[0] # :nodoc:
6
6
  VERSION_MINOR = VERSION_ARRAY[1] # :nodoc:
data/lib/ollama_chat.rb CHANGED
@@ -17,6 +17,7 @@ require 'ollama_chat/source_fetching'
17
17
  require 'ollama_chat/web_searching'
18
18
  require 'ollama_chat/dialog'
19
19
  require 'ollama_chat/information'
20
+ require 'ollama_chat/message_output'
20
21
  require 'ollama_chat/clipboard'
21
22
  require 'ollama_chat/document_cache'
22
23
  require 'ollama_chat/history'
data/ollama_chat.gemspec CHANGED
@@ -1,9 +1,9 @@
1
1
  # -*- encoding: utf-8 -*-
2
- # stub: ollama_chat 0.0.18 ruby lib
2
+ # stub: ollama_chat 0.0.20 ruby lib
3
3
 
4
4
  Gem::Specification.new do |s|
5
5
  s.name = "ollama_chat".freeze
6
- s.version = "0.0.18".freeze
6
+ s.version = "0.0.20".freeze
7
7
 
8
8
  s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
9
9
  s.require_paths = ["lib".freeze]
@@ -12,19 +12,19 @@ Gem::Specification.new do |s|
12
12
  s.description = "The app provides a command-line interface (CLI) to an Ollama AI model,\nallowing users to engage in text-based conversations and generate\nhuman-like responses. Users can import data from local files or web pages,\nwhich are then processed through three different modes: fully importing the\ncontent into the conversation context, summarizing the information for\nconcise reference, or storing it in an embedding vector database for later\nretrieval based on the conversation.\n".freeze
13
13
  s.email = "flori@ping.de".freeze
14
14
  s.executables = ["ollama_chat".freeze, "ollama_chat_send".freeze]
15
- s.extra_rdoc_files = ["README.md".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze]
16
- s.files = [".all_images.yml".freeze, ".envrc".freeze, ".gitignore".freeze, "CHANGES.md".freeze, "Gemfile".freeze, "README.md".freeze, "Rakefile".freeze, "VERSION".freeze, "bin/ollama_chat".freeze, "bin/ollama_chat_send".freeze, "config/searxng/settings.yml".freeze, "docker-compose.yml".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/ollama_chat_config/default_config.yml".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze, "ollama_chat.gemspec".freeze, "redis/redis.conf".freeze, "spec/assets/api_show.json".freeze, "spec/assets/api_tags.json".freeze, "spec/assets/api_version.json".freeze, "spec/assets/conversation.json".freeze, "spec/assets/duckduckgo.html".freeze, "spec/assets/example.atom".freeze, "spec/assets/example.csv".freeze, "spec/assets/example.html".freeze, "spec/assets/example.pdf".freeze, "spec/assets/example.ps".freeze, "spec/assets/example.rb".freeze, "spec/assets/example.rss".freeze, "spec/assets/example.xml".freeze, "spec/assets/kitten.jpg".freeze, "spec/assets/prompt.txt".freeze, "spec/assets/searxng.json".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/ollama_chat/web_searching_spec.rb".freeze, "spec/spec_helper.rb".freeze, "tmp/.keep".freeze]
15
+ s.extra_rdoc_files = ["README.md".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_output.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze]
16
+ s.files = [".all_images.yml".freeze, ".envrc".freeze, ".gitignore".freeze, "CHANGES.md".freeze, "Gemfile".freeze, "README.md".freeze, "Rakefile".freeze, "VERSION".freeze, "bin/ollama_chat".freeze, "bin/ollama_chat_send".freeze, "config/searxng/settings.yml".freeze, "docker-compose.yml".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_output.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/ollama_chat_config/default_config.yml".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze, "ollama_chat.gemspec".freeze, "redis/redis.conf".freeze, "spec/assets/api_show.json".freeze, "spec/assets/api_tags.json".freeze, "spec/assets/api_version.json".freeze, "spec/assets/conversation.json".freeze, "spec/assets/duckduckgo.html".freeze, "spec/assets/example.atom".freeze, "spec/assets/example.csv".freeze, "spec/assets/example.html".freeze, "spec/assets/example.pdf".freeze, "spec/assets/example.ps".freeze, "spec/assets/example.rb".freeze, "spec/assets/example.rss".freeze, "spec/assets/example.xml".freeze, "spec/assets/kitten.jpg".freeze, "spec/assets/prompt.txt".freeze, "spec/assets/searxng.json".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/message_output_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/ollama_chat/web_searching_spec.rb".freeze, "spec/spec_helper.rb".freeze, "tmp/.keep".freeze]
17
17
  s.homepage = "https://github.com/flori/ollama_chat".freeze
18
18
  s.licenses = ["MIT".freeze]
19
19
  s.rdoc_options = ["--title".freeze, "OllamaChat - A command-line interface (CLI) for interacting with an Ollama AI model.".freeze, "--main".freeze, "README.md".freeze]
20
20
  s.required_ruby_version = Gem::Requirement.new("~> 3.1".freeze)
21
21
  s.rubygems_version = "3.6.9".freeze
22
22
  s.summary = "A command-line interface (CLI) for interacting with an Ollama AI model.".freeze
23
- s.test_files = ["spec/assets/example.rb".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/ollama_chat/web_searching_spec.rb".freeze, "spec/spec_helper.rb".freeze]
23
+ s.test_files = ["spec/assets/example.rb".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/message_output_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/ollama_chat/web_searching_spec.rb".freeze, "spec/spec_helper.rb".freeze]
24
24
 
25
25
  s.specification_version = 4
26
26
 
27
- s.add_development_dependency(%q<gem_hadar>.freeze, ["~> 1.23".freeze])
27
+ s.add_development_dependency(%q<gem_hadar>.freeze, ["~> 1.27".freeze])
28
28
  s.add_development_dependency(%q<all_images>.freeze, ["~> 0.6".freeze])
29
29
  s.add_development_dependency(%q<rspec>.freeze, ["~> 3.2".freeze])
30
30
  s.add_development_dependency(%q<kramdown>.freeze, ["~> 2.0".freeze])
@@ -1,6 +1,6 @@
1
1
  require 'spec_helper'
2
2
 
3
- RSpec.describe OllamaChat::Chat do
3
+ describe OllamaChat::Chat do
4
4
  let :argv do
5
5
  %w[ -C test ]
6
6
  end
@@ -1,6 +1,6 @@
1
1
  require 'spec_helper'
2
2
 
3
- RSpec.describe OllamaChat::Clipboard do
3
+ describe OllamaChat::Clipboard do
4
4
  let :chat do
5
5
  OllamaChat::Chat.new
6
6
  end
@@ -1,6 +1,6 @@
1
1
  require 'spec_helper'
2
2
 
3
- RSpec.describe OllamaChat::FollowChat do
3
+ describe OllamaChat::FollowChat do
4
4
  let :messages do
5
5
  [
6
6
  Ollama::Message.new(role: 'user', content: 'hello', images: []),
@@ -1,6 +1,6 @@
1
1
  require 'spec_helper'
2
2
 
3
- RSpec.describe OllamaChat::Information do
3
+ describe OllamaChat::Information do
4
4
  let :chat do
5
5
  OllamaChat::Chat.new
6
6
  end
@@ -1,6 +1,6 @@
1
1
  require 'spec_helper'
2
2
 
3
- RSpec.describe OllamaChat::MessageList do
3
+ describe OllamaChat::MessageList do
4
4
  let :config do
5
5
  double(
6
6
  location: double(
@@ -70,6 +70,16 @@ RSpec.describe OllamaChat::MessageList do
70
70
  expect(list).to receive(:determine_pager_command).and_return nil
71
71
  end
72
72
 
73
+ it 'can show last message' do
74
+ expect(chat).to receive(:markdown).
75
+ and_return(double(on?: true)).at_least(:once)
76
+ expect(chat).to receive(:think).
77
+ and_return(double(on?: false)).at_least(:once)
78
+ expect(STDOUT).to receive(:puts).
79
+ with("📨 \e[1m\e[38;5;213msystem\e[0m\e[0m:\nhello\n")
80
+ list.show_last
81
+ end
82
+
73
83
  it 'can list conversations without thinking' do
74
84
  expect(chat).to receive(:markdown).
75
85
  and_return(double(on?: true)).at_least(:once)
@@ -0,0 +1,26 @@
1
+ require 'spec_helper'
2
+
3
+ describe OllamaChat::MessageOutput do
4
+ let :chat do
5
+ OllamaChat::Chat.new
6
+ end
7
+
8
+ connect_to_ollama_server
9
+
10
+ it 'output can write to file' do
11
+ expect(STDERR).to receive(:puts).with(/No response available to write to "foo.txt"/)
12
+ expect(chat.output('foo.txt')).to be_nil
13
+ chat.instance_variable_get(:@messages).load_conversation(asset('conversation.json'))
14
+ expect(chat).to receive(:write_file_unless_exist).and_return true
15
+ expect(STDOUT).to receive(:puts).with(/Last response was written to \"foo.txt\"./)
16
+ expect(chat.output('foo.txt')).to eq chat
17
+ end
18
+
19
+ it 'pipe can write to command stdin' do
20
+ expect(STDERR).to receive(:puts).with(/No response available to output to pipe command "true"/)
21
+ expect(chat.pipe('true')).to be_nil
22
+ chat.instance_variable_get(:@messages).load_conversation(asset('conversation.json'))
23
+ expect(STDOUT).to receive(:puts).with(/Last response was piped to \"true\"./)
24
+ expect(chat.pipe('true')).to eq chat
25
+ end
26
+ end
@@ -1,6 +1,6 @@
1
1
  require 'spec_helper'
2
2
 
3
- RSpec.describe OllamaChat::ModelHandling do
3
+ describe OllamaChat::ModelHandling do
4
4
  let :chat do
5
5
  OllamaChat::Chat.new
6
6
  end
@@ -1,7 +1,7 @@
1
1
  require 'spec_helper'
2
2
  require 'pathname'
3
3
 
4
- RSpec.describe OllamaChat::Parsing do
4
+ describe OllamaChat::Parsing do
5
5
  let :chat do
6
6
  OllamaChat::Chat.new.tap do |chat|
7
7
  chat.document_policy = 'importing'
@@ -31,6 +31,19 @@ RSpec.describe OllamaChat::Parsing do
31
31
  end
32
32
  end
33
33
 
34
+ it 'can parse RSS with application/xml content type' do
35
+ asset_io('example.rss') do |io|
36
+ def io.content_type
37
+ 'application/xml'
38
+ end
39
+ expect(chat.parse_source(io)).to start_with(<<~EOT)
40
+ # Example News Feed
41
+
42
+ ## [New Study Shows Benefits of Meditation](https://example.com/article/meditation-benefits)
43
+ EOT
44
+ end
45
+ end
46
+
34
47
  it 'can parse CSV' do
35
48
  asset_io('example.csv') do |io|
36
49
  def io.content_type
@@ -1,6 +1,6 @@
1
1
  require 'spec_helper'
2
2
 
3
- RSpec.describe OllamaChat::SourceFetching do
3
+ describe OllamaChat::SourceFetching do
4
4
  let :chat do
5
5
  OllamaChat::Chat.new
6
6
  end
@@ -1,6 +1,6 @@
1
1
  require 'spec_helper'
2
2
 
3
- RSpec.describe OllamaChat::Switches do
3
+ describe OllamaChat::Switches do
4
4
  describe OllamaChat::Switches::Switch do
5
5
  let :switch do
6
6
  described_class.new(
@@ -1,6 +1,6 @@
1
1
  require 'spec_helper'
2
2
 
3
- RSpec.describe OllamaChat::Utils::CacheFetcher do
3
+ describe OllamaChat::Utils::CacheFetcher do
4
4
  let :url do
5
5
  'https://www.example.com/hello'
6
6
  end
@@ -1,6 +1,6 @@
1
1
  require 'spec_helper'
2
2
 
3
- RSpec.describe OllamaChat::Utils::Fetcher do
3
+ describe OllamaChat::Utils::Fetcher do
4
4
  let :url do
5
5
  'https://www.example.com/hello'
6
6
  end
@@ -1,6 +1,6 @@
1
1
  require 'spec_helper'
2
2
 
3
- RSpec.describe OllamaChat::Utils::FileArgument do
3
+ describe OllamaChat::Utils::FileArgument do
4
4
  it 'it can return content' do
5
5
  expect(described_class.get_file_argument('foo')).to eq 'foo'
6
6
  end
@@ -1,6 +1,6 @@
1
1
  require 'spec_helper'
2
2
 
3
- RSpec.describe OllamaChat::WebSearching do
3
+ describe OllamaChat::WebSearching do
4
4
  let :chat do
5
5
  OllamaChat::Chat.new
6
6
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ollama_chat
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.18
4
+ version: 0.0.20
5
5
  platform: ruby
6
6
  authors:
7
7
  - Florian Frank
@@ -15,14 +15,14 @@ dependencies:
15
15
  requirements:
16
16
  - - "~>"
17
17
  - !ruby/object:Gem::Version
18
- version: '1.23'
18
+ version: '1.27'
19
19
  type: :development
20
20
  prerelease: false
21
21
  version_requirements: !ruby/object:Gem::Requirement
22
22
  requirements:
23
23
  - - "~>"
24
24
  - !ruby/object:Gem::Version
25
- version: '1.23'
25
+ version: '1.27'
26
26
  - !ruby/object:Gem::Dependency
27
27
  name: all_images
28
28
  requirement: !ruby/object:Gem::Requirement
@@ -388,6 +388,7 @@ extra_rdoc_files:
388
388
  - lib/ollama_chat/information.rb
389
389
  - lib/ollama_chat/message_format.rb
390
390
  - lib/ollama_chat/message_list.rb
391
+ - lib/ollama_chat/message_output.rb
391
392
  - lib/ollama_chat/model_handling.rb
392
393
  - lib/ollama_chat/ollama_chat_config.rb
393
394
  - lib/ollama_chat/parsing.rb
@@ -424,6 +425,7 @@ files:
424
425
  - lib/ollama_chat/information.rb
425
426
  - lib/ollama_chat/message_format.rb
426
427
  - lib/ollama_chat/message_list.rb
428
+ - lib/ollama_chat/message_output.rb
427
429
  - lib/ollama_chat/model_handling.rb
428
430
  - lib/ollama_chat/ollama_chat_config.rb
429
431
  - lib/ollama_chat/ollama_chat_config/default_config.yml
@@ -461,6 +463,7 @@ files:
461
463
  - spec/ollama_chat/follow_chat_spec.rb
462
464
  - spec/ollama_chat/information_spec.rb
463
465
  - spec/ollama_chat/message_list_spec.rb
466
+ - spec/ollama_chat/message_output_spec.rb
464
467
  - spec/ollama_chat/model_handling_spec.rb
465
468
  - spec/ollama_chat/parsing_spec.rb
466
469
  - spec/ollama_chat/source_fetching_spec.rb
@@ -503,6 +506,7 @@ test_files:
503
506
  - spec/ollama_chat/follow_chat_spec.rb
504
507
  - spec/ollama_chat/information_spec.rb
505
508
  - spec/ollama_chat/message_list_spec.rb
509
+ - spec/ollama_chat/message_output_spec.rb
506
510
  - spec/ollama_chat/model_handling_spec.rb
507
511
  - spec/ollama_chat/parsing_spec.rb
508
512
  - spec/ollama_chat/source_fetching_spec.rb