ollama_chat 0.0.11 → 0.0.13

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: ae96668b9a38eb238d0d9352e4c6866f3a99c8e89796ca696a6c09acce776c32
4
- data.tar.gz: 44171082a4e6c971a4cd3cf4a95ab89cf76388e13fc9638e140261a2702437c7
3
+ metadata.gz: a6a03b8af7c470d83520d269829f51fe480f7a5813bc1b4df50c63d339e28953
4
+ data.tar.gz: cb86dd9896d6948fb736c889a672fb29e4f21c8174a3ddb62d6fcc93be59f020
5
5
  SHA512:
6
- metadata.gz: 13ae7bdd0e3012e34341d989ace27240472c8e442f8dc9f49ec94b1e9729c71353f70089a80f066d34d65ad67dc3e52cffe0d2f873a1881a185dea88f63f413a
7
- data.tar.gz: ae099dfe86b2888b5ed545b5c5f630eeccef4f4d52c000ffa1a5fbc2f36897705e31634e5c0810a962fecd27241645efdcedaa667d5eebcead72c26ca3448c05
6
+ metadata.gz: ef19594894b1bb11217e710e1a302a2d30add6140da629c66e3e60d990e30f45a29196d310e06f7c06b36d8ddf33f84e7bd4278f87705b48e501b06d5f53d290
7
+ data.tar.gz: 332c66cdca39a187ecb3aff193e5a2522ad11deabaedb584c32a78a620d4c068be3cc062d181a5e6c1d0bf44fed371b3409010f2a4e404614ba142e37d5afbdc
data/CHANGES.md CHANGED
@@ -1,16 +1,31 @@
1
1
  # Changes
2
2
 
3
+ ## 2025-06-05 v0.0.13
4
+
5
+ * Improved chat command handling
6
+ - Added support for '/clear tags' to clear all tags.
7
+ - Updated cases for 'history', 'all' and added case for 'tags'.
8
+ - Added commands to clear documents collection and print a message in `information.rb`.
9
+ - `OllamaChat::Chat#clean` now accepts 'tags' as an option.
10
+ * Apply read and write timeouts from configuration (300 seconds) for ollama server.
11
+ * Added method comments
12
+
13
+ ## 2025-06-01 v0.0.12
14
+
15
+ * **API Compatibility**: Enforces Ollama API version `0.9.0` or higher to
16
+ support new features like the `thinking` attribute.
17
+ * **Think Output Splitting**: When `think` is enabled, the API response is
18
+ split into `content` and `thinking` fields, enabled by the new API version.
19
+ * **Think Mode Simplified**: The previous multi-mode `think_mode` system has
20
+ been replaced with a boolean `think` switch for cleaner, more intuitive
21
+ control.
22
+
3
23
  ## 2025-06-01 v0.0.11
4
24
 
5
25
  * **Think Mode Implementation**:
6
26
  + Introduced `@think_mode` attribute to read think mode setting from config
7
27
  + Implemented `remove_think_blocks` method to filter out thought blocks from chat messages sent to the LLM model.
8
- + Added conditional logic based on `@think_mode` value to handle different modes (`'display'`, `'omit'`, `'no_delete'`, `'only_delete'`)
9
-
10
- * **'display'**: Displays thought blocks' tags as emojis.
11
- * **'omit'**: Omit internal reasoning blocks and tags from the output entirely.
12
- * **'no_delete'**: Sends the entire conversation, including all think tags, to the Large Language Model (LLM) for processing.
13
- * **'only_delete'**: Removes the explicit indicators of thought processes only from the conversation sent to the LLM, but does not modify the output shown to the user.
28
+ + Added conditional logic based on `@think_mode` value to handle different modes
14
29
  * **User Interface Improvements**:
15
30
  + Added `/think_mode` command to help users understand think mode options
16
31
  + Updated session output to include current think mode
data/README.md CHANGED
@@ -133,7 +133,7 @@ The following commands can be given inside the chat, if prefixed by a `/`:
133
133
  /info show information for current session
134
134
  /config output current configuration ("/Users/flori/.config/ollama_chat/config.yml")
135
135
  /document_policy pick a scan policy for document references
136
- /think_mode pick a think mode for reasoning models (display, omit, only_delete, no_delete)
136
+ /think enable ollama think setting for models
137
137
  /import source import the source's content
138
138
  /summarize [n] source summarize the source's content in n words
139
139
  /embedding toggle embedding paused or not
data/Rakefile CHANGED
@@ -30,7 +30,7 @@ GemHadar do
30
30
  executables << 'ollama_chat' << 'ollama_chat_send'
31
31
 
32
32
  dependency 'excon', '~> 1.0'
33
- dependency 'ollama-ruby', '~> 1.0'
33
+ dependency 'ollama-ruby', '~> 1.2'
34
34
  dependency 'documentrix', '~> 0.0', '>= 0.0.2'
35
35
  dependency 'rss', '~> 0.3'
36
36
  dependency 'term-ansicolor', '~> 1.11'
data/VERSION CHANGED
@@ -1 +1 @@
1
- 0.0.11
1
+ 0.0.13
data/docker-compose.yml CHANGED
@@ -1,7 +1,7 @@
1
1
  services:
2
2
  redis:
3
3
  container_name: redis
4
- image: valkey/valkey:7.2.9-alpine
4
+ image: valkey/valkey:8.1.1-alpine
5
5
  restart: unless-stopped
6
6
  ports: [ "127.0.0.1:9736:6379" ]
7
7
  volumes:
@@ -1,5 +1,6 @@
1
1
  require 'tins'
2
2
  require 'tins/secure_write'
3
+ require 'tins/xt/string_version'
3
4
  require 'json'
4
5
  require 'term/ansicolor'
5
6
  require 'reline'
@@ -27,7 +28,7 @@ class OllamaChat::Chat
27
28
  include OllamaChat::Dialog
28
29
  include OllamaChat::Information
29
30
  include OllamaChat::Clipboard
30
- include OllamaChat::MessageType
31
+ include OllamaChat::MessageFormat
31
32
  include OllamaChat::History
32
33
  include OllamaChat::ServerSocket
33
34
 
@@ -40,13 +41,17 @@ class OllamaChat::Chat
40
41
  setup_switches(config)
41
42
  base_url = @opts[?u] || config.url
42
43
  @ollama = Ollama::Client.new(
43
- base_url: base_url,
44
- debug: config.debug,
44
+ connect_timeout: config.timeouts.connect_timeout?,
45
+ read_timeout: config.timeouts.read_timeout?,
46
+ write_timeout: config.timeouts.write_timeout?,
47
+ base_url: base_url,
48
+ debug: config.debug,
45
49
  user_agent:
46
50
  )
47
- server_version
51
+ if server_version.version < '0.9.0'.version
52
+ raise ArgumentError, 'require ollama API version 0.9.0 or higher'
53
+ end
48
54
  @document_policy = config.document_policy
49
- @think_mode = config.think_mode
50
55
  @model = choose_model(@opts[?m], config.model.name)
51
56
  @model_options = Ollama::Options[config.model.options]
52
57
  model_system = pull_model_unless_present(@model, @model_options)
@@ -134,7 +139,7 @@ class OllamaChat::Chat
134
139
  last = 2 * $1.to_i if $1
135
140
  messages.list_conversation(last)
136
141
  :next
137
- when %r(^/clear(?:\s+(messages|links|history|all))?$)
142
+ when %r(^/clear(?:\s+(messages|links|history|tags|all))?$)
138
143
  clean($1)
139
144
  :next
140
145
  when %r(^/clobber$)
@@ -196,6 +201,9 @@ class OllamaChat::Chat
196
201
  when %r(^/document_policy$)
197
202
  choose_document_policy
198
203
  :next
204
+ when %r(^/think$)
205
+ think.toggle
206
+ :next
199
207
  when %r(^/import\s+(.+))
200
208
  @parse_content = false
201
209
  import($1) or :next
@@ -301,6 +309,9 @@ class OllamaChat::Chat
301
309
  when 'history'
302
310
  clear_history
303
311
  STDOUT.puts "Cleared history."
312
+ when 'tags'
313
+ @documents.clear
314
+ STDOUT.puts "Cleared all tags."
304
315
  when 'all'
305
316
  if ask?(prompt: 'Are you sure to clear messages and collection? (y/n) ') =~ /\Ay/i
306
317
  messages.clear
@@ -387,17 +398,12 @@ class OllamaChat::Chat
387
398
  messages:,
388
399
  voice: (@current_voice if voice.on?)
389
400
  )
390
- messages_to_send =
391
- if @think_mode == 'no_delete'
392
- messages
393
- else
394
- remove_think_blocks(messages)
395
- end
396
401
  ollama.chat(
397
402
  model: @model,
398
- messages: messages_to_send,
403
+ messages: ,
399
404
  options: @model_options,
400
405
  stream: stream.on?,
406
+ think: think.on?,
401
407
  &handler
402
408
  )
403
409
  if embedding.on? && !records.empty?
@@ -425,19 +431,6 @@ class OllamaChat::Chat
425
431
 
426
432
  private
427
433
 
428
- def remove_think_blocks(messages)
429
- new_messages = OllamaChat::MessageList.new(self)
430
- messages.to_ary.each do |message|
431
- thought_less_content = message.content.gsub(%r(<think(?:ing)?>.*?</think(?:ing)?>)im, '')
432
- new_messages << Ollama::Message.new(
433
- role: message.role,
434
- content: thought_less_content,
435
- images: message.images
436
- )
437
- end
438
- new_messages
439
- end
440
-
441
434
  def setup_documents
442
435
  if embedding.on?
443
436
  @embedding_model = config.embedding.model.name
@@ -1,4 +1,13 @@
1
1
  module OllamaChat::Clipboard
2
+
3
+ # Copy the last assistant's message to the system clipboard.
4
+ #
5
+ # This method checks if there is a last message from an assistant in the `@messages`
6
+ # array and copies its content to the clipboard using the specified command from `config.copy`.
7
+ # If no assistant response is available or the clipboard command is not found, appropriate
8
+ # error messages are displayed.
9
+ #
10
+ # @return [NilClass] Always returns nil.
2
11
  def copy_to_clipboard
3
12
  if message = @messages.last and message.role == 'assistant'
4
13
  copy = `which #{config.copy}`.chomp
@@ -16,6 +25,13 @@ module OllamaChat::Clipboard
16
25
  nil
17
26
  end
18
27
 
28
+ # Paste content from the input.
29
+ #
30
+ # Prompts the user to paste their content and then press C-d (Ctrl+D) to terminate
31
+ # input. Reads all lines from standard input until Ctrl+D is pressed and returns
32
+ # the pasted content as a string.
33
+ #
34
+ # @return [String] The pasted content entered by the user.
19
35
  def paste_from_input
20
36
  STDOUT.puts bold { "Paste your content and then press C-d!" }
21
37
  STDIN.read
@@ -57,30 +57,6 @@ module OllamaChat::Dialog
57
57
  info
58
58
  end
59
59
 
60
- attr_accessor :think_mode
61
-
62
- def choose_think_mode
63
- modes = %w[ display omit only_delete no_delete ].sort
64
- current = if modes.index(@think_mode)
65
- @think_mode
66
- elsif modes.index(config.think_mode)
67
- config.think_mode
68
- else
69
- modes.first
70
- end
71
- modes.unshift('[EXIT]')
72
- think_mode = OllamaChat::Utils::Chooser.choose(modes)
73
- case think_mode
74
- when nil, '[EXIT]'
75
- STDOUT.puts "Exiting chooser."
76
- think_mode = current
77
- end
78
- self.think_mode = think_mode
79
- ensure
80
- STDOUT.puts "Using think mode #{bold{@think_mode}}."
81
- info
82
- end
83
-
84
60
  def change_system_prompt(default, system: nil)
85
61
  selector = Regexp.new(system.to_s[1..-1].to_s)
86
62
  prompts = config.system_prompts.attribute_names.compact.grep(selector)
@@ -2,7 +2,7 @@ class OllamaChat::FollowChat
2
2
  include Ollama
3
3
  include Ollama::Handlers::Concern
4
4
  include Term::ANSIColor
5
- include OllamaChat::MessageType
5
+ include OllamaChat::MessageFormat
6
6
 
7
7
  def initialize(chat:, messages:, voice: nil, output: STDOUT)
8
8
  super(output:)
@@ -14,40 +14,59 @@ class OllamaChat::FollowChat
14
14
  end
15
15
 
16
16
  def call(response)
17
- OllamaChat::Chat.config.debug and jj response
17
+ debug_output(response)
18
+
18
19
  if response&.message&.role == 'assistant'
19
- if @messages&.last&.role != 'assistant'
20
- @messages << Message.new(role: 'assistant', content: '')
21
- @user = message_type(@messages.last.images) + " " +
22
- bold { color(111) { 'assistant:' } }
23
- end
24
- @messages.last.content << response.message&.content
25
- if content = @messages.last.content.full?
26
- case @chat.think_mode
27
- when 'display'
28
- content = emphasize_think_block(content)
29
- when 'omit'
30
- content = omit_think_block(content)
31
- when 'no_delete', 'only_delete'
32
- content = quote_think_tags(content)
33
- end
34
- if @chat.markdown.on?
35
- markdown_content = Kramdown::ANSI.parse(content)
36
- @output.print clear_screen, move_home, @user, ?\n, markdown_content
37
- else
38
- @output.print clear_screen, move_home, @user, ?\n, content
39
- end
40
- end
20
+ ensure_assistant_response_exists
21
+ update_last_message(response)
22
+ display_formatted_terminal_output
41
23
  @say.call(response)
42
24
  end
43
- if response.done
44
- @output.puts "", eval_stats(response)
45
- end
25
+
26
+ output_eval_stats(response)
27
+
46
28
  self
47
29
  end
48
30
 
31
+ private
32
+
33
+ def ensure_assistant_response_exists
34
+ if @messages&.last&.role != 'assistant'
35
+ @messages << Message.new(
36
+ role: 'assistant',
37
+ content: '',
38
+ thinking: ('' if @chat.think.on?)
39
+ )
40
+ @user = message_type(@messages.last.images) + " " +
41
+ bold { color(111) { 'assistant:' } }
42
+ end
43
+ end
44
+
45
+ def update_last_message(response)
46
+ @messages.last.content << response.message&.content
47
+ if @chat.think.on? and response_thinking = response.message&.thinking.full?
48
+ @messages.last.thinking << response_thinking
49
+ end
50
+ end
51
+
52
+ def display_formatted_terminal_output
53
+ content, thinking = @messages.last.content, @messages.last.thinking
54
+ if @chat.markdown.on?
55
+ content = talk_annotate { Kramdown::ANSI.parse(content) }
56
+ if @chat.think.on?
57
+ thinking = think_annotate { Kramdown::ANSI.parse(thinking) }
58
+ end
59
+ else
60
+ content = talk_annotate { content }
61
+ @chat.think.on? and thinking = think_annotate { @messages.last.thinking.full? }
62
+ end
63
+ @output.print(*([
64
+ clear_screen, move_home, @user, ?\n, thinking, content
65
+ ].compact))
66
+ end
67
+
49
68
  def eval_stats(response)
50
- eval_duration = response.eval_duration / 1e9
69
+ eval_duration = response.eval_duration / 1e9
51
70
  prompt_eval_duration = response.prompt_eval_duration / 1e9
52
71
  stats_text = {
53
72
  eval_duration: Tins::Duration.new(eval_duration),
@@ -64,21 +83,12 @@ class OllamaChat::FollowChat
64
83
  }
65
84
  end
66
85
 
67
- private
68
-
69
- def emphasize_think_block(content)
70
- content.gsub(%r(<think(?:ing)?>)i, "\n💭\n").gsub(%r(</think(?:ing)?>)i, "\n💬\n")
71
- end
72
-
73
- def omit_think_block(content)
74
- content.gsub(%r(<think(?:ing)?>.*?(</think(?:ing)?>|\z))im, '')
86
+ def output_eval_stats(response)
87
+ response.done or return
88
+ @output.puts "", eval_stats(response)
75
89
  end
76
90
 
77
- def quote_think_tags(content)
78
- if @chat.markdown.on?
79
- content.gsub(%r(<(think(?:ing)?)>)i, "\n\\<\\1\\>\n").gsub(%r(</(think(?:ing)?)>)i, "\n\\</\\1\\>\n")
80
- else
81
- content.gsub(%r(<(think(?:ing)?)>)i, "\n<\\1\>\n").gsub(%r(</(think(?:ing)?)>)i, "\n</\\1>\n")
82
- end
91
+ def debug_output(response)
92
+ OllamaChat::Chat.config.debug and jj response
83
93
  end
84
94
  end
@@ -48,7 +48,7 @@ module OllamaChat::Information
48
48
  stream.show
49
49
  location.show
50
50
  STDOUT.puts "Document policy for references in user text: #{bold{@document_policy}}"
51
- STDOUT.puts "Think mode is currently: #{bold{@think_mode}}"
51
+ STDOUT.puts "Thinking is #{bold(think.on? ? 'enabled' : 'disabled')}."
52
52
  STDOUT.puts "Currently selected search engine is #{bold(search_engine)}."
53
53
  if @voice.on?
54
54
  STDOUT.puts "Using voice #{bold{@current_voice}} to speak."
@@ -66,7 +66,7 @@ module OllamaChat::Information
66
66
  /location toggle location submission
67
67
  /voice [change] toggle voice output or change the voice
68
68
  /list [n] list the last n / all conversation exchanges
69
- /clear [messages|links|history] clear the all messages, links, or the chat history (defaults to messages)
69
+ /clear [what] clear what=messages|links|history|tags|all
70
70
  /clobber clear the conversation, links, and collection
71
71
  /drop [n] drop the last n exchanges, defaults to 1
72
72
  /model change the model
@@ -76,7 +76,7 @@ module OllamaChat::Information
76
76
  /info show information for current session
77
77
  /config output current configuration (#{@ollama_chat_config.filename.to_s.inspect})
78
78
  /document_policy pick a scan policy for document references
79
- /think_mode pick a think mode for reasoning models
79
+ /think enable ollama think setting for models
80
80
  /import source import the source's content
81
81
  /summarize [n] source summarize the source's content in n words
82
82
  /embedding toggle embedding paused or not
@@ -0,0 +1,23 @@
1
+ module OllamaChat::MessageFormat
2
+ def message_type(images)
3
+ images.present? ? ?📸 : ?📨
4
+ end
5
+
6
+ def think_annotate(&block)
7
+ string = block.()
8
+ string.to_s.size == 0 and return
9
+ if @chat.think.on?
10
+ "💭\n#{string}\n"
11
+ end
12
+ end
13
+
14
+ def talk_annotate(&block)
15
+ string = block.()
16
+ string.to_s.size == 0 and return
17
+ if @chat.think.on?
18
+ "💬\n#{string}\n"
19
+ else
20
+ string
21
+ end
22
+ end
23
+ end
@@ -1,6 +1,6 @@
1
1
  class OllamaChat::MessageList
2
2
  include Term::ANSIColor
3
- include OllamaChat::MessageType
3
+ include OllamaChat::MessageFormat
4
4
 
5
5
  # The initialize method sets up the message list for an OllamaChat session.
6
6
  #
@@ -5,6 +5,10 @@ model:
5
5
  name: <%= ENV.fetch('OLLAMA_CHAT_MODEL', 'llama3.1') %>
6
6
  options:
7
7
  num_ctx: 8192
8
+ timeouts:
9
+ connect_timeout: null
10
+ read_timeout: 300
11
+ write_timeout: 300
8
12
  location:
9
13
  enabled: false
10
14
  name: Berlin
@@ -32,7 +36,7 @@ voice:
32
36
  markdown: true
33
37
  stream: true
34
38
  document_policy: importing
35
- think_mode: display
39
+ think: false
36
40
  embedding:
37
41
  enabled: true
38
42
  model:
@@ -1,13 +1,23 @@
1
1
  module OllamaChat::ServerSocket
2
2
  class << self
3
+ # Returns the path to the XDG runtime directory, or a default path if not set.
4
+ # @return [String] the expanded path to the XDG runtime directory
3
5
  def runtime_dir
4
6
  File.expand_path(ENV.fetch('XDG_RUNTIME_DIR', '~/.local/run'))
5
7
  end
6
8
 
9
+ # Constructs the full path to the server socket file.
10
+ # @return [String] the full path to the Unix socket
7
11
  def server_socket_path
8
12
  File.join(runtime_dir, 'ollama_chat.sock')
9
13
  end
10
14
 
15
+ # Sends a message to the server socket.
16
+ #
17
+ # @param content [String] the content to send
18
+ # @param type [Symbol] the type of message (default: :socket_input)
19
+ # @raise [Errno::ENOENT] if the socket file does not exist
20
+ # @raise [Errno::ECONNREFUSED] if the socket is not listening (server no running)
11
21
  def send_to_server_socket(content, type: :socket_input)
12
22
  FileUtils.mkdir_p runtime_dir
13
23
  message = { content:, type: }
@@ -17,8 +27,20 @@ module OllamaChat::ServerSocket
17
27
  end
18
28
  end
19
29
 
30
+ # Accessor for the server socket message.
31
+ # Holds the last message received from the Unix socket.
32
+ # @return [String, nil] the message content, or nil if not set
33
+ # @see OllamaChat::ServerSocket#init_server_socket
34
+ # @see OllamaChat::ServerSocket#send_to_server_socket
20
35
  attr_accessor :server_socket_message
21
36
 
37
+ # Initializes a Unix domain socket server for OllamaChat.
38
+ #
39
+ # Creates the necessary runtime directory, checks for existing socket file,
40
+ # and starts a server loop in a new thread. Listens for incoming connections,
41
+ # reads JSON data, and terminates the server upon receiving a message.
42
+ #
43
+ # Raises Errno::EEXIST if the socket path already exists.
22
44
  def init_server_socket
23
45
  FileUtils.mkdir_p OllamaChat::ServerSocket.runtime_dir
24
46
  if File.exist?(OllamaChat::ServerSocket.server_socket_path)
@@ -49,10 +49,12 @@ module OllamaChat::Switches
49
49
  include CheckSwitch
50
50
  end
51
51
 
52
- attr_reader :markdown
53
-
54
52
  attr_reader :stream
55
53
 
54
+ attr_reader :think
55
+
56
+ attr_reader :markdown
57
+
56
58
  attr_reader :voice
57
59
 
58
60
  attr_reader :embedding
@@ -64,21 +66,30 @@ module OllamaChat::Switches
64
66
  attr_reader :location
65
67
 
66
68
  def setup_switches(config)
67
- @markdown = Switch.new(
68
- :markdown,
69
+ @stream = Switch.new(
70
+ :stream,
69
71
  config:,
70
72
  msg: {
71
- true => "Using #{italic{'ANSI'}} markdown to output content.",
72
- false => "Using plaintext for outputting content.",
73
+ true => "Streaming enabled.",
74
+ false => "Streaming disabled.",
73
75
  }
74
76
  )
75
77
 
76
- @stream = Switch.new(
77
- :stream,
78
+ @think = Switch.new(
79
+ :think,
78
80
  config:,
79
81
  msg: {
80
- true => "Streaming enabled.",
81
- false => "Streaming disabled.",
82
+ true => "Thinking enabled.",
83
+ false => "Thinking disabled.",
84
+ }
85
+ )
86
+
87
+ @markdown = Switch.new(
88
+ :markdown,
89
+ config:,
90
+ msg: {
91
+ true => "Using #{italic{'ANSI'}} markdown to output content.",
92
+ false => "Using plaintext for outputting content.",
82
93
  }
83
94
  )
84
95
 
@@ -1,6 +1,6 @@
1
1
  module OllamaChat
2
2
  # OllamaChat version
3
- VERSION = '0.0.11'
3
+ VERSION = '0.0.13'
4
4
  VERSION_ARRAY = VERSION.split('.').map(&:to_i) # :nodoc:
5
5
  VERSION_MAJOR = VERSION_ARRAY[0] # :nodoc:
6
6
  VERSION_MINOR = VERSION_ARRAY[1] # :nodoc:
data/lib/ollama_chat.rb CHANGED
@@ -5,7 +5,7 @@ require 'ollama'
5
5
  require 'documentrix'
6
6
  require 'ollama_chat/version'
7
7
  require 'ollama_chat/utils'
8
- require 'ollama_chat/message_type'
8
+ require 'ollama_chat/message_format'
9
9
  require 'ollama_chat/ollama_chat_config'
10
10
  require 'ollama_chat/follow_chat'
11
11
  require 'ollama_chat/switches'
data/ollama_chat.gemspec CHANGED
@@ -1,9 +1,9 @@
1
1
  # -*- encoding: utf-8 -*-
2
- # stub: ollama_chat 0.0.11 ruby lib
2
+ # stub: ollama_chat 0.0.13 ruby lib
3
3
 
4
4
  Gem::Specification.new do |s|
5
5
  s.name = "ollama_chat".freeze
6
- s.version = "0.0.11".freeze
6
+ s.version = "0.0.13".freeze
7
7
 
8
8
  s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
9
9
  s.require_paths = ["lib".freeze]
@@ -12,8 +12,8 @@ Gem::Specification.new do |s|
12
12
  s.description = "The app provides a command-line interface (CLI) to an Ollama AI model,\nallowing users to engage in text-based conversations and generate\nhuman-like responses. Users can import data from local files or web pages,\nwhich are then processed through three different modes: fully importing the\ncontent into the conversation context, summarizing the information for\nconcise reference, or storing it in an embedding vector database for later\nretrieval based on the conversation.\n".freeze
13
13
  s.email = "flori@ping.de".freeze
14
14
  s.executables = ["ollama_chat".freeze, "ollama_chat_send".freeze]
15
- s.extra_rdoc_files = ["README.md".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_type.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze]
16
- s.files = [".all_images.yml".freeze, ".envrc".freeze, ".gitignore".freeze, "CHANGES.md".freeze, "Gemfile".freeze, "README.md".freeze, "Rakefile".freeze, "VERSION".freeze, "bin/ollama_chat".freeze, "bin/ollama_chat_send".freeze, "config/searxng/settings.yml".freeze, "docker-compose.yml".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_type.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/ollama_chat_config/default_config.yml".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze, "ollama_chat.gemspec".freeze, "redis/redis.conf".freeze, "spec/assets/api_show.json".freeze, "spec/assets/api_tags.json".freeze, "spec/assets/api_version.json".freeze, "spec/assets/conversation.json".freeze, "spec/assets/duckduckgo.html".freeze, "spec/assets/example.atom".freeze, "spec/assets/example.csv".freeze, "spec/assets/example.html".freeze, "spec/assets/example.pdf".freeze, "spec/assets/example.ps".freeze, "spec/assets/example.rb".freeze, "spec/assets/example.rss".freeze, "spec/assets/example.xml".freeze, "spec/assets/kitten.jpg".freeze, "spec/assets/prompt.txt".freeze, "spec/assets/searxng.json".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/ollama_chat/web_searching_spec.rb".freeze, "spec/spec_helper.rb".freeze, "tmp/.keep".freeze]
15
+ s.extra_rdoc_files = ["README.md".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze]
16
+ s.files = [".all_images.yml".freeze, ".envrc".freeze, ".gitignore".freeze, "CHANGES.md".freeze, "Gemfile".freeze, "README.md".freeze, "Rakefile".freeze, "VERSION".freeze, "bin/ollama_chat".freeze, "bin/ollama_chat_send".freeze, "config/searxng/settings.yml".freeze, "docker-compose.yml".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/ollama_chat_config/default_config.yml".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze, "ollama_chat.gemspec".freeze, "redis/redis.conf".freeze, "spec/assets/api_show.json".freeze, "spec/assets/api_tags.json".freeze, "spec/assets/api_version.json".freeze, "spec/assets/conversation.json".freeze, "spec/assets/duckduckgo.html".freeze, "spec/assets/example.atom".freeze, "spec/assets/example.csv".freeze, "spec/assets/example.html".freeze, "spec/assets/example.pdf".freeze, "spec/assets/example.ps".freeze, "spec/assets/example.rb".freeze, "spec/assets/example.rss".freeze, "spec/assets/example.xml".freeze, "spec/assets/kitten.jpg".freeze, "spec/assets/prompt.txt".freeze, "spec/assets/searxng.json".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/ollama_chat/web_searching_spec.rb".freeze, "spec/spec_helper.rb".freeze, "tmp/.keep".freeze]
17
17
  s.homepage = "https://github.com/flori/ollama_chat".freeze
18
18
  s.licenses = ["MIT".freeze]
19
19
  s.rdoc_options = ["--title".freeze, "OllamaChat - A command-line interface (CLI) for interacting with an Ollama AI model.".freeze, "--main".freeze, "README.md".freeze]
@@ -32,7 +32,7 @@ Gem::Specification.new do |s|
32
32
  s.add_development_dependency(%q<debug>.freeze, [">= 0".freeze])
33
33
  s.add_development_dependency(%q<simplecov>.freeze, [">= 0".freeze])
34
34
  s.add_runtime_dependency(%q<excon>.freeze, ["~> 1.0".freeze])
35
- s.add_runtime_dependency(%q<ollama-ruby>.freeze, ["~> 1.0".freeze])
35
+ s.add_runtime_dependency(%q<ollama-ruby>.freeze, ["~> 1.2".freeze])
36
36
  s.add_runtime_dependency(%q<documentrix>.freeze, ["~> 0.0".freeze, ">= 0.0.2".freeze])
37
37
  s.add_runtime_dependency(%q<rss>.freeze, ["~> 0.3".freeze])
38
38
  s.add_runtime_dependency(%q<term-ansicolor>.freeze, ["~> 1.11".freeze])
@@ -297,7 +297,7 @@ RSpec.describe OllamaChat::Chat do
297
297
  Streaming|
298
298
  Location|
299
299
  Document\ policy|
300
- Think\ mode|
300
+ Thinking\ is|
301
301
  Currently\ selected\ search\ engine
302
302
  /x
303
303
  ).at_least(1)
@@ -8,7 +8,7 @@ RSpec.describe OllamaChat::FollowChat do
8
8
  end
9
9
 
10
10
  let :chat do
11
- double('Chat', markdown: double(on?: false), think_mode: 'display')
11
+ double('Chat', markdown: double(on?: false), think: double(on?: false))
12
12
  end
13
13
 
14
14
  let :follow_chat do
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ollama_chat
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.11
4
+ version: 0.0.13
5
5
  platform: ruby
6
6
  authors:
7
7
  - Florian Frank
@@ -127,14 +127,14 @@ dependencies:
127
127
  requirements:
128
128
  - - "~>"
129
129
  - !ruby/object:Gem::Version
130
- version: '1.0'
130
+ version: '1.2'
131
131
  type: :runtime
132
132
  prerelease: false
133
133
  version_requirements: !ruby/object:Gem::Requirement
134
134
  requirements:
135
135
  - - "~>"
136
136
  - !ruby/object:Gem::Version
137
- version: '1.0'
137
+ version: '1.2'
138
138
  - !ruby/object:Gem::Dependency
139
139
  name: documentrix
140
140
  requirement: !ruby/object:Gem::Requirement
@@ -372,8 +372,8 @@ extra_rdoc_files:
372
372
  - lib/ollama_chat/follow_chat.rb
373
373
  - lib/ollama_chat/history.rb
374
374
  - lib/ollama_chat/information.rb
375
+ - lib/ollama_chat/message_format.rb
375
376
  - lib/ollama_chat/message_list.rb
376
- - lib/ollama_chat/message_type.rb
377
377
  - lib/ollama_chat/model_handling.rb
378
378
  - lib/ollama_chat/ollama_chat_config.rb
379
379
  - lib/ollama_chat/parsing.rb
@@ -408,8 +408,8 @@ files:
408
408
  - lib/ollama_chat/follow_chat.rb
409
409
  - lib/ollama_chat/history.rb
410
410
  - lib/ollama_chat/information.rb
411
+ - lib/ollama_chat/message_format.rb
411
412
  - lib/ollama_chat/message_list.rb
412
- - lib/ollama_chat/message_type.rb
413
413
  - lib/ollama_chat/model_handling.rb
414
414
  - lib/ollama_chat/ollama_chat_config.rb
415
415
  - lib/ollama_chat/ollama_chat_config/default_config.yml
@@ -1,5 +0,0 @@
1
- module OllamaChat::MessageType
2
- def message_type(images)
3
- images.present? ? ?📸 : ?📨
4
- end
5
- end