ollama_chat 0.0.15 → 0.0.16

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: c05a3e4579e75ba1c724d7128581de7b96dac914c2bd633368920a207be24b3a
4
- data.tar.gz: 6d4cf892e3a3ec3d00dc1b4cf00e686f311240893a5cd2af561afdb7ea6946de
3
+ metadata.gz: 1cddb901277529210fcc39b4c086217515e98aa75c1569044a47e1cf7149acbe
4
+ data.tar.gz: f7a40ef2722c750bb71ff2b42d3c2cf0c04816bfd11753e22fa7a09f2d13c9f8
5
5
  SHA512:
6
- metadata.gz: 9afec82d69645e4b8400b63c8f5bf51bd4b4d0471cb59c419b304e9a532fe874dce33f403625abb4fac4543892ed5c0e40b476a56da7023eda4ea99a15ac6275
7
- data.tar.gz: 611da289b8ac3d9bef78d7453418ec9e90123e5f7af36d386b62a7646a578adbe90ebc5396a09248152d117085ee0598a743cf4746e4e1e9a402a6e32475bd02
6
+ metadata.gz: 7cd5baab3a073464bbf28d3cd9e6ebbcfdcd938c55b1a17ca5d6f2a6b3949d1a2d0a138d9aacf192f6d2f2de1751b8d3e10368c2c15eba5b955ee5454fbd6ff1
7
+ data.tar.gz: 3ed3442aa4962666de9a56ce5a10e816c9b6f21d85e1281c65962d70c41c60e0c9c8f47fc4e380703799d67e006b2aef36fbe4e2208632e7c3d02c4cc3120c1a
data/CHANGES.md CHANGED
@@ -1,5 +1,23 @@
1
1
  # Changes
2
2
 
3
+ ## 2025-07-10 v0.0.16
4
+
5
+ - **New Features**
6
+ - Added `-f CONFIG` option to `ollama_chat_send` for specifying configuration files.
7
+ - Introduced `server_socket_runtime_dir` setting in the default config, and
8
+ make it default to the current directory, allowing for a per directory chat
9
+ to receive server socket messages.
10
+
11
+ - **Enhancements**
12
+ - Improved logging with debug output for received server socket messages.
13
+ - Refactored server socket handling:
14
+ - Created `create_socket_server` method for UnixSocks setup with configurable runtime directories.
15
+ - Updated `send_to_server_socket` and `init_server_socket` methods to use the new helper.
16
+ - Changed evaluation rate metrics from 'c/s' to 't/s' for better clarity.
17
+
18
+ - **Documentation**
19
+ - Added additional documentation for key classes and methods in `FollowChat`.
20
+
3
21
  ## 2025-07-02 v0.0.15
4
22
 
5
23
  - **Enhanced `ollama_chat_send` and Unix Domain Socket Support:**
data/VERSION CHANGED
@@ -1 +1 @@
1
- 0.0.15
1
+ 0.0.16
data/bin/ollama_chat_send CHANGED
@@ -5,22 +5,24 @@ require 'tins/go'
5
5
  include Tins::GO
6
6
 
7
7
 
8
- opts = go 'rth', ARGV
8
+ opts = go 'f:rth', ARGV
9
9
 
10
10
  def usage(rc = 0)
11
11
  puts <<~EOT
12
12
  Usage: #{File.basename($0)} [OPTIONS]
13
13
 
14
14
  Options:
15
- -r Wait for the response from Ollama Chat and output it
16
- -t Send input as terminal input including commands, e. g. /import
17
- -h Show this help message
15
+ -r Wait for the response from Ollama Chat and output it
16
+ -t Send input as terminal input including commands, e. g. /import
17
+ -f CONFIG file to read
18
+ -h Show this help message
18
19
 
19
20
  Send data to a running Ollame Chat client via standard input.
20
21
  EOT
21
22
  exit rc
22
23
  end
23
24
 
25
+ config = OllamaChat::OllamaChatConfig.new(opts[?f]).config
24
26
  opts[?h] and usage
25
27
  begin
26
28
  type = if opts[?t]
@@ -28,7 +30,7 @@ begin
28
30
  else
29
31
  opts[?r] ? :socket_input_with_response : :socket_input
30
32
  end
31
- response = OllamaChat::ServerSocket.send_to_server_socket(STDIN.read, type:)
33
+ response = OllamaChat::ServerSocket.send_to_server_socket(STDIN.read, type:, config:)
32
34
  type == :socket_input_with_response and puts response.content
33
35
  rescue => e
34
36
  warn "Caught #{e.class}: #{e}"
@@ -355,6 +355,7 @@ class OllamaChat::Chat
355
355
  if message = server_socket_message
356
356
  type = message.type.full?(:to_sym) || :socket_input
357
357
  content = message.content
358
+ STDOUT.puts color(112) { "Received a server socket message. Processing now…" }
358
359
  else
359
360
  raise
360
361
  end
@@ -4,6 +4,15 @@ class OllamaChat::FollowChat
4
4
  include Term::ANSIColor
5
5
  include OllamaChat::MessageFormat
6
6
 
7
+
8
+ # Initializes a new instance of OllamaChat::FollowChat.
9
+ #
10
+ # @param [OllamaChat::Chat] chat The chat object, which represents the conversation context.
11
+ # @param [#to_a] messages A collection of message objects, representing the conversation history.
12
+ # @param [String] voice (optional) to speek with if any.
13
+ # @param [IO] output (optional) The output stream where terminal output should be printed. Defaults to STDOUT.
14
+ #
15
+ # @return [OllamaChat::FollowChat] A new instance of OllamaChat::FollowChat.
7
16
  def initialize(chat:, messages:, voice: nil, output: STDOUT)
8
17
  super(output:)
9
18
  @chat = chat
@@ -13,8 +22,28 @@ class OllamaChat::FollowChat
13
22
  @user = nil
14
23
  end
15
24
 
25
+ # Returns the conversation history (an array of message objects).
26
+ #
27
+ # @return [OllamaChat::MessageList<Ollama::Message>] The array of messages in the conversation.
16
28
  attr_reader :messages
17
29
 
30
+ # Invokes the chat flow based on the provided Ollama server response.
31
+ #
32
+ # The response is expected to be a parsed JSON object containing information
33
+ # about the user input and the assistant's response.
34
+ #
35
+ # If the response indicates an assistant message, this method:
36
+ # 1. Ensures that an assistant response exists in the message history (if not already present).
37
+ # 2. Updates the last message with the new content and thinking (if applicable).
38
+ # 3. Displays the formatted terminal output for the user.
39
+ # 4. Outputs the voice response (if configured).
40
+ #
41
+ # Regardless of whether an assistant message is present, this method also
42
+ # outputs evaluation statistics (if applicable).
43
+ #
44
+ # @param [Ollama::Response] response The parsed JSON response from the Ollama server.
45
+ #
46
+ # @return [OllamaChat::FollowChat] The current instance for method chaining.
18
47
  def call(response)
19
48
  debug_output(response)
20
49
 
@@ -73,10 +102,10 @@ class OllamaChat::FollowChat
73
102
  stats_text = {
74
103
  eval_duration: Tins::Duration.new(eval_duration),
75
104
  eval_count: response.eval_count.to_i,
76
- eval_rate: bold { "%.2f c/s" % (response.eval_count.to_i / eval_duration) } + color(111),
105
+ eval_rate: bold { "%.2f t/s" % (response.eval_count.to_i / eval_duration) } + color(111),
77
106
  prompt_eval_duration: Tins::Duration.new(prompt_eval_duration),
78
107
  prompt_eval_count: response.prompt_eval_count.to_i,
79
- prompt_eval_rate: bold { "%.2f c/s" % (response.prompt_eval_count.to_i / prompt_eval_duration) } + color(111),
108
+ prompt_eval_rate: bold { "%.2f t/s" % (response.prompt_eval_count.to_i / prompt_eval_duration) } + color(111),
80
109
  total_duration: Tins::Duration.new(response.total_duration / 1e9),
81
110
  load_duration: Tins::Duration.new(response.load_duration / 1e9),
82
111
  }.map { _1 * ?= } * ' '
@@ -61,6 +61,7 @@ redis:
61
61
  url: <%= ENV.fetch('REDIS_EXPIRING_URL', 'null') %>
62
62
  ex: 86400
63
63
  chat_history_filename: <%= ENV.fetch('OLLAMA_CHAT_HISTORY', '~/.ollama_chat_history') %>
64
+ server_socket_runtime_dir: .
64
65
  debug: <%= ENV['OLLAMA_CHAT_DEBUG'].to_i == 1 ? true : false %>
65
66
  request_headers:
66
67
  Accept: 'text/*,application/*,image/*'
@@ -8,8 +8,8 @@ module OllamaChat::ServerSocket
8
8
  #
9
9
  # @return [ String, NilClass ] the response from the server if type is
10
10
  # :socket_input_with_response, otherwise nil.
11
- def send_to_server_socket(content, type: :socket_input)
12
- server = UnixSocks::Server.new(socket_name: 'ollama_chat.sock')
11
+ def send_to_server_socket(content, config:, type: :socket_input)
12
+ server = create_socket_server(config:)
13
13
  message = { content:, type: }
14
14
  if type.to_sym == :socket_input_with_response
15
15
  return server.transmit_with_response(message)
@@ -18,6 +18,14 @@ module OllamaChat::ServerSocket
18
18
  nil
19
19
  end
20
20
  end
21
+
22
+ def create_socket_server(config:)
23
+ if runtime_dir = config.server_socket_runtime_dir
24
+ UnixSocks::Server.new(socket_name: 'ollama_chat.sock', runtime_dir:)
25
+ else
26
+ UnixSocks::Server.new(socket_name: 'ollama_chat.sock')
27
+ end
28
+ end
21
29
  end
22
30
 
23
31
  attr_accessor :server_socket_message
@@ -33,7 +41,7 @@ module OllamaChat::ServerSocket
33
41
  # @return [ nil ] This method does not return any value, it only sets up the
34
42
  # server socket and kills the process when a message is received.
35
43
  def init_server_socket
36
- server = UnixSocks::Server.new(socket_name: 'ollama_chat.sock')
44
+ server = OllamaChat::ServerSocket.create_socket_server(config:)
37
45
  server.receive_in_background do |message|
38
46
  self.server_socket_message = message
39
47
  Process.kill :INT, $$
@@ -1,6 +1,6 @@
1
1
  module OllamaChat
2
2
  # OllamaChat version
3
- VERSION = '0.0.15'
3
+ VERSION = '0.0.16'
4
4
  VERSION_ARRAY = VERSION.split('.').map(&:to_i) # :nodoc:
5
5
  VERSION_MAJOR = VERSION_ARRAY[0] # :nodoc:
6
6
  VERSION_MINOR = VERSION_ARRAY[1] # :nodoc:
data/ollama_chat.gemspec CHANGED
@@ -1,9 +1,9 @@
1
1
  # -*- encoding: utf-8 -*-
2
- # stub: ollama_chat 0.0.15 ruby lib
2
+ # stub: ollama_chat 0.0.16 ruby lib
3
3
 
4
4
  Gem::Specification.new do |s|
5
5
  s.name = "ollama_chat".freeze
6
- s.version = "0.0.15".freeze
6
+ s.version = "0.0.16".freeze
7
7
 
8
8
  s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
9
9
  s.require_paths = ["lib".freeze]
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ollama_chat
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.15
4
+ version: 0.0.16
5
5
  platform: ruby
6
6
  authors:
7
7
  - Florian Frank