ollama_chat 0.0.14 → 0.0.16
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGES.md +33 -0
- data/README.md +31 -2
- data/Rakefile +1 -0
- data/VERSION +1 -1
- data/bin/ollama_chat_send +29 -2
- data/lib/ollama_chat/chat.rb +17 -4
- data/lib/ollama_chat/dialog.rb +11 -2
- data/lib/ollama_chat/follow_chat.rb +33 -2
- data/lib/ollama_chat/information.rb +9 -3
- data/lib/ollama_chat/message_list.rb +32 -12
- data/lib/ollama_chat/model_handling.rb +29 -0
- data/lib/ollama_chat/ollama_chat_config/default_config.yml +1 -0
- data/lib/ollama_chat/server_socket.rb +34 -49
- data/lib/ollama_chat/version.rb +1 -1
- data/lib/ollama_chat.rb +1 -0
- data/ollama_chat.gemspec +4 -3
- data/spec/ollama_chat/chat_spec.rb +4 -0
- data/spec/ollama_chat/information_spec.rb +10 -1
- data/spec/ollama_chat/message_list_spec.rb +16 -1
- metadata +16 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 1cddb901277529210fcc39b4c086217515e98aa75c1569044a47e1cf7149acbe
|
4
|
+
data.tar.gz: f7a40ef2722c750bb71ff2b42d3c2cf0c04816bfd11753e22fa7a09f2d13c9f8
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 7cd5baab3a073464bbf28d3cd9e6ebbcfdcd938c55b1a17ca5d6f2a6b3949d1a2d0a138d9aacf192f6d2f2de1751b8d3e10368c2c15eba5b955ee5454fbd6ff1
|
7
|
+
data.tar.gz: 3ed3442aa4962666de9a56ce5a10e816c9b6f21d85e1281c65962d70c41c60e0c9c8f47fc4e380703799d67e006b2aef36fbe4e2208632e7c3d02c4cc3120c1a
|
data/CHANGES.md
CHANGED
@@ -1,5 +1,38 @@
|
|
1
1
|
# Changes
|
2
2
|
|
3
|
+
## 2025-07-10 v0.0.16
|
4
|
+
|
5
|
+
- **New Features**
|
6
|
+
- Added `-f CONFIG` option to `ollama_chat_send` for specifying configuration files.
|
7
|
+
- Introduced `server_socket_runtime_dir` setting in the default config, and
|
8
|
+
make it default to the current directory, allowing for a per directory chat
|
9
|
+
to receive server socket messages.
|
10
|
+
|
11
|
+
- **Enhancements**
|
12
|
+
- Improved logging with debug output for received server socket messages.
|
13
|
+
- Refactored server socket handling:
|
14
|
+
- Created `create_socket_server` method for UnixSocks setup with configurable runtime directories.
|
15
|
+
- Updated `send_to_server_socket` and `init_server_socket` methods to use the new helper.
|
16
|
+
- Changed evaluation rate metrics from 'c/s' to 't/s' for better clarity.
|
17
|
+
|
18
|
+
- **Documentation**
|
19
|
+
- Added additional documentation for key classes and methods in `FollowChat`.
|
20
|
+
|
21
|
+
## 2025-07-02 v0.0.15
|
22
|
+
|
23
|
+
- **Enhanced `ollama_chat_send` and Unix Domain Socket Support:**
|
24
|
+
- Added support for advanced parameters:
|
25
|
+
- `-t`: Sends input as terminal commands.
|
26
|
+
- `-r`: Enables two-way communication by waiting for and returning the server's response.
|
27
|
+
- `-h` or `--help`: Displays usage information and available options.
|
28
|
+
- Improved socket management using the `unix_socks` gem.
|
29
|
+
- Enhanced message processing logic to handle different types of messages
|
30
|
+
(`:socket_input`, `:terminal_input`, `:socket_input_with_response`).
|
31
|
+
- **Selector Support for Model and System Prompt Selection:**
|
32
|
+
- Introduced `?selector` syntax to filter models and prompts.
|
33
|
+
- Updated documentation to reflect this new feature.
|
34
|
+
- Added a chooser dialog when multiple options match the selector.
|
35
|
+
|
3
36
|
## 2025-06-07 v0.0.14
|
4
37
|
|
5
38
|
* **Message List Improvements**:
|
data/README.md
CHANGED
@@ -24,8 +24,8 @@ Usage: ollama_chat [OPTIONS]
|
|
24
24
|
|
25
25
|
-f CONFIG config file to read
|
26
26
|
-u URL the ollama base url, OLLAMA_URL
|
27
|
-
-m MODEL the ollama model to chat with, OLLAMA_CHAT_MODEL
|
28
|
-
-s SYSTEM the system prompt to use as a file, OLLAMA_CHAT_SYSTEM
|
27
|
+
-m MODEL the ollama model to chat with, OLLAMA_CHAT_MODEL, ?selector
|
28
|
+
-s SYSTEM the system prompt to use as a file, OLLAMA_CHAT_SYSTEM, ?selector
|
29
29
|
-c CHAT a saved chat conversation to load
|
30
30
|
-C COLLECTION name of the collection used in this conversation
|
31
31
|
-D DOCUMENT load document and add to embeddings collection (multiple)
|
@@ -34,6 +34,9 @@ Usage: ollama_chat [OPTIONS]
|
|
34
34
|
-S open a socket to receive input from ollama_chat_send
|
35
35
|
-V display the current version number and quit
|
36
36
|
-h this help
|
37
|
+
|
38
|
+
Use `?selector` with `-m` or `-s` to filter options. Multiple matches
|
39
|
+
will open a chooser dialog.
|
37
40
|
```
|
38
41
|
|
39
42
|
The base URL can be either set by the environment variable `OLLAMA_URL` or it
|
@@ -167,6 +170,32 @@ function! OllamaChatSend(input)
|
|
167
170
|
endfunction
|
168
171
|
```
|
169
172
|
|
173
|
+
#### Advanced Parameters for `ollama_chat_send`
|
174
|
+
|
175
|
+
The `ollama_chat_send` command now supports additional parameters to enhance functionality:
|
176
|
+
|
177
|
+
- **Terminal Input (`-t`)**: Sends input as terminal commands, enabling special commands like `/import`.
|
178
|
+
|
179
|
+
```bash
|
180
|
+
$ echo "/import https://example.com/some-content" | ollama_chat_send -t
|
181
|
+
```
|
182
|
+
|
183
|
+
- **Wait for Response (`-r`)**: Enables two-way communication by waiting for and returning the server's response.
|
184
|
+
|
185
|
+
```bash
|
186
|
+
$ response=$(echo "Tell me a joke." | ollama_chat_send -r)
|
187
|
+
$ echo "$response"
|
188
|
+
```
|
189
|
+
|
190
|
+
- **Help (`-h` or `--help`)**: Displays usage information and available options.
|
191
|
+
|
192
|
+
```bash
|
193
|
+
$ ollama_chat_send -h
|
194
|
+
```
|
195
|
+
|
196
|
+
These parameters provide greater flexibility in how you interact with
|
197
|
+
`ollama_chat`, whether from the command line or integrated tools like `vim`.
|
198
|
+
|
170
199
|
## Download
|
171
200
|
|
172
201
|
The homepage of this app is located at
|
data/Rakefile
CHANGED
@@ -32,6 +32,7 @@ GemHadar do
|
|
32
32
|
dependency 'excon', '~> 1.0'
|
33
33
|
dependency 'ollama-ruby', '~> 1.2'
|
34
34
|
dependency 'documentrix', '~> 0.0', '>= 0.0.2'
|
35
|
+
dependency 'unix_socks'
|
35
36
|
dependency 'rss', '~> 0.3'
|
36
37
|
dependency 'term-ansicolor', '~> 1.11'
|
37
38
|
dependency 'redis', '~> 5.0'
|
data/VERSION
CHANGED
@@ -1 +1 @@
|
|
1
|
-
0.0.
|
1
|
+
0.0.16
|
data/bin/ollama_chat_send
CHANGED
@@ -1,10 +1,37 @@
|
|
1
1
|
#!/usr/bin/env ruby
|
2
2
|
|
3
3
|
require 'ollama_chat'
|
4
|
+
require 'tins/go'
|
5
|
+
include Tins::GO
|
4
6
|
|
7
|
+
|
8
|
+
opts = go 'f:rth', ARGV
|
9
|
+
|
10
|
+
def usage(rc = 0)
|
11
|
+
puts <<~EOT
|
12
|
+
Usage: #{File.basename($0)} [OPTIONS]
|
13
|
+
|
14
|
+
Options:
|
15
|
+
-r Wait for the response from Ollama Chat and output it
|
16
|
+
-t Send input as terminal input including commands, e. g. /import
|
17
|
+
-f CONFIG file to read
|
18
|
+
-h Show this help message
|
19
|
+
|
20
|
+
Send data to a running Ollame Chat client via standard input.
|
21
|
+
EOT
|
22
|
+
exit rc
|
23
|
+
end
|
24
|
+
|
25
|
+
config = OllamaChat::OllamaChatConfig.new(opts[?f]).config
|
26
|
+
opts[?h] and usage
|
5
27
|
begin
|
6
|
-
type =
|
7
|
-
|
28
|
+
type = if opts[?t]
|
29
|
+
:terminal_input
|
30
|
+
else
|
31
|
+
opts[?r] ? :socket_input_with_response : :socket_input
|
32
|
+
end
|
33
|
+
response = OllamaChat::ServerSocket.send_to_server_socket(STDIN.read, type:, config:)
|
34
|
+
type == :socket_input_with_response and puts response.content
|
8
35
|
rescue => e
|
9
36
|
warn "Caught #{e.class}: #{e}"
|
10
37
|
exit 1
|
data/lib/ollama_chat/chat.rb
CHANGED
@@ -1,6 +1,7 @@
|
|
1
1
|
require 'tins'
|
2
2
|
require 'tins/secure_write'
|
3
3
|
require 'tins/xt/string_version'
|
4
|
+
require 'tins/xt/full'
|
4
5
|
require 'json'
|
5
6
|
require 'term/ansicolor'
|
6
7
|
require 'reline'
|
@@ -352,15 +353,15 @@ class OllamaChat::Chat
|
|
352
353
|
content = Reline.readline(input_prompt, true)&.chomp
|
353
354
|
rescue Interrupt
|
354
355
|
if message = server_socket_message
|
355
|
-
|
356
|
-
|
357
|
-
|
356
|
+
type = message.type.full?(:to_sym) || :socket_input
|
357
|
+
content = message.content
|
358
|
+
STDOUT.puts color(112) { "Received a server socket message. Processing now…" }
|
358
359
|
else
|
359
360
|
raise
|
360
361
|
end
|
361
362
|
end
|
362
363
|
|
363
|
-
|
364
|
+
if type == :terminal_input
|
364
365
|
case next_action = handle_input(content)
|
365
366
|
when :next
|
366
367
|
next
|
@@ -421,10 +422,22 @@ class OllamaChat::Chat
|
|
421
422
|
}.uniq.map { |l, t| hyperlink(l, t) }.join(' ')
|
422
423
|
config.debug and jj messages.to_ary
|
423
424
|
end
|
425
|
+
|
426
|
+
case type
|
427
|
+
when :socket_input
|
428
|
+
server_socket_message&.disconnect
|
429
|
+
when :socket_input_with_response
|
430
|
+
if message = handler.messages.last
|
431
|
+
server_socket_message.respond({ role: message.role, content: message.content })
|
432
|
+
end
|
433
|
+
server_socket_message&.disconnect
|
434
|
+
end
|
424
435
|
rescue Ollama::Errors::TimeoutError
|
425
436
|
STDOUT.puts "#{bold('Error')}: Currently lost connection to ollama server and cannot send command."
|
426
437
|
rescue Interrupt
|
427
438
|
STDOUT.puts "Type /quit to quit."
|
439
|
+
ensure
|
440
|
+
self.server_socket_message = nil
|
428
441
|
end
|
429
442
|
0
|
430
443
|
rescue ComplexConfig::AttributeMissing, ComplexConfig::ConfigurationSyntaxError => e
|
data/lib/ollama_chat/dialog.rb
CHANGED
@@ -1,6 +1,11 @@
|
|
1
1
|
module OllamaChat::Dialog
|
2
2
|
def choose_model(cli_model, current_model)
|
3
|
+
selector = if cli_model =~ /\A\?+(.*)\z/
|
4
|
+
cli_model = ''
|
5
|
+
Regexp.new($1)
|
6
|
+
end
|
3
7
|
models = ollama.tags.models.map(&:name).sort
|
8
|
+
selector and models = models.grep(selector)
|
4
9
|
model = if cli_model == ''
|
5
10
|
OllamaChat::Utils::Chooser.choose(models) || current_model
|
6
11
|
else
|
@@ -58,8 +63,12 @@ module OllamaChat::Dialog
|
|
58
63
|
end
|
59
64
|
|
60
65
|
def change_system_prompt(default, system: nil)
|
61
|
-
selector =
|
62
|
-
|
66
|
+
selector = if system =~ /\A\?(.+)\z/
|
67
|
+
Regexp.new($1)
|
68
|
+
else
|
69
|
+
Regexp.new(system.to_s)
|
70
|
+
end
|
71
|
+
prompts = config.system_prompts.attribute_names.compact.grep(selector)
|
63
72
|
if prompts.size == 1
|
64
73
|
system = config.system_prompts.send(prompts.first)
|
65
74
|
else
|
@@ -4,6 +4,15 @@ class OllamaChat::FollowChat
|
|
4
4
|
include Term::ANSIColor
|
5
5
|
include OllamaChat::MessageFormat
|
6
6
|
|
7
|
+
|
8
|
+
# Initializes a new instance of OllamaChat::FollowChat.
|
9
|
+
#
|
10
|
+
# @param [OllamaChat::Chat] chat The chat object, which represents the conversation context.
|
11
|
+
# @param [#to_a] messages A collection of message objects, representing the conversation history.
|
12
|
+
# @param [String] voice (optional) to speek with if any.
|
13
|
+
# @param [IO] output (optional) The output stream where terminal output should be printed. Defaults to STDOUT.
|
14
|
+
#
|
15
|
+
# @return [OllamaChat::FollowChat] A new instance of OllamaChat::FollowChat.
|
7
16
|
def initialize(chat:, messages:, voice: nil, output: STDOUT)
|
8
17
|
super(output:)
|
9
18
|
@chat = chat
|
@@ -13,6 +22,28 @@ class OllamaChat::FollowChat
|
|
13
22
|
@user = nil
|
14
23
|
end
|
15
24
|
|
25
|
+
# Returns the conversation history (an array of message objects).
|
26
|
+
#
|
27
|
+
# @return [OllamaChat::MessageList<Ollama::Message>] The array of messages in the conversation.
|
28
|
+
attr_reader :messages
|
29
|
+
|
30
|
+
# Invokes the chat flow based on the provided Ollama server response.
|
31
|
+
#
|
32
|
+
# The response is expected to be a parsed JSON object containing information
|
33
|
+
# about the user input and the assistant's response.
|
34
|
+
#
|
35
|
+
# If the response indicates an assistant message, this method:
|
36
|
+
# 1. Ensures that an assistant response exists in the message history (if not already present).
|
37
|
+
# 2. Updates the last message with the new content and thinking (if applicable).
|
38
|
+
# 3. Displays the formatted terminal output for the user.
|
39
|
+
# 4. Outputs the voice response (if configured).
|
40
|
+
#
|
41
|
+
# Regardless of whether an assistant message is present, this method also
|
42
|
+
# outputs evaluation statistics (if applicable).
|
43
|
+
#
|
44
|
+
# @param [Ollama::Response] response The parsed JSON response from the Ollama server.
|
45
|
+
#
|
46
|
+
# @return [OllamaChat::FollowChat] The current instance for method chaining.
|
16
47
|
def call(response)
|
17
48
|
debug_output(response)
|
18
49
|
|
@@ -71,10 +102,10 @@ class OllamaChat::FollowChat
|
|
71
102
|
stats_text = {
|
72
103
|
eval_duration: Tins::Duration.new(eval_duration),
|
73
104
|
eval_count: response.eval_count.to_i,
|
74
|
-
eval_rate: bold { "%.2f
|
105
|
+
eval_rate: bold { "%.2f t/s" % (response.eval_count.to_i / eval_duration) } + color(111),
|
75
106
|
prompt_eval_duration: Tins::Duration.new(prompt_eval_duration),
|
76
107
|
prompt_eval_count: response.prompt_eval_count.to_i,
|
77
|
-
prompt_eval_rate: bold { "%.2f
|
108
|
+
prompt_eval_rate: bold { "%.2f t/s" % (response.prompt_eval_count.to_i / prompt_eval_duration) } + color(111),
|
78
109
|
total_duration: Tins::Duration.new(response.total_duration / 1e9),
|
79
110
|
load_duration: Tins::Duration.new(response.load_duration / 1e9),
|
80
111
|
}.map { _1 * ?= } * ' '
|
@@ -29,7 +29,7 @@ module OllamaChat::Information
|
|
29
29
|
|
30
30
|
def info
|
31
31
|
STDOUT.puts "Running ollama_chat version: #{bold(OllamaChat::VERSION)}"
|
32
|
-
STDOUT.puts "Connected to ollama server version: #{bold(server_version)}"
|
32
|
+
STDOUT.puts "Connected to ollama server version: #{bold(server_version)} on: #{bold(server_url)}"
|
33
33
|
STDOUT.puts "Current conversation model is #{bold{@model}}."
|
34
34
|
if @model_options.present?
|
35
35
|
STDOUT.puts " Options: #{JSON.pretty_generate(@model_options).gsub(/(?<!\A)^/, ' ')}"
|
@@ -98,8 +98,8 @@ module OllamaChat::Information
|
|
98
98
|
|
99
99
|
-f CONFIG config file to read
|
100
100
|
-u URL the ollama base url, OLLAMA_URL
|
101
|
-
-m MODEL the ollama model to chat with, OLLAMA_CHAT_MODEL
|
102
|
-
-s SYSTEM the system prompt to use as a file, OLLAMA_CHAT_SYSTEM
|
101
|
+
-m MODEL the ollama model to chat with, OLLAMA_CHAT_MODEL, ?selector
|
102
|
+
-s SYSTEM the system prompt to use as a file, OLLAMA_CHAT_SYSTEM, ?selector
|
103
103
|
-c CHAT a saved chat conversation to load
|
104
104
|
-C COLLECTION name of the collection used in this conversation
|
105
105
|
-D DOCUMENT load document and add to embeddings collection (multiple)
|
@@ -109,6 +109,8 @@ module OllamaChat::Information
|
|
109
109
|
-V display the current version number and quit
|
110
110
|
-h this help
|
111
111
|
|
112
|
+
Use `?selector` with `-m` or `-s` to filter options. Multiple matches
|
113
|
+
will open a chooser dialog.
|
112
114
|
EOT
|
113
115
|
0
|
114
116
|
end
|
@@ -121,4 +123,8 @@ module OllamaChat::Information
|
|
121
123
|
def server_version
|
122
124
|
@server_version ||= ollama.version.version
|
123
125
|
end
|
126
|
+
|
127
|
+
def server_url
|
128
|
+
@server_url ||= ollama.base_url
|
129
|
+
end
|
124
130
|
end
|
@@ -128,22 +128,42 @@ class OllamaChat::MessageList
|
|
128
128
|
self
|
129
129
|
end
|
130
130
|
|
131
|
-
#
|
131
|
+
# Removes the last `n` exchanges from the message list. An exchange consists
|
132
|
+
# of a user and an assistant message. If only a single user message is
|
133
|
+
# present at the end, it will be removed first before proceeding with
|
134
|
+
# complete exchanges.
|
132
135
|
#
|
133
|
-
# @param n [
|
136
|
+
# @param n [Integer] The number of exchanges to remove.
|
137
|
+
# @return [Integer] The actual number of complete exchanges removed.
|
138
|
+
# This may be less than `n` if there are not enough messages.
|
134
139
|
#
|
135
|
-
# @
|
140
|
+
# @note
|
141
|
+
# - System messages are preserved and not considered part of an exchange.
|
142
|
+
# - If only one incomplete exchange (a single user message) exists, it will
|
143
|
+
# be dropped first before removing complete exchanges.
|
136
144
|
def drop(n)
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
STDOUT.puts "
|
145
|
-
|
145
|
+
n = n.to_i.clamp(1, Float::INFINITY)
|
146
|
+
non_system_messages = @messages.reject { _1.role == 'system' }
|
147
|
+
if non_system_messages&.last&.role == 'user'
|
148
|
+
@messages.pop
|
149
|
+
n -= 1
|
150
|
+
end
|
151
|
+
if n == 0
|
152
|
+
STDOUT.puts "Dropped the last exchange."
|
153
|
+
return 1
|
154
|
+
end
|
155
|
+
if non_system_messages.empty?
|
156
|
+
STDOUT.puts "No more exchanges can be dropped."
|
157
|
+
return 0
|
158
|
+
end
|
159
|
+
m = 0
|
160
|
+
while @messages.size > 1 && n > 0
|
161
|
+
@messages.pop(2)
|
162
|
+
m += 1
|
163
|
+
n -= 1
|
146
164
|
end
|
165
|
+
STDOUT.puts "Dropped the last #{m} exchanges."
|
166
|
+
m
|
147
167
|
end
|
148
168
|
|
149
169
|
# Sets the system prompt for the chat session.
|
@@ -1,15 +1,44 @@
|
|
1
1
|
module OllamaChat::ModelHandling
|
2
|
+
|
3
|
+
# The model_present? method checks if the specified Ollama model is available.
|
4
|
+
#
|
5
|
+
# @param model [ String ] the name of the Ollama model
|
6
|
+
#
|
7
|
+
# @return [ String, FalseClass ] the system prompt if the model is present,
|
8
|
+
# false otherwise
|
2
9
|
def model_present?(model)
|
3
10
|
ollama.show(model:) { return _1.system.to_s }
|
4
11
|
rescue Ollama::Errors::NotFoundError
|
5
12
|
false
|
6
13
|
end
|
7
14
|
|
15
|
+
# The pull_model_from_remote method attempts to retrieve a model from the
|
16
|
+
# remote server if it is not found locally.
|
17
|
+
#
|
18
|
+
# @param model [ String ] the name of the model to be pulled
|
19
|
+
#
|
20
|
+
# @return [ nil ]
|
8
21
|
def pull_model_from_remote(model)
|
9
22
|
STDOUT.puts "Model #{bold{model}} not found locally, attempting to pull it from remote now…"
|
10
23
|
ollama.pull(model:)
|
11
24
|
end
|
12
25
|
|
26
|
+
# The pull_model_unless_present method checks if the specified model is
|
27
|
+
# present on the system.
|
28
|
+
#
|
29
|
+
# If the model is already present, it returns the system prompt if it is
|
30
|
+
# present.
|
31
|
+
#
|
32
|
+
# Otherwise, it attempts to pull the model from the remote server using the
|
33
|
+
# pull_model_from_remote method. If the model is still not found after
|
34
|
+
# pulling, it exits the program with a message indicating that the model was
|
35
|
+
# not found remotely.
|
36
|
+
#
|
37
|
+
# @param model [ String ] The name of the model to check for presence.
|
38
|
+
# @param options [ Hash ] Options for the pull_model_from_remote method.
|
39
|
+
#
|
40
|
+
# @return [ String, FalseClass ] the system prompt if the model and it are
|
41
|
+
# present, false otherwise.
|
13
42
|
def pull_model_unless_present(model, options)
|
14
43
|
if system = model_present?(model)
|
15
44
|
return system.full?
|
@@ -61,6 +61,7 @@ redis:
|
|
61
61
|
url: <%= ENV.fetch('REDIS_EXPIRING_URL', 'null') %>
|
62
62
|
ex: 86400
|
63
63
|
chat_history_filename: <%= ENV.fetch('OLLAMA_CHAT_HISTORY', '~/.ollama_chat_history') %>
|
64
|
+
server_socket_runtime_dir: .
|
64
65
|
debug: <%= ENV['OLLAMA_CHAT_DEBUG'].to_i == 1 ? true : false %>
|
65
66
|
request_headers:
|
66
67
|
Accept: 'text/*,application/*,image/*'
|
@@ -1,65 +1,50 @@
|
|
1
1
|
module OllamaChat::ServerSocket
|
2
2
|
class << self
|
3
|
-
#
|
4
|
-
#
|
5
|
-
def runtime_dir
|
6
|
-
File.expand_path(ENV.fetch('XDG_RUNTIME_DIR', '~/.local/run'))
|
7
|
-
end
|
8
|
-
|
9
|
-
# Constructs the full path to the server socket file.
|
10
|
-
# @return [String] the full path to the Unix socket
|
11
|
-
def server_socket_path
|
12
|
-
File.join(runtime_dir, 'ollama_chat.sock')
|
13
|
-
end
|
3
|
+
# The send_to_server_socket method sends content to the server socket and returns
|
4
|
+
# the response if type is :socket_input_with_response, otherwise it returns nil.
|
14
5
|
|
15
|
-
#
|
6
|
+
# @param content [ String ] the message to be sent to the server
|
7
|
+
# @param type [ Symbol ] the type of message being sent (default: :socket_input)
|
16
8
|
#
|
17
|
-
# @
|
18
|
-
#
|
19
|
-
|
20
|
-
|
21
|
-
def send_to_server_socket(content, type: :socket_input)
|
22
|
-
FileUtils.mkdir_p runtime_dir
|
9
|
+
# @return [ String, NilClass ] the response from the server if type is
|
10
|
+
# :socket_input_with_response, otherwise nil.
|
11
|
+
def send_to_server_socket(content, config:, type: :socket_input)
|
12
|
+
server = create_socket_server(config:)
|
23
13
|
message = { content:, type: }
|
24
|
-
|
25
|
-
|
26
|
-
|
14
|
+
if type.to_sym == :socket_input_with_response
|
15
|
+
return server.transmit_with_response(message)
|
16
|
+
else
|
17
|
+
server.transmit(message)
|
18
|
+
nil
|
19
|
+
end
|
20
|
+
end
|
21
|
+
|
22
|
+
def create_socket_server(config:)
|
23
|
+
if runtime_dir = config.server_socket_runtime_dir
|
24
|
+
UnixSocks::Server.new(socket_name: 'ollama_chat.sock', runtime_dir:)
|
25
|
+
else
|
26
|
+
UnixSocks::Server.new(socket_name: 'ollama_chat.sock')
|
27
|
+
end
|
27
28
|
end
|
28
29
|
end
|
29
30
|
|
30
|
-
# Accessor for the server socket message.
|
31
|
-
# Holds the last message received from the Unix socket.
|
32
|
-
# @return [String, nil] the message content, or nil if not set
|
33
|
-
# @see OllamaChat::ServerSocket#init_server_socket
|
34
|
-
# @see OllamaChat::ServerSocket#send_to_server_socket
|
35
31
|
attr_accessor :server_socket_message
|
36
32
|
|
37
|
-
# Initializes
|
33
|
+
# Initializes the server socket to receive messages from the Ollama Chat
|
34
|
+
# Client.
|
38
35
|
#
|
39
|
-
#
|
40
|
-
#
|
41
|
-
#
|
36
|
+
# This method sets up a Unix domain socket server that listens for incoming
|
37
|
+
# messages in the background. When a message is received, it updates the
|
38
|
+
# instance variable `server_socket_message` and sends an interrupt signal
|
39
|
+
# to the current process in order to handle the message.
|
42
40
|
#
|
43
|
-
#
|
41
|
+
# @return [ nil ] This method does not return any value, it only sets up the
|
42
|
+
# server socket and kills the process when a message is received.
|
44
43
|
def init_server_socket
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
Thread.new do
|
50
|
-
Socket.unix_server_loop(OllamaChat::ServerSocket.server_socket_path) do |sock, client_addrinfo|
|
51
|
-
begin
|
52
|
-
data = sock.readline.chomp
|
53
|
-
self.server_socket_message = JSON.load(data)
|
54
|
-
Process.kill :INT, $$
|
55
|
-
rescue JSON::ParserError
|
56
|
-
ensure
|
57
|
-
sock.close
|
58
|
-
end
|
59
|
-
end
|
60
|
-
rescue Errno::ENOENT
|
61
|
-
ensure
|
62
|
-
FileUtils.rm_f OllamaChat::ServerSocket.server_socket_path
|
44
|
+
server = OllamaChat::ServerSocket.create_socket_server(config:)
|
45
|
+
server.receive_in_background do |message|
|
46
|
+
self.server_socket_message = message
|
47
|
+
Process.kill :INT, $$
|
63
48
|
end
|
64
49
|
end
|
65
50
|
end
|
data/lib/ollama_chat/version.rb
CHANGED
data/lib/ollama_chat.rb
CHANGED
data/ollama_chat.gemspec
CHANGED
@@ -1,9 +1,9 @@
|
|
1
1
|
# -*- encoding: utf-8 -*-
|
2
|
-
# stub: ollama_chat 0.0.
|
2
|
+
# stub: ollama_chat 0.0.16 ruby lib
|
3
3
|
|
4
4
|
Gem::Specification.new do |s|
|
5
5
|
s.name = "ollama_chat".freeze
|
6
|
-
s.version = "0.0.
|
6
|
+
s.version = "0.0.16".freeze
|
7
7
|
|
8
8
|
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
|
9
9
|
s.require_paths = ["lib".freeze]
|
@@ -18,7 +18,7 @@ Gem::Specification.new do |s|
|
|
18
18
|
s.licenses = ["MIT".freeze]
|
19
19
|
s.rdoc_options = ["--title".freeze, "OllamaChat - A command-line interface (CLI) for interacting with an Ollama AI model.".freeze, "--main".freeze, "README.md".freeze]
|
20
20
|
s.required_ruby_version = Gem::Requirement.new("~> 3.1".freeze)
|
21
|
-
s.rubygems_version = "3.6.
|
21
|
+
s.rubygems_version = "3.6.9".freeze
|
22
22
|
s.summary = "A command-line interface (CLI) for interacting with an Ollama AI model.".freeze
|
23
23
|
s.test_files = ["spec/assets/example.rb".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/ollama_chat/web_searching_spec.rb".freeze, "spec/spec_helper.rb".freeze]
|
24
24
|
|
@@ -34,6 +34,7 @@ Gem::Specification.new do |s|
|
|
34
34
|
s.add_runtime_dependency(%q<excon>.freeze, ["~> 1.0".freeze])
|
35
35
|
s.add_runtime_dependency(%q<ollama-ruby>.freeze, ["~> 1.2".freeze])
|
36
36
|
s.add_runtime_dependency(%q<documentrix>.freeze, ["~> 0.0".freeze, ">= 0.0.2".freeze])
|
37
|
+
s.add_runtime_dependency(%q<unix_socks>.freeze, [">= 0".freeze])
|
37
38
|
s.add_runtime_dependency(%q<rss>.freeze, ["~> 0.3".freeze])
|
38
39
|
s.add_runtime_dependency(%q<term-ansicolor>.freeze, ["~> 1.11".freeze])
|
39
40
|
s.add_runtime_dependency(%q<redis>.freeze, ["~> 5.0".freeze])
|
@@ -39,7 +39,16 @@ RSpec.describe OllamaChat::Information do
|
|
39
39
|
expect(chat.usage).to eq 0
|
40
40
|
end
|
41
41
|
|
42
|
-
it 'can show
|
42
|
+
it 'can show version' do
|
43
|
+
expect(STDOUT).to receive(:puts).with(/^ollama_chat \d+\.\d+\.\d+$/)
|
43
44
|
expect(chat.version).to eq 0
|
44
45
|
end
|
46
|
+
|
47
|
+
it 'can show server version' do
|
48
|
+
expect(chat.server_version).to eq '6.6.6'
|
49
|
+
end
|
50
|
+
|
51
|
+
it 'can show server URL' do
|
52
|
+
expect(chat.server_url).to be_a URI::HTTP
|
53
|
+
end
|
45
54
|
end
|
@@ -128,11 +128,26 @@ RSpec.describe OllamaChat::MessageList do
|
|
128
128
|
expect(list.size).to eq 1
|
129
129
|
list << Ollama::Message.new(role: 'user', content: 'world')
|
130
130
|
expect(list.size).to eq 2
|
131
|
-
expect(list.drop(1)).to eq 0
|
132
131
|
list << Ollama::Message.new(role: 'assistant', content: 'hi')
|
133
132
|
expect(list.size).to eq 3
|
134
133
|
expect(list.drop(1)).to eq 1
|
135
134
|
expect(list.size).to eq 1
|
135
|
+
expect(list.drop(1)).to eq 0
|
136
|
+
expect(list.size).to eq 1
|
137
|
+
expect(list.drop(1)).to eq 0
|
138
|
+
expect(list.size).to eq 1
|
139
|
+
end
|
140
|
+
|
141
|
+
it 'drops the last user message when there is no assistant response' do
|
142
|
+
expect(list.size).to eq 1
|
143
|
+
list << Ollama::Message.new(role: 'user', content: 'hello')
|
144
|
+
list << Ollama::Message.new(role: 'assistant', content: 'hi')
|
145
|
+
list << Ollama::Message.new(role: 'user', content: 'world')
|
146
|
+
expect(list.size).to eq 4
|
147
|
+
expect(list.drop(1)).to eq 1
|
148
|
+
expect(list.size).to eq 3
|
149
|
+
expect(list.drop(1)).to eq 1
|
150
|
+
expect(list.size).to eq 1
|
136
151
|
end
|
137
152
|
|
138
153
|
it 'can determine location for system prompt' do
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: ollama_chat
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.0.
|
4
|
+
version: 0.0.16
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Florian Frank
|
@@ -155,6 +155,20 @@ dependencies:
|
|
155
155
|
- - ">="
|
156
156
|
- !ruby/object:Gem::Version
|
157
157
|
version: 0.0.2
|
158
|
+
- !ruby/object:Gem::Dependency
|
159
|
+
name: unix_socks
|
160
|
+
requirement: !ruby/object:Gem::Requirement
|
161
|
+
requirements:
|
162
|
+
- - ">="
|
163
|
+
- !ruby/object:Gem::Version
|
164
|
+
version: '0'
|
165
|
+
type: :runtime
|
166
|
+
prerelease: false
|
167
|
+
version_requirements: !ruby/object:Gem::Requirement
|
168
|
+
requirements:
|
169
|
+
- - ">="
|
170
|
+
- !ruby/object:Gem::Version
|
171
|
+
version: '0'
|
158
172
|
- !ruby/object:Gem::Dependency
|
159
173
|
name: rss
|
160
174
|
requirement: !ruby/object:Gem::Requirement
|
@@ -479,7 +493,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
479
493
|
- !ruby/object:Gem::Version
|
480
494
|
version: '0'
|
481
495
|
requirements: []
|
482
|
-
rubygems_version: 3.6.
|
496
|
+
rubygems_version: 3.6.9
|
483
497
|
specification_version: 4
|
484
498
|
summary: A command-line interface (CLI) for interacting with an Ollama AI model.
|
485
499
|
test_files:
|