ollama_chat 0.0.20 → 0.0.22
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGES.md +16 -0
- data/README.md +8 -0
- data/VERSION +1 -1
- data/bin/ollama_chat_send +3 -2
- data/lib/ollama_chat/chat.rb +176 -8
- data/lib/ollama_chat/dialog.rb +8 -5
- data/lib/ollama_chat/follow_chat.rb +36 -0
- data/lib/ollama_chat/information.rb +1 -0
- data/lib/ollama_chat/ollama_chat_config/default_config.yml +1 -0
- data/lib/ollama_chat/server_socket.rb +36 -15
- data/lib/ollama_chat/source_fetching.rb +91 -1
- data/lib/ollama_chat/switches.rb +79 -16
- data/lib/ollama_chat/utils/cache_fetcher.rb +30 -0
- data/lib/ollama_chat/utils/fetcher.rb +117 -0
- data/lib/ollama_chat/version.rb +1 -1
- data/lib/ollama_chat/vim.rb +53 -0
- data/lib/ollama_chat/web_searching.rb +35 -1
- data/lib/ollama_chat.rb +1 -0
- data/ollama_chat.gemspec +6 -6
- data/spec/assets/api_tags.json +17 -0
- data/spec/ollama_chat/chat_spec.rb +1 -1
- data/spec/ollama_chat/server_socket_spec.rb +133 -0
- data/spec/ollama_chat/switches_spec.rb +9 -14
- data/spec/spec_helper.rb +2 -6
- metadata +7 -3
@@ -1,4 +1,16 @@
|
|
1
1
|
module OllamaChat::WebSearching
|
2
|
+
# The search_web method performs a web search using the configured search
|
3
|
+
# engine.
|
4
|
+
# It appends location information to the query if available and limits the
|
5
|
+
# number of results.
|
6
|
+
# The method delegates to engine-specific search methods based on the
|
7
|
+
# configured search engine.
|
8
|
+
#
|
9
|
+
# @param query [ String ] the search query string
|
10
|
+
# @param n [ Integer ] the maximum number of results to return
|
11
|
+
#
|
12
|
+
# @return [ Array<String>, nil ] an array of URLs from the search results or
|
13
|
+
# nil if the search engine is not implemented
|
2
14
|
def search_web(query, n = nil)
|
3
15
|
l = @messages.at_location.full? and query += " #{l}"
|
4
16
|
n = n.to_i.clamp(1..)
|
@@ -14,10 +26,22 @@ module OllamaChat::WebSearching
|
|
14
26
|
|
15
27
|
private
|
16
28
|
|
29
|
+
# The search_engine method returns the currently configured web search engine
|
30
|
+
# to be used for online searches.
|
31
|
+
#
|
32
|
+
# @return [ String ] the name of the web search engine
|
33
|
+
# @see OllamaChat::Config::WebSearch#use
|
17
34
|
def search_engine
|
18
35
|
config.web_search.use
|
19
36
|
end
|
20
37
|
|
38
|
+
# The search_web_with_searxng method performs a web search using the SearxNG
|
39
|
+
# engine and returns the URLs of the first n search results.
|
40
|
+
#
|
41
|
+
# @param query [ String ] the search query string
|
42
|
+
# @param n [ Integer ] the number of search results to return
|
43
|
+
#
|
44
|
+
# @return [ Array<String> ] an array of URLs from the search results
|
21
45
|
def search_web_with_searxng(query, n)
|
22
46
|
url = config.web_search.engines.searxng.url % { query: }
|
23
47
|
OllamaChat::Utils::Fetcher.get(
|
@@ -30,6 +54,15 @@ module OllamaChat::WebSearching
|
|
30
54
|
end
|
31
55
|
end
|
32
56
|
|
57
|
+
# The search_web_with_duckduckgo method performs a web search using the
|
58
|
+
# DuckDuckGo search engine and extracts URLs from the search results.
|
59
|
+
#
|
60
|
+
# @param query [ String ] the search query string to be used
|
61
|
+
# @param n [ Integer ] the maximum number of URLs to extract from the search
|
62
|
+
# results
|
63
|
+
#
|
64
|
+
# @return [ Array<String> ] an array of URL strings extracted from the search
|
65
|
+
# results
|
33
66
|
def search_web_with_duckduckgo(query, n)
|
34
67
|
url = config.web_search.engines.duckduckgo.url % { query: }
|
35
68
|
OllamaChat::Utils::Fetcher.get(
|
@@ -47,7 +80,8 @@ module OllamaChat::WebSearching
|
|
47
80
|
url = URI.decode_uri_component(url)
|
48
81
|
url = URI.parse(url)
|
49
82
|
url.host =~ /duckduckgo\.com/ and next
|
50
|
-
|
83
|
+
url = url.to_s
|
84
|
+
links.add(url)
|
51
85
|
result << url
|
52
86
|
n -= 1
|
53
87
|
else
|
data/lib/ollama_chat.rb
CHANGED
@@ -19,6 +19,7 @@ require 'ollama_chat/dialog'
|
|
19
19
|
require 'ollama_chat/information'
|
20
20
|
require 'ollama_chat/message_output'
|
21
21
|
require 'ollama_chat/clipboard'
|
22
|
+
require 'ollama_chat/vim'
|
22
23
|
require 'ollama_chat/document_cache'
|
23
24
|
require 'ollama_chat/history'
|
24
25
|
require 'ollama_chat/server_socket'
|
data/ollama_chat.gemspec
CHANGED
@@ -1,9 +1,9 @@
|
|
1
1
|
# -*- encoding: utf-8 -*-
|
2
|
-
# stub: ollama_chat 0.0.
|
2
|
+
# stub: ollama_chat 0.0.22 ruby lib
|
3
3
|
|
4
4
|
Gem::Specification.new do |s|
|
5
5
|
s.name = "ollama_chat".freeze
|
6
|
-
s.version = "0.0.
|
6
|
+
s.version = "0.0.22".freeze
|
7
7
|
|
8
8
|
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
|
9
9
|
s.require_paths = ["lib".freeze]
|
@@ -12,19 +12,19 @@ Gem::Specification.new do |s|
|
|
12
12
|
s.description = "The app provides a command-line interface (CLI) to an Ollama AI model,\nallowing users to engage in text-based conversations and generate\nhuman-like responses. Users can import data from local files or web pages,\nwhich are then processed through three different modes: fully importing the\ncontent into the conversation context, summarizing the information for\nconcise reference, or storing it in an embedding vector database for later\nretrieval based on the conversation.\n".freeze
|
13
13
|
s.email = "flori@ping.de".freeze
|
14
14
|
s.executables = ["ollama_chat".freeze, "ollama_chat_send".freeze]
|
15
|
-
s.extra_rdoc_files = ["README.md".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_output.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze]
|
16
|
-
s.files = [".all_images.yml".freeze, ".envrc".freeze, ".gitignore".freeze, "CHANGES.md".freeze, "Gemfile".freeze, "README.md".freeze, "Rakefile".freeze, "VERSION".freeze, "bin/ollama_chat".freeze, "bin/ollama_chat_send".freeze, "config/searxng/settings.yml".freeze, "docker-compose.yml".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_output.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/ollama_chat_config/default_config.yml".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze, "ollama_chat.gemspec".freeze, "redis/redis.conf".freeze, "spec/assets/api_show.json".freeze, "spec/assets/api_tags.json".freeze, "spec/assets/api_version.json".freeze, "spec/assets/conversation.json".freeze, "spec/assets/duckduckgo.html".freeze, "spec/assets/example.atom".freeze, "spec/assets/example.csv".freeze, "spec/assets/example.html".freeze, "spec/assets/example.pdf".freeze, "spec/assets/example.ps".freeze, "spec/assets/example.rb".freeze, "spec/assets/example.rss".freeze, "spec/assets/example.xml".freeze, "spec/assets/kitten.jpg".freeze, "spec/assets/prompt.txt".freeze, "spec/assets/searxng.json".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/message_output_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/ollama_chat/web_searching_spec.rb".freeze, "spec/spec_helper.rb".freeze, "tmp/.keep".freeze]
|
15
|
+
s.extra_rdoc_files = ["README.md".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_output.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/vim.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze]
|
16
|
+
s.files = [".all_images.yml".freeze, ".envrc".freeze, ".gitignore".freeze, "CHANGES.md".freeze, "Gemfile".freeze, "README.md".freeze, "Rakefile".freeze, "VERSION".freeze, "bin/ollama_chat".freeze, "bin/ollama_chat_send".freeze, "config/searxng/settings.yml".freeze, "docker-compose.yml".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_output.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/ollama_chat_config/default_config.yml".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/vim.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze, "ollama_chat.gemspec".freeze, "redis/redis.conf".freeze, "spec/assets/api_show.json".freeze, "spec/assets/api_tags.json".freeze, "spec/assets/api_version.json".freeze, "spec/assets/conversation.json".freeze, "spec/assets/duckduckgo.html".freeze, "spec/assets/example.atom".freeze, "spec/assets/example.csv".freeze, "spec/assets/example.html".freeze, "spec/assets/example.pdf".freeze, "spec/assets/example.ps".freeze, "spec/assets/example.rb".freeze, "spec/assets/example.rss".freeze, "spec/assets/example.xml".freeze, "spec/assets/kitten.jpg".freeze, "spec/assets/prompt.txt".freeze, "spec/assets/searxng.json".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/message_output_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/server_socket_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/ollama_chat/web_searching_spec.rb".freeze, "spec/spec_helper.rb".freeze, "tmp/.keep".freeze]
|
17
17
|
s.homepage = "https://github.com/flori/ollama_chat".freeze
|
18
18
|
s.licenses = ["MIT".freeze]
|
19
19
|
s.rdoc_options = ["--title".freeze, "OllamaChat - A command-line interface (CLI) for interacting with an Ollama AI model.".freeze, "--main".freeze, "README.md".freeze]
|
20
20
|
s.required_ruby_version = Gem::Requirement.new("~> 3.1".freeze)
|
21
21
|
s.rubygems_version = "3.6.9".freeze
|
22
22
|
s.summary = "A command-line interface (CLI) for interacting with an Ollama AI model.".freeze
|
23
|
-
s.test_files = ["spec/assets/example.rb".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/message_output_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/ollama_chat/web_searching_spec.rb".freeze, "spec/spec_helper.rb".freeze]
|
23
|
+
s.test_files = ["spec/assets/example.rb".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/message_output_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/server_socket_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/ollama_chat/web_searching_spec.rb".freeze, "spec/spec_helper.rb".freeze]
|
24
24
|
|
25
25
|
s.specification_version = 4
|
26
26
|
|
27
|
-
s.add_development_dependency(%q<gem_hadar>.freeze, ["~>
|
27
|
+
s.add_development_dependency(%q<gem_hadar>.freeze, ["~> 2.0".freeze])
|
28
28
|
s.add_development_dependency(%q<all_images>.freeze, ["~> 0.6".freeze])
|
29
29
|
s.add_development_dependency(%q<rspec>.freeze, ["~> 3.2".freeze])
|
30
30
|
s.add_development_dependency(%q<kramdown>.freeze, ["~> 2.0".freeze])
|
data/spec/assets/api_tags.json
CHANGED
@@ -16,6 +16,23 @@
|
|
16
16
|
"parameter_size": "8.0B",
|
17
17
|
"quantization_level": "Q4_K_M"
|
18
18
|
}
|
19
|
+
},
|
20
|
+
{
|
21
|
+
"name": "qwen3-coder:latest",
|
22
|
+
"model": "qwen3-coder:latest",
|
23
|
+
"modified_at": "2025-08-08T00:10:43.7235626Z",
|
24
|
+
"size": 18556701140,
|
25
|
+
"digest": "ad67f85ca2502e92936ef793bf29a312e1912ecd1e2c09c9c2963adf1debde78",
|
26
|
+
"details": {
|
27
|
+
"parent_model": "",
|
28
|
+
"format": "gguf",
|
29
|
+
"family": "qwen3moe",
|
30
|
+
"families": [
|
31
|
+
"qwen3moe"
|
32
|
+
],
|
33
|
+
"parameter_size": "30.5B",
|
34
|
+
"quantization_level": "Q4_K_M"
|
35
|
+
}
|
19
36
|
}
|
20
37
|
]
|
21
38
|
}
|
@@ -79,7 +79,7 @@ describe OllamaChat::Chat do
|
|
79
79
|
|
80
80
|
it 'returns :next when input is "/drop(?:\s+(\d*))?"' do
|
81
81
|
expect(chat.messages).to receive(:drop).with(?2)
|
82
|
-
expect(chat.messages).to receive(:
|
82
|
+
expect(chat.messages).to receive(:show_last)
|
83
83
|
expect(chat.handle_input("/drop 2")).to eq :next
|
84
84
|
end
|
85
85
|
|
@@ -0,0 +1,133 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
describe OllamaChat::ServerSocket do
|
4
|
+
let :instance do
|
5
|
+
Object.extend(described_class)
|
6
|
+
end
|
7
|
+
|
8
|
+
describe '#send_to_server_socket' do
|
9
|
+
let(:config) { double('Config') }
|
10
|
+
let(:server) { double('Server') }
|
11
|
+
|
12
|
+
before do
|
13
|
+
expect(OllamaChat::ServerSocket).to receive(:create_socket_server).with(config: config).and_return(server)
|
14
|
+
end
|
15
|
+
|
16
|
+
context 'with default parameters' do
|
17
|
+
it 'uses correct defaults' do
|
18
|
+
message = { content: 'test', type: :socket_input, parse: false }
|
19
|
+
|
20
|
+
expect(server).to receive(:transmit).with(message).and_return(nil)
|
21
|
+
|
22
|
+
result = OllamaChat::ServerSocket.send_to_server_socket('test', config: config)
|
23
|
+
|
24
|
+
expect(result).to be_nil
|
25
|
+
end
|
26
|
+
end
|
27
|
+
|
28
|
+
context 'with :socket_input type and parse: true' do
|
29
|
+
it 'sends message with parse flag and returns nil' do
|
30
|
+
message = { content: 'test', type: :socket_input, parse: true }
|
31
|
+
|
32
|
+
expect(server).to receive(:transmit).with(message).and_return(nil)
|
33
|
+
|
34
|
+
result = OllamaChat::ServerSocket.send_to_server_socket(
|
35
|
+
'test',
|
36
|
+
config: config,
|
37
|
+
type: :socket_input,
|
38
|
+
parse: true
|
39
|
+
)
|
40
|
+
|
41
|
+
expect(result).to be_nil
|
42
|
+
end
|
43
|
+
end
|
44
|
+
|
45
|
+
context 'with :socket_input_with_response type and parse: false' do
|
46
|
+
it 'sends message and returns response with parse flag' do
|
47
|
+
message = { content: 'test', type: :socket_input_with_response, parse: false }
|
48
|
+
response = double('Response')
|
49
|
+
|
50
|
+
expect(server).to receive(:transmit_with_response).with(message).and_return(response)
|
51
|
+
|
52
|
+
result = OllamaChat::ServerSocket.send_to_server_socket(
|
53
|
+
'test',
|
54
|
+
config: config,
|
55
|
+
type: :socket_input_with_response,
|
56
|
+
parse: false
|
57
|
+
)
|
58
|
+
|
59
|
+
expect(result).to eq(response)
|
60
|
+
end
|
61
|
+
end
|
62
|
+
|
63
|
+
context 'with :socket_input_with_response type and parse: true' do
|
64
|
+
it 'sends message and returns response with parse flag' do
|
65
|
+
message = { content: 'test', type: :socket_input_with_response, parse: true }
|
66
|
+
response = double('Response')
|
67
|
+
|
68
|
+
expect(server).to receive(:transmit_with_response).with(message).and_return(response)
|
69
|
+
|
70
|
+
result = OllamaChat::ServerSocket.send_to_server_socket(
|
71
|
+
'test',
|
72
|
+
config: config,
|
73
|
+
type: :socket_input_with_response,
|
74
|
+
parse: true
|
75
|
+
)
|
76
|
+
|
77
|
+
expect(result).to eq(response)
|
78
|
+
end
|
79
|
+
end
|
80
|
+
|
81
|
+
end
|
82
|
+
|
83
|
+
describe '#create_socket_server' do
|
84
|
+
context 'with configured runtime_dir' do
|
85
|
+
it 'can be created with configured runtime_dir' do
|
86
|
+
config = double('Config', server_socket_runtime_dir: '/custom/runtime')
|
87
|
+
expect(UnixSocks::Server).to receive(:new).with(
|
88
|
+
socket_name: 'ollama_chat.sock',
|
89
|
+
runtime_dir: '/custom/runtime'
|
90
|
+
).and_return :unix_socks_server
|
91
|
+
|
92
|
+
result = OllamaChat::ServerSocket.create_socket_server(config: config)
|
93
|
+
expect(result).to eq :unix_socks_server
|
94
|
+
end
|
95
|
+
end
|
96
|
+
|
97
|
+
context 'with default runtime_dir' do
|
98
|
+
it 'can be created with default runtime_dir' do
|
99
|
+
config = double('Config', server_socket_runtime_dir: nil)
|
100
|
+
expect(UnixSocks::Server).to receive(:new).with(
|
101
|
+
socket_name: 'ollama_chat.sock'
|
102
|
+
).and_return :unix_socks_server
|
103
|
+
|
104
|
+
result = OllamaChat::ServerSocket.create_socket_server(config: config)
|
105
|
+
expect(result).to eq :unix_socks_server
|
106
|
+
end
|
107
|
+
end
|
108
|
+
end
|
109
|
+
|
110
|
+
describe '#server_socket_message' do
|
111
|
+
it 'can be set' do
|
112
|
+
message = double('message')
|
113
|
+
instance.server_socket_message = message
|
114
|
+
expect(instance.server_socket_message).to eq(message)
|
115
|
+
end
|
116
|
+
|
117
|
+
it 'can be read' do
|
118
|
+
message = double('message')
|
119
|
+
instance.server_socket_message = message
|
120
|
+
expect(instance.server_socket_message).to eq(message)
|
121
|
+
end
|
122
|
+
end
|
123
|
+
|
124
|
+
describe '#init_server_socket' do
|
125
|
+
it 'can be initialized' do
|
126
|
+
config = double('Config')
|
127
|
+
expect(instance).to receive(:config).and_return config
|
128
|
+
server = double('Server', receive_in_background: :receive_in_background)
|
129
|
+
expect(described_class).to receive(:create_socket_server).and_return server
|
130
|
+
expect(instance.init_server_socket).to eq :receive_in_background
|
131
|
+
end
|
132
|
+
end
|
133
|
+
end
|
@@ -4,8 +4,7 @@ describe OllamaChat::Switches do
|
|
4
4
|
describe OllamaChat::Switches::Switch do
|
5
5
|
let :switch do
|
6
6
|
described_class.new(
|
7
|
-
:test,
|
8
|
-
config: config,
|
7
|
+
value: config.test,
|
9
8
|
msg: {
|
10
9
|
true => "Enabled.",
|
11
10
|
false => "Disabled.",
|
@@ -15,7 +14,7 @@ describe OllamaChat::Switches do
|
|
15
14
|
|
16
15
|
context 'default to false' do
|
17
16
|
let :config do
|
18
|
-
double(test
|
17
|
+
double(test: false)
|
19
18
|
end
|
20
19
|
|
21
20
|
it 'can be switched on' do
|
@@ -38,7 +37,7 @@ describe OllamaChat::Switches do
|
|
38
37
|
|
39
38
|
context 'default to false' do
|
40
39
|
let :config do
|
41
|
-
double(test
|
40
|
+
double(test: true)
|
42
41
|
end
|
43
42
|
|
44
43
|
it 'can be switched on' do
|
@@ -63,13 +62,12 @@ describe OllamaChat::Switches do
|
|
63
62
|
describe OllamaChat::Switches::CombinedSwitch do
|
64
63
|
describe 'off' do
|
65
64
|
let :config do
|
66
|
-
double(test1
|
65
|
+
double(test1: true, test2: false)
|
67
66
|
end
|
68
67
|
|
69
68
|
let :switch1 do
|
70
69
|
OllamaChat::Switches::Switch.new(
|
71
|
-
:test1,
|
72
|
-
config: config,
|
70
|
+
value: config.test1,
|
73
71
|
msg: {
|
74
72
|
true => "Enabled.",
|
75
73
|
false => "Disabled.",
|
@@ -79,8 +77,7 @@ describe OllamaChat::Switches do
|
|
79
77
|
|
80
78
|
let :switch2 do
|
81
79
|
OllamaChat::Switches::Switch.new(
|
82
|
-
:test2,
|
83
|
-
config: config,
|
80
|
+
value: config.test2,
|
84
81
|
msg: {
|
85
82
|
true => "Enabled.",
|
86
83
|
false => "Disabled.",
|
@@ -117,13 +114,12 @@ describe OllamaChat::Switches do
|
|
117
114
|
|
118
115
|
describe 'on' do
|
119
116
|
let :config do
|
120
|
-
double(test1
|
117
|
+
double(test1: false, test2: true)
|
121
118
|
end
|
122
119
|
|
123
120
|
let :switch1 do
|
124
121
|
OllamaChat::Switches::Switch.new(
|
125
|
-
:test1,
|
126
|
-
config: config,
|
122
|
+
value: config.test1,
|
127
123
|
msg: {
|
128
124
|
true => "Enabled.",
|
129
125
|
false => "Disabled.",
|
@@ -133,8 +129,7 @@ describe OllamaChat::Switches do
|
|
133
129
|
|
134
130
|
let :switch2 do
|
135
131
|
OllamaChat::Switches::Switch.new(
|
136
|
-
:test2,
|
137
|
-
config: config,
|
132
|
+
value: config.test2,
|
138
133
|
msg: {
|
139
134
|
true => "Enabled.",
|
140
135
|
false => "Disabled.",
|
data/spec/spec_helper.rb
CHANGED
@@ -1,9 +1,5 @@
|
|
1
|
-
|
2
|
-
|
3
|
-
SimpleCov.start do
|
4
|
-
add_filter "#{File.basename(File.dirname(__FILE__))}/"
|
5
|
-
end
|
6
|
-
end
|
1
|
+
require 'gem_hadar/simplecov'
|
2
|
+
GemHadar::SimpleCov.start
|
7
3
|
require 'rspec'
|
8
4
|
require 'tins/xt/expose'
|
9
5
|
begin
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: ollama_chat
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.0.
|
4
|
+
version: 0.0.22
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Florian Frank
|
@@ -15,14 +15,14 @@ dependencies:
|
|
15
15
|
requirements:
|
16
16
|
- - "~>"
|
17
17
|
- !ruby/object:Gem::Version
|
18
|
-
version: '
|
18
|
+
version: '2.0'
|
19
19
|
type: :development
|
20
20
|
prerelease: false
|
21
21
|
version_requirements: !ruby/object:Gem::Requirement
|
22
22
|
requirements:
|
23
23
|
- - "~>"
|
24
24
|
- !ruby/object:Gem::Version
|
25
|
-
version: '
|
25
|
+
version: '2.0'
|
26
26
|
- !ruby/object:Gem::Dependency
|
27
27
|
name: all_images
|
28
28
|
requirement: !ruby/object:Gem::Requirement
|
@@ -401,6 +401,7 @@ extra_rdoc_files:
|
|
401
401
|
- lib/ollama_chat/utils/fetcher.rb
|
402
402
|
- lib/ollama_chat/utils/file_argument.rb
|
403
403
|
- lib/ollama_chat/version.rb
|
404
|
+
- lib/ollama_chat/vim.rb
|
404
405
|
- lib/ollama_chat/web_searching.rb
|
405
406
|
files:
|
406
407
|
- ".all_images.yml"
|
@@ -439,6 +440,7 @@ files:
|
|
439
440
|
- lib/ollama_chat/utils/fetcher.rb
|
440
441
|
- lib/ollama_chat/utils/file_argument.rb
|
441
442
|
- lib/ollama_chat/version.rb
|
443
|
+
- lib/ollama_chat/vim.rb
|
442
444
|
- lib/ollama_chat/web_searching.rb
|
443
445
|
- ollama_chat.gemspec
|
444
446
|
- redis/redis.conf
|
@@ -466,6 +468,7 @@ files:
|
|
466
468
|
- spec/ollama_chat/message_output_spec.rb
|
467
469
|
- spec/ollama_chat/model_handling_spec.rb
|
468
470
|
- spec/ollama_chat/parsing_spec.rb
|
471
|
+
- spec/ollama_chat/server_socket_spec.rb
|
469
472
|
- spec/ollama_chat/source_fetching_spec.rb
|
470
473
|
- spec/ollama_chat/switches_spec.rb
|
471
474
|
- spec/ollama_chat/utils/cache_fetcher_spec.rb
|
@@ -509,6 +512,7 @@ test_files:
|
|
509
512
|
- spec/ollama_chat/message_output_spec.rb
|
510
513
|
- spec/ollama_chat/model_handling_spec.rb
|
511
514
|
- spec/ollama_chat/parsing_spec.rb
|
515
|
+
- spec/ollama_chat/server_socket_spec.rb
|
512
516
|
- spec/ollama_chat/source_fetching_spec.rb
|
513
517
|
- spec/ollama_chat/switches_spec.rb
|
514
518
|
- spec/ollama_chat/utils/cache_fetcher_spec.rb
|