ollama_chat 0.0.28 → 0.0.29

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 1e18a688ad95c46f3776624e926def4eca5bd3f138321b2cdeec13f32606aceb
4
- data.tar.gz: 68c632100120653785b5f2747db5e5da254550bdd1de80a70cdbf16c1db56e8a
3
+ metadata.gz: c4521bf944235fe5a3507f46e8c1ff8016faab3c178c2c8981d223c02c32ad1e
4
+ data.tar.gz: 8266fb46c588c5e456c00574a2c291b89da9aab453e8d33690a32c9b5e23762a
5
5
  SHA512:
6
- metadata.gz: 94621d91d0fcec33e3695768b52ceeea30dc69728d92e0cdf1966751d23abc059df07543efc9ab87e547277050a39b6ea572ab7aab7db639dcc65b19a540c494
7
- data.tar.gz: 4780b91d49114e137363b6281cce78ef467d5e2c2a566f311fcc87d65aaad029d80e14cf522db9ba89050700e07a2645ea8f32c2eb0b062c9f4840b1b2bf4dd0
6
+ metadata.gz: b3bf3bc435c932230a7ac671f52fb41890abf8e673dd31bf8784eac721ef859e7726c06a164f5a15f7a474351274213bbbd82fe8973c9340ef3cd5463343799d
7
+ data.tar.gz: 7a05cc98e971adaac2e1ae834ac732533bfea925bc6c7e084068bad4dc4fc16292f3eac936406ff8abca9253cafc5b61f61f1597af8945595bd62a0084c85aba
data/CHANGES.md CHANGED
@@ -1,5 +1,18 @@
1
1
  # Changes
2
2
 
3
+ ## 2025-09-08 v0.0.29
4
+
5
+ - Refactored conversation persistence into a dedicated
6
+ `OllamaChat::Conversation` module
7
+ - Added automatic backup saving in the `fix_config` method using
8
+ **backup.json**
9
+ - Simplified `/save` and `/load` commands to delegate to module methods
10
+ - Introduced document processing policies for web search results with three
11
+ modes: **embedding**, **summarizing**, and **importing**
12
+ - Added `@document_policy` configuration to control result processing mode
13
+ - Updated `/web` command help text and added new prompt templates for summarization and importing modes
14
+ - Modified conditional logic from `@embedding.on?` to `@document_policy == 'embedding' && @embedding.on?`
15
+
3
16
  ## 2025-09-08 v0.0.28
4
17
 
5
18
  - Replaced `server_socket_runtime_dir` config option with
data/README.md CHANGED
@@ -171,7 +171,7 @@ The following commands can be given inside the chat, if prefixed by a `/`:
171
171
  /summarize [n] source summarize the source's content in n words
172
172
  /embedding toggle embedding paused or not
173
173
  /embed source embed the source's content
174
- /web [n] query query web search & return n or 1 results
174
+ /web [n] query query web & for n(=1) results (policy: importing)
175
175
  /links [clear] display (or clear) links used in the chat
176
176
  /save filename store conversation messages
177
177
  /load filename load conversation messages
@@ -48,6 +48,7 @@ class OllamaChat::Chat
48
48
  include OllamaChat::History
49
49
  include OllamaChat::ServerSocket
50
50
  include OllamaChat::KramdownANSI
51
+ include OllamaChat::Conversation
51
52
 
52
53
  # Initializes a new OllamaChat::Chat instance with the given command-line
53
54
  # arguments.
@@ -312,27 +313,13 @@ class OllamaChat::Chat
312
313
  @parse_content = false
313
314
  web($1, $2)
314
315
  when %r(^/save\s+(.+)$)
315
- filename = $1
316
- if messages.save_conversation(filename)
317
- STDOUT.puts "Saved conversation to #{filename.inspect}."
318
- else
319
- STDERR.puts "Saving conversation to #{filename.inspect} failed."
320
- end
316
+ save_conversation($1)
321
317
  :next
322
318
  when %r(^/links(?:\s+(clear))?$)
323
319
  manage_links($1)
324
320
  :next
325
321
  when %r(^/load\s+(.+)$)
326
- filename = $1
327
- success = messages.load_conversation(filename)
328
- if messages.size > 1
329
- messages.list_conversation(2)
330
- end
331
- if success
332
- STDOUT.puts "Loaded conversation from #{filename.inspect}."
333
- else
334
- STDERR.puts "Loading conversation from #{filename.inspect} failed."
335
- end
322
+ load_conversation($1)
336
323
  :next
337
324
  when %r(^/pipe\s+(.+)$)
338
325
  pipe($1)
@@ -362,31 +349,60 @@ class OllamaChat::Chat
362
349
  end
363
350
  end
364
351
 
365
- # The web method performs a web search and processes the results based on
366
- # embedding configuration.
367
- #
368
- # It searches for the given query using the configured search engine and
369
- # processes up to the specified number of URLs. If embeddings are enabled, it
370
- # embeds each result and interpolates the query into the web_embed prompt.
371
- # Otherwise, it imports each result and interpolates both the query and
372
- # results into the web_import prompt.
373
- #
374
- # @param count [ String ] the maximum number of search results to process
375
- # @param query [ String ] the search query string
376
- #
377
- # @return [ String, Symbol ] the interpolated prompt content or :next if no URLs were found
352
+ # Performs a web search and processes the results based on document processing configuration.
353
+ #
354
+ # Searches for the given query using the configured search engine and processes up to
355
+ # the specified number of URLs. The processing approach varies based on the current
356
+ # document policy and embedding status:
357
+ #
358
+ # - **Embedding mode**: When `@document_policy == 'embedding'` AND `@embedding.on?` is true,
359
+ # each result is embedded and the query is interpolated into the `web_embed` prompt.
360
+ # - **Summarizing mode**: When `@document_policy == 'summarizing'`,
361
+ # each result is summarized and both query and results are interpolated into the
362
+ # `web_summarize` prompt.
363
+ # - **Importing mode**: For all other cases, each result is imported and both query and
364
+ # results are interpolated into the `web_import` prompt.
365
+ #
366
+ # @param count [String] The maximum number of search results to process (defaults to 1)
367
+ # @param query [String] The search query string
368
+ #
369
+ # @return [String, Symbol] The interpolated prompt content when successful,
370
+ # or :next if no URLs were found or processing failed
371
+ #
372
+ # @example Basic web search
373
+ # web('3', 'ruby programming tutorials')
374
+ #
375
+ # @example Web search with embedding policy
376
+ # # With @document_policy == 'embedding' and @embedding.on?
377
+ # # Processes results through embedding pipeline
378
+ #
379
+ # @example Web search with summarizing policy
380
+ # # With @document_policy == 'summarizing'
381
+ # # Processes results through summarization pipeline
382
+ #
383
+ # @see #search_web
384
+ # @see #fetch_source
385
+ # @see #embed_source
386
+ # @see #import
387
+ # @see #summarize
378
388
  def web(count, query)
379
389
  urls = search_web(query, count.to_i) or return :next
380
- if @embedding.on?
390
+ if @document_policy == 'embedding' && @embedding.on?
381
391
  prompt = config.prompts.web_embed
382
392
  urls.each do |url|
383
393
  fetch_source(url) { |url_io| embed_source(url_io, url) }
384
394
  end
385
395
  prompt.named_placeholders_interpolate({query:})
396
+ elsif @document_policy == 'summarizing'
397
+ prompt = config.prompts.web_summarize
398
+ results = urls.each_with_object('') do |url, content|
399
+ import(url).full? { |c| content << c }
400
+ end
401
+ prompt.named_placeholders_interpolate({query:, results:})
386
402
  else
387
403
  prompt = config.prompts.web_import
388
404
  results = urls.each_with_object('') do |url, content|
389
- import(url).full? { |c| content << c }
405
+ summarize(url).full? { |c| content << c }
390
406
  end
391
407
  prompt.named_placeholders_interpolate({query:, results:})
392
408
  end
@@ -727,6 +743,7 @@ class OllamaChat::Chat
727
743
  # @param exception [ Exception ] the exception that occurred while reading
728
744
  # the config file
729
745
  def fix_config(exception)
746
+ save_conversation('backup.json')
730
747
  STDOUT.puts "When reading the config file, a #{exception.class} "\
731
748
  "exception was caught: #{exception.message.inspect}"
732
749
  if ask?(prompt: 'Do you want to fix the config? (y/n) ') =~ /\Ay/i
@@ -0,0 +1,58 @@
1
+ # A module that provides conversation persistence functionality for the
2
+ # OllamaChat::Chat class.
3
+ #
4
+ # This module encapsulates the logic for saving and loading chat conversations
5
+ # to/from JSON files. It delegates the actual file operations to the `messages`
6
+ # object, which is expected to respond to `save_conversation` and
7
+ # `load_conversation` methods.
8
+ #
9
+ # @example Save a conversation
10
+ # chat.save_conversation('my_chat.json')
11
+ #
12
+ # @example Load a conversation
13
+ # chat.load_conversation('my_chat.json')
14
+ module OllamaChat::Conversation
15
+ # Saves the current conversation to a JSON file.
16
+ #
17
+ # This method delegates to the `messages` object's `save_conversation`
18
+ # method, which handles the actual serialization of messages into JSON
19
+ # format.
20
+ #
21
+ # @param filename [String] The path to the file where the conversation should
22
+ # be saved
23
+ #
24
+ # @example Save conversation with explicit filename
25
+ # chat.save_conversation('conversations/2023-10-15_my_session.json')
26
+ def save_conversation(filename)
27
+ if messages.save_conversation(filename)
28
+ STDOUT.puts "Saved conversation to #{filename.inspect}."
29
+ else
30
+ STDERR.puts "Saving conversation to #{filename.inspect} failed."
31
+ end
32
+ end
33
+
34
+ # Loads a conversation from a JSON file and replaces the current message
35
+ # history.
36
+ #
37
+ # This method delegates to the `messages` object's `load_conversation`
38
+ # method, which handles deserialization of messages from JSON format. After
39
+ # loading, if there are more than one message, it lists the last two messages
40
+ # for confirmation.
41
+ #
42
+ # @param filename [String] The path to the file containing the conversation
43
+ # to load
44
+ #
45
+ # @example Load a conversation from a specific file
46
+ # chat.load_conversation('conversations/2023-10-15_my_session.json')
47
+ def load_conversation(filename)
48
+ success = messages.load_conversation(filename)
49
+ if messages.size > 1
50
+ messages.list_conversation(2)
51
+ end
52
+ if success
53
+ STDOUT.puts "Loaded conversation from #{filename.inspect}."
54
+ else
55
+ STDERR.puts "Loading conversation from #{filename.inspect} failed."
56
+ end
57
+ end
58
+ end
@@ -141,7 +141,7 @@ module OllamaChat::Information
141
141
  /summarize [n] source summarize the source's content in n words
142
142
  /embedding toggle embedding paused or not
143
143
  /embed source embed the source's content
144
- /web [n] query query web search & return n or 1 results
144
+ /web [n] query query web & for n(=1) results (policy: #@document_policy)
145
145
  /links [clear] display (or clear) links used in the chat
146
146
  /save filename store conversation messages
147
147
  /load filename load conversation messages
@@ -24,7 +24,11 @@ prompts:
24
24
  web_embed: |
25
25
  Answer the the query %{query} using the provided chunks.
26
26
  web_import: |
27
- Answer the the query %{query} using these imported source:
27
+ Answer the the query %{query} using these imported sources:
28
+
29
+ %{results}
30
+ web_summarize: |
31
+ Answer the the query %{query} using these summarized sources:
28
32
 
29
33
  %{results}
30
34
  location: You are at %{location_name}, %{location_decimal_degrees}, on %{localtime}, preferring %{units}
@@ -1,6 +1,6 @@
1
1
  module OllamaChat
2
2
  # OllamaChat version
3
- VERSION = '0.0.28'
3
+ VERSION = '0.0.29'
4
4
  VERSION_ARRAY = VERSION.split('.').map(&:to_i) # :nodoc:
5
5
  VERSION_MAJOR = VERSION_ARRAY[0] # :nodoc:
6
6
  VERSION_MINOR = VERSION_ARRAY[1] # :nodoc:
data/lib/ollama_chat.rb CHANGED
@@ -33,4 +33,5 @@ require 'ollama_chat/document_cache'
33
33
  require 'ollama_chat/history'
34
34
  require 'ollama_chat/server_socket'
35
35
  require 'ollama_chat/kramdown_ansi'
36
+ require 'ollama_chat/conversation'
36
37
  require 'ollama_chat/chat'
data/ollama_chat.gemspec CHANGED
@@ -1,9 +1,9 @@
1
1
  # -*- encoding: utf-8 -*-
2
- # stub: ollama_chat 0.0.28 ruby lib
2
+ # stub: ollama_chat 0.0.29 ruby lib
3
3
 
4
4
  Gem::Specification.new do |s|
5
5
  s.name = "ollama_chat".freeze
6
- s.version = "0.0.28".freeze
6
+ s.version = "0.0.29".freeze
7
7
 
8
8
  s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
9
9
  s.require_paths = ["lib".freeze]
@@ -12,8 +12,8 @@ Gem::Specification.new do |s|
12
12
  s.description = "The app provides a command-line interface (CLI) to an Ollama AI model,\nallowing users to engage in text-based conversations and generate\nhuman-like responses. Users can import data from local files or web pages,\nwhich are then processed through three different modes: fully importing the\ncontent into the conversation context, summarizing the information for\nconcise reference, or storing it in an embedding vector database for later\nretrieval based on the conversation.\n".freeze
13
13
  s.email = "flori@ping.de".freeze
14
14
  s.executables = ["ollama_chat".freeze, "ollama_chat_send".freeze]
15
- s.extra_rdoc_files = ["README.md".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/kramdown_ansi.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_output.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/vim.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze]
16
- s.files = [".utilsrc".freeze, "CHANGES.md".freeze, "Gemfile".freeze, "README.md".freeze, "Rakefile".freeze, "bin/ollama_chat".freeze, "bin/ollama_chat_send".freeze, "config/searxng/settings.yml".freeze, "docker-compose.yml".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/kramdown_ansi.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_output.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/ollama_chat_config/default_config.yml".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/vim.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze, "ollama_chat.gemspec".freeze, "redis/redis.conf".freeze, "spec/assets/api_show.json".freeze, "spec/assets/api_tags.json".freeze, "spec/assets/api_version.json".freeze, "spec/assets/conversation.json".freeze, "spec/assets/duckduckgo.html".freeze, "spec/assets/example.atom".freeze, "spec/assets/example.csv".freeze, "spec/assets/example.html".freeze, "spec/assets/example.pdf".freeze, "spec/assets/example.ps".freeze, "spec/assets/example.rb".freeze, "spec/assets/example.rss".freeze, "spec/assets/example.xml".freeze, "spec/assets/kitten.jpg".freeze, "spec/assets/prompt.txt".freeze, "spec/assets/searxng.json".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/kramdown_ansi_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/message_output_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/server_socket_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/ollama_chat/web_searching_spec.rb".freeze, "spec/spec_helper.rb".freeze, "tmp/.keep".freeze]
15
+ s.extra_rdoc_files = ["README.md".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/conversation.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/kramdown_ansi.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_output.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/vim.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze]
16
+ s.files = [".utilsrc".freeze, "CHANGES.md".freeze, "Gemfile".freeze, "README.md".freeze, "Rakefile".freeze, "bin/ollama_chat".freeze, "bin/ollama_chat_send".freeze, "config/searxng/settings.yml".freeze, "docker-compose.yml".freeze, "lib/ollama_chat.rb".freeze, "lib/ollama_chat/chat.rb".freeze, "lib/ollama_chat/clipboard.rb".freeze, "lib/ollama_chat/conversation.rb".freeze, "lib/ollama_chat/dialog.rb".freeze, "lib/ollama_chat/document_cache.rb".freeze, "lib/ollama_chat/follow_chat.rb".freeze, "lib/ollama_chat/history.rb".freeze, "lib/ollama_chat/information.rb".freeze, "lib/ollama_chat/kramdown_ansi.rb".freeze, "lib/ollama_chat/message_format.rb".freeze, "lib/ollama_chat/message_list.rb".freeze, "lib/ollama_chat/message_output.rb".freeze, "lib/ollama_chat/model_handling.rb".freeze, "lib/ollama_chat/ollama_chat_config.rb".freeze, "lib/ollama_chat/ollama_chat_config/default_config.yml".freeze, "lib/ollama_chat/parsing.rb".freeze, "lib/ollama_chat/server_socket.rb".freeze, "lib/ollama_chat/source_fetching.rb".freeze, "lib/ollama_chat/switches.rb".freeze, "lib/ollama_chat/utils.rb".freeze, "lib/ollama_chat/utils/cache_fetcher.rb".freeze, "lib/ollama_chat/utils/chooser.rb".freeze, "lib/ollama_chat/utils/fetcher.rb".freeze, "lib/ollama_chat/utils/file_argument.rb".freeze, "lib/ollama_chat/version.rb".freeze, "lib/ollama_chat/vim.rb".freeze, "lib/ollama_chat/web_searching.rb".freeze, "ollama_chat.gemspec".freeze, "redis/redis.conf".freeze, "spec/assets/api_show.json".freeze, "spec/assets/api_tags.json".freeze, "spec/assets/api_version.json".freeze, "spec/assets/conversation.json".freeze, "spec/assets/duckduckgo.html".freeze, "spec/assets/example.atom".freeze, "spec/assets/example.csv".freeze, "spec/assets/example.html".freeze, "spec/assets/example.pdf".freeze, "spec/assets/example.ps".freeze, "spec/assets/example.rb".freeze, "spec/assets/example.rss".freeze, "spec/assets/example.xml".freeze, "spec/assets/kitten.jpg".freeze, "spec/assets/prompt.txt".freeze, "spec/assets/searxng.json".freeze, "spec/ollama_chat/chat_spec.rb".freeze, "spec/ollama_chat/clipboard_spec.rb".freeze, "spec/ollama_chat/follow_chat_spec.rb".freeze, "spec/ollama_chat/information_spec.rb".freeze, "spec/ollama_chat/kramdown_ansi_spec.rb".freeze, "spec/ollama_chat/message_list_spec.rb".freeze, "spec/ollama_chat/message_output_spec.rb".freeze, "spec/ollama_chat/model_handling_spec.rb".freeze, "spec/ollama_chat/parsing_spec.rb".freeze, "spec/ollama_chat/server_socket_spec.rb".freeze, "spec/ollama_chat/source_fetching_spec.rb".freeze, "spec/ollama_chat/switches_spec.rb".freeze, "spec/ollama_chat/utils/cache_fetcher_spec.rb".freeze, "spec/ollama_chat/utils/fetcher_spec.rb".freeze, "spec/ollama_chat/utils/file_argument_spec.rb".freeze, "spec/ollama_chat/web_searching_spec.rb".freeze, "spec/spec_helper.rb".freeze, "tmp/.keep".freeze]
17
17
  s.homepage = "https://github.com/flori/ollama_chat".freeze
18
18
  s.licenses = ["MIT".freeze]
19
19
  s.rdoc_options = ["--title".freeze, "OllamaChat - A command-line interface (CLI) for interacting with an Ollama AI model.".freeze, "--main".freeze, "README.md".freeze]
@@ -102,12 +102,11 @@ describe OllamaChat::ServerSocket do
102
102
  end
103
103
 
104
104
  describe '#create_socket_server' do
105
- context 'with configured runtime_dir' do
105
+ context 'with working dir dependent socket' do
106
106
  it 'can be created with configured runtime_dir' do
107
- config = double('Config', server_socket_runtime_dir: '/custom/runtime')
107
+ config = double('Config', working_dir_dependent_socket: true)
108
108
  expect(UnixSocks::Server).to receive(:new).with(
109
- socket_name: 'ollama_chat.sock',
110
- runtime_dir: '/custom/runtime'
109
+ socket_name: /\Aollama_chat-\h{32}.sock\z/,
111
110
  ).and_return :unix_socks_server
112
111
 
113
112
  result = OllamaChat::ServerSocket.create_socket_server(config: config)
@@ -115,9 +114,9 @@ describe OllamaChat::ServerSocket do
115
114
  end
116
115
  end
117
116
 
118
- context 'with default runtime_dir' do
117
+ context 'with default runtime_dir and name' do
119
118
  it 'can be created with default runtime_dir' do
120
- config = double('Config', server_socket_runtime_dir: nil)
119
+ config = double('Config', working_dir_dependent_socket: false)
121
120
  expect(UnixSocks::Server).to receive(:new).with(
122
121
  socket_name: 'ollama_chat.sock'
123
122
  ).and_return :unix_socks_server
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ollama_chat
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.28
4
+ version: 0.0.29
5
5
  platform: ruby
6
6
  authors:
7
7
  - Florian Frank
@@ -389,6 +389,7 @@ extra_rdoc_files:
389
389
  - lib/ollama_chat.rb
390
390
  - lib/ollama_chat/chat.rb
391
391
  - lib/ollama_chat/clipboard.rb
392
+ - lib/ollama_chat/conversation.rb
392
393
  - lib/ollama_chat/dialog.rb
393
394
  - lib/ollama_chat/document_cache.rb
394
395
  - lib/ollama_chat/follow_chat.rb
@@ -425,6 +426,7 @@ files:
425
426
  - lib/ollama_chat.rb
426
427
  - lib/ollama_chat/chat.rb
427
428
  - lib/ollama_chat/clipboard.rb
429
+ - lib/ollama_chat/conversation.rb
428
430
  - lib/ollama_chat/dialog.rb
429
431
  - lib/ollama_chat/document_cache.rb
430
432
  - lib/ollama_chat/follow_chat.rb