ollama_chat 0.0.15 → 0.0.17

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: c05a3e4579e75ba1c724d7128581de7b96dac914c2bd633368920a207be24b3a
4
- data.tar.gz: 6d4cf892e3a3ec3d00dc1b4cf00e686f311240893a5cd2af561afdb7ea6946de
3
+ metadata.gz: 1ca8b4d15da972d1600ccab9349f1852ccb75f2733cd9a85ccfb57f2890cb786
4
+ data.tar.gz: 3ec2450f156b98ac722b58358de3190e58d30ebf6f8e09dd435804b409c1e1e0
5
5
  SHA512:
6
- metadata.gz: 9afec82d69645e4b8400b63c8f5bf51bd4b4d0471cb59c419b304e9a532fe874dce33f403625abb4fac4543892ed5c0e40b476a56da7023eda4ea99a15ac6275
7
- data.tar.gz: 611da289b8ac3d9bef78d7453418ec9e90123e5f7af36d386b62a7646a578adbe90ebc5396a09248152d117085ee0598a743cf4746e4e1e9a402a6e32475bd02
6
+ metadata.gz: 98a1335f95549196c6a6a486465c3dad12694b8006c18a7ff2619be8a866b826869ed35dde99d810fea330101e67361142000ac705e5784450bc9ae11952f8d7
7
+ data.tar.gz: dce73464d416d095eee8d86723ca241cc7affdbc87b4472bf4e0cf0caf084f52d2c47810fd3fe8ec67cb883df85e58098caae1ad45c4088850d98cc37705c2b0
data/CHANGES.md CHANGED
@@ -1,5 +1,30 @@
1
1
  # Changes
2
2
 
3
+ ## 2025-07-14 v0.0.17
4
+
5
+ * Implement Pager Support for List Command
6
+ * Add simple command completion to chat
7
+ * Improved chat link generation
8
+ + Changed `record.tags.first` to have prefix `?#` before the tag
9
+
10
+ ## 2025-07-10 v0.0.16
11
+
12
+ - **New Features**
13
+ - Added `-f CONFIG` option to `ollama_chat_send` for specifying configuration files.
14
+ - Introduced `server_socket_runtime_dir` setting in the default config, and
15
+ make it default to the current directory, allowing for a per directory chat
16
+ to receive server socket messages.
17
+
18
+ - **Enhancements**
19
+ - Improved logging with debug output for received server socket messages.
20
+ - Refactored server socket handling:
21
+ - Created `create_socket_server` method for UnixSocks setup with configurable runtime directories.
22
+ - Updated `send_to_server_socket` and `init_server_socket` methods to use the new helper.
23
+ - Changed evaluation rate metrics from 'c/s' to 't/s' for better clarity.
24
+
25
+ - **Documentation**
26
+ - Added additional documentation for key classes and methods in `FollowChat`.
27
+
3
28
  ## 2025-07-02 v0.0.15
4
29
 
5
30
  - **Enhanced `ollama_chat_send` and Unix Domain Socket Support:**
data/Rakefile CHANGED
@@ -32,13 +32,13 @@ GemHadar do
32
32
  dependency 'excon', '~> 1.0'
33
33
  dependency 'ollama-ruby', '~> 1.2'
34
34
  dependency 'documentrix', '~> 0.0', '>= 0.0.2'
35
- dependency 'unix_socks'
35
+ dependency 'unix_socks', '>= 0.0.1'
36
36
  dependency 'rss', '~> 0.3'
37
37
  dependency 'term-ansicolor', '~> 1.11'
38
38
  dependency 'redis', '~> 5.0'
39
39
  dependency 'mime-types', '~> 3.0'
40
40
  dependency 'reverse_markdown', '~> 3.0'
41
- dependency 'xdg', '~> 7.0'
41
+ dependency 'xdg'
42
42
  dependency 'kramdown-ansi', '~> 0.0', '>= 0.0.1'
43
43
  dependency 'complex_config', '~> 0.22', '>= 0.22.2'
44
44
  dependency 'tins', '~> 1.34'
data/VERSION CHANGED
@@ -1 +1 @@
1
- 0.0.15
1
+ 0.0.17
data/bin/ollama_chat_send CHANGED
@@ -5,22 +5,24 @@ require 'tins/go'
5
5
  include Tins::GO
6
6
 
7
7
 
8
- opts = go 'rth', ARGV
8
+ opts = go 'f:rth', ARGV
9
9
 
10
10
  def usage(rc = 0)
11
11
  puts <<~EOT
12
12
  Usage: #{File.basename($0)} [OPTIONS]
13
13
 
14
14
  Options:
15
- -r Wait for the response from Ollama Chat and output it
16
- -t Send input as terminal input including commands, e. g. /import
17
- -h Show this help message
15
+ -r Wait for the response from Ollama Chat and output it
16
+ -t Send input as terminal input including commands, e. g. /import
17
+ -f CONFIG file to read
18
+ -h Show this help message
18
19
 
19
20
  Send data to a running Ollame Chat client via standard input.
20
21
  EOT
21
22
  exit rc
22
23
  end
23
24
 
25
+ config = OllamaChat::OllamaChatConfig.new(opts[?f]).config
24
26
  opts[?h] and usage
25
27
  begin
26
28
  type = if opts[?t]
@@ -28,7 +30,7 @@ begin
28
30
  else
29
31
  opts[?r] ? :socket_input_with_response : :socket_input
30
32
  end
31
- response = OllamaChat::ServerSocket.send_to_server_socket(STDIN.read, type:)
33
+ response = OllamaChat::ServerSocket.send_to_server_socket(STDIN.read, type:, config:)
32
34
  type == :socket_input_with_response and puts response.content
33
35
  rescue => e
34
36
  warn "Caught #{e.class}: #{e}"
@@ -348,13 +348,15 @@ class OllamaChat::Chat
348
348
  @parse_content = true
349
349
  type = :terminal_input
350
350
  input_prompt = bold { color(172) { message_type(@images) + " user" } } + bold { "> " }
351
-
352
351
  begin
353
- content = Reline.readline(input_prompt, true)&.chomp
352
+ content = enable_command_completion do
353
+ Reline.readline(input_prompt, true)&.chomp
354
+ end
354
355
  rescue Interrupt
355
356
  if message = server_socket_message
356
357
  type = message.type.full?(:to_sym) || :socket_input
357
358
  content = message.content
359
+ STDOUT.puts color(112) { "Received a server socket message. Processing now…" }
358
360
  else
359
361
  raise
360
362
  end
@@ -417,7 +419,7 @@ class OllamaChat::Chat
417
419
  else
418
420
  'file://%s' % File.expand_path(record.source)
419
421
  end
420
- [ link, record.tags.first ]
422
+ [ link, ?# + record.tags.first ]
421
423
  }.uniq.map { |l, t| hyperlink(l, t) }.join(' ')
422
424
  config.debug and jj messages.to_ary
423
425
  end
@@ -525,4 +527,15 @@ class OllamaChat::Chat
525
527
  exit 1
526
528
  end
527
529
  end
530
+
531
+ def enable_command_completion(&block)
532
+ old = Reline.completion_proc
533
+ commands = display_chat_help_message.scan(/^\s*(\S+)/).inject(&:concat)
534
+ Reline.completion_proc = -> input {
535
+ commands.grep Regexp.new('\A' + Regexp.quote(input))
536
+ }
537
+ block.()
538
+ ensure
539
+ Reline.completion_proc = old
540
+ end
528
541
  end
@@ -4,6 +4,15 @@ class OllamaChat::FollowChat
4
4
  include Term::ANSIColor
5
5
  include OllamaChat::MessageFormat
6
6
 
7
+
8
+ # Initializes a new instance of OllamaChat::FollowChat.
9
+ #
10
+ # @param [OllamaChat::Chat] chat The chat object, which represents the conversation context.
11
+ # @param [#to_a] messages A collection of message objects, representing the conversation history.
12
+ # @param [String] voice (optional) to speek with if any.
13
+ # @param [IO] output (optional) The output stream where terminal output should be printed. Defaults to STDOUT.
14
+ #
15
+ # @return [OllamaChat::FollowChat] A new instance of OllamaChat::FollowChat.
7
16
  def initialize(chat:, messages:, voice: nil, output: STDOUT)
8
17
  super(output:)
9
18
  @chat = chat
@@ -13,8 +22,28 @@ class OllamaChat::FollowChat
13
22
  @user = nil
14
23
  end
15
24
 
25
+ # Returns the conversation history (an array of message objects).
26
+ #
27
+ # @return [OllamaChat::MessageList<Ollama::Message>] The array of messages in the conversation.
16
28
  attr_reader :messages
17
29
 
30
+ # Invokes the chat flow based on the provided Ollama server response.
31
+ #
32
+ # The response is expected to be a parsed JSON object containing information
33
+ # about the user input and the assistant's response.
34
+ #
35
+ # If the response indicates an assistant message, this method:
36
+ # 1. Ensures that an assistant response exists in the message history (if not already present).
37
+ # 2. Updates the last message with the new content and thinking (if applicable).
38
+ # 3. Displays the formatted terminal output for the user.
39
+ # 4. Outputs the voice response (if configured).
40
+ #
41
+ # Regardless of whether an assistant message is present, this method also
42
+ # outputs evaluation statistics (if applicable).
43
+ #
44
+ # @param [Ollama::Response] response The parsed JSON response from the Ollama server.
45
+ #
46
+ # @return [OllamaChat::FollowChat] The current instance for method chaining.
18
47
  def call(response)
19
48
  debug_output(response)
20
49
 
@@ -73,10 +102,10 @@ class OllamaChat::FollowChat
73
102
  stats_text = {
74
103
  eval_duration: Tins::Duration.new(eval_duration),
75
104
  eval_count: response.eval_count.to_i,
76
- eval_rate: bold { "%.2f c/s" % (response.eval_count.to_i / eval_duration) } + color(111),
105
+ eval_rate: bold { "%.2f t/s" % (response.eval_count.to_i / eval_duration) } + color(111),
77
106
  prompt_eval_duration: Tins::Duration.new(prompt_eval_duration),
78
107
  prompt_eval_count: response.prompt_eval_count.to_i,
79
- prompt_eval_rate: bold { "%.2f c/s" % (response.prompt_eval_count.to_i / prompt_eval_duration) } + color(111),
108
+ prompt_eval_rate: bold { "%.2f t/s" % (response.prompt_eval_count.to_i / prompt_eval_duration) } + color(111),
80
109
  total_duration: Tins::Duration.new(response.total_duration / 1e9),
81
110
  load_duration: Tins::Duration.new(response.load_duration / 1e9),
82
111
  }.map { _1 * ?= } * ' '
@@ -58,8 +58,8 @@ module OllamaChat::Information
58
58
  nil
59
59
  end
60
60
 
61
- def display_chat_help
62
- STDOUT.puts <<~EOT
61
+ private def display_chat_help_message
62
+ <<~EOT
63
63
  /copy to copy last response to clipboard
64
64
  /paste to paste content
65
65
  /markdown toggle markdown output
@@ -89,6 +89,10 @@ module OllamaChat::Information
89
89
  /quit to quit
90
90
  /help to view this help
91
91
  EOT
92
+ end
93
+
94
+ def display_chat_help
95
+ STDOUT.puts display_chat_help_message
92
96
  nil
93
97
  end
94
98
 
@@ -99,31 +99,33 @@ class OllamaChat::MessageList
99
99
  # @return [ OllamaChat::MessageList ]
100
100
  def list_conversation(last = nil)
101
101
  last = (last || @messages.size).clamp(0, @messages.size)
102
- @messages[-last..-1].to_a.each do |m|
103
- role_color = case m.role
104
- when 'user' then 172
105
- when 'assistant' then 111
106
- when 'system' then 213
107
- else 210
102
+ use_pager do |output|
103
+ @messages[-last..-1].to_a.each do |m|
104
+ role_color = case m.role
105
+ when 'user' then 172
106
+ when 'assistant' then 111
107
+ when 'system' then 213
108
+ else 210
109
+ end
110
+ thinking = if @chat.think.on?
111
+ think_annotate do
112
+ m.thinking.full? { @chat.markdown.on? ? Kramdown::ANSI.parse(_1) : _1 }
113
+ end
108
114
  end
109
- thinking = if @chat.think.on?
110
- think_annotate do
111
- m.thinking.full? { @chat.markdown.on? ? Kramdown::ANSI.parse(_1) : _1 }
112
- end
113
- end
114
- content = m.content.full? { @chat.markdown.on? ? Kramdown::ANSI.parse(_1) : _1 }
115
- message_text = message_type(m.images) + " "
116
- message_text += bold { color(role_color) { m.role } }
117
- if thinking
118
- message_text += [ ?:, thinking, talk_annotate { content } ].compact.
119
- map { _1.chomp } * ?\n
120
- else
121
- message_text += ":\n#{content}"
115
+ content = m.content.full? { @chat.markdown.on? ? Kramdown::ANSI.parse(_1) : _1 }
116
+ message_text = message_type(m.images) + " "
117
+ message_text += bold { color(role_color) { m.role } }
118
+ if thinking
119
+ message_text += [ ?:, thinking, talk_annotate { content } ].compact.
120
+ map { _1.chomp } * ?\n
121
+ else
122
+ message_text += ":\n#{content}"
123
+ end
124
+ m.images.full? { |images|
125
+ message_text += "\nImages: " + italic { images.map(&:path) * ', ' }
126
+ }
127
+ output.puts message_text
122
128
  end
123
- m.images.full? { |images|
124
- message_text += "\nImages: " + italic { images.map(&:path) * ', ' }
125
- }
126
- STDOUT.puts message_text
127
129
  end
128
130
  self
129
131
  end
@@ -261,4 +263,22 @@ class OllamaChat::MessageList
261
263
  def config
262
264
  @chat.config
263
265
  end
266
+
267
+ def determine_pager_command
268
+ default_pager = ENV['PAGER'].full?
269
+ if fallback_pager = `which less`.chomp.full? || `which more`.chomp.full?
270
+ fallback_pager << ' -r'
271
+ end
272
+ default_pager || fallback_pager
273
+ end
274
+
275
+ def use_pager
276
+ command = determine_pager_command
277
+ output_buffer = StringIO.new
278
+ yield output_buffer
279
+ messages = output_buffer.string
280
+ Kramdown::ANSI::Pager.pager(command:, lines: messages.count(?\n)) do |output|
281
+ output.puts messages
282
+ end
283
+ end
264
284
  end
@@ -61,6 +61,7 @@ redis:
61
61
  url: <%= ENV.fetch('REDIS_EXPIRING_URL', 'null') %>
62
62
  ex: 86400
63
63
  chat_history_filename: <%= ENV.fetch('OLLAMA_CHAT_HISTORY', '~/.ollama_chat_history') %>
64
+ server_socket_runtime_dir: .
64
65
  debug: <%= ENV['OLLAMA_CHAT_DEBUG'].to_i == 1 ? true : false %>
65
66
  request_headers:
66
67
  Accept: 'text/*,application/*,image/*'
@@ -8,8 +8,8 @@ module OllamaChat::ServerSocket
8
8
  #
9
9
  # @return [ String, NilClass ] the response from the server if type is
10
10
  # :socket_input_with_response, otherwise nil.
11
- def send_to_server_socket(content, type: :socket_input)
12
- server = UnixSocks::Server.new(socket_name: 'ollama_chat.sock')
11
+ def send_to_server_socket(content, config:, type: :socket_input)
12
+ server = create_socket_server(config:)
13
13
  message = { content:, type: }
14
14
  if type.to_sym == :socket_input_with_response
15
15
  return server.transmit_with_response(message)
@@ -18,6 +18,14 @@ module OllamaChat::ServerSocket
18
18
  nil
19
19
  end
20
20
  end
21
+
22
+ def create_socket_server(config:)
23
+ if runtime_dir = config.server_socket_runtime_dir
24
+ UnixSocks::Server.new(socket_name: 'ollama_chat.sock', runtime_dir:)
25
+ else
26
+ UnixSocks::Server.new(socket_name: 'ollama_chat.sock')
27
+ end
28
+ end
21
29
  end
22
30
 
23
31
  attr_accessor :server_socket_message
@@ -33,7 +41,7 @@ module OllamaChat::ServerSocket
33
41
  # @return [ nil ] This method does not return any value, it only sets up the
34
42
  # server socket and kills the process when a message is received.
35
43
  def init_server_socket
36
- server = UnixSocks::Server.new(socket_name: 'ollama_chat.sock')
44
+ server = OllamaChat::ServerSocket.create_socket_server(config:)
37
45
  server.receive_in_background do |message|
38
46
  self.server_socket_message = message
39
47
  Process.kill :INT, $$
@@ -1,6 +1,6 @@
1
1
  module OllamaChat
2
2
  # OllamaChat version
3
- VERSION = '0.0.15'
3
+ VERSION = '0.0.17'
4
4
  VERSION_ARRAY = VERSION.split('.').map(&:to_i) # :nodoc:
5
5
  VERSION_MAJOR = VERSION_ARRAY[0] # :nodoc:
6
6
  VERSION_MINOR = VERSION_ARRAY[1] # :nodoc:
data/ollama_chat.gemspec CHANGED
@@ -1,9 +1,9 @@
1
1
  # -*- encoding: utf-8 -*-
2
- # stub: ollama_chat 0.0.15 ruby lib
2
+ # stub: ollama_chat 0.0.17 ruby lib
3
3
 
4
4
  Gem::Specification.new do |s|
5
5
  s.name = "ollama_chat".freeze
6
- s.version = "0.0.15".freeze
6
+ s.version = "0.0.17".freeze
7
7
 
8
8
  s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
9
9
  s.require_paths = ["lib".freeze]
@@ -34,13 +34,13 @@ Gem::Specification.new do |s|
34
34
  s.add_runtime_dependency(%q<excon>.freeze, ["~> 1.0".freeze])
35
35
  s.add_runtime_dependency(%q<ollama-ruby>.freeze, ["~> 1.2".freeze])
36
36
  s.add_runtime_dependency(%q<documentrix>.freeze, ["~> 0.0".freeze, ">= 0.0.2".freeze])
37
- s.add_runtime_dependency(%q<unix_socks>.freeze, [">= 0".freeze])
37
+ s.add_runtime_dependency(%q<unix_socks>.freeze, [">= 0.0.1".freeze])
38
38
  s.add_runtime_dependency(%q<rss>.freeze, ["~> 0.3".freeze])
39
39
  s.add_runtime_dependency(%q<term-ansicolor>.freeze, ["~> 1.11".freeze])
40
40
  s.add_runtime_dependency(%q<redis>.freeze, ["~> 5.0".freeze])
41
41
  s.add_runtime_dependency(%q<mime-types>.freeze, ["~> 3.0".freeze])
42
42
  s.add_runtime_dependency(%q<reverse_markdown>.freeze, ["~> 3.0".freeze])
43
- s.add_runtime_dependency(%q<xdg>.freeze, ["~> 7.0".freeze])
43
+ s.add_runtime_dependency(%q<xdg>.freeze, [">= 0".freeze])
44
44
  s.add_runtime_dependency(%q<kramdown-ansi>.freeze, ["~> 0.0".freeze, ">= 0.0.1".freeze])
45
45
  s.add_runtime_dependency(%q<complex_config>.freeze, ["~> 0.22".freeze, ">= 0.22.2".freeze])
46
46
  s.add_runtime_dependency(%q<tins>.freeze, ["~> 1.34".freeze])
@@ -10,7 +10,8 @@ RSpec.describe OllamaChat::MessageList do
10
10
  units: 'SI (International System of Units)'
11
11
  ),
12
12
  prompts: double(
13
- location: 'You are at %{location_name} (%{location_decimal_degrees}), on %{localtime}, preferring %{units}'
13
+ location: 'You are at %{location_name} (%{location_decimal_degrees}),' \
14
+ ' on %{localtime}, preferring %{units}'
14
15
  ),
15
16
  system_prompts: double(
16
17
  assistant?: 'You are a helpful assistant.'
@@ -64,32 +65,60 @@ RSpec.describe OllamaChat::MessageList do
64
65
  FileUtils.rm_f 'tmp/test-conversation.json'
65
66
  end
66
67
 
67
- it 'can list conversations without thinking' do
68
- expect(chat).to receive(:markdown).
69
- and_return(double(on?: true)).at_least(:once)
70
- expect(chat).to receive(:think).
71
- and_return(double(on?: false)).at_least(:once)
72
- list << Ollama::Message.new(role: 'user', content: 'world')
73
- expect(STDOUT).to receive(:puts).
74
- with("📨 \e[1m\e[38;5;213msystem\e[0m\e[0m:\nhello\n")
75
- expect(STDOUT).to receive(:puts).
76
- with("📨 \e[1m\e[38;5;172muser\e[0m\e[0m:\nworld\n")
77
- list.list_conversation
78
- end
79
-
80
- it 'can list conversations with thinking' do
81
- expect(chat).to receive(:markdown).
82
- and_return(double(on?: true)).at_least(:once)
83
- expect(chat).to receive(:think).
84
- and_return(double(on?: true)).at_least(:once)
85
- expect(STDOUT).to receive(:puts).
86
- with("📨 \e[1m\e[38;5;213msystem\e[0m\e[0m:\n💭\nI need to say something nice…\n\n💬\nhello\n")
87
- expect(STDOUT).to receive(:puts).
88
- with("📨 \e[1m\e[38;5;172muser\e[0m\e[0m:\nworld\n")
89
- list.set_system_prompt nil
90
- list << Ollama::Message.new(role: 'system', content: 'hello', thinking: 'I need to say something nice…')
91
- list << Ollama::Message.new(role: 'user', content: 'world')
92
- list.list_conversation
68
+ context 'without pager' do
69
+ before do
70
+ expect(list).to receive(:determine_pager_command).and_return nil
71
+ end
72
+
73
+ it 'can list conversations without thinking' do
74
+ expect(chat).to receive(:markdown).
75
+ and_return(double(on?: true)).at_least(:once)
76
+ expect(chat).to receive(:think).
77
+ and_return(double(on?: false)).at_least(:once)
78
+ list << Ollama::Message.new(role: 'user', content: 'world')
79
+ expect(STDOUT).to receive(:puts).
80
+ with(
81
+ "📨 \e[1m\e[38;5;213msystem\e[0m\e[0m:\nhello\n" \
82
+ "📨 \e[1m\e[38;5;172muser\e[0m\e[0m:\nworld\n"
83
+ )
84
+ list.list_conversation
85
+ end
86
+
87
+ it 'can list conversations with thinking' do
88
+ expect(chat).to receive(:markdown).
89
+ and_return(double(on?: true)).at_least(:once)
90
+ expect(chat).to receive(:think).
91
+ and_return(double(on?: true)).at_least(:once)
92
+ expect(STDOUT).to receive(:puts).
93
+ with(
94
+ "📨 \e[1m\e[38;5;213msystem\e[0m\e[0m:\n" \
95
+ "💭\nI need to say something nice…\n\n💬\nhello\n" \
96
+ "📨 \e[1m\e[38;5;172muser\e[0m\e[0m:\nworld\n"
97
+ )
98
+ list.set_system_prompt nil
99
+ list << Ollama::Message.new(
100
+ role: 'system', content: 'hello',
101
+ thinking: 'I need to say something nice…'
102
+ )
103
+ list << Ollama::Message.new(role: 'user', content: 'world')
104
+ list.list_conversation
105
+ end
106
+ end
107
+
108
+ context 'with pager' do
109
+ before do
110
+ expect(list).to receive(:determine_pager_command).and_return 'true'
111
+ expect(Tins::Terminal).to receive(:lines).and_return 1
112
+ end
113
+
114
+ it 'can list conversations' do
115
+ expect(chat).to receive(:markdown).
116
+ and_return(double(on?: true)).at_least(:once)
117
+ expect(chat).to receive(:think).
118
+ and_return(double(on?: false)).at_least(:once)
119
+ list << Ollama::Message.new(role: 'user', content: 'world')
120
+ list.list_conversation
121
+ end
93
122
  end
94
123
 
95
124
  it 'can show_system_prompt' do
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: ollama_chat
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.15
4
+ version: 0.0.17
5
5
  platform: ruby
6
6
  authors:
7
7
  - Florian Frank
@@ -161,14 +161,14 @@ dependencies:
161
161
  requirements:
162
162
  - - ">="
163
163
  - !ruby/object:Gem::Version
164
- version: '0'
164
+ version: 0.0.1
165
165
  type: :runtime
166
166
  prerelease: false
167
167
  version_requirements: !ruby/object:Gem::Requirement
168
168
  requirements:
169
169
  - - ">="
170
170
  - !ruby/object:Gem::Version
171
- version: '0'
171
+ version: 0.0.1
172
172
  - !ruby/object:Gem::Dependency
173
173
  name: rss
174
174
  requirement: !ruby/object:Gem::Requirement
@@ -243,16 +243,16 @@ dependencies:
243
243
  name: xdg
244
244
  requirement: !ruby/object:Gem::Requirement
245
245
  requirements:
246
- - - "~>"
246
+ - - ">="
247
247
  - !ruby/object:Gem::Version
248
- version: '7.0'
248
+ version: '0'
249
249
  type: :runtime
250
250
  prerelease: false
251
251
  version_requirements: !ruby/object:Gem::Requirement
252
252
  requirements:
253
- - - "~>"
253
+ - - ">="
254
254
  - !ruby/object:Gem::Version
255
- version: '7.0'
255
+ version: '0'
256
256
  - !ruby/object:Gem::Dependency
257
257
  name: kramdown-ansi
258
258
  requirement: !ruby/object:Gem::Requirement