durable-llm 0.1.4 → 0.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. checksums.yaml +4 -4
  2. data/.envrc +7 -0
  3. data/CHANGELOG.md +5 -0
  4. data/CLI.md +0 -2
  5. data/Gemfile +7 -9
  6. data/README.md +564 -30
  7. data/Rakefile +16 -6
  8. data/devenv.lock +171 -0
  9. data/devenv.nix +12 -0
  10. data/devenv.yaml +8 -0
  11. data/durable-llm.gemspec +52 -0
  12. data/examples/openai_quick_complete.rb +4 -2
  13. data/lib/durable/llm/cli.rb +218 -22
  14. data/lib/durable/llm/client.rb +228 -8
  15. data/lib/durable/llm/configuration.rb +163 -10
  16. data/lib/durable/llm/convenience.rb +102 -0
  17. data/lib/durable/llm/errors.rb +185 -0
  18. data/lib/durable/llm/provider_utilities.rb +201 -0
  19. data/lib/durable/llm/providers/anthropic.rb +232 -24
  20. data/lib/durable/llm/providers/azure_openai.rb +347 -0
  21. data/lib/durable/llm/providers/base.rb +220 -11
  22. data/lib/durable/llm/providers/cohere.rb +157 -11
  23. data/lib/durable/llm/providers/deepseek.rb +233 -0
  24. data/lib/durable/llm/providers/fireworks.rb +304 -0
  25. data/lib/durable/llm/providers/google.rb +327 -0
  26. data/lib/durable/llm/providers/groq.rb +133 -25
  27. data/lib/durable/llm/providers/huggingface.rb +120 -17
  28. data/lib/durable/llm/providers/mistral.rb +431 -0
  29. data/lib/durable/llm/providers/openai.rb +150 -4
  30. data/lib/durable/llm/providers/opencode.rb +253 -0
  31. data/lib/durable/llm/providers/openrouter.rb +256 -0
  32. data/lib/durable/llm/providers/perplexity.rb +273 -0
  33. data/lib/durable/llm/providers/together.rb +346 -0
  34. data/lib/durable/llm/providers/xai.rb +355 -0
  35. data/lib/durable/llm/providers.rb +113 -13
  36. data/lib/durable/llm/response_helpers.rb +185 -0
  37. data/lib/durable/llm/version.rb +5 -1
  38. data/lib/durable/llm.rb +214 -1
  39. data/lib/durable.rb +29 -4
  40. data/sig/durable/llm.rbs +303 -1
  41. metadata +106 -28
  42. data/Gemfile.lock +0 -103
data/Rakefile CHANGED
@@ -1,16 +1,26 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "bundler/gem_tasks"
4
- require "rake/testtask"
3
+ require 'bundler/gem_tasks'
4
+ require 'rake/testtask'
5
5
 
6
6
  Rake::TestTask.new(:test) do |t|
7
- t.libs << "test"
8
- t.libs << "lib"
9
- t.test_files = FileList["test/**/test_*.rb"]
7
+ t.libs << 'test'
8
+ t.libs << 'lib'
9
+ t.test_files = FileList['test/**/test_*.rb']
10
10
  end
11
11
 
12
- require "rubocop/rake_task"
12
+ require 'rubocop/rake_task'
13
13
 
14
14
  RuboCop::RakeTask.new
15
15
 
16
+ begin
17
+ require 'yard'
18
+ YARD::Rake::YardocTask.new(:doc) do |t|
19
+ t.files = ['lib/**/*.rb']
20
+ t.options = ['--markup', 'markdown', '--output-dir', 'doc']
21
+ end
22
+ rescue LoadError
23
+ # YARD not available
24
+ end
25
+
16
26
  task default: %i[test rubocop]
data/devenv.lock ADDED
@@ -0,0 +1,171 @@
1
+ {
2
+ "nodes": {
3
+ "devenv": {
4
+ "locked": {
5
+ "dir": "src/modules",
6
+ "lastModified": 1761922975,
7
+ "owner": "cachix",
8
+ "repo": "devenv",
9
+ "rev": "c9f0b47815a4895fadac87812de8a4de27e0ace1",
10
+ "type": "github"
11
+ },
12
+ "original": {
13
+ "dir": "src/modules",
14
+ "owner": "cachix",
15
+ "repo": "devenv",
16
+ "type": "github"
17
+ }
18
+ },
19
+ "flake-compat": {
20
+ "flake": false,
21
+ "locked": {
22
+ "lastModified": 1761588595,
23
+ "owner": "edolstra",
24
+ "repo": "flake-compat",
25
+ "rev": "f387cd2afec9419c8ee37694406ca490c3f34ee5",
26
+ "type": "github"
27
+ },
28
+ "original": {
29
+ "owner": "edolstra",
30
+ "repo": "flake-compat",
31
+ "type": "github"
32
+ }
33
+ },
34
+ "flake-compat_2": {
35
+ "flake": false,
36
+ "locked": {
37
+ "lastModified": 1761588595,
38
+ "owner": "edolstra",
39
+ "repo": "flake-compat",
40
+ "rev": "f387cd2afec9419c8ee37694406ca490c3f34ee5",
41
+ "type": "github"
42
+ },
43
+ "original": {
44
+ "owner": "edolstra",
45
+ "repo": "flake-compat",
46
+ "type": "github"
47
+ }
48
+ },
49
+ "flake-utils": {
50
+ "inputs": {
51
+ "systems": "systems"
52
+ },
53
+ "locked": {
54
+ "lastModified": 1731533236,
55
+ "owner": "numtide",
56
+ "repo": "flake-utils",
57
+ "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
58
+ "type": "github"
59
+ },
60
+ "original": {
61
+ "owner": "numtide",
62
+ "repo": "flake-utils",
63
+ "type": "github"
64
+ }
65
+ },
66
+ "git-hooks": {
67
+ "inputs": {
68
+ "flake-compat": "flake-compat",
69
+ "gitignore": "gitignore",
70
+ "nixpkgs": [
71
+ "nixpkgs"
72
+ ]
73
+ },
74
+ "locked": {
75
+ "lastModified": 1760663237,
76
+ "owner": "cachix",
77
+ "repo": "git-hooks.nix",
78
+ "rev": "ca5b894d3e3e151ffc1db040b6ce4dcc75d31c37",
79
+ "type": "github"
80
+ },
81
+ "original": {
82
+ "owner": "cachix",
83
+ "repo": "git-hooks.nix",
84
+ "type": "github"
85
+ }
86
+ },
87
+ "gitignore": {
88
+ "inputs": {
89
+ "nixpkgs": [
90
+ "git-hooks",
91
+ "nixpkgs"
92
+ ]
93
+ },
94
+ "locked": {
95
+ "lastModified": 1709087332,
96
+ "owner": "hercules-ci",
97
+ "repo": "gitignore.nix",
98
+ "rev": "637db329424fd7e46cf4185293b9cc8c88c95394",
99
+ "type": "github"
100
+ },
101
+ "original": {
102
+ "owner": "hercules-ci",
103
+ "repo": "gitignore.nix",
104
+ "type": "github"
105
+ }
106
+ },
107
+ "nixpkgs": {
108
+ "locked": {
109
+ "lastModified": 1761313199,
110
+ "owner": "cachix",
111
+ "repo": "devenv-nixpkgs",
112
+ "rev": "d1c30452ebecfc55185ae6d1c983c09da0c274ff",
113
+ "type": "github"
114
+ },
115
+ "original": {
116
+ "owner": "cachix",
117
+ "ref": "rolling",
118
+ "repo": "devenv-nixpkgs",
119
+ "type": "github"
120
+ }
121
+ },
122
+ "nixpkgs-ruby": {
123
+ "inputs": {
124
+ "flake-compat": "flake-compat_2",
125
+ "flake-utils": "flake-utils",
126
+ "nixpkgs": [
127
+ "nixpkgs"
128
+ ]
129
+ },
130
+ "locked": {
131
+ "lastModified": 1759902829,
132
+ "owner": "bobvanderlinden",
133
+ "repo": "nixpkgs-ruby",
134
+ "rev": "5fba6c022a63f1e76dee4da71edddad8959f088a",
135
+ "type": "github"
136
+ },
137
+ "original": {
138
+ "owner": "bobvanderlinden",
139
+ "repo": "nixpkgs-ruby",
140
+ "type": "github"
141
+ }
142
+ },
143
+ "root": {
144
+ "inputs": {
145
+ "devenv": "devenv",
146
+ "git-hooks": "git-hooks",
147
+ "nixpkgs": "nixpkgs",
148
+ "nixpkgs-ruby": "nixpkgs-ruby",
149
+ "pre-commit-hooks": [
150
+ "git-hooks"
151
+ ]
152
+ }
153
+ },
154
+ "systems": {
155
+ "locked": {
156
+ "lastModified": 1681028828,
157
+ "owner": "nix-systems",
158
+ "repo": "default",
159
+ "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
160
+ "type": "github"
161
+ },
162
+ "original": {
163
+ "owner": "nix-systems",
164
+ "repo": "default",
165
+ "type": "github"
166
+ }
167
+ }
168
+ },
169
+ "root": "root",
170
+ "version": 7
171
+ }
data/devenv.nix ADDED
@@ -0,0 +1,12 @@
1
+ { pkgs, lib, config, inputs, ... }:
2
+
3
+ {
4
+
5
+ packages = [ pkgs.git ];
6
+
7
+ languages.ruby = {
8
+ enable = true;
9
+ version = "3.4.7";
10
+ };
11
+
12
+ }
data/devenv.yaml ADDED
@@ -0,0 +1,8 @@
1
+ inputs:
2
+ nixpkgs:
3
+ url: github:cachix/devenv-nixpkgs/rolling
4
+ nixpkgs-ruby:
5
+ url: github:bobvanderlinden/nixpkgs-ruby
6
+ inputs:
7
+ nixpkgs:
8
+ follows: nixpkgs
@@ -0,0 +1,52 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative 'lib/durable/llm/version'
4
+
5
+ Gem::Specification.new do |spec|
6
+ spec.name = 'durable-llm'
7
+ spec.version = Durable::Llm::VERSION
8
+ spec.authors = ['Durable Programming Team']
9
+ spec.email = ['djberube@durableprogramming.com']
10
+
11
+ spec.summary = 'A Ruby gem providing access to LLM APIs from various vendors'
12
+ spec.description = 'Durable-LLM is a unified interface for interacting with multiple Large Language Model APIs, simplifying integration of AI capabilities into Ruby applications.'
13
+ spec.homepage = 'https://github.com/durableprogramming/durable-llm'
14
+ spec.license = 'MIT'
15
+ spec.required_ruby_version = '>= 2.6.0'
16
+
17
+ spec.metadata['allowed_push_host'] = 'https://rubygems.org'
18
+
19
+ spec.metadata['homepage_uri'] = spec.homepage
20
+ spec.metadata['source_code_uri'] = 'https://github.com/durableprogramming/durable-llm'
21
+ spec.metadata['changelog_uri'] = 'https://github.com/durableprogramming/durable-llm/blob/main/CHANGELOG.md'
22
+
23
+ spec.files = Dir.chdir(__dir__) do
24
+ if system('git rev-parse --git-dir > /dev/null 2>&1')
25
+ `git ls-files -z`.split("\x0").reject do |f|
26
+ (File.expand_path(f) == __FILE__) || f.start_with?(*%w[bin/ test/ spec/ features/ .git .circleci appveyor])
27
+ end
28
+ else
29
+ Dir.glob('**/*', File::FNM_DOTMATCH).reject do |f|
30
+ File.directory?(f) || (File.expand_path(f) == __FILE__) || f.start_with?(*%w[bin/ test/ spec/ features/ .git .circleci appveyor])
31
+ end
32
+ end
33
+ end
34
+ spec.bindir = 'exe'
35
+ spec.executables = spec.files.grep(%r{\Aexe/}) { |f| File.basename(f) }
36
+ spec.require_paths = ['lib']
37
+
38
+ spec.add_dependency 'event_stream_parser', '~> 1.0', '>= 1.0.0'
39
+ spec.add_dependency 'faraday', '>= 1.0', '< 3.0'
40
+ spec.add_dependency 'highline', '~> 3.1', '>= 3.1.0'
41
+ spec.add_dependency 'json', '~> 2.6', '>= 2.6.0'
42
+ spec.add_dependency 'ostruct', '~> 0.6.0'
43
+ spec.add_dependency 'thor', '~> 1.3', '>= 1.3.0'
44
+ spec.add_dependency 'zeitwerk', '~> 2.6', '>= 2.6.0'
45
+
46
+ spec.add_development_dependency 'dotenv', '~> 2.8'
47
+ spec.add_development_dependency 'minitest', '~> 5.0'
48
+ spec.add_development_dependency 'mocha', '~> 2.1'
49
+ spec.add_development_dependency 'rubocop', '~> 1.0'
50
+ spec.add_development_dependency 'vcr', '~> 6.0'
51
+ spec.add_development_dependency 'yard', '~> 0.9'
52
+ end
@@ -1,8 +1,10 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require 'durable/llm'
2
4
  require 'durable/llm/client'
3
5
 
4
- client = Durable::Llm::Client.new(:openai, :model=> 'gpt-4')
6
+ client = Durable::Llm::Client.new(:openai, model: 'gpt-4')
5
7
 
6
- response = client.quick_complete("What's the capital of California?")
8
+ response = client.complete("What's the capital of California?")
7
9
 
8
10
  puts response
@@ -1,15 +1,69 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This file implements the command-line interface for the Durable LLM gem using Thor, providing commands for single prompts, interactive chat sessions, and listing available models. It handles provider resolution, streaming responses, model options, system prompts, and conversation management through a user-friendly CLI with support for both one-shot completions and multi-turn conversations.
4
+
1
5
  require 'thor'
2
- require 'durable/llm'
3
- require 'durable/llm/client'
4
6
  require 'highline'
7
+ require 'json'
8
+ require 'securerandom'
9
+ require 'fileutils'
10
+ require 'time'
11
+ require 'durable/llm/client'
12
+ require 'durable/llm/providers'
5
13
 
6
14
  module Durable
7
15
  module Llm
16
+ # Command-line interface for Durable LLM gem.
17
+ #
18
+ # Provides Thor-based CLI commands for interacting with LLM providers.
8
19
  class CLI < Thor
9
20
  def self.exit_on_failure?
10
21
  true
11
22
  end
12
23
 
24
+ CONVERSATIONS_DIR = File.expand_path('~/.durable_llm/conversations')
25
+ LAST_CONVERSATION_FILE = File.join(CONVERSATIONS_DIR, 'last_conversation.txt')
26
+
27
+ no_commands do
28
+ def conversation_file_path(id)
29
+ File.join(CONVERSATIONS_DIR, "#{id}.json")
30
+ end
31
+
32
+ def load_conversation(id)
33
+ path = conversation_file_path(id)
34
+ return nil unless File.exist?(path)
35
+
36
+ JSON.parse(File.read(path))
37
+ end
38
+
39
+ def save_conversation(conversation)
40
+ FileUtils.mkdir_p(CONVERSATIONS_DIR) unless Dir.exist?(CONVERSATIONS_DIR)
41
+ id = conversation['id'] || SecureRandom.uuid
42
+ conversation['id'] = id
43
+ conversation['updated_at'] = Time.now.iso8601
44
+ File.write(conversation_file_path(id), JSON.generate(conversation))
45
+ File.write(LAST_CONVERSATION_FILE, id)
46
+ id
47
+ end
48
+
49
+ def last_conversation_id
50
+ return nil unless File.exist?(LAST_CONVERSATION_FILE)
51
+
52
+ File.read(LAST_CONVERSATION_FILE).strip
53
+ end
54
+ end
55
+
56
+ # Run a single prompt and get a response
57
+ #
58
+ # @param prompt [Array<String>] The prompt text to send to the model
59
+ # @option options :model [String] The model to use (default: gpt-3.5-turbo)
60
+ # @option options :system [String] System prompt to set context
61
+ # @option options :continue [Boolean] Continue the last conversation
62
+ # @option options :conversation [String] Continue a specific conversation by ID
63
+ # @option options :no_stream [Boolean] Disable streaming responses
64
+ # @option options :option [Hash] Additional model-specific options
65
+ # @return [void] Outputs the response to stdout
66
+ # @raise [RuntimeError] If no provider is found for the specified model
13
67
  desc 'prompt PROMPT', 'Run a prompt'
14
68
  option :model, aliases: '-m', desc: 'Specify the model to use'
15
69
  option :system, aliases: '-s', desc: 'Set a system prompt'
@@ -27,8 +81,11 @@ module Durable
27
81
  provider_name = provider_class.name.split('::').last.downcase.to_sym
28
82
  client = Durable::Llm::Client.new(provider_name)
29
83
 
30
- messages = []
31
- messages << { role: 'system', content: options[:system] } if options[:system]
84
+ conversation_id = options[:conversation] || (options[:continue] ? last_conversation_id : nil)
85
+ conversation = conversation_id ? load_conversation(conversation_id) : nil
86
+
87
+ messages = conversation ? conversation['messages'].dup : []
88
+ messages << { role: 'system', content: options[:system] } if options[:system] && !conversation
32
89
  messages << { role: 'user', content: prompt.join(' ') }
33
90
 
34
91
  params = {
@@ -37,22 +94,55 @@ module Durable
37
94
  }
38
95
  params.merge!(options[:option]) if options[:option]
39
96
 
40
- if options[:no_stream] || !client.stream?
41
- response = client.completion(params)
42
- puts response.choices.first
43
- else
44
- client.stream(params) do |chunk|
45
- print chunk
46
- $stdout.flush
97
+ begin
98
+ if options[:no_stream] || !client.stream?
99
+ response = client.completion(**params)
100
+ assistant_message = response.choices.first.to_s
101
+ puts assistant_message
102
+ messages << { role: 'assistant', content: assistant_message }
103
+ else
104
+ assistant_content = ''
105
+ client.stream(**params) do |chunk|
106
+ print chunk
107
+ assistant_content += chunk
108
+ $stdout.flush
109
+ end
110
+ messages << { role: 'assistant', content: assistant_content }
47
111
  end
112
+
113
+ # Save conversation
114
+ conversation_data = {
115
+ 'id' => conversation_id,
116
+ 'model' => model,
117
+ 'messages' => messages,
118
+ 'created_at' => conversation ? conversation['created_at'] : Time.now.iso8601
119
+ }
120
+ save_conversation(conversation_data)
121
+ rescue Durable::Llm::Error => e
122
+ warn "API Error: #{e.message}"
123
+ exit 1
124
+ rescue StandardError => e
125
+ warn "Unexpected error: #{e.message}"
126
+ exit 1
48
127
  end
49
128
  end
50
129
 
130
+ # Start an interactive chat session with the model
131
+ #
132
+ # @option options :model [String] The model to use (default: gpt-3.5-turbo)
133
+ # @option options :system [String] System prompt to set context
134
+ # @option options :continue [Boolean] Continue the last conversation
135
+ # @option options :conversation [String] Continue a specific conversation by ID
136
+ # @option options :no_stream [Boolean] Disable streaming responses
137
+ # @option options :option [Hash] Additional model-specific options
138
+ # @return [void] Starts interactive chat session
139
+ # @raise [RuntimeError] If no provider is found for the specified model
51
140
  desc 'chat', 'Start an interactive chat'
52
141
  option :model, aliases: '-m', desc: 'Specify the model to use'
53
142
  option :system, aliases: '-s', desc: 'Set a system prompt'
54
143
  option :continue, aliases: '-c', type: :boolean, desc: 'Continue the previous conversation'
55
144
  option :conversation, aliases: '--cid', desc: 'Continue a specific conversation by ID'
145
+ option :no_stream, type: :boolean, desc: 'Disable streaming of tokens'
56
146
  option :option, aliases: '-o', type: :hash, desc: 'Set model-specific options'
57
147
  def chat
58
148
  model = options[:model] || 'gpt-3.5-turbo'
@@ -63,8 +153,11 @@ module Durable
63
153
  provider_name = provider_class.name.split('::').last.downcase.to_sym
64
154
  client = Durable::Llm::Client.new(provider_name)
65
155
 
66
- messages = []
67
- messages << { role: 'system', content: options[:system] } if options[:system]
156
+ conversation_id = options[:conversation] || (options[:continue] ? last_conversation_id : nil)
157
+ conversation = conversation_id ? load_conversation(conversation_id) : nil
158
+
159
+ messages = conversation ? conversation['messages'].dup : []
160
+ messages << { role: 'system', content: options[:system] } if options[:system] && !conversation
68
161
 
69
162
  cli = HighLine.new
70
163
 
@@ -89,28 +182,131 @@ module Durable
89
182
  }
90
183
  params.merge!(options[:option]) if options[:option]
91
184
 
92
- response = client.completion(params)
93
- cli.say(response.choices.first.to_s)
94
- messages << { role: 'assistant', content: response.choices.first.to_s }
185
+ begin
186
+ if options[:no_stream] || !client.stream?
187
+ response = client.completion(**params)
188
+ assistant_message = response.choices.first.to_s
189
+ cli.say(assistant_message)
190
+ messages << { role: 'assistant', content: assistant_message }
191
+ else
192
+ assistant_content = ''
193
+ client.stream(**params) do |chunk|
194
+ print chunk
195
+ assistant_content += chunk
196
+ $stdout.flush
197
+ end
198
+ puts # Add newline after streaming
199
+ messages << { role: 'assistant', content: assistant_content }
200
+ end
201
+
202
+ # Save conversation after each exchange
203
+ conversation_data = {
204
+ 'id' => conversation_id,
205
+ 'model' => model,
206
+ 'messages' => messages,
207
+ 'created_at' => conversation ? conversation['created_at'] : Time.now.iso8601
208
+ }
209
+ conversation_id = save_conversation(conversation_data)
210
+ rescue Durable::Llm::Error => e
211
+ cli.say("API Error: #{e.message}")
212
+ next
213
+ rescue StandardError => e
214
+ cli.say("Unexpected error: #{e.message}")
215
+ next
216
+ end
95
217
  end
96
218
  end
97
219
 
220
+ # List all available models from all providers
221
+ #
222
+ # @option options :options [Boolean] Show model-specific options for each model
223
+ # @return [void] Outputs available models to stdout
98
224
  desc 'models', 'List available models'
99
225
  option :options, type: :boolean, desc: 'Show model options'
100
226
  def models
101
227
  cli = HighLine.new
102
228
  cli.say('Available models:')
103
229
 
104
- Durable::Llm::Providers.providers.each do |provider_name|
105
- provider_class = Durable::Llm::Providers.const_get(provider_name.to_s.capitalize)
106
- provider_models = provider_class.models
230
+ Durable::Llm::Providers.providers.each do |provider_sym|
231
+ provider_class = Durable::Llm::Providers.provider_class_for(provider_sym)
232
+ begin
233
+ provider_models = provider_class.models
234
+ cli.say("#{provider_sym.to_s.capitalize}:")
235
+ provider_models.each do |model|
236
+ cli.say(" #{model}")
237
+ if options[:options]
238
+ provider_options = provider_class.options
239
+ cli.say(" Options: #{provider_options.join(', ')}")
240
+ end
241
+ end
242
+ rescue StandardError => e
243
+ cli.say("#{provider_sym.to_s.capitalize}: Error loading models - #{e.message}")
244
+ end
245
+ end
246
+ end
247
+
248
+ # List all saved conversations
249
+ #
250
+ # @return [void] Outputs list of saved conversations to stdout
251
+ desc 'conversations', 'List saved conversations'
252
+ def conversations
253
+ cli = HighLine.new
254
+
255
+ unless Dir.exist?(CONVERSATIONS_DIR)
256
+ cli.say('No conversations found.')
257
+ return
258
+ end
259
+
260
+ conversation_files = Dir.glob("#{CONVERSATIONS_DIR}/*.json").sort_by { |f| File.mtime(f) }.reverse
261
+
262
+ if conversation_files.empty?
263
+ cli.say('No conversations found.')
264
+ return
265
+ end
266
+
267
+ cli.say('Saved conversations:')
268
+ cli.say('')
269
+
270
+ conversation_files.each do |file|
271
+ id = File.basename(file, '.json')
272
+ begin
273
+ conversation = JSON.parse(File.read(file))
274
+ model = conversation['model'] || 'unknown'
275
+ message_count = conversation['messages']&.length || 0
276
+ updated_at = conversation['updated_at'] ? Time.parse(conversation['updated_at']).strftime('%Y-%m-%d %H:%M') : 'unknown'
107
277
 
108
- cli.say("#{provider_name.to_s.capitalize}:")
109
- provider_models.each do |model|
110
- cli.say(" #{model}")
278
+ marker = id == last_conversation_id ? ' *' : ''
279
+ cli.say("#{id}#{marker} - #{model} (#{message_count} messages, updated #{updated_at})")
280
+ rescue JSON::ParserError
281
+ cli.say("#{id} - [corrupted conversation file]")
111
282
  end
112
283
  end
284
+
285
+ cli.say('')
286
+ cli.say('* indicates the last active conversation')
287
+ end
288
+
289
+ # Delete a saved conversation by ID
290
+ #
291
+ # @param id [String] The conversation ID to delete
292
+ # @return [void] Outputs confirmation message to stdout
293
+ desc 'delete_conversation ID', 'Delete a saved conversation'
294
+ def delete_conversation(id)
295
+ cli = HighLine.new
296
+
297
+ path = conversation_file_path(id)
298
+ if File.exist?(path)
299
+ File.delete(path)
300
+ cli.say("Deleted conversation #{id}")
301
+
302
+ # Remove from last conversation if it was the last one
303
+ File.delete(LAST_CONVERSATION_FILE) if last_conversation_id == id && File.exist?(LAST_CONVERSATION_FILE)
304
+ else
305
+ cli.say("Conversation #{id} not found")
306
+ end
113
307
  end
114
308
  end
115
309
  end
116
310
  end
311
+
312
+ # Copyright (c) 2025 Durable Programming, LLC. All rights reserved.