durable-llm 0.1.4 → 0.1.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 41b81b7592103ed34098a473f004036c0a9ed22a6f43cd764c32d057ac840cd9
4
- data.tar.gz: 0caf0d378409b4d025261049c41deb769b111eb556a77843495202cc58cea430
3
+ metadata.gz: f8e6cc177ffcd7ac3cdd67678a2cb0bfbd97944470a88a90ca82c7e02ea40845
4
+ data.tar.gz: 0e595c60949cfa309e2b72aa5ab417c31966e91df76188f7c3a722ea052e16d2
5
5
  SHA512:
6
- metadata.gz: 395a3c6e8b542107c01122c983beb0d43918a3d5567137c9652a716ddd01d89b7d8399e3e00f7059fde2bca82e80141063bf1dc15602bb9c665304aa4a393505
7
- data.tar.gz: af5ea2f180fdbac3cd52a6f856f37b419b1568f93c03aad36df0906ab79c336b5091372c4b048dcc4307e0984d8ed66f72cc1b047fd295ca9174e1f2c784bdf2
6
+ metadata.gz: b8fed0d23edcc73a48613681f528f5a7fe8575ef7faf7be34a897d91cfcded505df1de1723becd420394269f11856ea857df09872423c529361d06995e19bcbe
7
+ data.tar.gz: 6e1a7db706eced060c26f1bbbd48a2dd57103195b633971a52df48d0fae6345080b4b70b91dc41662c19dd61c67c39718423a8b96589ea1fb34083278a6b6f02
data/.envrc ADDED
@@ -0,0 +1,7 @@
1
+ export DIRENV_WARN_TIMEOUT=20s
2
+
3
+ eval "$(devenv direnvrc)"
4
+
5
+ # The use_devenv function supports passing flags to the devenv command
6
+ # For example: use devenv --impure --option services.postgres.enable:bool true
7
+ use devenv
data/CHANGELOG.md CHANGED
@@ -1,5 +1,10 @@
1
1
  ## [Unreleased]
2
2
 
3
+ ## [0.1.4] - 2024-10-13
4
+
5
+ - Enhanced version.rb with Version class for semantic version parsing and comparison
6
+ - Added comprehensive tests for version functionality
7
+
3
8
  ## [0.1.0] - 2024-10-09
4
9
 
5
10
  - Initial release
data/Gemfile CHANGED
@@ -1,20 +1,18 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- source "https://rubygems.org"
3
+ source 'https://rubygems.org'
4
4
 
5
5
  # Specify your gem's dependencies in durable-llm.gemspec
6
6
  gemspec
7
7
 
8
- gem "rake", "~> 13.0"
8
+ gem 'rake', '~> 13.0'
9
9
 
10
- gem "minitest", "~> 5.0"
10
+ gem 'minitest', '~> 5.0'
11
11
 
12
- gem "rubocop", "~> 1.21"
12
+ gem 'rubocop', '~> 1.21'
13
13
 
14
- gem "ruby-openai", "~> 7.1"
14
+ gem 'ruby-openai', '~> 7.1'
15
15
 
16
- gem "thor", "~> 1.3"
16
+ gem 'thor', '~> 1.3'
17
17
 
18
-
19
-
20
- gem "webmock", "~> 3.24"
18
+ gem 'webmock', '~> 3.24'
data/Gemfile.lock CHANGED
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- durable-llm (0.1.3)
4
+ durable-llm (0.1.5)
5
5
  event_stream_parser (~> 1.0)
6
6
  faraday (> 1.0)
7
7
  highline (~> 3.1)
@@ -93,11 +93,11 @@ DEPENDENCIES
93
93
  minitest (~> 5.0)
94
94
  mocha (~> 2.1)
95
95
  rake (~> 13.0)
96
- rubocop (~> 1.21)
96
+ rubocop (~> 1.21, ~> 1.0)
97
97
  ruby-openai (~> 7.1)
98
98
  thor (~> 1.3)
99
99
  vcr (~> 6.0)
100
100
  webmock (~> 3.24)
101
101
 
102
102
  BUNDLED WITH
103
- 2.4.10
103
+ 2.7.1
data/README.md CHANGED
@@ -54,6 +54,7 @@ puts response.choices.first.message.content
54
54
  - Anthropic
55
55
  - Grok
56
56
  - Huggingface
57
+ - Cohere
57
58
 
58
59
  ## Configuration
59
60
 
data/Rakefile CHANGED
@@ -1,15 +1,15 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- require "bundler/gem_tasks"
4
- require "rake/testtask"
3
+ require 'bundler/gem_tasks'
4
+ require 'rake/testtask'
5
5
 
6
6
  Rake::TestTask.new(:test) do |t|
7
- t.libs << "test"
8
- t.libs << "lib"
9
- t.test_files = FileList["test/**/test_*.rb"]
7
+ t.libs << 'test'
8
+ t.libs << 'lib'
9
+ t.test_files = FileList['test/**/test_*.rb']
10
10
  end
11
11
 
12
- require "rubocop/rake_task"
12
+ require 'rubocop/rake_task'
13
13
 
14
14
  RuboCop::RakeTask.new
15
15
 
data/devenv.lock ADDED
@@ -0,0 +1,103 @@
1
+ {
2
+ "nodes": {
3
+ "devenv": {
4
+ "locked": {
5
+ "dir": "src/modules",
6
+ "lastModified": 1760162706,
7
+ "owner": "cachix",
8
+ "repo": "devenv",
9
+ "rev": "0d5ad578728fe4bce66eb4398b8b1e66deceb4e4",
10
+ "type": "github"
11
+ },
12
+ "original": {
13
+ "dir": "src/modules",
14
+ "owner": "cachix",
15
+ "repo": "devenv",
16
+ "type": "github"
17
+ }
18
+ },
19
+ "flake-compat": {
20
+ "flake": false,
21
+ "locked": {
22
+ "lastModified": 1747046372,
23
+ "owner": "edolstra",
24
+ "repo": "flake-compat",
25
+ "rev": "9100a0f413b0c601e0533d1d94ffd501ce2e7885",
26
+ "type": "github"
27
+ },
28
+ "original": {
29
+ "owner": "edolstra",
30
+ "repo": "flake-compat",
31
+ "type": "github"
32
+ }
33
+ },
34
+ "git-hooks": {
35
+ "inputs": {
36
+ "flake-compat": "flake-compat",
37
+ "gitignore": "gitignore",
38
+ "nixpkgs": [
39
+ "nixpkgs"
40
+ ]
41
+ },
42
+ "locked": {
43
+ "lastModified": 1759523803,
44
+ "owner": "cachix",
45
+ "repo": "git-hooks.nix",
46
+ "rev": "cfc9f7bb163ad8542029d303e599c0f7eee09835",
47
+ "type": "github"
48
+ },
49
+ "original": {
50
+ "owner": "cachix",
51
+ "repo": "git-hooks.nix",
52
+ "type": "github"
53
+ }
54
+ },
55
+ "gitignore": {
56
+ "inputs": {
57
+ "nixpkgs": [
58
+ "git-hooks",
59
+ "nixpkgs"
60
+ ]
61
+ },
62
+ "locked": {
63
+ "lastModified": 1709087332,
64
+ "owner": "hercules-ci",
65
+ "repo": "gitignore.nix",
66
+ "rev": "637db329424fd7e46cf4185293b9cc8c88c95394",
67
+ "type": "github"
68
+ },
69
+ "original": {
70
+ "owner": "hercules-ci",
71
+ "repo": "gitignore.nix",
72
+ "type": "github"
73
+ }
74
+ },
75
+ "nixpkgs": {
76
+ "locked": {
77
+ "lastModified": 1758532697,
78
+ "owner": "cachix",
79
+ "repo": "devenv-nixpkgs",
80
+ "rev": "207a4cb0e1253c7658c6736becc6eb9cace1f25f",
81
+ "type": "github"
82
+ },
83
+ "original": {
84
+ "owner": "cachix",
85
+ "ref": "rolling",
86
+ "repo": "devenv-nixpkgs",
87
+ "type": "github"
88
+ }
89
+ },
90
+ "root": {
91
+ "inputs": {
92
+ "devenv": "devenv",
93
+ "git-hooks": "git-hooks",
94
+ "nixpkgs": "nixpkgs",
95
+ "pre-commit-hooks": [
96
+ "git-hooks"
97
+ ]
98
+ }
99
+ }
100
+ },
101
+ "root": "root",
102
+ "version": 7
103
+ }
data/devenv.nix ADDED
@@ -0,0 +1,9 @@
1
+ { pkgs, lib, config, inputs, ... }:
2
+
3
+ {
4
+
5
+ packages = [ pkgs.git ];
6
+
7
+ languages.ruby.enable = true;
8
+
9
+ }
data/devenv.yaml ADDED
@@ -0,0 +1,15 @@
1
+ # yaml-language-server: $schema=https://devenv.sh/devenv.schema.json
2
+ inputs:
3
+ nixpkgs:
4
+ url: github:cachix/devenv-nixpkgs/rolling
5
+
6
+ # If you're using non-OSS software, you can set allowUnfree to true.
7
+ # allowUnfree: true
8
+
9
+ # If you're willing to use a package that's vulnerable
10
+ # permittedInsecurePackages:
11
+ # - "openssl-1.1.1w"
12
+
13
+ # If you have more than one devenv you can merge them
14
+ #imports:
15
+ # - ./backend
@@ -0,0 +1,44 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative 'lib/durable/llm/version'
4
+
5
+ Gem::Specification.new do |spec|
6
+ spec.name = 'durable-llm'
7
+ spec.version = Durable::Llm::VERSION
8
+ spec.authors = ['Durable Programming Team']
9
+ spec.email = ['djberube@durableprogramming.com']
10
+
11
+ spec.summary = 'A Ruby gem providing access to LLM APIs from various vendors'
12
+ spec.description = 'Durable-LLM is a unified interface for interacting with multiple Large Language Model APIs, simplifying integration of AI capabilities into Ruby applications.'
13
+ spec.homepage = 'https://github.com/durableprogramming/durable-llm'
14
+ spec.license = 'MIT'
15
+ spec.required_ruby_version = '>= 2.6.0'
16
+
17
+ spec.metadata['allowed_push_host'] = 'https://rubygems.org'
18
+
19
+ spec.metadata['homepage_uri'] = spec.homepage
20
+ spec.metadata['source_code_uri'] = 'https://github.com/durableprogramming/durable-llm'
21
+ spec.metadata['changelog_uri'] = 'https://github.com/durableprogramming/durable-llm/blob/main/CHANGELOG.md'
22
+
23
+ spec.files = Dir.chdir(__dir__) do
24
+ `git ls-files -z`.split("\x0").reject do |f|
25
+ (File.expand_path(f) == __FILE__) || f.start_with?(*%w[bin/ test/ spec/ features/ .git .circleci appveyor])
26
+ end
27
+ end
28
+ spec.bindir = 'exe'
29
+ spec.executables = spec.files.grep(%r{\Aexe/}) { |f| File.basename(f) }
30
+ spec.require_paths = ['lib']
31
+
32
+ spec.add_dependency 'event_stream_parser', '~> 1.0'
33
+ spec.add_dependency 'faraday', '> 1.0'
34
+ spec.add_dependency 'highline', '~> 3.1'
35
+ spec.add_dependency 'json', '~> 2.6'
36
+ spec.add_dependency 'thor', '~> 1.3'
37
+ spec.add_dependency 'zeitwerk', '~> 2.6'
38
+
39
+ spec.add_development_dependency 'dotenv', '~> 2.8'
40
+ spec.add_development_dependency 'minitest', '~> 5.0'
41
+ spec.add_development_dependency 'mocha', '~> 2.1'
42
+ spec.add_development_dependency 'rubocop', '~> 1.0'
43
+ spec.add_development_dependency 'vcr', '~> 6.0'
44
+ end
@@ -1,7 +1,9 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require 'durable/llm'
2
4
  require 'durable/llm/client'
3
5
 
4
- client = Durable::Llm::Client.new(:openai, :model=> 'gpt-4')
6
+ client = Durable::Llm::Client.new(:openai, model: 'gpt-4')
5
7
 
6
8
  response = client.quick_complete("What's the capital of California?")
7
9
 
@@ -1,7 +1,15 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This file implements the command-line interface for the Durable LLM gem using Thor, providing commands for single prompts, interactive chat sessions, and listing available models. It handles provider resolution, streaming responses, model options, system prompts, and conversation management through a user-friendly CLI with support for both one-shot completions and multi-turn conversations.
4
+
1
5
  require 'thor'
2
- require 'durable/llm'
3
- require 'durable/llm/client'
4
6
  require 'highline'
7
+ require 'json'
8
+ require 'securerandom'
9
+ require 'fileutils'
10
+ require 'time'
11
+ require 'durable/llm/client'
12
+ require 'durable/llm/providers'
5
13
 
6
14
  module Durable
7
15
  module Llm
@@ -10,6 +18,49 @@ module Durable
10
18
  true
11
19
  end
12
20
 
21
+ CONVERSATIONS_DIR = File.expand_path('~/.durable_llm/conversations')
22
+ LAST_CONVERSATION_FILE = File.join(CONVERSATIONS_DIR, 'last_conversation.txt')
23
+
24
+ def conversation_file_path(id)
25
+ File.join(CONVERSATIONS_DIR, "#{id}.json")
26
+ end
27
+
28
+ def load_conversation(id)
29
+ path = conversation_file_path(id)
30
+ return nil unless File.exist?(path)
31
+
32
+ JSON.parse(File.read(path))
33
+ end
34
+
35
+ def save_conversation(conversation)
36
+ FileUtils.mkdir_p(CONVERSATIONS_DIR) unless Dir.exist?(CONVERSATIONS_DIR)
37
+ id = conversation['id'] || SecureRandom.uuid
38
+ conversation['id'] = id
39
+ conversation['updated_at'] = Time.now.iso8601
40
+ File.write(conversation_file_path(id), JSON.generate(conversation))
41
+ File.write(LAST_CONVERSATION_FILE, id)
42
+ id
43
+ end
44
+
45
+ def last_conversation_id
46
+ return nil unless File.exist?(LAST_CONVERSATION_FILE)
47
+
48
+ File.read(LAST_CONVERSATION_FILE).strip
49
+ end
50
+
51
+ private :load_conversation, :save_conversation
52
+
53
+ # Run a single prompt and get a response
54
+ #
55
+ # @param prompt [Array<String>] The prompt text to send to the model
56
+ # @option options :model [String] The model to use (default: gpt-3.5-turbo)
57
+ # @option options :system [String] System prompt to set context
58
+ # @option options :continue [Boolean] Continue the last conversation
59
+ # @option options :conversation [String] Continue a specific conversation by ID
60
+ # @option options :no_stream [Boolean] Disable streaming responses
61
+ # @option options :option [Hash] Additional model-specific options
62
+ # @return [void] Outputs the response to stdout
63
+ # @raise [RuntimeError] If no provider is found for the specified model
13
64
  desc 'prompt PROMPT', 'Run a prompt'
14
65
  option :model, aliases: '-m', desc: 'Specify the model to use'
15
66
  option :system, aliases: '-s', desc: 'Set a system prompt'
@@ -27,8 +78,11 @@ module Durable
27
78
  provider_name = provider_class.name.split('::').last.downcase.to_sym
28
79
  client = Durable::Llm::Client.new(provider_name)
29
80
 
30
- messages = []
31
- messages << { role: 'system', content: options[:system] } if options[:system]
81
+ conversation_id = options[:conversation] || (options[:continue] ? last_conversation_id : nil)
82
+ conversation = conversation_id ? load_conversation(conversation_id) : nil
83
+
84
+ messages = conversation ? conversation['messages'].dup : []
85
+ messages << { role: 'system', content: options[:system] } if options[:system] && !conversation
32
86
  messages << { role: 'user', content: prompt.join(' ') }
33
87
 
34
88
  params = {
@@ -37,22 +91,55 @@ module Durable
37
91
  }
38
92
  params.merge!(options[:option]) if options[:option]
39
93
 
40
- if options[:no_stream] || !client.stream?
41
- response = client.completion(params)
42
- puts response.choices.first
43
- else
44
- client.stream(params) do |chunk|
45
- print chunk
46
- $stdout.flush
94
+ begin
95
+ if options[:no_stream] || !client.stream?
96
+ response = client.completion(**params)
97
+ assistant_message = response.choices.first.to_s
98
+ puts assistant_message
99
+ messages << { role: 'assistant', content: assistant_message }
100
+ else
101
+ assistant_content = ''
102
+ client.stream(**params) do |chunk|
103
+ print chunk
104
+ assistant_content += chunk
105
+ $stdout.flush
106
+ end
107
+ messages << { role: 'assistant', content: assistant_content }
47
108
  end
109
+
110
+ # Save conversation
111
+ conversation_data = {
112
+ 'id' => conversation_id,
113
+ 'model' => model,
114
+ 'messages' => messages,
115
+ 'created_at' => conversation ? conversation['created_at'] : Time.now.iso8601
116
+ }
117
+ save_conversation(conversation_data)
118
+ rescue Durable::Llm::Error => e
119
+ warn "API Error: #{e.message}"
120
+ exit 1
121
+ rescue StandardError => e
122
+ warn "Unexpected error: #{e.message}"
123
+ exit 1
48
124
  end
49
125
  end
50
126
 
127
+ # Start an interactive chat session with the model
128
+ #
129
+ # @option options :model [String] The model to use (default: gpt-3.5-turbo)
130
+ # @option options :system [String] System prompt to set context
131
+ # @option options :continue [Boolean] Continue the last conversation
132
+ # @option options :conversation [String] Continue a specific conversation by ID
133
+ # @option options :no_stream [Boolean] Disable streaming responses
134
+ # @option options :option [Hash] Additional model-specific options
135
+ # @return [void] Starts interactive chat session
136
+ # @raise [RuntimeError] If no provider is found for the specified model
51
137
  desc 'chat', 'Start an interactive chat'
52
138
  option :model, aliases: '-m', desc: 'Specify the model to use'
53
139
  option :system, aliases: '-s', desc: 'Set a system prompt'
54
140
  option :continue, aliases: '-c', type: :boolean, desc: 'Continue the previous conversation'
55
141
  option :conversation, aliases: '--cid', desc: 'Continue a specific conversation by ID'
142
+ option :no_stream, type: :boolean, desc: 'Disable streaming of tokens'
56
143
  option :option, aliases: '-o', type: :hash, desc: 'Set model-specific options'
57
144
  def chat
58
145
  model = options[:model] || 'gpt-3.5-turbo'
@@ -63,8 +150,11 @@ module Durable
63
150
  provider_name = provider_class.name.split('::').last.downcase.to_sym
64
151
  client = Durable::Llm::Client.new(provider_name)
65
152
 
66
- messages = []
67
- messages << { role: 'system', content: options[:system] } if options[:system]
153
+ conversation_id = options[:conversation] || (options[:continue] ? last_conversation_id : nil)
154
+ conversation = conversation_id ? load_conversation(conversation_id) : nil
155
+
156
+ messages = conversation ? conversation['messages'].dup : []
157
+ messages << { role: 'system', content: options[:system] } if options[:system] && !conversation
68
158
 
69
159
  cli = HighLine.new
70
160
 
@@ -89,28 +179,131 @@ module Durable
89
179
  }
90
180
  params.merge!(options[:option]) if options[:option]
91
181
 
92
- response = client.completion(params)
93
- cli.say(response.choices.first.to_s)
94
- messages << { role: 'assistant', content: response.choices.first.to_s }
182
+ begin
183
+ if options[:no_stream] || !client.stream?
184
+ response = client.completion(**params)
185
+ assistant_message = response.choices.first.to_s
186
+ cli.say(assistant_message)
187
+ messages << { role: 'assistant', content: assistant_message }
188
+ else
189
+ assistant_content = ''
190
+ client.stream(**params) do |chunk|
191
+ print chunk
192
+ assistant_content += chunk
193
+ $stdout.flush
194
+ end
195
+ puts # Add newline after streaming
196
+ messages << { role: 'assistant', content: assistant_content }
197
+ end
198
+
199
+ # Save conversation after each exchange
200
+ conversation_data = {
201
+ 'id' => conversation_id,
202
+ 'model' => model,
203
+ 'messages' => messages,
204
+ 'created_at' => conversation ? conversation['created_at'] : Time.now.iso8601
205
+ }
206
+ conversation_id = save_conversation(conversation_data)
207
+ rescue Durable::Llm::Error => e
208
+ cli.say("API Error: #{e.message}")
209
+ next
210
+ rescue StandardError => e
211
+ cli.say("Unexpected error: #{e.message}")
212
+ next
213
+ end
95
214
  end
96
215
  end
97
216
 
217
+ # List all available models from all providers
218
+ #
219
+ # @option options :options [Boolean] Show model-specific options for each model
220
+ # @return [void] Outputs available models to stdout
98
221
  desc 'models', 'List available models'
99
222
  option :options, type: :boolean, desc: 'Show model options'
100
223
  def models
101
224
  cli = HighLine.new
102
225
  cli.say('Available models:')
103
226
 
104
- Durable::Llm::Providers.providers.each do |provider_name|
105
- provider_class = Durable::Llm::Providers.const_get(provider_name.to_s.capitalize)
106
- provider_models = provider_class.models
227
+ Durable::Llm::Providers.providers.each do |provider_sym|
228
+ provider_class = Durable::Llm::Providers.provider_class_for(provider_sym)
229
+ begin
230
+ provider_models = provider_class.models
231
+ cli.say("#{provider_sym.to_s.capitalize}:")
232
+ provider_models.each do |model|
233
+ cli.say(" #{model}")
234
+ if options[:options]
235
+ provider_options = provider_class.options
236
+ cli.say(" Options: #{provider_options.join(', ')}")
237
+ end
238
+ end
239
+ rescue StandardError => e
240
+ cli.say("#{provider_sym.to_s.capitalize}: Error loading models - #{e.message}")
241
+ end
242
+ end
243
+ end
244
+
245
+ # List all saved conversations
246
+ #
247
+ # @return [void] Outputs list of saved conversations to stdout
248
+ desc 'conversations', 'List saved conversations'
249
+ def conversations
250
+ cli = HighLine.new
251
+
252
+ unless Dir.exist?(CONVERSATIONS_DIR)
253
+ cli.say('No conversations found.')
254
+ return
255
+ end
256
+
257
+ conversation_files = Dir.glob("#{CONVERSATIONS_DIR}/*.json").sort_by { |f| File.mtime(f) }.reverse
258
+
259
+ if conversation_files.empty?
260
+ cli.say('No conversations found.')
261
+ return
262
+ end
263
+
264
+ cli.say('Saved conversations:')
265
+ cli.say('')
266
+
267
+ conversation_files.each do |file|
268
+ id = File.basename(file, '.json')
269
+ begin
270
+ conversation = JSON.parse(File.read(file))
271
+ model = conversation['model'] || 'unknown'
272
+ message_count = conversation['messages']&.length || 0
273
+ updated_at = conversation['updated_at'] ? Time.parse(conversation['updated_at']).strftime('%Y-%m-%d %H:%M') : 'unknown'
107
274
 
108
- cli.say("#{provider_name.to_s.capitalize}:")
109
- provider_models.each do |model|
110
- cli.say(" #{model}")
275
+ marker = id == last_conversation_id ? ' *' : ''
276
+ cli.say("#{id}#{marker} - #{model} (#{message_count} messages, updated #{updated_at})")
277
+ rescue JSON::ParserError
278
+ cli.say("#{id} - [corrupted conversation file]")
111
279
  end
112
280
  end
281
+
282
+ cli.say('')
283
+ cli.say('* indicates the last active conversation')
284
+ end
285
+
286
+ # Delete a saved conversation by ID
287
+ #
288
+ # @param id [String] The conversation ID to delete
289
+ # @return [void] Outputs confirmation message to stdout
290
+ desc 'delete_conversation ID', 'Delete a saved conversation'
291
+ def delete_conversation(id)
292
+ cli = HighLine.new
293
+
294
+ path = conversation_file_path(id)
295
+ if File.exist?(path)
296
+ File.delete(path)
297
+ cli.say("Deleted conversation #{id}")
298
+
299
+ # Remove from last conversation if it was the last one
300
+ File.delete(LAST_CONVERSATION_FILE) if last_conversation_id == id && File.exist?(LAST_CONVERSATION_FILE)
301
+ else
302
+ cli.say("Conversation #{id} not found")
303
+ end
113
304
  end
114
305
  end
115
306
  end
116
307
  end
308
+
309
+ # Copyright (c) 2025 Durable Programming, LLC. All rights reserved.