llm-shell 0.4.1 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: f6aec859f4eb4fd9bfc722da0f0ccbaada65932b56bbd5d118f8131be32097e5
4
- data.tar.gz: 0156acf8d3efbec33ce61aab82e5cc90f2618888b7cce6fa181c5de5b11b591c
3
+ metadata.gz: c767e6b583094aec1b80bc31d20de4da454d7ec70f93186118e247ef00deb8b3
4
+ data.tar.gz: 1a85b31965a9a33eedab86ffcfa6812f001d9b2941fc7c7f0b3c857a94172ff9
5
5
  SHA512:
6
- metadata.gz: bdc7e09ec212ee04005bf636caa64abbfdbb087a6d20506402aaf81703806d4c3ddf7611f0c4dc61eedaa7711f6fe50880be0691a07181cb9d582e19fbcf2ba8
7
- data.tar.gz: 7ce76e58b455c55ee94d52b2913396e514d2b808c0fa00ac57c5dff34b499c4cf1def4f3acc398ddfd602cb9f202a3261f8c51ea6012c89853c3a9d79991f9d5
6
+ metadata.gz: 8d3b433cf8aa6c01185baa03682171fc98a57d22e0edc211898db47e6a44190eabfa7fa16d65be37317c1ba6a9d2b95e8f61568bfe877067b2b0a01d53b88176
7
+ data.tar.gz: 6b7c997d7f512614dc3d84c6801d09d59b9bdec0d1fb8a97b2039c4d9ddba92ecceed95d9b745d75bdfa0c483f4c3cddc2ccf2e674e8d0d1dff4fe5d33261ef9
data/README.md CHANGED
@@ -12,7 +12,7 @@ it in action!
12
12
  #### General
13
13
 
14
14
  - 🌟 Unified interface for multiple Large Language Models (LLMs)
15
- - 🤝 Supports Gemini, OpenAI, Anthropic, LlamaCpp and Ollama
15
+ - 🤝 Supports Gemini, OpenAI, Anthropic, DeepSeek, LlamaCpp and Ollama
16
16
 
17
17
  #### Customize
18
18
 
@@ -35,13 +35,13 @@ it in action!
35
35
  </details>
36
36
 
37
37
  <details>
38
- <summary><b>2. Files: import at boot time</b></summary>
39
- <img src="share/llm-shell/examples/files-boottime.gif">
38
+ <summary><b>2. Files: import at runtime</b></summary>
39
+ <img src="share/llm-shell/examples/files-runtime.gif">
40
40
  </details>
41
41
 
42
42
  <details>
43
- <summary><b>3. Files: import at runtime</b></summary>
44
- <img src="share/llm-shell/examples/files-runtime.gif">
43
+ <summary><b>3. Files: import at boot time</b></summary>
44
+ <img src="share/llm-shell/examples/files-boottime.gif">
45
45
  </details>
46
46
 
47
47
  ## Customization
@@ -80,8 +80,10 @@ end
80
80
  llm-shell can be extended with your own console commands. This can be
81
81
  done by creating a Ruby file in the `~/.llm-shell/commands/` directory &ndash;
82
82
  with one file per command. The commands are loaded at boot time. See the
83
- [import-file](lib/llm/shell/commands/import_file.rb)
84
- command for a realistic example:
83
+ [file-import](lib/llm/shell/commands/file_import.rb)
84
+ and
85
+ [dir-import](lib/llm/shell/commands/dir_import.rb)
86
+ commands for a realistic example:
85
87
 
86
88
  ```ruby
87
89
  LLM.command "say-hello" do |cmd|
@@ -112,6 +114,9 @@ gemini:
112
114
  anthropic:
113
115
  key: YOURKEY
114
116
  model: claude-3-7-sonnet-20250219
117
+ deepseek:
118
+ key: YOURKEY
119
+ model: deepseek-chat
115
120
  ollama:
116
121
  host: localhost
117
122
  model: deepseek-coder:6.7b
@@ -16,7 +16,7 @@ class LLM::Shell::Command
16
16
  # @return [void]
17
17
  def command(name)
18
18
  cmd = LLM::Shell::Command.new
19
- cmd.name(name) if name
19
+ cmd.name(name)
20
20
  yield cmd
21
21
  commands[cmd.name] = cmd
22
22
  end
@@ -0,0 +1,57 @@
1
+ # frozen_string_literal: true
2
+
3
+ class LLM::Shell::Command
4
+ class DirImport
5
+ ##
6
+ # Completes a path with a wildcard.
7
+ # @param path [String]
8
+ # The path to complete.
9
+ # @return [Array<String>]
10
+ # Returns the completed path(s)
11
+ def self.complete(path)
12
+ Dir["#{path}*"]
13
+ end
14
+
15
+ ##
16
+ # @param [LLM::Shell::Context] context
17
+ # The context of the command
18
+ # @return [LLM::Shell::Command::DirImport]
19
+ def initialize(context)
20
+ @context = context
21
+ end
22
+
23
+ ##
24
+ # Recursively imports all files in a directory.
25
+ # @return [void]
26
+ def call(dir)
27
+ Dir.entries(dir).each do |file|
28
+ if file == "." || file == ".."
29
+ next
30
+ elsif File.directory? File.join(dir, file)
31
+ call File.join(dir, file)
32
+ else
33
+ import File.join(dir, file)
34
+ end
35
+ end
36
+ end
37
+
38
+ private
39
+
40
+ def import(file)
41
+ return unless File.file?(file)
42
+ bot.chat [
43
+ "--- START: #{file} ---",
44
+ File.read(file),
45
+ "--- END: #{file} ---"
46
+ ].join("\n")
47
+ end
48
+
49
+ def bot = @context.bot
50
+ def io = @context.io
51
+ end
52
+
53
+ LLM.command "dir-import" do |cmd|
54
+ cmd.description "Share the contents of a directory with the LLM"
55
+ cmd.register(DirImport)
56
+ end
57
+ end
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  class LLM::Shell::Command
4
- class ImportFile
4
+ class FileImport
5
5
  ##
6
6
  # Completes a path with a wildcard.
7
7
  # @param path [String]
@@ -15,11 +15,14 @@ class LLM::Shell::Command
15
15
  ##
16
16
  # @param [LLM::Shell::Context] context
17
17
  # The context of the command
18
- # @return [LLM::Shell::Command::ImportFile]
18
+ # @return [LLM::Shell::Command::FileImport]
19
19
  def initialize(context)
20
20
  @context = context
21
21
  end
22
22
 
23
+ ##
24
+ # Imports one or more globbed files.
25
+ # @return [void]
23
26
  def call(*files)
24
27
  Dir[*files].each { import(_1) }
25
28
  end
@@ -27,6 +30,7 @@ class LLM::Shell::Command
27
30
  private
28
31
 
29
32
  def import(file)
33
+ return unless File.file?(file)
30
34
  bot.chat [
31
35
  "--- START: #{file} ---",
32
36
  File.read(file),
@@ -38,8 +42,8 @@ class LLM::Shell::Command
38
42
  def io = @context.io
39
43
  end
40
44
 
41
- LLM.command "import-file" do |cmd|
45
+ LLM.command "file-import" do |cmd|
42
46
  cmd.description "Share one or more files with the LLM"
43
- cmd.register ImportFile
47
+ cmd.register(FileImport)
44
48
  end
45
49
  end
@@ -10,7 +10,7 @@ class LLM::Shell
10
10
  # @param [LLM::Chat] bot
11
11
  # @param [LLM::Shell::Options] options
12
12
  # @return [LLM::Shell::REPL]
13
- def initialize(bot, options:)
13
+ def initialize(bot:, options:)
14
14
  @bot = bot
15
15
  @console = IO.console
16
16
  @options = options
@@ -4,5 +4,5 @@ module LLM
4
4
  end unless defined?(LLM)
5
5
 
6
6
  class LLM::Shell
7
- VERSION = "0.4.1"
7
+ VERSION = "0.5.0"
8
8
  end
data/lib/llm/shell.rb CHANGED
@@ -48,7 +48,7 @@ class LLM::Shell
48
48
  @config = Config.new(options[:provider])
49
49
  @options = Options.new @config.merge(options), Default.new(options[:provider])
50
50
  @bot = LLM::Chat.new(llm, {tools:}.merge(@options.chat)).lazy
51
- @repl = REPL.new(@bot, options: @options)
51
+ @repl = REPL.new(bot: @bot, options: @options)
52
52
  end
53
53
 
54
54
  ##
@@ -4,13 +4,17 @@
4
4
  require_relative "../../lib/llm/shell"
5
5
 
6
6
  def main(argv)
7
- options = {tools: []}
8
- option_parser.parse(argv, into: options)
9
- if argv.empty? || options[:provider].nil?
10
- warn option_parser.help
11
- throw(:exit, 1)
7
+ if argv.include?("-v") || argv.include?("--version")
8
+ puts LLM::Shell::VERSION
12
9
  else
13
- LLM::Shell.new(options).start
10
+ options = {tools: []}
11
+ option_parser.parse(argv, into: options)
12
+ if argv.empty? || options[:provider].nil?
13
+ warn option_parser.help
14
+ throw(:exit, 1)
15
+ else
16
+ LLM::Shell.new(options).start
17
+ end
14
18
  end
15
19
  end
16
20
 
@@ -24,6 +28,7 @@ def option_parser
24
28
  o.on("-o [PORT]", "--port [PORT]", "Optional. Sometimes required by ollama.", Integer)
25
29
  o.on("-f [GLOB]", "--files [GLOB]", "Optional. Glob pattern(s) separated by a comma.", Array)
26
30
  o.on("-t [TOOLS]", "--tools [TOOLS]", "Optional. One or more tool names to load automatically.", Array)
31
+ o.on("-v", "--version", "Optional. Print the version and exit.")
27
32
  end
28
33
  end
29
34
 
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: llm-shell
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.4.1
4
+ version: 0.5.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Antar Azri
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2025-05-11 00:00:00.000000000 Z
12
+ date: 2025-05-19 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: llm.rb
@@ -17,14 +17,14 @@ dependencies:
17
17
  requirements:
18
18
  - - "~>"
19
19
  - !ruby/object:Gem::Version
20
- version: '0.7'
20
+ version: '0.8'
21
21
  type: :runtime
22
22
  prerelease: false
23
23
  version_requirements: !ruby/object:Gem::Requirement
24
24
  requirements:
25
25
  - - "~>"
26
26
  - !ruby/object:Gem::Version
27
- version: '0.7'
27
+ version: '0.8'
28
28
  - !ruby/object:Gem::Dependency
29
29
  name: paint
30
30
  requirement: !ruby/object:Gem::Requirement
@@ -224,7 +224,8 @@ files:
224
224
  - lib/llm/shell.rb
225
225
  - lib/llm/shell/command.rb
226
226
  - lib/llm/shell/command/extension.rb
227
- - lib/llm/shell/commands/import_file.rb
227
+ - lib/llm/shell/commands/dir_import.rb
228
+ - lib/llm/shell/commands/file_import.rb
228
229
  - lib/llm/shell/completion.rb
229
230
  - lib/llm/shell/config.rb
230
231
  - lib/llm/shell/default.rb
@@ -249,7 +250,7 @@ required_ruby_version: !ruby/object:Gem::Requirement
249
250
  requirements:
250
251
  - - ">="
251
252
  - !ruby/object:Gem::Version
252
- version: 3.0.0
253
+ version: '3.2'
253
254
  required_rubygems_version: !ruby/object:Gem::Requirement
254
255
  requirements:
255
256
  - - ">="