llm-shell 0.1.0 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: bad036ff98c154b18cabda0e2b598b40c242907e6eda28b6d2cfaa1fba66a265
4
- data.tar.gz: 85d5fbc076495609562221a2296eabb06ec03c422ce1a2ea48661a3cf23213e9
3
+ metadata.gz: 629a62d1b7f2fb7c4de9149c1af8e640c1ff08ccbef3a581d006cd99d331e4e2
4
+ data.tar.gz: 9e8808cba408a0474a938b7de8df00e1b72032635b00928bdd0a7a78f57d1521
5
5
  SHA512:
6
- metadata.gz: 182781650d0281008f8741ef389b1c7eaaf815191df2a3e8153415d6b65fdf64ce65d5d83ec67debfd05bde2f408f40c9ad79a1f21299dac699ba7c24f580f74
7
- data.tar.gz: cebfa126c00d63d9927a9cfd6684f382016bd2c805aadf0da3ae5798d4bc136ccea3682af9c3c4e89236cfcbb7a837891b092a3744e537581eb676de9ecbaa9d
6
+ metadata.gz: 4552e7ef324acf1f800c42cffc5fe7812e581389f1a544a447c6963611011d66c4a3e19ec2fb1d5f142fbf822b8dd8a0264ed8de6a40427113b597541d099170
7
+ data.tar.gz: 8c453dfd38ff0c2f94707ab460a47afeba52efeccc808ff68c5a9c1007040855104e1954548222367459eca4a6245a174b78cd9f034baa8f301d2451a78b0288
data/README.md CHANGED
@@ -10,21 +10,27 @@ it in action!
10
10
  ## Features
11
11
 
12
12
  - 🌟 Unified interface for multiple Large Language Models (LLMs)
13
- - 🤝 Supports Gemini, OpenAI, Anthropic, and Ollama
13
+ - 🤝 Supports Gemini, OpenAI, Anthropic, LlamaCpp and Ollama
14
14
  - 📤 Attach local files as conversation context
15
15
  - 🔧 Extend with your own functions and tool calls
16
16
  - 📝 Advanced Markdown formatting and output
17
+ - 📄 Deploys the less pager for long outputs
17
18
 
18
19
  ## Demos
19
20
 
20
21
  <details>
21
- <summary><b>1. Tool calls</b></summary>
22
- <img src="share/llm-shell/examples/example2.gif/">
22
+ <summary><b>1. Tools: "system" function</b></summary>
23
+ <img src="share/llm-shell/examples/toolcalls.gif/">
23
24
  </details>
24
25
 
25
26
  <details>
26
- <summary><b>2. File discussion</b></summary>
27
- <img src="share/llm-shell/examples/example1.gif">
27
+ <summary><b>2. Files: import at boot time</b></summary>
28
+ <img src="share/llm-shell/examples/files-boottime.gif">
29
+ </details>
30
+
31
+ <details>
32
+ <summary><b>3. Files: import at runtime</b></summary>
33
+ <img src="share/llm-shell/examples/files-runtime.gif">
28
34
  </details>
29
35
 
30
36
  ## Customization
@@ -84,6 +90,9 @@ anthropic:
84
90
  ollama:
85
91
  host: localhost
86
92
  model: deepseek-coder:6.7b
93
+ llamacpp:
94
+ host: localhost
95
+ model: qwen3
87
96
  tools:
88
97
  - system
89
98
  ```
@@ -0,0 +1,29 @@
1
+ # frozen_string_literal: true
2
+
3
+ class LLM::Shell::Command
4
+ module Extension
5
+ ##
6
+ # @example
7
+ # LLM.command do |cmd|
8
+ # cmd.name "hello"
9
+ # cmd.define do |name|
10
+ # io.rewind.print("Hello #{name}")
11
+ # end
12
+ # end
13
+ # @yieldparam [LLM::Shell::Command] cmd
14
+ # Yields an instance of LLM::Shell::Command
15
+ # @return [void]
16
+ def command
17
+ cmd = LLM::Shell::Command.new
18
+ yield cmd
19
+ commands[cmd.name] = cmd
20
+ end
21
+
22
+ ##
23
+ # @return [Hash<String, LLM::Shell::Command>]
24
+ def commands
25
+ @commands ||= {}
26
+ end
27
+ end
28
+ LLM.extend(Extension)
29
+ end
@@ -0,0 +1,47 @@
1
+ # frozen_string_literal: true
2
+
3
+ class LLM::Shell
4
+ class Command
5
+ Context = Struct.new(:bot, :io)
6
+
7
+ ##
8
+ # Set or get the command name
9
+ # @param [String, nil] name
10
+ # The name of the command
11
+ def name(name = nil)
12
+ if name
13
+ @name = name
14
+ else
15
+ @name
16
+ end
17
+ end
18
+
19
+ ##
20
+ # Setup the command context
21
+ # @return [void]
22
+ def setup(bot, io)
23
+ @context = Context.new(bot, io)
24
+ end
25
+
26
+ ##
27
+ # Define the command
28
+ # @return [void]
29
+ def define(klass = nil, &b)
30
+ @runner = klass || b
31
+ end
32
+ alias_method :register, :define
33
+
34
+ ##
35
+ # Call the command
36
+ # @reurn [void]
37
+ def call(*argv)
38
+ if @context.nil?
39
+ raise "context has not been setup"
40
+ elsif Class === @runner
41
+ @runner.new(@context).call(*argv)
42
+ else
43
+ @context.instance_exec(*argv, &@runner)
44
+ end
45
+ end
46
+ end
47
+ end
@@ -0,0 +1,31 @@
1
+ # frozen_string_literal: true
2
+
3
+ class LLM::Shell::Command
4
+ class ImportFile
5
+ def initialize(context)
6
+ @context = context
7
+ end
8
+
9
+ def call(*files)
10
+ Dir[*files].each { import(_1) }
11
+ end
12
+
13
+ private
14
+
15
+ def import(file)
16
+ bot.chat [
17
+ "--- START: #{file} ---",
18
+ File.read(file),
19
+ "--- END: #{file} ---"
20
+ ].join("\n")
21
+ end
22
+
23
+ def bot = @context.bot
24
+ def io = @context.io
25
+ end
26
+
27
+ LLM.command do |command|
28
+ command.name "import-file"
29
+ command.register ImportFile
30
+ end
31
+ end
@@ -7,18 +7,7 @@ class LLM::Shell
7
7
  end
8
8
 
9
9
  def prompt
10
- "You are a helpful assistant." \
11
- "Answer the user's questions as best as you can." \
12
- "The user's environment is a terminal." \
13
- "Provide short and concise answers that are suitable for a terminal." \
14
- "Do not provide long answers." \
15
- "One or more files might be provided at the start of the conversation. " \
16
- "The user might ask you about them, you should try to understand them and what they are. " \
17
- "If you don't understand something, say so. " \
18
- "Respond in markdown format." \
19
- "Each file will be surrounded by the following markers: " \
20
- "'# START: /path/to/file'" \
21
- "'# END: /path/to/file'"
10
+ File.read File.join(SHAREDIR, "prompts", "default.txt")
22
11
  end
23
12
 
24
13
  def role
@@ -27,5 +16,8 @@ class LLM::Shell
27
16
  else :user
28
17
  end
29
18
  end
19
+
20
+ SHAREDIR = File.join(__dir__, "..", "..", "..", "share", "llm-shell")
21
+ private_constant :SHAREDIR
30
22
  end
31
23
  end
@@ -3,6 +3,7 @@
3
3
  class LLM::Shell
4
4
  class Formatter
5
5
  FormatError = Class.new(RuntimeError)
6
+ FILE_REGEXP = /\A--- START: (.+?) ---/
6
7
 
7
8
  def initialize(messages)
8
9
  @messages = messages.reject(&:tool_call?)
@@ -21,25 +22,26 @@ class LLM::Shell
21
22
  attr_reader :messages
22
23
 
23
24
  def format_user(messages)
24
- messages.flat_map do |message|
25
+ messages.filter_map do |message|
25
26
  next unless message.user?
26
27
  next unless String === message.content
28
+ next unless message.content !~ FILE_REGEXP
27
29
  role = Paint[message.role, :bold, :yellow]
28
30
  title = "#{role} says: "
29
31
  body = wrap(message.tap(&:read!).content)
30
- [title, render(body), ""].join("\n")
31
- end.join
32
+ [title, "\n", render(body), "\n"].join
33
+ end.join("\n")
32
34
  end
33
35
 
34
36
  def format_assistant(messages)
35
- messages.flat_map do |message|
37
+ messages.filter_map do |message|
36
38
  next unless message.assistant?
37
39
  next unless String === message.content
38
40
  role = Paint[message.role, :bold, :green]
39
41
  title = "#{role} says: "
40
42
  body = wrap(message.tap(&:read!).content)
41
- [title, render(body)].join("\n")
42
- end.join
43
+ [title, "\n", render(body)].join
44
+ end.join("\n")
43
45
  end
44
46
 
45
47
  def render(text)
@@ -55,6 +55,8 @@ class LLM::Shell
55
55
  text
56
56
  .gsub(/([^\n])\n(#+ )/, "\\1\n\n\\2")
57
57
  .gsub(/(#+ .+?)\n(?!\n)/, "\\1\n\n")
58
+ .gsub(/\A<think>[\n]*<\/think>(?:\n)/, "")
59
+ .gsub(/\A\n{2,}/, "")
58
60
  end
59
61
  end
60
62
  end
@@ -26,5 +26,6 @@ class LLM::Shell
26
26
  def llm = @options
27
27
  def chat = @chat_options
28
28
  def default = @default
29
+ def prompt = default.prompt
29
30
  end
30
31
  end
@@ -14,15 +14,15 @@ class LLM::Shell
14
14
  @bot = bot
15
15
  @console = IO.console
16
16
  @options = options
17
- @line = IO::Line.new($stdout)
17
+ @io = IO::Line.new($stdout)
18
18
  end
19
19
 
20
20
  ##
21
21
  # Performs initial setup
22
22
  # @return [void]
23
23
  def setup
24
- chat options.default.prompt, role: options.default.role
25
- files.each { bot.chat ["# START: #{_1}", File.read(_1), "# END: #{_1}"].join("\n") }
24
+ chat options.prompt, role: options.default.role
25
+ files.each { bot.chat ["--- START: #{_1} ---", File.read(_1), "--- END: #{_1} ---"].join("\n") }
26
26
  bot.messages.each(&:read!)
27
27
  clear_screen
28
28
  end
@@ -50,7 +50,7 @@ class LLM::Shell
50
50
  private
51
51
 
52
52
  attr_reader :bot, :console,
53
- :line, :default,
53
+ :io, :default,
54
54
  :options
55
55
 
56
56
  def formatter(messages) = Formatter.new(messages)
@@ -61,9 +61,17 @@ class LLM::Shell
61
61
 
62
62
  def read
63
63
  input = Readline.readline("llm> ", true) || throw(:exit, 0)
64
- chat input.tap { clear_screen }
65
- line.rewind.print(Paint["Thinking", :bold])
66
- unread.tap { line.rewind }
64
+ words = input.split(" ")
65
+ if LLM.commands[words[0]]
66
+ cmd = LLM.commands[words[0]]
67
+ argv = words[1..]
68
+ cmd.setup(bot, io)
69
+ cmd.call(*argv)
70
+ else
71
+ chat input.tap { clear_screen }
72
+ io.rewind.print(Paint["Thinking", :bold])
73
+ unread.tap { io.rewind }
74
+ end
67
75
  end
68
76
 
69
77
  def eval
@@ -74,18 +82,21 @@ class LLM::Shell
74
82
  print "Do you want to call it? "
75
83
  input = $stdin.gets.chomp.downcase
76
84
  puts
77
- if %w(y yes yeah ok).include?(input)
85
+ if %w(y yes yep yeah ok).include?(input)
78
86
  bot.chat function.call
79
- unread.tap { line.rewind }
87
+ unread.tap { io.rewind }
80
88
  else
81
- print "Skipping function call", "\n"
89
+ bot.chat function.cancel
90
+ bot.chat "I decided to not run the function this time. Maybe next time."
82
91
  end
83
92
  end
84
93
  end
85
94
 
86
95
  def emit
87
- print formatter(unread).format!(:user), "\n"
88
- print formatter(unread).format!(:assistant), "\n"
96
+ IO.popen("less -FRX", "w") do
97
+ _1.write formatter(unread).format!(:user), "\n"
98
+ _1.write formatter(unread).format!(:assistant), "\n"
99
+ end
89
100
  end
90
101
 
91
102
  def chat(...)
@@ -4,5 +4,5 @@ module LLM
4
4
  end unless defined?(LLM)
5
5
 
6
6
  class LLM::Shell
7
- VERSION = "0.1.0"
7
+ VERSION = "0.2.0"
8
8
  end
data/lib/llm/shell.rb CHANGED
@@ -8,6 +8,8 @@ require "paint"
8
8
 
9
9
  class LLM::Shell
10
10
  require_relative "../io/line"
11
+ require_relative "shell/command"
12
+ require_relative "shell/command/extension"
11
13
  require_relative "shell/markdown"
12
14
  require_relative "shell/formatter"
13
15
  require_relative "shell/default"
@@ -16,6 +18,10 @@ class LLM::Shell
16
18
  require_relative "shell/config"
17
19
  require_relative "shell/version"
18
20
 
21
+ ##
22
+ # Load all commands
23
+ Dir[File.join(__dir__, "shell", "commands", "*.rb")].each { require(_1) }
24
+
19
25
  ##
20
26
  # @return [String]
21
27
  def self.home
@@ -55,7 +61,7 @@ class LLM::Shell
55
61
  print Paint["llm-shell: ", :green], "load #{name} tool", "\n"
56
62
  eval File.read(path), TOPLEVEL_BINDING, path, 1
57
63
  else
58
- print Paint["llm-shell:: ", :yellow], "skip #{name} tool", "\n"
64
+ print Paint["llm-shell: ", :yellow], "skip #{name} tool", "\n"
59
65
  end
60
66
  end.grep(LLM::Function)
61
67
  end
data/lib/llm-shell.rb CHANGED
@@ -1 +1,3 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require_relative "llm/shell"
@@ -0,0 +1,27 @@
1
+ /no_think
2
+
3
+ ## General
4
+
5
+ You are a helpful assistant.
6
+ Answer the user's questions as best as you can.
7
+
8
+ The user's environment is a terminal.
9
+ Provide short and concise answers that are suitable for a terminal.
10
+ Do not provide long answers.
11
+
12
+ ## Files
13
+
14
+ One or more files *MIGHT* be provided at the start of the conversation.
15
+ One file will be provided per message, *IF* any files are provided at all.
16
+ *IF* a file is provided, it will be in this format:
17
+
18
+ --- START: /path/to/file ---
19
+ <contents>
20
+ --- END: /path/to/file ---
21
+
22
+ Otherwise, no files will be provided and you shouldn't mention them.
23
+ On receipt of one or more files, you will respond with: Got it. And with nothing else.
24
+
25
+ ## Format
26
+
27
+ Respond in markdown.
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: llm-shell
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.0
4
+ version: 0.2.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Antar Azri
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2025-05-06 00:00:00.000000000 Z
12
+ date: 2025-05-09 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: llm.rb
@@ -17,14 +17,14 @@ dependencies:
17
17
  requirements:
18
18
  - - "~>"
19
19
  - !ruby/object:Gem::Version
20
- version: '0.6'
20
+ version: '0.7'
21
21
  type: :runtime
22
22
  prerelease: false
23
23
  version_requirements: !ruby/object:Gem::Requirement
24
24
  requirements:
25
25
  - - "~>"
26
26
  - !ruby/object:Gem::Version
27
- version: '0.6'
27
+ version: '0.7'
28
28
  - !ruby/object:Gem::Dependency
29
29
  name: paint
30
30
  requirement: !ruby/object:Gem::Requirement
@@ -208,6 +208,9 @@ files:
208
208
  - lib/io/line.rb
209
209
  - lib/llm-shell.rb
210
210
  - lib/llm/shell.rb
211
+ - lib/llm/shell/command.rb
212
+ - lib/llm/shell/command/extension.rb
213
+ - lib/llm/shell/commands/import_file.rb
211
214
  - lib/llm/shell/config.rb
212
215
  - lib/llm/shell/default.rb
213
216
  - lib/llm/shell/formatter.rb
@@ -216,6 +219,7 @@ files:
216
219
  - lib/llm/shell/repl.rb
217
220
  - lib/llm/shell/version.rb
218
221
  - libexec/llm-shell/shell
222
+ - share/llm-shell/prompts/default.txt
219
223
  homepage: https://github.com/llmrb/llm-shell
220
224
  licenses:
221
225
  - 0BSD