llm-shell 0.6.0 → 0.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +24 -4
- data/bin/llm-shell +1 -0
- data/lib/llm/shell/command/extension.rb +11 -0
- data/lib/llm/shell/command.rb +1 -0
- data/lib/llm/shell/commands/clear_screen.rb +31 -0
- data/lib/llm/shell/commands/dir_import.rb +6 -15
- data/lib/llm/shell/commands/file_import.rb +6 -15
- data/lib/llm/shell/commands/show_history.rb +45 -0
- data/lib/llm/shell/commands/system_prompt.rb +30 -0
- data/lib/llm/shell/commands/utils.rb +20 -0
- data/lib/llm/shell/default.rb +1 -1
- data/lib/llm/shell/formatter.rb +6 -19
- data/lib/llm/shell/options.rb +9 -1
- data/lib/llm/shell/renderer.rb +47 -0
- data/lib/llm/shell/repl.rb +14 -22
- data/lib/llm/shell/version.rb +1 -1
- data/lib/llm/shell.rb +1 -0
- data/libexec/llm-shell/shell +9 -2
- data/share/llm-shell/prompts/default.txt +13 -19
- metadata +6 -1
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 89ffe0d7aacb89de310acc7012fb8b383d9c27e0059536ff9ce7ea855f604c37
|
4
|
+
data.tar.gz: eb1cd003da504a27f6859898c21339406536a4979cf0fea6f1d6740bb2ea9328
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: aa30fcd97acd567fa2c53d7c5936063e0442a3b9e8a2a4a2d721557dd704af57710f4e3d6389c0da9d9e2bf9babacb2e74f51f800f239c65a18d8da4d429f9c2
|
7
|
+
data.tar.gz: '0900fcbd3c39c95c3941494f93863b8a11f78a69305c4d8dce31c3a87a2391f4940b97743c3f6812e3acb185be40d85f2a3003a5d75de19cc7f94153773f380c'
|
data/README.md
CHANGED
@@ -80,10 +80,12 @@ end
|
|
80
80
|
llm-shell can be extended with your own console commands. This can be
|
81
81
|
done by creating a Ruby file in the `~/.llm-shell/commands/` directory –
|
82
82
|
with one file per command. The commands are loaded at boot time. See the
|
83
|
-
[file-import](lib/llm/shell/commands/file_import.rb)
|
84
|
-
|
85
|
-
[
|
86
|
-
commands
|
83
|
+
[file-import](lib/llm/shell/commands/file_import.rb),
|
84
|
+
[dir-import](lib/llm/shell/commands/dir_import.rb),
|
85
|
+
[show-history](lib/llm/shell/commands/show_history.rb),
|
86
|
+
[clear-screen](lib/llm/shell/commands/clear_screen.rb)
|
87
|
+
and [system-prompt](lib/llm/shell/commands/system_prompt.rb)
|
88
|
+
commands for more realistic examples:
|
87
89
|
|
88
90
|
```ruby
|
89
91
|
LLM.command "say-hello" do |cmd|
|
@@ -93,6 +95,24 @@ LLM.command "say-hello" do |cmd|
|
|
93
95
|
end
|
94
96
|
end
|
95
97
|
```
|
98
|
+
|
99
|
+
#### Prompts
|
100
|
+
|
101
|
+
> It is recommended that custom prompts instruct the LLM to emit markdown,
|
102
|
+
> otherwise you might see unexpected results because llm-shell assumes the LLM
|
103
|
+
> will emit markdown.
|
104
|
+
|
105
|
+
The first message in a conversation is sometimes known as a "system prompt",
|
106
|
+
and it defines the expectations and rules to be followed by an LLM throughout
|
107
|
+
a conversation. The default prompt used by llm-shell can be found at
|
108
|
+
[default.txt](share/llm-shell/prompts/default.txt).
|
109
|
+
|
110
|
+
The prompt can be changed by adding a file to the `~/.llm-shell/prompts/` directory,
|
111
|
+
and then choosing it at boot time with the `-r PROMPT`, `--prompt PROMPT` options.
|
112
|
+
Generally you probably want to fork [default.txt](share/llm-shell/prompts/default.txt)
|
113
|
+
to conserve the original prompt rules around markdown and files, then modify it to
|
114
|
+
suit your own needs and preferences.
|
115
|
+
|
96
116
|
## Settings
|
97
117
|
|
98
118
|
#### YAML
|
data/bin/llm-shell
CHANGED
@@ -1,6 +1,17 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
3
|
class LLM::Shell::Command
|
4
|
+
##
|
5
|
+
# The {LLM::Shell::Command::Extension LLM::Shell::Command::Extension}
|
6
|
+
# module extends the `LLM` constant with methods that can provide shell
|
7
|
+
# commands for an active llm-shell session.
|
8
|
+
#
|
9
|
+
# @example hello command
|
10
|
+
# LLM.command(:hello) do |cmd|
|
11
|
+
# cmd.define do |name|
|
12
|
+
# io.rewind.print("Hello #{name}")
|
13
|
+
# end
|
14
|
+
# end
|
4
15
|
module Extension
|
5
16
|
##
|
6
17
|
# @example
|
data/lib/llm/shell/command.rb
CHANGED
@@ -0,0 +1,31 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
class LLM::Shell::Command
|
4
|
+
class ClearScreen
|
5
|
+
require_relative "utils"
|
6
|
+
include Utils
|
7
|
+
|
8
|
+
##
|
9
|
+
# @param [LLM::Shell::Context] context
|
10
|
+
# The context of the command
|
11
|
+
# @return [LLM::Shell::Command::ClearScreen]
|
12
|
+
def initialize(context)
|
13
|
+
@context = context
|
14
|
+
end
|
15
|
+
|
16
|
+
##
|
17
|
+
# Clears the screen
|
18
|
+
# @return [void]
|
19
|
+
def call = clear_screen
|
20
|
+
|
21
|
+
private
|
22
|
+
|
23
|
+
def console = IO.console
|
24
|
+
def clear_screen = console.clear_screen
|
25
|
+
|
26
|
+
LLM.command "clear-screen" do |cmd|
|
27
|
+
cmd.description "Clears the screen"
|
28
|
+
cmd.register(self)
|
29
|
+
end
|
30
|
+
end
|
31
|
+
end
|
@@ -2,6 +2,9 @@
|
|
2
2
|
|
3
3
|
class LLM::Shell::Command
|
4
4
|
class DirImport
|
5
|
+
require_relative "utils"
|
6
|
+
include Utils
|
7
|
+
|
5
8
|
##
|
6
9
|
# Completes a path with a wildcard.
|
7
10
|
# @param path [String]
|
@@ -37,21 +40,9 @@ class LLM::Shell::Command
|
|
37
40
|
|
38
41
|
private
|
39
42
|
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
"--- START: #{file} ---",
|
44
|
-
File.read(file),
|
45
|
-
"--- END: #{file} ---"
|
46
|
-
].join("\n")
|
43
|
+
LLM.command "dir-import" do |cmd|
|
44
|
+
cmd.description "Share the contents of a directory with the LLM"
|
45
|
+
cmd.register(self)
|
47
46
|
end
|
48
|
-
|
49
|
-
def bot = @context.bot
|
50
|
-
def io = @context.io
|
51
|
-
end
|
52
|
-
|
53
|
-
LLM.command "dir-import" do |cmd|
|
54
|
-
cmd.description "Share the contents of a directory with the LLM"
|
55
|
-
cmd.register(DirImport)
|
56
47
|
end
|
57
48
|
end
|
@@ -2,6 +2,9 @@
|
|
2
2
|
|
3
3
|
class LLM::Shell::Command
|
4
4
|
class FileImport
|
5
|
+
require_relative "utils"
|
6
|
+
include Utils
|
7
|
+
|
5
8
|
##
|
6
9
|
# Completes a path with a wildcard.
|
7
10
|
# @param path [String]
|
@@ -29,21 +32,9 @@ class LLM::Shell::Command
|
|
29
32
|
|
30
33
|
private
|
31
34
|
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
"--- START: #{file} ---",
|
36
|
-
File.read(file),
|
37
|
-
"--- END: #{file} ---"
|
38
|
-
].join("\n")
|
35
|
+
LLM.command "file-import" do |cmd|
|
36
|
+
cmd.description "Share one or more files with the LLM"
|
37
|
+
cmd.register(self)
|
39
38
|
end
|
40
|
-
|
41
|
-
def bot = @context.bot
|
42
|
-
def io = @context.io
|
43
|
-
end
|
44
|
-
|
45
|
-
LLM.command "file-import" do |cmd|
|
46
|
-
cmd.description "Share one or more files with the LLM"
|
47
|
-
cmd.register(FileImport)
|
48
39
|
end
|
49
40
|
end
|
@@ -0,0 +1,45 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
class LLM::Shell::Command
|
4
|
+
class ShowHistory
|
5
|
+
require_relative "utils"
|
6
|
+
include Utils
|
7
|
+
|
8
|
+
##
|
9
|
+
# @param [LLM::Shell::Context] context
|
10
|
+
# The context of the command
|
11
|
+
# @return [LLM::Shell::Command::ShowHistory]
|
12
|
+
def initialize(context)
|
13
|
+
@context = context
|
14
|
+
end
|
15
|
+
|
16
|
+
##
|
17
|
+
# Emits the full chat history to standard output
|
18
|
+
# @return [void]
|
19
|
+
def call
|
20
|
+
clear_screen
|
21
|
+
emit
|
22
|
+
end
|
23
|
+
|
24
|
+
private
|
25
|
+
|
26
|
+
def emit
|
27
|
+
IO.popen("less -FRX", "w") do |io|
|
28
|
+
messages.each.with_index do |message, index|
|
29
|
+
next if index <= 1
|
30
|
+
io << render(message) << "\n"
|
31
|
+
end
|
32
|
+
end
|
33
|
+
end
|
34
|
+
|
35
|
+
def console = IO.console
|
36
|
+
def clear_screen = console.clear_screen
|
37
|
+
def messages = bot.messages
|
38
|
+
def render(message) = LLM::Shell::Renderer.new(message).render
|
39
|
+
|
40
|
+
LLM.command "show-history" do |cmd|
|
41
|
+
cmd.description "Show the full chat history"
|
42
|
+
cmd.register(self)
|
43
|
+
end
|
44
|
+
end
|
45
|
+
end
|
@@ -0,0 +1,30 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
class LLM::Shell::Command
|
4
|
+
class SystemPrompt
|
5
|
+
require_relative "utils"
|
6
|
+
include Utils
|
7
|
+
|
8
|
+
##
|
9
|
+
# @param [LLM::Shell::Context] context
|
10
|
+
# The context of the command
|
11
|
+
# @return [LLM::Shell::Command::SystemPrompt]
|
12
|
+
def initialize(context)
|
13
|
+
@context = context
|
14
|
+
end
|
15
|
+
|
16
|
+
##
|
17
|
+
# Emits the system prompt to standard output
|
18
|
+
# @return [void]
|
19
|
+
def call = puts render(bot.messages.to_a[0])
|
20
|
+
|
21
|
+
private
|
22
|
+
|
23
|
+
def render(message) = LLM::Shell::Renderer.new(message).render
|
24
|
+
|
25
|
+
LLM.command "system-prompt" do |cmd|
|
26
|
+
cmd.description "Show the system prompt"
|
27
|
+
cmd.register(self)
|
28
|
+
end
|
29
|
+
end
|
30
|
+
end
|
@@ -0,0 +1,20 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
class LLM::Shell::Command
|
4
|
+
module Utils
|
5
|
+
private
|
6
|
+
|
7
|
+
def import(file)
|
8
|
+
return unless File.file?(file)
|
9
|
+
bot.chat [
|
10
|
+
"<file path=\"#{file}\">",
|
11
|
+
File.read(file),
|
12
|
+
"</file>"
|
13
|
+
].join("\n")
|
14
|
+
end
|
15
|
+
|
16
|
+
def file_pattern = /\A<file path=(.+?)>/
|
17
|
+
def bot = @context.bot
|
18
|
+
def io = @context.io
|
19
|
+
end
|
20
|
+
end
|
data/lib/llm/shell/default.rb
CHANGED
data/lib/llm/shell/formatter.rb
CHANGED
@@ -3,7 +3,7 @@
|
|
3
3
|
class LLM::Shell
|
4
4
|
class Formatter
|
5
5
|
FormatError = Class.new(RuntimeError)
|
6
|
-
|
6
|
+
include Command::Utils
|
7
7
|
|
8
8
|
def initialize(messages)
|
9
9
|
@messages = messages.reject(&:tool_call?)
|
@@ -19,17 +19,12 @@ class LLM::Shell
|
|
19
19
|
|
20
20
|
private
|
21
21
|
|
22
|
-
attr_reader :messages
|
23
|
-
|
24
22
|
def format_user(messages)
|
25
23
|
messages.filter_map do |message|
|
26
24
|
next unless message.user?
|
27
25
|
next unless String === message.content
|
28
|
-
next unless message.content !~
|
29
|
-
|
30
|
-
title = "#{role} says: "
|
31
|
-
body = wrap(message.tap(&:read!).content)
|
32
|
-
[title, "\n", render(body), "\n"].join
|
26
|
+
next unless message.content !~ file_pattern
|
27
|
+
render(message.tap(&:read!))
|
33
28
|
end.join("\n")
|
34
29
|
end
|
35
30
|
|
@@ -37,19 +32,11 @@ class LLM::Shell
|
|
37
32
|
messages.filter_map do |message|
|
38
33
|
next unless message.assistant?
|
39
34
|
next unless String === message.content
|
40
|
-
|
41
|
-
title = "#{role} says: "
|
42
|
-
body = wrap(message.tap(&:read!).content)
|
43
|
-
[title, "\n", render(body)].join
|
35
|
+
render(message.tap(&:read!))
|
44
36
|
end.join("\n")
|
45
37
|
end
|
46
38
|
|
47
|
-
|
48
|
-
|
49
|
-
end
|
50
|
-
|
51
|
-
def wrap(text, width = 80)
|
52
|
-
text.gsub(/(.{1,#{width}})(\s+|\Z)/, "\\1\n")
|
53
|
-
end
|
39
|
+
attr_reader :messages
|
40
|
+
def render(message) = Renderer.new(message).render
|
54
41
|
end
|
55
42
|
end
|
data/lib/llm/shell/options.rb
CHANGED
@@ -15,6 +15,7 @@ class LLM::Shell
|
|
15
15
|
@options = options.transform_keys(&:to_sym)
|
16
16
|
@provider = @options.delete(:provider)
|
17
17
|
@tools = @options.delete(:tools)
|
18
|
+
@prompt = @options[:prompt] ? custom_prompt : default.prompt
|
18
19
|
@files = Dir[*@options.delete(:files) || []].reject { File.directory?(_1) }
|
19
20
|
@bot_options = {model: @options.delete(:model)}.compact
|
20
21
|
@default = default
|
@@ -26,6 +27,13 @@ class LLM::Shell
|
|
26
27
|
def llm = @options
|
27
28
|
def bot = @bot_options
|
28
29
|
def default = @default
|
29
|
-
def prompt =
|
30
|
+
def prompt = File.read(@prompt)
|
31
|
+
|
32
|
+
private
|
33
|
+
|
34
|
+
def custom_prompt
|
35
|
+
prompt = @options.delete(:prompt)
|
36
|
+
File.join(LLM::Shell.home, "prompts", prompt)
|
37
|
+
end
|
30
38
|
end
|
31
39
|
end
|
@@ -0,0 +1,47 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
class LLM::Shell
|
4
|
+
class Renderer
|
5
|
+
RenderError = Class.new(RuntimeError)
|
6
|
+
include Command::Utils
|
7
|
+
|
8
|
+
##
|
9
|
+
# @param [LLM::Message] message
|
10
|
+
# The message to render
|
11
|
+
# @return [LLM::Shell::MessageRenderer]
|
12
|
+
# Returns an instance of the renderer
|
13
|
+
def initialize(message)
|
14
|
+
@message = message
|
15
|
+
end
|
16
|
+
|
17
|
+
def render
|
18
|
+
if message.user?
|
19
|
+
render_message(message, :yellow)
|
20
|
+
elsif message.assistant?
|
21
|
+
render_message(message, :green)
|
22
|
+
elsif message.system?
|
23
|
+
render_message(message, :red)
|
24
|
+
else
|
25
|
+
raise RenderError.new("no handler for message role '#{message.role}'")
|
26
|
+
end
|
27
|
+
end
|
28
|
+
|
29
|
+
private
|
30
|
+
|
31
|
+
def render_message(message, color)
|
32
|
+
role = Paint[message.role, :bold, color]
|
33
|
+
title = "#{role} says: "
|
34
|
+
if message.content =~ file_pattern
|
35
|
+
path = message.content.match(file_pattern) ? Regexp.last_match[1] : nil
|
36
|
+
body = "<file path=#{path} />"
|
37
|
+
else
|
38
|
+
body = markdown(wrap(message.content))
|
39
|
+
end
|
40
|
+
[title, "\n", body, "\n"].join
|
41
|
+
end
|
42
|
+
|
43
|
+
attr_reader :message
|
44
|
+
def markdown(text) = Markdown.new(text).to_ansi
|
45
|
+
def wrap(text, width = 80) = text.gsub(/(.{1,#{width}})(\s+|\Z)/, "\\1\n")
|
46
|
+
end
|
47
|
+
end
|
data/lib/llm/shell/repl.rb
CHANGED
@@ -24,7 +24,7 @@ class LLM::Shell
|
|
24
24
|
LLM::Shell.commands.each { |file| require file }
|
25
25
|
Readline.completion_proc = Completion.to_proc
|
26
26
|
chat options.prompt, role: options.default.role
|
27
|
-
files.each {
|
27
|
+
files.each { chat ["--- START: #{_1} ---", File.read(_1), "--- END: #{_1} ---"].join("\n") }
|
28
28
|
bot.messages.each(&:read!)
|
29
29
|
clear_screen
|
30
30
|
end
|
@@ -37,7 +37,7 @@ class LLM::Shell
|
|
37
37
|
read
|
38
38
|
eval
|
39
39
|
emit
|
40
|
-
rescue LLM::
|
40
|
+
rescue LLM::ResponseError => ex
|
41
41
|
print Paint[ex.response.class, :red], "\n"
|
42
42
|
print ex.response.body, "\n"
|
43
43
|
rescue => ex
|
@@ -51,16 +51,6 @@ class LLM::Shell
|
|
51
51
|
|
52
52
|
private
|
53
53
|
|
54
|
-
attr_reader :bot, :console,
|
55
|
-
:io, :default,
|
56
|
-
:options
|
57
|
-
|
58
|
-
def formatter(messages) = Formatter.new(messages)
|
59
|
-
def unread = bot.messages.unread
|
60
|
-
def functions = bot.functions
|
61
|
-
def files = @options.files
|
62
|
-
def clear_screen = console.clear_screen
|
63
|
-
|
64
54
|
def read
|
65
55
|
input = Readline.readline("llm> ", true) || throw(:exit, 0)
|
66
56
|
words = input.split(" ")
|
@@ -85,11 +75,11 @@ class LLM::Shell
|
|
85
75
|
input = $stdin.gets.chomp.downcase
|
86
76
|
puts
|
87
77
|
if %w(y yes yep yeah ok).include?(input)
|
88
|
-
|
78
|
+
chat function.call
|
89
79
|
unread.tap { io.rewind }
|
90
80
|
else
|
91
|
-
|
92
|
-
|
81
|
+
chat function.cancel
|
82
|
+
chat "I decided to not run the function this time. Maybe next time."
|
93
83
|
end
|
94
84
|
end
|
95
85
|
end
|
@@ -98,14 +88,16 @@ class LLM::Shell
|
|
98
88
|
IO.popen("less -FRX", "w") do
|
99
89
|
_1.write formatter(unread).format!(:user), "\n"
|
100
90
|
_1.write formatter(unread).format!(:assistant), "\n"
|
101
|
-
end
|
91
|
+
end unless unread.empty?
|
102
92
|
end
|
103
93
|
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
94
|
+
attr_reader :bot, :console, :io, :default, :options
|
95
|
+
|
96
|
+
def formatter(messages) = Formatter.new(messages)
|
97
|
+
def unread = bot.messages.unread
|
98
|
+
def functions = bot.functions
|
99
|
+
def files = @options.files
|
100
|
+
def clear_screen = console.clear_screen
|
101
|
+
def chat(...) = bot.chat(...)
|
110
102
|
end
|
111
103
|
end
|
data/lib/llm/shell/version.rb
CHANGED
data/lib/llm/shell.rb
CHANGED
@@ -11,6 +11,7 @@ class LLM::Shell
|
|
11
11
|
require_relative "shell/command"
|
12
12
|
require_relative "shell/command/extension"
|
13
13
|
require_relative "shell/markdown"
|
14
|
+
require_relative "shell/renderer"
|
14
15
|
require_relative "shell/formatter"
|
15
16
|
require_relative "shell/default"
|
16
17
|
require_relative "shell/options"
|
data/libexec/llm-shell/shell
CHANGED
@@ -28,12 +28,19 @@ def option_parser
|
|
28
28
|
o.on("-o [PORT]", "--port [PORT]", "Optional. Sometimes required by ollama.", Integer)
|
29
29
|
o.on("-f [GLOB]", "--files [GLOB]", "Optional. Glob pattern(s) separated by a comma.", Array)
|
30
30
|
o.on("-t [TOOLS]", "--tools [TOOLS]", "Optional. One or more tool names to load automatically.", Array)
|
31
|
+
o.on("-r [PROMPT]", "--prompt [PROMPT]", "Optional. The prompt to use.", String)
|
31
32
|
o.on("-v", "--version", "Optional. Print the version and exit.")
|
32
33
|
end
|
33
34
|
end
|
34
35
|
|
35
|
-
excode = catch(:exit)
|
36
|
+
excode = catch(:exit) do
|
36
37
|
main(ARGV)
|
37
38
|
0
|
38
|
-
|
39
|
+
rescue => ex
|
40
|
+
print Paint[ex.class, :red, :bold], "\n"
|
41
|
+
print ex.message, "\n\n"
|
42
|
+
print Paint["Backtrace", :bold], "\n"
|
43
|
+
print ex.backtrace[0..5].join("\n"), "\n"
|
44
|
+
1
|
45
|
+
end
|
39
46
|
exit excode
|
@@ -1,27 +1,21 @@
|
|
1
|
-
|
1
|
+
## Task
|
2
2
|
|
3
|
-
|
3
|
+
Your task is to provide helpful assistance to the user.
|
4
4
|
|
5
|
-
|
6
|
-
Answer the user's questions as best as you can.
|
5
|
+
## Context
|
7
6
|
|
8
|
-
The user
|
9
|
-
Provide short and concise answers that are suitable for a terminal.
|
10
|
-
Do not provide long answers.
|
7
|
+
The user is talking to you from a terminal-based application.
|
11
8
|
|
12
|
-
##
|
9
|
+
## Inputs
|
13
10
|
|
14
|
-
|
15
|
-
|
16
|
-
*IF* a file is provided, it will be in this format:
|
11
|
+
The user might send you the content of file(s) from their filesystem.
|
12
|
+
When the user sends you a file, it will be in this format:
|
17
13
|
|
18
|
-
|
19
|
-
|
20
|
-
|
14
|
+
<file path="/path/to/file">
|
15
|
+
CONTENT
|
16
|
+
</file>
|
21
17
|
|
22
|
-
|
23
|
-
On receipt of one or more files, you will respond with: Got it. And with nothing else.
|
18
|
+
## Outputs
|
24
19
|
|
25
|
-
|
26
|
-
|
27
|
-
Respond in markdown.
|
20
|
+
Always respond in markdown.
|
21
|
+
When the user sends a file, reply with "Got it" and nothing else.
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: llm-shell
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.7.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Antar Azri
|
@@ -223,14 +223,19 @@ files:
|
|
223
223
|
- lib/llm/shell.rb
|
224
224
|
- lib/llm/shell/command.rb
|
225
225
|
- lib/llm/shell/command/extension.rb
|
226
|
+
- lib/llm/shell/commands/clear_screen.rb
|
226
227
|
- lib/llm/shell/commands/dir_import.rb
|
227
228
|
- lib/llm/shell/commands/file_import.rb
|
229
|
+
- lib/llm/shell/commands/show_history.rb
|
230
|
+
- lib/llm/shell/commands/system_prompt.rb
|
231
|
+
- lib/llm/shell/commands/utils.rb
|
228
232
|
- lib/llm/shell/completion.rb
|
229
233
|
- lib/llm/shell/config.rb
|
230
234
|
- lib/llm/shell/default.rb
|
231
235
|
- lib/llm/shell/formatter.rb
|
232
236
|
- lib/llm/shell/markdown.rb
|
233
237
|
- lib/llm/shell/options.rb
|
238
|
+
- lib/llm/shell/renderer.rb
|
234
239
|
- lib/llm/shell/repl.rb
|
235
240
|
- lib/llm/shell/version.rb
|
236
241
|
- libexec/llm-shell/shell
|