llm-shell 0.7.2 → 0.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +22 -13
- data/lib/llm/function.rb +17 -0
- data/lib/llm/shell/command.rb +14 -0
- data/lib/llm/shell/commands/clear_screen.rb +2 -1
- data/lib/llm/shell/commands/dir_import.rb +2 -1
- data/lib/llm/shell/commands/file_import.rb +2 -1
- data/lib/llm/shell/commands/help.rb +20 -12
- data/lib/llm/shell/commands/{show_history.rb → show_chat.rb} +5 -4
- data/lib/llm/shell/commands/system_prompt.rb +1 -0
- data/lib/llm/shell/functions/read_file.rb +22 -0
- data/lib/llm/shell/functions/write_file.rb +22 -0
- data/lib/llm/shell/markdown.rb +54 -61
- data/lib/llm/shell/renderer.rb +14 -9
- data/lib/llm/shell/repl.rb +9 -6
- data/lib/llm/shell/version.rb +1 -1
- data/lib/llm/shell.rb +10 -9
- data/libexec/llm-shell/shell +0 -1
- data/llm-shell.gemspec +3 -2
- metadata +25 -8
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 5a36478ef62c7e7db413c26a66d77da788a7135846d27e200dc236ee2480b2af
|
4
|
+
data.tar.gz: 940ecf35df53edd106f0750c3ca6467482ff59c18e45ae05702b9e60a8adff03
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: c36710535f7e59346fbc85dba3e90e1ad6ef4ea72ac2d799d743c8c9f482ac85172962c817e3817898f1c8e51f64e9cf5c6172f8e48377f647b4a5ba033f8531
|
7
|
+
data.tar.gz: 568944aaf6e69b314bc39714dd2d33896943e9716556f75e1bdb9ac38dc415fdb933943b24c7fc346a9d2ef4b904875ee74d3187e0f5c34616841215d477242f
|
data/README.md
CHANGED
@@ -12,7 +12,7 @@ it in action.
|
|
12
12
|
#### General
|
13
13
|
|
14
14
|
- 🌟 Unified interface for multiple Large Language Models (LLMs)
|
15
|
-
- 🤝 Supports Gemini, OpenAI, Anthropic, DeepSeek, LlamaCpp and Ollama
|
15
|
+
- 🤝 Supports Gemini, OpenAI, Anthropic, xAI (grok), DeepSeek, LlamaCpp and Ollama
|
16
16
|
|
17
17
|
#### Customize
|
18
18
|
|
@@ -36,7 +36,12 @@ it in action.
|
|
36
36
|
|
37
37
|
<details>
|
38
38
|
<summary><b>2. Add files as conversation context</b></summary>
|
39
|
-
<img src="share/llm-shell/examples/files
|
39
|
+
<img src="share/llm-shell/examples/files.gif">
|
40
|
+
</details>
|
41
|
+
|
42
|
+
<details>
|
43
|
+
<summary><b>3. Advanced features: markdown, syntax highlighting</b></summary>
|
44
|
+
<img src="share/llm-shell/examples/codegen.gif">
|
40
45
|
</details>
|
41
46
|
|
42
47
|
## Customization
|
@@ -44,15 +49,18 @@ it in action.
|
|
44
49
|
#### Functions
|
45
50
|
|
46
51
|
> For security and safety reasons, a user must confirm the execution of
|
47
|
-
> all function calls before they happen
|
48
|
-
|
49
|
-
|
52
|
+
> all function calls before they happen
|
53
|
+
|
54
|
+
llm-shell can be extended with your own functions (also known as tool calls).
|
55
|
+
This can be done by creating a Ruby file in the `~/.llm-shell/functions/`
|
56
|
+
directory – with one file per function. The functions are
|
57
|
+
loaded at boot time. The functions are shared with the LLM and the LLM
|
58
|
+
can request their execution. The LLM is also made aware of a function's
|
59
|
+
return value after it has been called.
|
60
|
+
See the
|
61
|
+
[functions/](lib/llm/shell/functions/)
|
62
|
+
directory for more examples:
|
50
63
|
|
51
|
-
The `~/.llm-shell/tools/` directory can contain one or more
|
52
|
-
[llmrb/llm](https://github.com/llmrb/llm) functions that the
|
53
|
-
LLM can call once you confirm you are okay with executing the
|
54
|
-
code locally (along with any arguments it provides). See the
|
55
|
-
earlier demo for an example:
|
56
64
|
|
57
65
|
```ruby
|
58
66
|
LLM.function(:system) do |fn|
|
@@ -60,10 +68,10 @@ LLM.function(:system) do |fn|
|
|
60
68
|
fn.params do |schema|
|
61
69
|
schema.object(command: schema.string.required)
|
62
70
|
end
|
63
|
-
fn.define do |
|
71
|
+
fn.define do |command:|
|
64
72
|
ro, wo = IO.pipe
|
65
73
|
re, we = IO.pipe
|
66
|
-
Process.wait Process.spawn(
|
74
|
+
Process.wait Process.spawn(command, out: wo, err: we)
|
67
75
|
[wo,we].each(&:close)
|
68
76
|
{stderr: re.read, stdout: ro.read}
|
69
77
|
end
|
@@ -123,6 +131,8 @@ gemini:
|
|
123
131
|
key: YOURKEY
|
124
132
|
anthropic:
|
125
133
|
key: YOURKEY
|
134
|
+
xai:
|
135
|
+
key: YOURKEY
|
126
136
|
deepseek:
|
127
137
|
key: YOURKEY
|
128
138
|
ollama:
|
@@ -147,7 +157,6 @@ Usage: llm-shell [OPTIONS]
|
|
147
157
|
-h, --host [HOST] Optional. Sometimes required by ollama.
|
148
158
|
-o, --port [PORT] Optional. Sometimes required by ollama.
|
149
159
|
-f, --files [GLOB] Optional. Glob pattern(s) separated by a comma.
|
150
|
-
-t, --tools [TOOLS] Optional. One or more tool names to load automatically.
|
151
160
|
-r, --prompt [PROMPT] Optional. The prompt to use.
|
152
161
|
-v, --version Optional. Print the version and exit
|
153
162
|
```
|
data/lib/llm/function.rb
ADDED
@@ -0,0 +1,17 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
class LLM::Function
|
4
|
+
##
|
5
|
+
# Returns true when a function is a built-in function
|
6
|
+
# @return [Boolean]
|
7
|
+
def builtin?
|
8
|
+
@builtin
|
9
|
+
end
|
10
|
+
|
11
|
+
##
|
12
|
+
# Mark a function as a built-in function
|
13
|
+
# @return [void]
|
14
|
+
def builtin!
|
15
|
+
@builtin = true
|
16
|
+
end
|
17
|
+
end
|
data/lib/llm/shell/command.rb
CHANGED
@@ -70,5 +70,19 @@ class LLM::Shell
|
|
70
70
|
@context.instance_exec(*argv, &@object)
|
71
71
|
end
|
72
72
|
end
|
73
|
+
|
74
|
+
##
|
75
|
+
# @return [Boolean]
|
76
|
+
# Returns true if this is a builtin command
|
77
|
+
def builtin?
|
78
|
+
@builtin
|
79
|
+
end
|
80
|
+
|
81
|
+
##
|
82
|
+
# Mark this command as builtin command
|
83
|
+
# @return [void]
|
84
|
+
def builtin!
|
85
|
+
@builtin = true
|
86
|
+
end
|
73
87
|
end
|
74
88
|
end
|
@@ -41,8 +41,9 @@ class LLM::Shell::Command
|
|
41
41
|
private
|
42
42
|
|
43
43
|
LLM.command "dir-import" do |cmd|
|
44
|
-
cmd.description "Share
|
44
|
+
cmd.description "Share a directory with the LLM"
|
45
45
|
cmd.register(self)
|
46
|
+
cmd.builtin!
|
46
47
|
end
|
47
48
|
end
|
48
49
|
end
|
@@ -27,32 +27,40 @@ class LLM::Shell::Command
|
|
27
27
|
|
28
28
|
def render_commands(io)
|
29
29
|
io.print(Paint["Commands", :bold, :underline], "\n\n")
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
30
|
+
io.print(Paint["Builtin", :bold], "\n\n")
|
31
|
+
render_group commands.select(&:builtin?), io, :cyan
|
32
|
+
io.print(Paint["User", :bold], "\n\n")
|
33
|
+
render_group commands.reject(&:builtin?), io, :cyan
|
34
34
|
end
|
35
35
|
|
36
36
|
def render_functions(io)
|
37
37
|
io.print(Paint["Functions", :bold, :underline], "\n\n")
|
38
|
-
|
39
|
-
|
38
|
+
io.print(Paint["Builtin", :bold], "\n\n")
|
39
|
+
render_group functions.select(&:builtin?), io, :blue
|
40
|
+
io.print(Paint["User", :bold], "\n\n")
|
41
|
+
render_group functions.reject(&:builtin?), io, :blue
|
42
|
+
end
|
43
|
+
|
44
|
+
def render_group(commands, io, bgcolor)
|
45
|
+
if commands.empty?
|
46
|
+
io.print(Paint["None available", :yellow], "\n\n")
|
40
47
|
else
|
41
|
-
|
42
|
-
io.print(
|
43
|
-
io.print(
|
48
|
+
commands.each.with_index(1) do |command, index|
|
49
|
+
io.print(name(command, index, bgcolor), "\n")
|
50
|
+
io.print(desc(command), "\n\n")
|
44
51
|
end
|
45
52
|
end
|
46
53
|
end
|
47
54
|
|
48
55
|
def commands = LLM.commands.values.sort_by(&:name)
|
49
56
|
def functions = LLM.functions.values.sort_by(&:name)
|
50
|
-
def
|
51
|
-
def
|
57
|
+
def name(command, index, bgcolor) = [Paint[" #{index} ", :white, bgcolor, :bold], " ", Paint[command.name, :bold]].join
|
58
|
+
def desc(command) = command.description || "No description"
|
52
59
|
|
53
60
|
LLM.command "help" do |cmd|
|
54
|
-
cmd.description "
|
61
|
+
cmd.description "Show the help menu"
|
55
62
|
cmd.register(self)
|
63
|
+
cmd.builtin!
|
56
64
|
end
|
57
65
|
end
|
58
66
|
end
|
@@ -1,14 +1,14 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
3
|
class LLM::Shell::Command
|
4
|
-
class
|
4
|
+
class ShowChat
|
5
5
|
require_relative "utils"
|
6
6
|
include Utils
|
7
7
|
|
8
8
|
##
|
9
9
|
# @param [LLM::Shell::Context] context
|
10
10
|
# The context of the command
|
11
|
-
# @return [LLM::Shell::Command::
|
11
|
+
# @return [LLM::Shell::Command::ShowChat]
|
12
12
|
def initialize(context)
|
13
13
|
@context = context
|
14
14
|
end
|
@@ -37,9 +37,10 @@ class LLM::Shell::Command
|
|
37
37
|
def messages = bot.messages
|
38
38
|
def render(message) = LLM::Shell::Renderer.new(message).render
|
39
39
|
|
40
|
-
LLM.command "show-
|
41
|
-
cmd.description "Show the
|
40
|
+
LLM.command "show-chat" do |cmd|
|
41
|
+
cmd.description "Show the chat"
|
42
42
|
cmd.register(self)
|
43
|
+
cmd.builtin!
|
43
44
|
end
|
44
45
|
end
|
45
46
|
end
|
@@ -0,0 +1,22 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module LLM::Shell::Functions
|
4
|
+
class ReadFile
|
5
|
+
def call(path:)
|
6
|
+
{ok: true, content: File.read(path)}
|
7
|
+
rescue => ex
|
8
|
+
{ok: false, error: {class: ex.class.to_s, message: ex.message}}
|
9
|
+
end
|
10
|
+
|
11
|
+
private
|
12
|
+
|
13
|
+
LLM.function(:read_file) do |fn|
|
14
|
+
fn.description "Read the contents of a file"
|
15
|
+
fn.params do |schema|
|
16
|
+
schema.object(path: schema.string.required)
|
17
|
+
end
|
18
|
+
fn.register(self)
|
19
|
+
fn.builtin!
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
@@ -0,0 +1,22 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module LLM::Shell::Functions
|
4
|
+
class WriteFile
|
5
|
+
def call(path:, content:)
|
6
|
+
{ok: true, content: File.binwrite(path, content)}
|
7
|
+
rescue => ex
|
8
|
+
{ok: false, error: {class: ex.class.to_s, message: ex.message}}
|
9
|
+
end
|
10
|
+
|
11
|
+
private
|
12
|
+
|
13
|
+
LLM.function(:write_file) do |fn|
|
14
|
+
fn.description "Write the contents of a file"
|
15
|
+
fn.params do |schema|
|
16
|
+
schema.object(path: schema.string.required, content: schema.string.required)
|
17
|
+
end
|
18
|
+
fn.register(self)
|
19
|
+
fn.builtin!
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
data/lib/llm/shell/markdown.rb
CHANGED
@@ -1,71 +1,71 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
-
|
4
|
-
|
5
|
-
require "kramdown"
|
6
|
-
require "coderay"
|
3
|
+
require "redcarpet"
|
4
|
+
require "coderay"
|
7
5
|
|
6
|
+
class LLM::Shell
|
7
|
+
##
|
8
|
+
# @api private
|
9
|
+
# @see redcarpet https://github.com/vmg/redcarpet/blob/master/ext/redcarpet/markdown.h#L69-L110
|
10
|
+
class Markdown < Redcarpet::Render::Base
|
8
11
|
##
|
9
|
-
#
|
10
|
-
# @return [
|
11
|
-
def
|
12
|
-
|
12
|
+
# Renders markdown text to a terminal-friendly format.
|
13
|
+
# @return [String
|
14
|
+
def self.render(text)
|
15
|
+
renderer = Redcarpet::Markdown.new(self, options)
|
16
|
+
renderer.render(wrap(p: text)).strip
|
13
17
|
end
|
14
18
|
|
15
19
|
##
|
16
|
-
# @
|
17
|
-
def
|
18
|
-
|
20
|
+
# @api private
|
21
|
+
def self.wrap(p:, width: 80)
|
22
|
+
in_code = false
|
23
|
+
p.lines.map do |line|
|
24
|
+
if line =~ /^(\s*)(```|~~~)/
|
25
|
+
in_code = !in_code
|
26
|
+
line
|
27
|
+
elsif in_code || line =~ /^\s{4}/
|
28
|
+
line
|
29
|
+
else
|
30
|
+
line.gsub(/(.{1,#{width}})(\s+|\Z)/, "\\1\n")
|
31
|
+
end
|
32
|
+
end.join.strip + "\n"
|
19
33
|
end
|
20
34
|
|
21
|
-
|
35
|
+
##
|
36
|
+
# @api private
|
37
|
+
def self.options
|
38
|
+
{
|
39
|
+
autolink: false, no_intra_emphasis: true,
|
40
|
+
fenced_code_blocks: true, lax_spacing: true,
|
41
|
+
strikethrough: true, superscript: true,
|
42
|
+
tables: true, with_toc_data: true
|
43
|
+
}
|
44
|
+
end
|
22
45
|
|
23
|
-
def
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
color = levels[level]
|
28
|
-
Paint[("#" * level) + " " + node.children.map { visit(_1) }.join, color]
|
29
|
-
when :p
|
30
|
-
node.children.map { visit(_1) }.join
|
31
|
-
when :ul
|
32
|
-
node.children.map { visit(_1) }.join("\n")
|
33
|
-
when :li
|
34
|
-
"• " + node.children.map { visit(_1) }.join
|
35
|
-
when :em
|
36
|
-
Paint[node.children.map { visit(_1) }.join, :italic]
|
37
|
-
when :strong
|
38
|
-
Paint[node.children.map { visit(_1) }.join, :bold]
|
39
|
-
when :br
|
40
|
-
"\n"
|
41
|
-
when :codespan, :codeblock
|
42
|
-
lines = node.value.each_line.to_a
|
43
|
-
lang = lines[0].strip
|
44
|
-
code = lines[1..].join
|
45
|
-
if lines.size == 1
|
46
|
-
Paint[node.value, :italic]
|
47
|
-
else
|
48
|
-
["\n", Paint[">>> #{lang}", :blue, :bold],
|
49
|
-
"\n\n", coderay(code, lang),
|
50
|
-
"\n", Paint["<<< #{lang}", :blue, :bold]].join
|
51
|
-
end
|
52
|
-
when :smart_quote
|
53
|
-
smart_quotes[node.value]
|
54
|
-
when :text
|
55
|
-
node.value
|
56
|
-
else
|
57
|
-
node.children.map { visit(_1) }.join
|
58
|
-
end
|
46
|
+
def block_code(code, lang)
|
47
|
+
["\n", Paint["#{lang}:", :blue, :bold],
|
48
|
+
"\n", coderay(code, lang),
|
49
|
+
"\n"].join
|
59
50
|
end
|
60
51
|
|
61
|
-
def
|
62
|
-
|
63
|
-
|
64
|
-
.gsub(/(#+ .+?)\n(?!\n)/, "\\1\n\n")
|
65
|
-
.gsub(/\A<think>[\n]*<\/think>(?:\n)/, "")
|
66
|
-
.gsub(/\A\n{2,}/, "")
|
52
|
+
def header(text, level)
|
53
|
+
color = levels.fetch(level, :white)
|
54
|
+
"\n" + Paint[("#" * level) + " " + text, color] + "\n"
|
67
55
|
end
|
68
56
|
|
57
|
+
def paragraph(p) = "#{p.strip}\n\n"
|
58
|
+
def list(items, _type) = items
|
59
|
+
def list_item(item, _type) = "\n• #{item.strip}\n"
|
60
|
+
def emphasis(text) = Paint[text, :italic]
|
61
|
+
def double_emphasis(text) = Paint[text, :bold]
|
62
|
+
def codespan(code) = Paint[code, :yellow, :underline]
|
63
|
+
def block_quote(quote) = Paint[quote, :italic]
|
64
|
+
def normal_text(text) = text
|
65
|
+
def linebreak = "\n"
|
66
|
+
|
67
|
+
private
|
68
|
+
|
69
69
|
def coderay(code, lang)
|
70
70
|
CodeRay.scan(code, lang).terminal
|
71
71
|
rescue ArgumentError
|
@@ -79,12 +79,5 @@ class LLM::Shell
|
|
79
79
|
4 => :yellow, 5 => :red, 6 => :purple
|
80
80
|
}
|
81
81
|
end
|
82
|
-
|
83
|
-
def smart_quotes
|
84
|
-
{
|
85
|
-
lsquo: "'", rsquo: "'",
|
86
|
-
ldquo: '"', rdquo: '"'
|
87
|
-
}
|
88
|
-
end
|
89
82
|
end
|
90
83
|
end
|
data/lib/llm/shell/renderer.rb
CHANGED
@@ -28,20 +28,25 @@ class LLM::Shell
|
|
28
28
|
|
29
29
|
private
|
30
30
|
|
31
|
+
attr_reader :message
|
32
|
+
|
31
33
|
def render_message(message, color)
|
32
|
-
role
|
33
|
-
title
|
34
|
-
|
35
|
-
|
34
|
+
role = Paint[message.role, :bold, color]
|
35
|
+
title = "#{role} says: "
|
36
|
+
content = message.content
|
37
|
+
if message.tool_call?
|
38
|
+
body = "Tool call(s) request"
|
39
|
+
elsif message.tool_return?
|
40
|
+
body = "Tool call(s) return"
|
41
|
+
elsif Array === content
|
42
|
+
body = content.map { |c| render_message(c, color) }.join("\n")
|
43
|
+
elsif content =~ file_pattern
|
44
|
+
path = content.match(file_pattern) ? Regexp.last_match[1] : nil
|
36
45
|
body = "<file path=#{path} />"
|
37
46
|
else
|
38
|
-
body
|
47
|
+
body = Markdown.render(content)
|
39
48
|
end
|
40
49
|
[title, "\n", body, "\n"].join
|
41
50
|
end
|
42
|
-
|
43
|
-
attr_reader :message
|
44
|
-
def markdown(text) = Markdown.new(text).to_ansi
|
45
|
-
def wrap(text, width = 80) = text.gsub(/(.{1,#{width}})(\s+|\Z)/, "\\1\n")
|
46
51
|
end
|
47
52
|
end
|
data/lib/llm/shell/repl.rb
CHANGED
@@ -67,21 +67,24 @@ class LLM::Shell
|
|
67
67
|
end
|
68
68
|
|
69
69
|
def eval
|
70
|
+
callables = []
|
71
|
+
cancels = []
|
72
|
+
results = []
|
70
73
|
functions.each do |function|
|
71
74
|
print Paint["system", :bold, :red], " says: ", "\n"
|
72
75
|
print "function: ", function.name, "\n"
|
73
76
|
print "arguments: ", function.arguments, "\n"
|
74
|
-
|
75
|
-
input = $stdin.gets.chomp.downcase
|
77
|
+
input = Readline.readline("Do you want to call it ? ", true)
|
76
78
|
puts
|
77
79
|
if %w(y yes yep yeah ok).include?(input)
|
78
|
-
|
79
|
-
unread.tap { io.rewind }
|
80
|
+
callables << function
|
80
81
|
else
|
81
|
-
|
82
|
-
chat "I decided to not run the function this time. Maybe next time."
|
82
|
+
cancels << function
|
83
83
|
end
|
84
84
|
end
|
85
|
+
results.concat callables.map(&:call)
|
86
|
+
results.concat cancels.map(&:cancel)
|
87
|
+
bot.chat(results)
|
85
88
|
end
|
86
89
|
|
87
90
|
def emit
|
data/lib/llm/shell/version.rb
CHANGED
data/lib/llm/shell.rb
CHANGED
@@ -7,6 +7,7 @@ require "llm"
|
|
7
7
|
require "paint"
|
8
8
|
|
9
9
|
class LLM::Shell
|
10
|
+
require_relative "function"
|
10
11
|
require_relative "../io/line"
|
11
12
|
require_relative "shell/command"
|
12
13
|
require_relative "shell/command/extension"
|
@@ -40,7 +41,7 @@ class LLM::Shell
|
|
40
41
|
##
|
41
42
|
# @return [Array<String>]
|
42
43
|
def self.tools
|
43
|
-
Dir[
|
44
|
+
Dir[*TOOLGLOBS]
|
44
45
|
end
|
45
46
|
|
46
47
|
##
|
@@ -49,6 +50,12 @@ class LLM::Shell
|
|
49
50
|
Dir[File.join(home, "commands", "*.rb")]
|
50
51
|
end
|
51
52
|
|
53
|
+
TOOLGLOBS = [
|
54
|
+
File.join(home, "tools", "*.rb"),
|
55
|
+
File.join(__dir__, "shell", "functions", "*.rb")
|
56
|
+
].freeze
|
57
|
+
private_constant :TOOLGLOBS
|
58
|
+
|
52
59
|
##
|
53
60
|
# @param [Hash] options
|
54
61
|
# @return [LLM::Shell]
|
@@ -70,14 +77,8 @@ class LLM::Shell
|
|
70
77
|
private
|
71
78
|
|
72
79
|
def tools
|
73
|
-
LLM::Shell.tools.
|
74
|
-
|
75
|
-
if options.tools.include?(name)
|
76
|
-
print Paint["llm-shell: ", :green], "load #{name} tool", "\n"
|
77
|
-
eval File.read(path), TOPLEVEL_BINDING, path, 1
|
78
|
-
else
|
79
|
-
print Paint["llm-shell: ", :yellow], "skip #{name} tool", "\n"
|
80
|
-
end
|
80
|
+
LLM::Shell.tools.map do |path|
|
81
|
+
eval File.read(path), TOPLEVEL_BINDING, path, 1
|
81
82
|
end.grep(LLM::Function)
|
82
83
|
end
|
83
84
|
|
data/libexec/llm-shell/shell
CHANGED
@@ -27,7 +27,6 @@ def option_parser
|
|
27
27
|
o.on("-h [HOST]", "--host [HOST]", "Optional. Sometimes required by ollama.", String)
|
28
28
|
o.on("-o [PORT]", "--port [PORT]", "Optional. Sometimes required by ollama.", Integer)
|
29
29
|
o.on("-f [GLOB]", "--files [GLOB]", "Optional. Glob pattern(s) separated by a comma.", Array)
|
30
|
-
o.on("-t [TOOLS]", "--tools [TOOLS]", "Optional. One or more tool names to load automatically.", Array)
|
31
30
|
o.on("-r [PROMPT]", "--prompt [PROMPT]", "Optional. The prompt to use.", String)
|
32
31
|
o.on("-v", "--version", "Optional. Print the version and exit.")
|
33
32
|
end
|
data/llm-shell.gemspec
CHANGED
@@ -28,10 +28,11 @@ Gem::Specification.new do |spec|
|
|
28
28
|
]
|
29
29
|
spec.require_paths = ["lib"]
|
30
30
|
spec.executables = ["llm-shell"]
|
31
|
-
spec.add_dependency "llm.rb", "~> 0.
|
31
|
+
spec.add_dependency "llm.rb", "~> 0.13"
|
32
32
|
spec.add_dependency "paint", "~> 2.1"
|
33
|
-
spec.add_dependency "
|
33
|
+
spec.add_dependency "redcarpet", "~> 3.6"
|
34
34
|
spec.add_dependency "coderay", "~> 1.1"
|
35
|
+
spec.add_dependency "reline", "~> 0.6"
|
35
36
|
spec.add_development_dependency "webmock", "~> 3.24.0"
|
36
37
|
spec.add_development_dependency "yard", "~> 0.9.37"
|
37
38
|
spec.add_development_dependency "kramdown", "~> 2.4"
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: llm-shell
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.9.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Antar Azri
|
@@ -16,14 +16,14 @@ dependencies:
|
|
16
16
|
requirements:
|
17
17
|
- - "~>"
|
18
18
|
- !ruby/object:Gem::Version
|
19
|
-
version: 0.
|
19
|
+
version: '0.13'
|
20
20
|
type: :runtime
|
21
21
|
prerelease: false
|
22
22
|
version_requirements: !ruby/object:Gem::Requirement
|
23
23
|
requirements:
|
24
24
|
- - "~>"
|
25
25
|
- !ruby/object:Gem::Version
|
26
|
-
version: 0.
|
26
|
+
version: '0.13'
|
27
27
|
- !ruby/object:Gem::Dependency
|
28
28
|
name: paint
|
29
29
|
requirement: !ruby/object:Gem::Requirement
|
@@ -39,19 +39,19 @@ dependencies:
|
|
39
39
|
- !ruby/object:Gem::Version
|
40
40
|
version: '2.1'
|
41
41
|
- !ruby/object:Gem::Dependency
|
42
|
-
name:
|
42
|
+
name: redcarpet
|
43
43
|
requirement: !ruby/object:Gem::Requirement
|
44
44
|
requirements:
|
45
45
|
- - "~>"
|
46
46
|
- !ruby/object:Gem::Version
|
47
|
-
version: '
|
47
|
+
version: '3.6'
|
48
48
|
type: :runtime
|
49
49
|
prerelease: false
|
50
50
|
version_requirements: !ruby/object:Gem::Requirement
|
51
51
|
requirements:
|
52
52
|
- - "~>"
|
53
53
|
- !ruby/object:Gem::Version
|
54
|
-
version: '
|
54
|
+
version: '3.6'
|
55
55
|
- !ruby/object:Gem::Dependency
|
56
56
|
name: coderay
|
57
57
|
requirement: !ruby/object:Gem::Requirement
|
@@ -66,6 +66,20 @@ dependencies:
|
|
66
66
|
- - "~>"
|
67
67
|
- !ruby/object:Gem::Version
|
68
68
|
version: '1.1'
|
69
|
+
- !ruby/object:Gem::Dependency
|
70
|
+
name: reline
|
71
|
+
requirement: !ruby/object:Gem::Requirement
|
72
|
+
requirements:
|
73
|
+
- - "~>"
|
74
|
+
- !ruby/object:Gem::Version
|
75
|
+
version: '0.6'
|
76
|
+
type: :runtime
|
77
|
+
prerelease: false
|
78
|
+
version_requirements: !ruby/object:Gem::Requirement
|
79
|
+
requirements:
|
80
|
+
- - "~>"
|
81
|
+
- !ruby/object:Gem::Version
|
82
|
+
version: '0.6'
|
69
83
|
- !ruby/object:Gem::Dependency
|
70
84
|
name: webmock
|
71
85
|
requirement: !ruby/object:Gem::Requirement
|
@@ -220,6 +234,7 @@ files:
|
|
220
234
|
- bin/llm-shell
|
221
235
|
- lib/io/line.rb
|
222
236
|
- lib/llm-shell.rb
|
237
|
+
- lib/llm/function.rb
|
223
238
|
- lib/llm/shell.rb
|
224
239
|
- lib/llm/shell/command.rb
|
225
240
|
- lib/llm/shell/command/extension.rb
|
@@ -227,13 +242,15 @@ files:
|
|
227
242
|
- lib/llm/shell/commands/dir_import.rb
|
228
243
|
- lib/llm/shell/commands/file_import.rb
|
229
244
|
- lib/llm/shell/commands/help.rb
|
230
|
-
- lib/llm/shell/commands/
|
245
|
+
- lib/llm/shell/commands/show_chat.rb
|
231
246
|
- lib/llm/shell/commands/system_prompt.rb
|
232
247
|
- lib/llm/shell/commands/utils.rb
|
233
248
|
- lib/llm/shell/completion.rb
|
234
249
|
- lib/llm/shell/config.rb
|
235
250
|
- lib/llm/shell/default.rb
|
236
251
|
- lib/llm/shell/formatter.rb
|
252
|
+
- lib/llm/shell/functions/read_file.rb
|
253
|
+
- lib/llm/shell/functions/write_file.rb
|
237
254
|
- lib/llm/shell/markdown.rb
|
238
255
|
- lib/llm/shell/options.rb
|
239
256
|
- lib/llm/shell/renderer.rb
|
@@ -262,7 +279,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
262
279
|
- !ruby/object:Gem::Version
|
263
280
|
version: '0'
|
264
281
|
requirements: []
|
265
|
-
rubygems_version: 3.
|
282
|
+
rubygems_version: 3.6.9
|
266
283
|
specification_version: 4
|
267
284
|
summary: llm-shell is an extensible, developer-oriented command-line console that
|
268
285
|
can interact with multiple Large Language Models (LLMs).
|