llm-shell 0.3.0 → 0.4.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +9 -7
- data/lib/llm/shell/command.rb +14 -0
- data/lib/llm/shell/commands/import_file.rb +5 -0
- data/lib/llm/shell/markdown.rb +20 -1
- data/lib/llm/shell/version.rb +1 -1
- data/libexec/llm-shell/shell +1 -1
- metadata +16 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: f6aec859f4eb4fd9bfc722da0f0ccbaada65932b56bbd5d118f8131be32097e5
|
4
|
+
data.tar.gz: 0156acf8d3efbec33ce61aab82e5cc90f2618888b7cce6fa181c5de5b11b591c
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: bdc7e09ec212ee04005bf636caa64abbfdbb087a6d20506402aaf81703806d4c3ddf7611f0c4dc61eedaa7711f6fe50880be0691a07181cb9d582e19fbcf2ba8
|
7
|
+
data.tar.gz: 7ce76e58b455c55ee94d52b2913396e514d2b808c0fa00ac57c5dff34b499c4cf1def4f3acc398ddfd602cb9f202a3261f8c51ea6012c89853c3a9d79991f9d5
|
data/README.md
CHANGED
@@ -23,6 +23,7 @@ it in action!
|
|
23
23
|
#### Shell
|
24
24
|
|
25
25
|
- 🤖 Builtin auto-complete powered by Readline
|
26
|
+
- 🎨 Builtin syntax highlighting powered by Coderay
|
26
27
|
- 📄 Deploys the less pager for long outputs
|
27
28
|
- 📝 Advanced Markdown formatting and output
|
28
29
|
|
@@ -84,6 +85,7 @@ command for a realistic example:
|
|
84
85
|
|
85
86
|
```ruby
|
86
87
|
LLM.command "say-hello" do |cmd|
|
88
|
+
cmd.description "Say hello to somebody"
|
87
89
|
cmd.define do |name|
|
88
90
|
io.rewind.print "Hello #{name}!"
|
89
91
|
end
|
@@ -126,13 +128,13 @@ tools:
|
|
126
128
|
|
127
129
|
```bash
|
128
130
|
Usage: llm-shell [OPTIONS]
|
129
|
-
-p, --provider NAME
|
130
|
-
-k, --key [KEY]
|
131
|
-
-m, --model [MODEL]
|
132
|
-
-h, --host [HOST]
|
133
|
-
-o, --port [PORT]
|
134
|
-
-f, --files [GLOB]
|
135
|
-
-t, --tools [TOOLS]
|
131
|
+
-p, --provider NAME Required. Options: gemini, openai, anthropic, ollama or llamacpp.
|
132
|
+
-k, --key [KEY] Optional. Required by gemini, openai, and anthropic.
|
133
|
+
-m, --model [MODEL] Optional. The name of a model.
|
134
|
+
-h, --host [HOST] Optional. Sometimes required by ollama.
|
135
|
+
-o, --port [PORT] Optional. Sometimes required by ollama.
|
136
|
+
-f, --files [GLOB] Optional. Glob pattern(s) separated by a comma.
|
137
|
+
-t, --tools [TOOLS] Optional. One or more tool names to load automatically.
|
136
138
|
```
|
137
139
|
|
138
140
|
## Install
|
data/lib/llm/shell/command.rb
CHANGED
@@ -13,6 +13,7 @@ class LLM::Shell
|
|
13
13
|
# Set or get the command name
|
14
14
|
# @param [String, nil] name
|
15
15
|
# The name of the command
|
16
|
+
# @return [String]
|
16
17
|
def name(name = nil)
|
17
18
|
if name
|
18
19
|
@name = name
|
@@ -21,6 +22,19 @@ class LLM::Shell
|
|
21
22
|
end
|
22
23
|
end
|
23
24
|
|
25
|
+
##
|
26
|
+
# Set or get the command description
|
27
|
+
# @param [String, nil] desc
|
28
|
+
# The description of the command
|
29
|
+
# @return [String]
|
30
|
+
def description(desc = nil)
|
31
|
+
if desc
|
32
|
+
@description = desc
|
33
|
+
else
|
34
|
+
@description
|
35
|
+
end
|
36
|
+
end
|
37
|
+
|
24
38
|
##
|
25
39
|
# Setup the command context
|
26
40
|
# @return [void]
|
@@ -12,6 +12,10 @@ class LLM::Shell::Command
|
|
12
12
|
Dir["#{path}*"]
|
13
13
|
end
|
14
14
|
|
15
|
+
##
|
16
|
+
# @param [LLM::Shell::Context] context
|
17
|
+
# The context of the command
|
18
|
+
# @return [LLM::Shell::Command::ImportFile]
|
15
19
|
def initialize(context)
|
16
20
|
@context = context
|
17
21
|
end
|
@@ -35,6 +39,7 @@ class LLM::Shell::Command
|
|
35
39
|
end
|
36
40
|
|
37
41
|
LLM.command "import-file" do |cmd|
|
42
|
+
cmd.description "Share one or more files with the LLM"
|
38
43
|
cmd.register ImportFile
|
39
44
|
end
|
40
45
|
end
|
data/lib/llm/shell/markdown.rb
CHANGED
@@ -3,6 +3,7 @@
|
|
3
3
|
class LLM::Shell
|
4
4
|
class Markdown
|
5
5
|
require "kramdown"
|
6
|
+
require "coderay"
|
6
7
|
|
7
8
|
##
|
8
9
|
# @param [String] text
|
@@ -37,7 +38,18 @@ class LLM::Shell
|
|
37
38
|
Paint[node.children.map { visit(_1) }.join, :bold]
|
38
39
|
when :br
|
39
40
|
"\n"
|
40
|
-
when :
|
41
|
+
when :codespan, :codeblock
|
42
|
+
lines = node.value.each_line.to_a
|
43
|
+
lang = lines[0].strip
|
44
|
+
code = lines[1..].join
|
45
|
+
if lines.size == 1
|
46
|
+
Paint[node.value, :italic]
|
47
|
+
else
|
48
|
+
["\n", Paint[">>> #{lang}", :blue, :bold],
|
49
|
+
"\n\n", coderay(code, lang),
|
50
|
+
"\n", Paint["<<< #{lang}", :blue, :bold]].join
|
51
|
+
end
|
52
|
+
when :text
|
41
53
|
node.value
|
42
54
|
else
|
43
55
|
node.children.map { visit(_1) }.join
|
@@ -58,5 +70,12 @@ class LLM::Shell
|
|
58
70
|
.gsub(/\A<think>[\n]*<\/think>(?:\n)/, "")
|
59
71
|
.gsub(/\A\n{2,}/, "")
|
60
72
|
end
|
73
|
+
|
74
|
+
def coderay(code, lang)
|
75
|
+
CodeRay.scan(code, lang).terminal
|
76
|
+
rescue ArgumentError
|
77
|
+
lang = "text"
|
78
|
+
retry
|
79
|
+
end
|
61
80
|
end
|
62
81
|
end
|
data/lib/llm/shell/version.rb
CHANGED
data/libexec/llm-shell/shell
CHANGED
@@ -17,7 +17,7 @@ end
|
|
17
17
|
def option_parser
|
18
18
|
OptionParser.new do |o|
|
19
19
|
o.banner = "Usage: llm-shell [OPTIONS]"
|
20
|
-
o.on("-p PROVIDER", "--provider NAME", "Required. Options: gemini, openai, anthropic, or
|
20
|
+
o.on("-p PROVIDER", "--provider NAME", "Required. Options: gemini, openai, anthropic, ollama or llamacpp.", String)
|
21
21
|
o.on("-k [KEY]", "--key [KEY]", "Optional. Required by gemini, openai, and anthropic.", String)
|
22
22
|
o.on("-m [MODEL]", "--model [MODEL]", "Optional. The name of a model.", Array)
|
23
23
|
o.on("-h [HOST]", "--host [HOST]", "Optional. Sometimes required by ollama.", String)
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: llm-shell
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.4.1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Antar Azri
|
@@ -9,7 +9,7 @@ authors:
|
|
9
9
|
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date: 2025-05-
|
12
|
+
date: 2025-05-11 00:00:00.000000000 Z
|
13
13
|
dependencies:
|
14
14
|
- !ruby/object:Gem::Dependency
|
15
15
|
name: llm.rb
|
@@ -53,6 +53,20 @@ dependencies:
|
|
53
53
|
- - "~>"
|
54
54
|
- !ruby/object:Gem::Version
|
55
55
|
version: '2.5'
|
56
|
+
- !ruby/object:Gem::Dependency
|
57
|
+
name: coderay
|
58
|
+
requirement: !ruby/object:Gem::Requirement
|
59
|
+
requirements:
|
60
|
+
- - "~>"
|
61
|
+
- !ruby/object:Gem::Version
|
62
|
+
version: '1.1'
|
63
|
+
type: :runtime
|
64
|
+
prerelease: false
|
65
|
+
version_requirements: !ruby/object:Gem::Requirement
|
66
|
+
requirements:
|
67
|
+
- - "~>"
|
68
|
+
- !ruby/object:Gem::Version
|
69
|
+
version: '1.1'
|
56
70
|
- !ruby/object:Gem::Dependency
|
57
71
|
name: webmock
|
58
72
|
requirement: !ruby/object:Gem::Requirement
|