@koush/chatsh 1.0.10 → 1.0.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/README.md +103 -0
  2. package/dist/main.js +10 -6
  3. package/package.json +1 -1
package/README.md ADDED
@@ -0,0 +1,103 @@
1
+ # @koush/chatsh
2
+
3
+ A terminal-based shell assistant that connects your shell session to an LLM for context-aware help.
4
+
5
+ ## What it does
6
+
7
+ Chatsh wraps your shell session and maintains a transcript of all terminal activity. Use the `help` command to query an LLM with context about what you're doing.
8
+
9
+ ```
10
+ $ help how do I find large files?
11
+ ```
12
+
13
+ The LLM receives your entire shell transcript and can provide context-aware assistance.
14
+
15
+ ## Features
16
+
17
+ - **Shell wrapper** - Wraps zsh/bash/fish in a PTY session
18
+ - **LLM integration** - Multiple provider support (OpenAI, Anthropic, Google, OpenAI-compatible)
19
+ - **Transcript tracking** - Maintains full terminal history
20
+ - **Smart clear detection** - Resets transcript on terminal clear
21
+
22
+ ## Installation
23
+
24
+ ```bash
25
+ npm install -g @koush/chatsh
26
+ ```
27
+
28
+ Or run directly with:
29
+
30
+ ```bash
31
+ npx @koush/chatsh
32
+ ```
33
+
34
+ ## Configuration
35
+
36
+ Create `~/.chatsh/chatsh.jsonc`:
37
+
38
+ ```jsonc
39
+ // OpenAI
40
+ {
41
+ "provider": "openai",
42
+ "model": "gpt-4-turbo",
43
+ "options": {
44
+ "apiKey": "sk-..." // or set OPENAI_API_KEY env var
45
+ }
46
+ }
47
+
48
+ // Anthropic (Claude)
49
+ {
50
+ "provider": "anthropic",
51
+ "model": "claude-sonnet-4-5",
52
+ "options": {
53
+ "apiKey": "sk-ant-..." // or set ANTHROPIC_API_KEY env var
54
+ }
55
+ }
56
+
57
+ // Google (Gemini)
58
+ {
59
+ "provider": "google",
60
+ "model": "gemini-2.5-flash",
61
+ "options": {
62
+ "apiKey": "..." // or set GOOGLE_GENERATIVE_AI_API_KEY env var
63
+ }
64
+ }
65
+
66
+ // OpenAI-Compatible (Custom Endpoint)
67
+ {
68
+ "provider": "openai-compatible",
69
+ "model": "your-model-name",
70
+ "options": {
71
+ "name": "custom",
72
+ "baseURL": "http://localhost:8000/v1",
73
+ "apiKey": "your-api-key"
74
+ }
75
+ }
76
+ ```
77
+
78
+ ## Usage
79
+
80
+ Start chatsh:
81
+
82
+ ```bash
83
+ chatsh
84
+ ```
85
+
86
+ Use your shell normally. When you need help:
87
+
88
+ ```bash
89
+ $ help what does this error mean?
90
+ $ help how do I grep recursively?
91
+ ```
92
+
93
+ ## How it works
94
+
95
+ 1. Spawns an interactive shell in a pseudo-terminal (PTY)
96
+ 2. Starts a local HTTP server on a random port
97
+ 3. Tracks all terminal output in a transcript
98
+ 4. The `help` command sends the transcript + your question to an LLM
99
+ 5. LLM response streams back to your terminal
100
+
101
+ ## License
102
+
103
+ ISC
package/dist/main.js CHANGED
@@ -10,7 +10,7 @@ import { createAnthropic } from '@ai-sdk/anthropic';
10
10
  import { createGoogleGenerativeAI } from '@ai-sdk/google';
11
11
  import { createOpenAICompatible } from '@ai-sdk/openai-compatible';
12
12
  import { streamText } from 'ai';
13
- const CONFIG_PATH = join(homedir(), '.llmsh', 'llmsh.jsonc');
13
+ const CONFIG_PATH = join(homedir(), '.chatsh', 'chatsh.jsonc');
14
14
  function loadConfig() {
15
15
  if (!existsSync(CONFIG_PATH)) {
16
16
  return null;
@@ -34,7 +34,7 @@ function loadConfig() {
34
34
  function showConfigHelp() {
35
35
  console.error(`Error: No valid config found at ${CONFIG_PATH}
36
36
 
37
- Create a config file at ~/.llmsh/llmsh.jsonc with one of the following formats:
37
+ Create a config file at ~/.chatsh/chatsh.jsonc with one of the following formats:
38
38
 
39
39
  // OpenAI
40
40
  {
@@ -174,17 +174,18 @@ async function main() {
174
174
  await once(server, 'listening');
175
175
  const address = server.address();
176
176
  const port = address && typeof address === 'object' ? address.port : 0;
177
- const ptyProcess = spawn('zsh', [], {
177
+ const shell = process.env.SHELL || '/bin/zsh';
178
+ const ptyProcess = spawn(shell, ['-i'], {
178
179
  name: process.env.TERM || 'xterm',
179
180
  cols: process.stdout.columns || 80,
180
181
  rows: process.stdout.rows || 24,
181
182
  cwd: process.cwd(),
182
- env: { ...process.env, LLMSH_PORT: String(port) }
183
+ env: { ...process.env, CHATSH_PORT: String(port) }
183
184
  });
184
185
  process.stdin.setRawMode(true);
185
186
  process.stdin.resume();
186
187
  process.stdin.on('data', (data) => {
187
- ptyProcess.write(data.toString());
188
+ ptyProcess.write(data);
188
189
  });
189
190
  ptyProcess.onData((data) => {
190
191
  const clearSeq = '\x1b[2J';
@@ -199,7 +200,10 @@ async function main() {
199
200
  }
200
201
  process.stdout.write(data);
201
202
  });
202
- ptyProcess.write('help() { curl -s -X POST -d "$*" http://localhost:$LLMSH_PORT }\n');
203
+ ptyProcess.write('help() { curl -s -X POST -d "$*" http://localhost:$CHATSH_PORT }\n');
204
+ if (shell.includes('zsh')) {
205
+ ptyProcess.write('bindkey "^R" history-incremental-search-backward\n');
206
+ }
203
207
  ptyProcess.onExit(({ exitCode }) => {
204
208
  process.exit(exitCode);
205
209
  });
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@koush/chatsh",
3
- "version": "1.0.10",
3
+ "version": "1.0.12",
4
4
  "type": "module",
5
5
  "main": "dist/main.js",
6
6
  "bin": {