@koush/chatsh 1.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/config.js +72 -0
- package/dist/main.js +213 -0
- package/dist/provider.js +61 -0
- package/package.json +27 -0
- package/src/@scrypted__node-pty.d.ts +49 -0
- package/src/main.ts +252 -0
- package/tsconfig.json +15 -0
package/dist/config.js
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
import { readFileSync, existsSync } from 'node:fs';
|
|
2
|
+
import { homedir } from 'node:os';
|
|
3
|
+
import { join } from 'node:path';
|
|
4
|
+
import * as jsonc from 'jsonc-parser';
|
|
5
|
+
const CONFIG_PATH = join(homedir(), '.llmsh', 'llmsh.jsonc');
|
|
6
|
+
export function loadConfig() {
|
|
7
|
+
if (!existsSync(CONFIG_PATH)) {
|
|
8
|
+
return null;
|
|
9
|
+
}
|
|
10
|
+
try {
|
|
11
|
+
const content = readFileSync(CONFIG_PATH, 'utf-8');
|
|
12
|
+
const config = jsonc.parse(content);
|
|
13
|
+
if (!config.provider || !config.model) {
|
|
14
|
+
return null;
|
|
15
|
+
}
|
|
16
|
+
const validProviders = ['openai', 'anthropic', 'google', 'openai-compatible'];
|
|
17
|
+
if (!validProviders.includes(config.provider)) {
|
|
18
|
+
return null;
|
|
19
|
+
}
|
|
20
|
+
return config;
|
|
21
|
+
}
|
|
22
|
+
catch {
|
|
23
|
+
return null;
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
export function getConfigPath() {
|
|
27
|
+
return CONFIG_PATH;
|
|
28
|
+
}
|
|
29
|
+
export function showConfigHelp() {
|
|
30
|
+
console.error(`Error: No valid config found at ${CONFIG_PATH}
|
|
31
|
+
|
|
32
|
+
Create a config file at ~/.llmsh/llmsh.jsonc with one of the following formats:
|
|
33
|
+
|
|
34
|
+
// OpenAI
|
|
35
|
+
{
|
|
36
|
+
"provider": "openai",
|
|
37
|
+
"model": "gpt-4-turbo",
|
|
38
|
+
"options": {
|
|
39
|
+
"apiKey": "sk-..." // or set OPENAI_API_KEY env var
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
// Anthropic (Claude)
|
|
44
|
+
{
|
|
45
|
+
"provider": "anthropic",
|
|
46
|
+
"model": "claude-sonnet-4-5",
|
|
47
|
+
"options": {
|
|
48
|
+
"apiKey": "sk-ant-..." // or set ANTHROPIC_API_KEY env var
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// Google (Gemini)
|
|
53
|
+
{
|
|
54
|
+
"provider": "google",
|
|
55
|
+
"model": "gemini-2.5-flash",
|
|
56
|
+
"options": {
|
|
57
|
+
"apiKey": "..." // or set GOOGLE_GENERATIVE_AI_API_KEY env var
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
// OpenAI-Compatible (Custom Endpoint)
|
|
62
|
+
{
|
|
63
|
+
"provider": "openai-compatible",
|
|
64
|
+
"model": "glm-4.7",
|
|
65
|
+
"options": {
|
|
66
|
+
"name": "custom",
|
|
67
|
+
"baseURL": "http://localhost:8000/v1",
|
|
68
|
+
"apiKey": "your-api-key"
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
`);
|
|
72
|
+
}
|
package/dist/main.js
ADDED
|
@@ -0,0 +1,213 @@
|
|
|
1
|
+
import { spawn } from '@scrypted/node-pty';
|
|
2
|
+
import { createServer } from 'node:http';
|
|
3
|
+
import { once } from 'node:events';
|
|
4
|
+
import { readFileSync, existsSync } from 'node:fs';
|
|
5
|
+
import { homedir } from 'node:os';
|
|
6
|
+
import { join } from 'node:path';
|
|
7
|
+
import * as jsonc from 'jsonc-parser';
|
|
8
|
+
import { createOpenAI } from '@ai-sdk/openai';
|
|
9
|
+
import { createAnthropic } from '@ai-sdk/anthropic';
|
|
10
|
+
import { createGoogleGenerativeAI } from '@ai-sdk/google';
|
|
11
|
+
import { createOpenAICompatible } from '@ai-sdk/openai-compatible';
|
|
12
|
+
import { streamText } from 'ai';
|
|
13
|
+
const CONFIG_PATH = join(homedir(), '.llmsh', 'llmsh.jsonc');
|
|
14
|
+
function loadConfig() {
|
|
15
|
+
if (!existsSync(CONFIG_PATH)) {
|
|
16
|
+
return null;
|
|
17
|
+
}
|
|
18
|
+
try {
|
|
19
|
+
const content = readFileSync(CONFIG_PATH, 'utf-8');
|
|
20
|
+
const config = jsonc.parse(content);
|
|
21
|
+
if (!config.provider || !config.model) {
|
|
22
|
+
return null;
|
|
23
|
+
}
|
|
24
|
+
const validProviders = ['openai', 'anthropic', 'google', 'openai-compatible'];
|
|
25
|
+
if (!validProviders.includes(config.provider)) {
|
|
26
|
+
return null;
|
|
27
|
+
}
|
|
28
|
+
return config;
|
|
29
|
+
}
|
|
30
|
+
catch {
|
|
31
|
+
return null;
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
function showConfigHelp() {
|
|
35
|
+
console.error(`Error: No valid config found at ${CONFIG_PATH}
|
|
36
|
+
|
|
37
|
+
Create a config file at ~/.llmsh/llmsh.jsonc with one of the following formats:
|
|
38
|
+
|
|
39
|
+
// OpenAI
|
|
40
|
+
{
|
|
41
|
+
"provider": "openai",
|
|
42
|
+
"model": "gpt-4-turbo",
|
|
43
|
+
"options": {
|
|
44
|
+
"apiKey": "sk-..." // or set OPENAI_API_KEY env var
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
// Anthropic (Claude)
|
|
49
|
+
{
|
|
50
|
+
"provider": "anthropic",
|
|
51
|
+
"model": "claude-sonnet-4-5",
|
|
52
|
+
"options": {
|
|
53
|
+
"apiKey": "sk-ant-..." // or set ANTHROPIC_API_KEY env var
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
// Google (Gemini)
|
|
58
|
+
{
|
|
59
|
+
"provider": "google",
|
|
60
|
+
"model": "gemini-2.5-flash",
|
|
61
|
+
"options": {
|
|
62
|
+
"apiKey": "..." // or set GOOGLE_GENERATIVE_AI_API_KEY env var
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
// OpenAI-Compatible (Custom Endpoint)
|
|
67
|
+
{
|
|
68
|
+
"provider": "openai-compatible",
|
|
69
|
+
"model": "glm-4.7",
|
|
70
|
+
"options": {
|
|
71
|
+
"name": "custom",
|
|
72
|
+
"baseURL": "http://localhost:8000/v1",
|
|
73
|
+
"apiKey": "your-api-key"
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
`);
|
|
77
|
+
}
|
|
78
|
+
function getEnvApiKey(provider) {
|
|
79
|
+
switch (provider) {
|
|
80
|
+
case 'openai':
|
|
81
|
+
return process.env.OPENAI_API_KEY;
|
|
82
|
+
case 'anthropic':
|
|
83
|
+
return process.env.ANTHROPIC_API_KEY;
|
|
84
|
+
case 'google':
|
|
85
|
+
return process.env.GOOGLE_GENERATIVE_AI_API_KEY;
|
|
86
|
+
default:
|
|
87
|
+
return undefined;
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
function createProvider(config) {
|
|
91
|
+
const { provider, model, options = {} } = config;
|
|
92
|
+
const apiKey = options.apiKey || getEnvApiKey(provider);
|
|
93
|
+
const baseURL = options.baseURL;
|
|
94
|
+
switch (provider) {
|
|
95
|
+
case 'openai': {
|
|
96
|
+
return {
|
|
97
|
+
model: createOpenAI({
|
|
98
|
+
apiKey,
|
|
99
|
+
baseURL,
|
|
100
|
+
organization: options.organization,
|
|
101
|
+
project: options.project,
|
|
102
|
+
})(model),
|
|
103
|
+
};
|
|
104
|
+
}
|
|
105
|
+
case 'anthropic': {
|
|
106
|
+
return {
|
|
107
|
+
model: createAnthropic({
|
|
108
|
+
apiKey,
|
|
109
|
+
baseURL,
|
|
110
|
+
})(model),
|
|
111
|
+
};
|
|
112
|
+
}
|
|
113
|
+
case 'google': {
|
|
114
|
+
return {
|
|
115
|
+
model: createGoogleGenerativeAI({
|
|
116
|
+
apiKey,
|
|
117
|
+
baseURL,
|
|
118
|
+
})(model),
|
|
119
|
+
};
|
|
120
|
+
}
|
|
121
|
+
case 'openai-compatible': {
|
|
122
|
+
return {
|
|
123
|
+
model: createOpenAICompatible({
|
|
124
|
+
name: options.name || 'custom',
|
|
125
|
+
apiKey,
|
|
126
|
+
baseURL: baseURL || 'http://localhost:8000/v1',
|
|
127
|
+
})(model),
|
|
128
|
+
};
|
|
129
|
+
}
|
|
130
|
+
default: {
|
|
131
|
+
throw new Error(`Unknown provider: ${provider}`);
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
async function main() {
|
|
136
|
+
const config = loadConfig();
|
|
137
|
+
if (!config) {
|
|
138
|
+
showConfigHelp();
|
|
139
|
+
process.exit(1);
|
|
140
|
+
}
|
|
141
|
+
const { model } = createProvider(config);
|
|
142
|
+
let transcript = '';
|
|
143
|
+
const server = createServer((req, res) => {
|
|
144
|
+
if (req.method === 'POST') {
|
|
145
|
+
let body = '';
|
|
146
|
+
req.on('data', (chunk) => body += chunk);
|
|
147
|
+
req.on('end', async () => {
|
|
148
|
+
try {
|
|
149
|
+
const result = streamText({
|
|
150
|
+
model,
|
|
151
|
+
system: 'You are a helpful assistant running in a terminal session. You receive a shell transcript from the user and an associated user query, if any.\n\nCRITICAL: Do NOT use any markdown formatting whatsoever. This means:\n- No bullet points (- or *)\n- No backticks (`) for code\n- No headers (#)\n- No bold/italic markers (** or _)\n- No markdown links\n\nUse plain text only. If you need to emphasize something or format code examples, use ANSI escape codes instead (e.g., \\x1b[1m for bold, \\x1b[32m for green, \\x1b[0m to reset).\n\nHelp the user based on the transcript context.',
|
|
152
|
+
prompt: `${transcript}\n\n${body}`
|
|
153
|
+
});
|
|
154
|
+
res.writeHead(200, { 'Content-Type': 'text/plain' });
|
|
155
|
+
res.write('\n\n');
|
|
156
|
+
for await (const chunk of result.textStream) {
|
|
157
|
+
res.write(chunk);
|
|
158
|
+
}
|
|
159
|
+
res.write('\n\n');
|
|
160
|
+
res.end();
|
|
161
|
+
}
|
|
162
|
+
catch (error) {
|
|
163
|
+
res.writeHead(500, { 'Content-Type': 'text/plain' });
|
|
164
|
+
res.end('Error: ' + error.message);
|
|
165
|
+
}
|
|
166
|
+
});
|
|
167
|
+
}
|
|
168
|
+
else {
|
|
169
|
+
res.writeHead(200, { 'Content-Type': 'text/plain' });
|
|
170
|
+
res.end(transcript);
|
|
171
|
+
}
|
|
172
|
+
});
|
|
173
|
+
server.listen(0);
|
|
174
|
+
await once(server, 'listening');
|
|
175
|
+
const address = server.address();
|
|
176
|
+
const port = address && typeof address === 'object' ? address.port : 0;
|
|
177
|
+
const ptyProcess = spawn('zsh', [], {
|
|
178
|
+
name: process.env.TERM || 'xterm',
|
|
179
|
+
cols: process.stdout.columns || 80,
|
|
180
|
+
rows: process.stdout.rows || 24,
|
|
181
|
+
cwd: process.cwd(),
|
|
182
|
+
env: { ...process.env, LLMSH_PORT: String(port) }
|
|
183
|
+
});
|
|
184
|
+
process.stdin.setRawMode(true);
|
|
185
|
+
process.stdin.resume();
|
|
186
|
+
process.stdin.on('data', (data) => {
|
|
187
|
+
ptyProcess.write(data.toString());
|
|
188
|
+
});
|
|
189
|
+
ptyProcess.onData((data) => {
|
|
190
|
+
const clearSeq = '\x1b[2J';
|
|
191
|
+
const lastClearIndex = data.lastIndexOf(clearSeq);
|
|
192
|
+
if (lastClearIndex !== -1) {
|
|
193
|
+
transcript = '';
|
|
194
|
+
const afterClear = data.slice(lastClearIndex + clearSeq.length);
|
|
195
|
+
transcript += afterClear.replace(/^\x1b\[H/, '');
|
|
196
|
+
}
|
|
197
|
+
else {
|
|
198
|
+
transcript += data;
|
|
199
|
+
}
|
|
200
|
+
process.stdout.write(data);
|
|
201
|
+
});
|
|
202
|
+
ptyProcess.write('help() { curl -s -X POST -d "$*" http://localhost:$LLMSH_PORT }\n');
|
|
203
|
+
ptyProcess.onExit(({ exitCode }) => {
|
|
204
|
+
process.exit(exitCode);
|
|
205
|
+
});
|
|
206
|
+
process.on('exit', () => {
|
|
207
|
+
ptyProcess.kill();
|
|
208
|
+
});
|
|
209
|
+
process.stdout.on('resize', () => {
|
|
210
|
+
ptyProcess.resize(process.stdout.columns || 80, process.stdout.rows || 24);
|
|
211
|
+
});
|
|
212
|
+
}
|
|
213
|
+
main();
|
package/dist/provider.js
ADDED
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
import { createOpenAI } from '@ai-sdk/openai';
|
|
2
|
+
import { createAnthropic } from '@ai-sdk/anthropic';
|
|
3
|
+
import { createGoogleGenerativeAI } from '@ai-sdk/google';
|
|
4
|
+
import { createOpenAICompatible } from '@ai-sdk/openai-compatible';
|
|
5
|
+
export function createProvider(config) {
|
|
6
|
+
const { provider, model, options = {} } = config;
|
|
7
|
+
const apiKey = options.apiKey || getEnvApiKey(provider);
|
|
8
|
+
const baseURL = options.baseURL;
|
|
9
|
+
switch (provider) {
|
|
10
|
+
case 'openai': {
|
|
11
|
+
return {
|
|
12
|
+
model: createOpenAI({
|
|
13
|
+
apiKey,
|
|
14
|
+
baseURL,
|
|
15
|
+
organization: options.organization,
|
|
16
|
+
project: options.project,
|
|
17
|
+
})(model),
|
|
18
|
+
};
|
|
19
|
+
}
|
|
20
|
+
case 'anthropic': {
|
|
21
|
+
return {
|
|
22
|
+
model: createAnthropic({
|
|
23
|
+
apiKey,
|
|
24
|
+
baseURL,
|
|
25
|
+
})(model),
|
|
26
|
+
};
|
|
27
|
+
}
|
|
28
|
+
case 'google': {
|
|
29
|
+
return {
|
|
30
|
+
model: createGoogleGenerativeAI({
|
|
31
|
+
apiKey,
|
|
32
|
+
baseURL,
|
|
33
|
+
})(model),
|
|
34
|
+
};
|
|
35
|
+
}
|
|
36
|
+
case 'openai-compatible': {
|
|
37
|
+
return {
|
|
38
|
+
model: createOpenAICompatible({
|
|
39
|
+
name: options.name || 'custom',
|
|
40
|
+
apiKey,
|
|
41
|
+
baseURL: baseURL || 'http://localhost:8000/v1',
|
|
42
|
+
})(model),
|
|
43
|
+
};
|
|
44
|
+
}
|
|
45
|
+
default: {
|
|
46
|
+
throw new Error(`Unknown provider: ${provider}`);
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
function getEnvApiKey(provider) {
|
|
51
|
+
switch (provider) {
|
|
52
|
+
case 'openai':
|
|
53
|
+
return process.env.OPENAI_API_KEY;
|
|
54
|
+
case 'anthropic':
|
|
55
|
+
return process.env.ANTHROPIC_API_KEY;
|
|
56
|
+
case 'google':
|
|
57
|
+
return process.env.GOOGLE_GENERATIVE_AI_API_KEY;
|
|
58
|
+
default:
|
|
59
|
+
return undefined;
|
|
60
|
+
}
|
|
61
|
+
}
|
package/package.json
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@koush/chatsh",
|
|
3
|
+
"version": "1.0.4",
|
|
4
|
+
"type": "module",
|
|
5
|
+
"main": "dist/main.js",
|
|
6
|
+
"scripts": {
|
|
7
|
+
"build": "tsc",
|
|
8
|
+
"start": "node --experimental-strip-types src/main.ts",
|
|
9
|
+
"prepublishOnly": "npm version patch && npm run build"
|
|
10
|
+
},
|
|
11
|
+
"author": "",
|
|
12
|
+
"license": "ISC",
|
|
13
|
+
"description": "",
|
|
14
|
+
"devDependencies": {
|
|
15
|
+
"@types/node": "^20.10.0",
|
|
16
|
+
"typescript": "^5.3.0"
|
|
17
|
+
},
|
|
18
|
+
"dependencies": {
|
|
19
|
+
"@ai-sdk/anthropic": "^3.0.58",
|
|
20
|
+
"@ai-sdk/google": "^3.0.43",
|
|
21
|
+
"@ai-sdk/openai": "^3.0.41",
|
|
22
|
+
"@ai-sdk/openai-compatible": "^2.0.35",
|
|
23
|
+
"@scrypted/node-pty": "^1.0.25",
|
|
24
|
+
"ai": "^6.0.116",
|
|
25
|
+
"jsonc-parser": "^3.3.1"
|
|
26
|
+
}
|
|
27
|
+
}
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
declare module '@scrypted/node-pty' {
|
|
2
|
+
export function spawn(file: string, args: string[] | string, options: IPtyForkOptions | IWindowsPtyForkOptions): IPty;
|
|
3
|
+
|
|
4
|
+
export interface IBasePtyForkOptions {
|
|
5
|
+
name?: string;
|
|
6
|
+
cols?: number;
|
|
7
|
+
rows?: number;
|
|
8
|
+
cwd?: string;
|
|
9
|
+
env?: { [key: string]: string | undefined };
|
|
10
|
+
encoding?: string | null;
|
|
11
|
+
handleFlowControl?: boolean;
|
|
12
|
+
flowControlPause?: string;
|
|
13
|
+
flowControlResume?: string;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
export interface IPtyForkOptions extends IBasePtyForkOptions {
|
|
17
|
+
uid?: number;
|
|
18
|
+
gid?: number;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
export interface IWindowsPtyForkOptions extends IBasePtyForkOptions {
|
|
22
|
+
useConpty?: boolean;
|
|
23
|
+
conptyInheritCursor?: boolean;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
export interface IPty {
|
|
27
|
+
readonly pid: number;
|
|
28
|
+
readonly cols: number;
|
|
29
|
+
readonly rows: number;
|
|
30
|
+
readonly process: string;
|
|
31
|
+
handleFlowControl: boolean;
|
|
32
|
+
readonly onData: IEvent<string>;
|
|
33
|
+
readonly onExit: IEvent<{ exitCode: number, signal?: number }>;
|
|
34
|
+
resize(columns: number, rows: number): void;
|
|
35
|
+
clear(): void;
|
|
36
|
+
write(data: string): void;
|
|
37
|
+
kill(signal?: string): void;
|
|
38
|
+
pause(): void;
|
|
39
|
+
resume(): void;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
export interface IDisposable {
|
|
43
|
+
dispose(): void;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
export interface IEvent<T> {
|
|
47
|
+
(listener: (e: T) => any): IDisposable;
|
|
48
|
+
}
|
|
49
|
+
}
|
package/src/main.ts
ADDED
|
@@ -0,0 +1,252 @@
|
|
|
1
|
+
import { spawn } from '@scrypted/node-pty';
|
|
2
|
+
import { createServer } from 'node:http';
|
|
3
|
+
import { once } from 'node:events';
|
|
4
|
+
import { readFileSync, existsSync } from 'node:fs';
|
|
5
|
+
import { homedir } from 'node:os';
|
|
6
|
+
import { join } from 'node:path';
|
|
7
|
+
import * as jsonc from 'jsonc-parser';
|
|
8
|
+
import { createOpenAI } from '@ai-sdk/openai';
|
|
9
|
+
import { createAnthropic } from '@ai-sdk/anthropic';
|
|
10
|
+
import { createGoogleGenerativeAI } from '@ai-sdk/google';
|
|
11
|
+
import { createOpenAICompatible } from '@ai-sdk/openai-compatible';
|
|
12
|
+
import { streamText } from 'ai';
|
|
13
|
+
|
|
14
|
+
type ProviderType = 'openai' | 'anthropic' | 'google' | 'openai-compatible';
|
|
15
|
+
|
|
16
|
+
interface LlmshConfig {
|
|
17
|
+
provider: ProviderType;
|
|
18
|
+
model: string;
|
|
19
|
+
options?: {
|
|
20
|
+
apiKey?: string;
|
|
21
|
+
baseURL?: string;
|
|
22
|
+
name?: string;
|
|
23
|
+
organization?: string;
|
|
24
|
+
project?: string;
|
|
25
|
+
};
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
const CONFIG_PATH = join(homedir(), '.llmsh', 'llmsh.jsonc');
|
|
29
|
+
|
|
30
|
+
function loadConfig(): LlmshConfig | null {
|
|
31
|
+
if (!existsSync(CONFIG_PATH)) {
|
|
32
|
+
return null;
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
try {
|
|
36
|
+
const content = readFileSync(CONFIG_PATH, 'utf-8');
|
|
37
|
+
const config = jsonc.parse(content) as LlmshConfig;
|
|
38
|
+
|
|
39
|
+
if (!config.provider || !config.model) {
|
|
40
|
+
return null;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
const validProviders: ProviderType[] = ['openai', 'anthropic', 'google', 'openai-compatible'];
|
|
44
|
+
if (!validProviders.includes(config.provider)) {
|
|
45
|
+
return null;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
return config;
|
|
49
|
+
} catch {
|
|
50
|
+
return null;
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
function showConfigHelp(): void {
|
|
55
|
+
console.error(`Error: No valid config found at ${CONFIG_PATH}
|
|
56
|
+
|
|
57
|
+
Create a config file at ~/.llmsh/llmsh.jsonc with one of the following formats:
|
|
58
|
+
|
|
59
|
+
// OpenAI
|
|
60
|
+
{
|
|
61
|
+
"provider": "openai",
|
|
62
|
+
"model": "gpt-4-turbo",
|
|
63
|
+
"options": {
|
|
64
|
+
"apiKey": "sk-..." // or set OPENAI_API_KEY env var
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
// Anthropic (Claude)
|
|
69
|
+
{
|
|
70
|
+
"provider": "anthropic",
|
|
71
|
+
"model": "claude-sonnet-4-5",
|
|
72
|
+
"options": {
|
|
73
|
+
"apiKey": "sk-ant-..." // or set ANTHROPIC_API_KEY env var
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
// Google (Gemini)
|
|
78
|
+
{
|
|
79
|
+
"provider": "google",
|
|
80
|
+
"model": "gemini-2.5-flash",
|
|
81
|
+
"options": {
|
|
82
|
+
"apiKey": "..." // or set GOOGLE_GENERATIVE_AI_API_KEY env var
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
// OpenAI-Compatible (Custom Endpoint)
|
|
87
|
+
{
|
|
88
|
+
"provider": "openai-compatible",
|
|
89
|
+
"model": "glm-4.7",
|
|
90
|
+
"options": {
|
|
91
|
+
"name": "custom",
|
|
92
|
+
"baseURL": "http://localhost:8000/v1",
|
|
93
|
+
"apiKey": "your-api-key"
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
`);
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
function getEnvApiKey(provider: string): string | undefined {
|
|
100
|
+
switch (provider) {
|
|
101
|
+
case 'openai':
|
|
102
|
+
return process.env.OPENAI_API_KEY;
|
|
103
|
+
case 'anthropic':
|
|
104
|
+
return process.env.ANTHROPIC_API_KEY;
|
|
105
|
+
case 'google':
|
|
106
|
+
return process.env.GOOGLE_GENERATIVE_AI_API_KEY;
|
|
107
|
+
default:
|
|
108
|
+
return undefined;
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
function createProvider(config: LlmshConfig) {
|
|
113
|
+
const { provider, model, options = {} } = config;
|
|
114
|
+
const apiKey = options.apiKey || getEnvApiKey(provider);
|
|
115
|
+
const baseURL = options.baseURL;
|
|
116
|
+
|
|
117
|
+
switch (provider) {
|
|
118
|
+
case 'openai': {
|
|
119
|
+
return {
|
|
120
|
+
model: createOpenAI({
|
|
121
|
+
apiKey,
|
|
122
|
+
baseURL,
|
|
123
|
+
organization: options.organization,
|
|
124
|
+
project: options.project,
|
|
125
|
+
})(model),
|
|
126
|
+
};
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
case 'anthropic': {
|
|
130
|
+
return {
|
|
131
|
+
model: createAnthropic({
|
|
132
|
+
apiKey,
|
|
133
|
+
baseURL,
|
|
134
|
+
})(model),
|
|
135
|
+
};
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
case 'google': {
|
|
139
|
+
return {
|
|
140
|
+
model: createGoogleGenerativeAI({
|
|
141
|
+
apiKey,
|
|
142
|
+
baseURL,
|
|
143
|
+
})(model),
|
|
144
|
+
};
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
case 'openai-compatible': {
|
|
148
|
+
return {
|
|
149
|
+
model: createOpenAICompatible({
|
|
150
|
+
name: options.name || 'custom',
|
|
151
|
+
apiKey,
|
|
152
|
+
baseURL: baseURL || 'http://localhost:8000/v1',
|
|
153
|
+
})(model),
|
|
154
|
+
};
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
default: {
|
|
158
|
+
throw new Error(`Unknown provider: ${provider}`);
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
async function main() {
|
|
164
|
+
const config = loadConfig();
|
|
165
|
+
|
|
166
|
+
if (!config) {
|
|
167
|
+
showConfigHelp();
|
|
168
|
+
process.exit(1);
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
const { model } = createProvider(config);
|
|
172
|
+
let transcript = '';
|
|
173
|
+
|
|
174
|
+
const server = createServer((req, res) => {
|
|
175
|
+
if (req.method === 'POST') {
|
|
176
|
+
let body = '';
|
|
177
|
+
req.on('data', (chunk) => body += chunk);
|
|
178
|
+
req.on('end', async () => {
|
|
179
|
+
try {
|
|
180
|
+
const result = streamText({
|
|
181
|
+
model,
|
|
182
|
+
system: 'You are a helpful assistant running in a terminal session. You receive a shell transcript from the user and an associated user query, if any.\n\nCRITICAL: Do NOT use any markdown formatting whatsoever. This means:\n- No bullet points (- or *)\n- No backticks (`) for code\n- No headers (#)\n- No bold/italic markers (** or _)\n- No markdown links\n\nUse plain text only. If you need to emphasize something or format code examples, use ANSI escape codes instead (e.g., \\x1b[1m for bold, \\x1b[32m for green, \\x1b[0m to reset).\n\nHelp the user based on the transcript context.',
|
|
183
|
+
prompt: `${transcript}\n\n${body}`
|
|
184
|
+
});
|
|
185
|
+
|
|
186
|
+
res.writeHead(200, { 'Content-Type': 'text/plain' });
|
|
187
|
+
res.write('\n\n');
|
|
188
|
+
for await (const chunk of result.textStream) {
|
|
189
|
+
res.write(chunk);
|
|
190
|
+
}
|
|
191
|
+
res.write('\n\n');
|
|
192
|
+
res.end();
|
|
193
|
+
} catch (error) {
|
|
194
|
+
res.writeHead(500, { 'Content-Type': 'text/plain' });
|
|
195
|
+
res.end('Error: ' + (error as Error).message);
|
|
196
|
+
}
|
|
197
|
+
});
|
|
198
|
+
} else {
|
|
199
|
+
res.writeHead(200, { 'Content-Type': 'text/plain' });
|
|
200
|
+
res.end(transcript);
|
|
201
|
+
}
|
|
202
|
+
});
|
|
203
|
+
server.listen(0);
|
|
204
|
+
await once(server, 'listening');
|
|
205
|
+
const address = server.address();
|
|
206
|
+
const port = address && typeof address === 'object' ? address.port : 0;
|
|
207
|
+
|
|
208
|
+
const ptyProcess = spawn('zsh', [], {
|
|
209
|
+
name: process.env.TERM || 'xterm',
|
|
210
|
+
cols: process.stdout.columns || 80,
|
|
211
|
+
rows: process.stdout.rows || 24,
|
|
212
|
+
cwd: process.cwd(),
|
|
213
|
+
env: { ...process.env, LLMSH_PORT: String(port) } as { [key: string]: string }
|
|
214
|
+
});
|
|
215
|
+
|
|
216
|
+
process.stdin.setRawMode(true);
|
|
217
|
+
process.stdin.resume();
|
|
218
|
+
|
|
219
|
+
process.stdin.on('data', (data: Buffer) => {
|
|
220
|
+
ptyProcess.write(data.toString());
|
|
221
|
+
});
|
|
222
|
+
|
|
223
|
+
ptyProcess.onData((data: string) => {
|
|
224
|
+
const clearSeq = '\x1b[2J';
|
|
225
|
+
const lastClearIndex = data.lastIndexOf(clearSeq);
|
|
226
|
+
|
|
227
|
+
if (lastClearIndex !== -1) {
|
|
228
|
+
transcript = '';
|
|
229
|
+
const afterClear = data.slice(lastClearIndex + clearSeq.length);
|
|
230
|
+
transcript += afterClear.replace(/^\x1b\[H/, '');
|
|
231
|
+
} else {
|
|
232
|
+
transcript += data;
|
|
233
|
+
}
|
|
234
|
+
process.stdout.write(data);
|
|
235
|
+
});
|
|
236
|
+
|
|
237
|
+
ptyProcess.write('help() { curl -s -X POST -d "$*" http://localhost:$LLMSH_PORT }\n');
|
|
238
|
+
|
|
239
|
+
ptyProcess.onExit(({ exitCode }: { exitCode: number }) => {
|
|
240
|
+
process.exit(exitCode);
|
|
241
|
+
});
|
|
242
|
+
|
|
243
|
+
process.on('exit', () => {
|
|
244
|
+
ptyProcess.kill();
|
|
245
|
+
});
|
|
246
|
+
|
|
247
|
+
process.stdout.on('resize', () => {
|
|
248
|
+
ptyProcess.resize(process.stdout.columns || 80, process.stdout.rows || 24);
|
|
249
|
+
});
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
main();
|
package/tsconfig.json
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
{
|
|
2
|
+
"compilerOptions": {
|
|
3
|
+
"target": "ES2022",
|
|
4
|
+
"module": "NodeNext",
|
|
5
|
+
"moduleResolution": "NodeNext",
|
|
6
|
+
"outDir": "./dist",
|
|
7
|
+
"rootDir": "./src",
|
|
8
|
+
"strict": true,
|
|
9
|
+
"esModuleInterop": true,
|
|
10
|
+
"skipLibCheck": true,
|
|
11
|
+
"forceConsistentCasingInFileNames": true,
|
|
12
|
+
"verbatimModuleSyntax": true
|
|
13
|
+
},
|
|
14
|
+
"include": ["src/**/*"]
|
|
15
|
+
}
|