@heysalad/cheri-cli 0.3.0 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,66 @@
1
+ import { BaseProvider, SYSTEM_PROMPT } from "./base.js";
2
+
3
+ export class AnthropicProvider extends BaseProvider {
4
+ constructor(apiKey, model = "claude-sonnet-4-20250514") {
5
+ super(apiKey, model);
6
+ }
7
+
8
+ async *chat(messages, tools) {
9
+ const { default: Anthropic } = await import("@anthropic-ai/sdk");
10
+ const client = new Anthropic({ apiKey: this.apiKey });
11
+
12
+ const anthropicTools = tools.map((t) => ({
13
+ name: t.name,
14
+ description: t.description,
15
+ input_schema: t.parameters,
16
+ }));
17
+
18
+ const stream = await client.messages.stream({
19
+ model: this.model,
20
+ max_tokens: 8192,
21
+ system: SYSTEM_PROMPT,
22
+ messages,
23
+ tools: anthropicTools.length > 0 ? anthropicTools : undefined,
24
+ });
25
+
26
+ let currentToolId = null;
27
+ let currentToolName = null;
28
+ let toolInputJson = "";
29
+
30
+ for await (const event of stream) {
31
+ if (event.type === "content_block_start") {
32
+ if (event.content_block.type === "text") {
33
+ // text block starting
34
+ } else if (event.content_block.type === "tool_use") {
35
+ currentToolId = event.content_block.id;
36
+ currentToolName = event.content_block.name;
37
+ toolInputJson = "";
38
+ yield { type: "tool_use_start", id: currentToolId, name: currentToolName };
39
+ }
40
+ } else if (event.type === "content_block_delta") {
41
+ if (event.delta.type === "text_delta") {
42
+ yield { type: "text", content: event.delta.text };
43
+ } else if (event.delta.type === "input_json_delta") {
44
+ toolInputJson += event.delta.partial_json;
45
+ yield { type: "tool_input_delta", content: event.delta.partial_json };
46
+ }
47
+ } else if (event.type === "content_block_stop") {
48
+ if (currentToolId) {
49
+ let input = {};
50
+ try {
51
+ input = JSON.parse(toolInputJson);
52
+ } catch {}
53
+ yield { type: "tool_use_end", id: currentToolId, name: currentToolName, input };
54
+ currentToolId = null;
55
+ currentToolName = null;
56
+ toolInputJson = "";
57
+ }
58
+ } else if (event.type === "message_stop") {
59
+ // done
60
+ }
61
+ }
62
+
63
+ const finalMessage = await stream.finalMessage();
64
+ yield { type: "done", stopReason: finalMessage.stop_reason };
65
+ }
66
+ }
@@ -0,0 +1,34 @@
1
+ export const SYSTEM_PROMPT = `You are Cheri, an AI coding assistant by HeySalad. You help developers write, debug, and understand code.
2
+
3
+ You have access to tools that let you read files, write files, edit files, run shell commands, search files, and list directories. Use them proactively to help the user.
4
+
5
+ Guidelines:
6
+ - Read files before modifying them to understand the existing code.
7
+ - Use edit_file for targeted changes instead of rewriting entire files.
8
+ - When running commands, explain what you're about to run and why.
9
+ - Be concise but thorough. Show relevant code snippets in your responses.
10
+ - If you're unsure about something, say so rather than guessing.
11
+ - Format responses with markdown for readability.`;
12
+
13
+ export class BaseProvider {
14
+ constructor(apiKey, model) {
15
+ this.apiKey = apiKey;
16
+ this.model = model;
17
+ }
18
+
19
+ /**
20
+ * Async generator that yields streaming events:
21
+ * { type: "text", content: string }
22
+ * { type: "tool_use_start", id: string, name: string }
23
+ * { type: "tool_input_delta", content: string }
24
+ * { type: "tool_use_end", id: string, name: string, input: object }
25
+ * { type: "done", stopReason: string }
26
+ */
27
+ async *chat(messages, tools) {
28
+ throw new Error("chat() must be implemented by subclass");
29
+ }
30
+
31
+ getModel() {
32
+ return this.model;
33
+ }
34
+ }
@@ -0,0 +1,89 @@
1
+ import { BaseProvider, SYSTEM_PROMPT } from "./base.js";
2
+
3
+ export class GeminiProvider extends BaseProvider {
4
+ constructor(apiKey, model = "gemini-2.0-flash") {
5
+ super(apiKey, model);
6
+ }
7
+
8
+ async *chat(messages, tools) {
9
+ const { GoogleGenerativeAI } = await import("@google/generative-ai");
10
+ const genAI = new GoogleGenerativeAI(this.apiKey);
11
+
12
+ const geminiTools = tools.length > 0 ? [{
13
+ functionDeclarations: tools.map((t) => ({
14
+ name: t.name,
15
+ description: t.description,
16
+ parameters: t.parameters,
17
+ })),
18
+ }] : undefined;
19
+
20
+ const genModel = genAI.getGenerativeModel({
21
+ model: this.model,
22
+ systemInstruction: SYSTEM_PROMPT,
23
+ tools: geminiTools,
24
+ });
25
+
26
+ // Convert messages to Gemini format
27
+ const history = [];
28
+ for (const msg of messages.slice(0, -1)) {
29
+ const role = msg.role === "assistant" ? "model" : "user";
30
+ if (typeof msg.content === "string") {
31
+ history.push({ role, parts: [{ text: msg.content }] });
32
+ } else if (Array.isArray(msg.content)) {
33
+ const parts = [];
34
+ for (const block of msg.content) {
35
+ if (block.type === "text") {
36
+ parts.push({ text: block.text });
37
+ } else if (block.type === "tool_use") {
38
+ parts.push({ functionCall: { name: block.name, args: block.input } });
39
+ } else if (block.type === "tool_result") {
40
+ const resultText = typeof block.content === "string" ? block.content : JSON.stringify(block.content);
41
+ parts.push({ functionResponse: { name: block.name || "tool", response: { result: resultText } } });
42
+ }
43
+ }
44
+ if (parts.length > 0) history.push({ role, parts });
45
+ }
46
+ }
47
+
48
+ const chat = genModel.startChat({ history });
49
+
50
+ // Get the last message content
51
+ const lastMsg = messages[messages.length - 1];
52
+ const lastContent = typeof lastMsg.content === "string"
53
+ ? lastMsg.content
54
+ : lastMsg.content.map((b) => {
55
+ if (b.type === "text") return b.text;
56
+ if (b.type === "tool_result") {
57
+ return typeof b.content === "string" ? b.content : JSON.stringify(b.content);
58
+ }
59
+ return "";
60
+ }).join("\n");
61
+
62
+ const result = await chat.sendMessageStream(lastContent);
63
+
64
+ let hasToolCalls = false;
65
+
66
+ for await (const chunk of result.stream) {
67
+ const text = chunk.text();
68
+ if (text) {
69
+ yield { type: "text", content: text };
70
+ }
71
+
72
+ // Check for function calls
73
+ const candidates = chunk.candidates || [];
74
+ for (const candidate of candidates) {
75
+ for (const part of candidate.content?.parts || []) {
76
+ if (part.functionCall) {
77
+ hasToolCalls = true;
78
+ const id = `gemini_${Date.now()}_${Math.random().toString(36).slice(2, 8)}`;
79
+ yield { type: "tool_use_start", id, name: part.functionCall.name };
80
+ yield { type: "tool_input_delta", content: JSON.stringify(part.functionCall.args) };
81
+ yield { type: "tool_use_end", id, name: part.functionCall.name, input: part.functionCall.args || {} };
82
+ }
83
+ }
84
+ }
85
+ }
86
+
87
+ yield { type: "done", stopReason: hasToolCalls ? "tool_use" : "end_turn" };
88
+ }
89
+ }
@@ -0,0 +1,47 @@
1
+ import { getConfigValue } from "../config-store.js";
2
+ import { getDefaultModel } from "../branding.js";
3
+
4
+ export async function createProvider(options = {}) {
5
+ const provider = options.provider || getConfigValue("ai.provider") || "anthropic";
6
+ const model = options.model || getConfigValue("ai.model") || getDefaultModel(provider);
7
+
8
+ // Resolve API key: env var takes priority, then config
9
+ const envKeys = {
10
+ anthropic: "ANTHROPIC_API_KEY",
11
+ openai: "OPENAI_API_KEY",
12
+ deepseek: "DEEPSEEK_API_KEY",
13
+ gemini: "GEMINI_API_KEY",
14
+ };
15
+
16
+ const apiKey = process.env[envKeys[provider]] || getConfigValue(`ai.keys.${provider}`);
17
+
18
+ if (!apiKey) {
19
+ throw new Error(
20
+ `No API key found for ${provider}. Set it with:\n` +
21
+ ` cheri config set ai.keys.${provider} <your-key>\n` +
22
+ `Or set the ${envKeys[provider]} environment variable.`
23
+ );
24
+ }
25
+
26
+ // Lazy import only the selected provider
27
+ switch (provider) {
28
+ case "anthropic": {
29
+ const { AnthropicProvider } = await import("./anthropic.js");
30
+ return new AnthropicProvider(apiKey, model);
31
+ }
32
+ case "openai": {
33
+ const { OpenAIProvider } = await import("./openai.js");
34
+ return new OpenAIProvider(apiKey, model);
35
+ }
36
+ case "deepseek": {
37
+ const { DeepSeekProvider } = await import("./openai.js");
38
+ return new DeepSeekProvider(apiKey, model);
39
+ }
40
+ case "gemini": {
41
+ const { GeminiProvider } = await import("./gemini.js");
42
+ return new GeminiProvider(apiKey, model);
43
+ }
44
+ default:
45
+ throw new Error(`Unknown provider: ${provider}. Supported: anthropic, openai, deepseek, gemini`);
46
+ }
47
+ }
@@ -0,0 +1,105 @@
1
+ import { BaseProvider, SYSTEM_PROMPT } from "./base.js";
2
+
3
+ export class OpenAIProvider extends BaseProvider {
4
+ constructor(apiKey, model = "gpt-4o", baseURL = undefined) {
5
+ super(apiKey, model);
6
+ this.baseURL = baseURL;
7
+ }
8
+
9
+ async *chat(messages, tools) {
10
+ const { default: OpenAI } = await import("openai");
11
+ const clientOpts = { apiKey: this.apiKey };
12
+ if (this.baseURL) clientOpts.baseURL = this.baseURL;
13
+ const client = new OpenAI(clientOpts);
14
+
15
+ // Convert from Anthropic message format to OpenAI format
16
+ const openaiMessages = [{ role: "system", content: SYSTEM_PROMPT }];
17
+ for (const msg of messages) {
18
+ if (typeof msg.content === "string") {
19
+ openaiMessages.push({ role: msg.role, content: msg.content });
20
+ } else if (Array.isArray(msg.content)) {
21
+ // Handle tool results and multi-part content
22
+ for (const block of msg.content) {
23
+ if (block.type === "tool_result") {
24
+ openaiMessages.push({
25
+ role: "tool",
26
+ tool_call_id: block.tool_use_id,
27
+ content: typeof block.content === "string" ? block.content : JSON.stringify(block.content),
28
+ });
29
+ } else if (block.type === "text") {
30
+ openaiMessages.push({ role: msg.role, content: block.text });
31
+ } else if (block.type === "tool_use") {
32
+ // Assistant message with tool call
33
+ openaiMessages.push({
34
+ role: "assistant",
35
+ content: null,
36
+ tool_calls: [{
37
+ id: block.id,
38
+ type: "function",
39
+ function: { name: block.name, arguments: JSON.stringify(block.input) },
40
+ }],
41
+ });
42
+ }
43
+ }
44
+ }
45
+ }
46
+
47
+ const openaiTools = tools.map((t) => ({
48
+ type: "function",
49
+ function: { name: t.name, description: t.description, parameters: t.parameters },
50
+ }));
51
+
52
+ const stream = await client.chat.completions.create({
53
+ model: this.model,
54
+ messages: openaiMessages,
55
+ tools: openaiTools.length > 0 ? openaiTools : undefined,
56
+ stream: true,
57
+ });
58
+
59
+ const toolCalls = {};
60
+
61
+ for await (const chunk of stream) {
62
+ const delta = chunk.choices?.[0]?.delta;
63
+ const finishReason = chunk.choices?.[0]?.finish_reason;
64
+
65
+ if (delta?.content) {
66
+ yield { type: "text", content: delta.content };
67
+ }
68
+
69
+ if (delta?.tool_calls) {
70
+ for (const tc of delta.tool_calls) {
71
+ const idx = tc.index;
72
+ if (!toolCalls[idx]) {
73
+ toolCalls[idx] = { id: tc.id || "", name: "", arguments: "" };
74
+ }
75
+ if (tc.id) toolCalls[idx].id = tc.id;
76
+ if (tc.function?.name) {
77
+ toolCalls[idx].name = tc.function.name;
78
+ yield { type: "tool_use_start", id: toolCalls[idx].id, name: tc.function.name };
79
+ }
80
+ if (tc.function?.arguments) {
81
+ toolCalls[idx].arguments += tc.function.arguments;
82
+ yield { type: "tool_input_delta", content: tc.function.arguments };
83
+ }
84
+ }
85
+ }
86
+
87
+ if (finishReason) {
88
+ // Emit tool_use_end for any accumulated tool calls
89
+ for (const idx of Object.keys(toolCalls)) {
90
+ const tc = toolCalls[idx];
91
+ let input = {};
92
+ try { input = JSON.parse(tc.arguments); } catch {}
93
+ yield { type: "tool_use_end", id: tc.id, name: tc.name, input };
94
+ }
95
+ yield { type: "done", stopReason: finishReason === "tool_calls" ? "tool_use" : finishReason };
96
+ }
97
+ }
98
+ }
99
+ }
100
+
101
+ export class DeepSeekProvider extends OpenAIProvider {
102
+ constructor(apiKey, model = "deepseek-chat") {
103
+ super(apiKey, model, "https://api.deepseek.com");
104
+ }
105
+ }
@@ -0,0 +1,44 @@
1
+ import chalk from "chalk";
2
+
3
+ let markedRender = null;
4
+
5
+ async function getMarked() {
6
+ if (markedRender) return markedRender;
7
+ const { marked } = await import("marked");
8
+ const { default: TerminalRenderer } = await import("marked-terminal");
9
+ marked.setOptions({
10
+ renderer: new TerminalRenderer({
11
+ reflowText: true,
12
+ width: Math.min(process.stdout.columns || 80, 100),
13
+ tab: 2,
14
+ }),
15
+ });
16
+ markedRender = marked;
17
+ return markedRender;
18
+ }
19
+
20
+ export async function renderMarkdown(text) {
21
+ try {
22
+ const marked = await getMarked();
23
+ return marked.parse(text);
24
+ } catch {
25
+ return text;
26
+ }
27
+ }
28
+
29
+ export function renderToolUse(name, input) {
30
+ const shortInput = Object.entries(input || {})
31
+ .map(([k, v]) => {
32
+ const val = typeof v === "string" ? (v.length > 60 ? v.slice(0, 57) + "..." : v) : JSON.stringify(v);
33
+ return `${chalk.dim(k)}=${val}`;
34
+ })
35
+ .join(", ");
36
+ return `${chalk.magenta("🔧")} ${chalk.bold(name)}(${shortInput})`;
37
+ }
38
+
39
+ export function renderToolResult(name, result) {
40
+ if (result.error) {
41
+ return chalk.red(`❌ ${name}: ${result.error}`);
42
+ }
43
+ return chalk.green(`✅ ${name} completed`);
44
+ }
@@ -0,0 +1,225 @@
1
+ import readline from "readline";
2
+ import chalk from "chalk";
3
+ import ora from "ora";
4
+ import { createProvider } from "./providers/index.js";
5
+ import { getToolDefinitions, executeTool, requiresConfirmation } from "./tools/index.js";
6
+ import { showStartupScreen } from "./branding.js";
7
+ import { renderMarkdown, renderToolUse, renderToolResult } from "./renderer.js";
8
+
9
+ export async function startRepl(options = {}) {
10
+ showStartupScreen(options);
11
+
12
+ let provider;
13
+ try {
14
+ provider = await createProvider(options);
15
+ } catch (err) {
16
+ console.error(chalk.red(`\n${err.message}`));
17
+ process.exit(1);
18
+ }
19
+
20
+ const messages = [];
21
+ const tools = getToolDefinitions();
22
+
23
+ const rl = readline.createInterface({
24
+ input: process.stdin,
25
+ output: process.stdout,
26
+ prompt: "🍒 > ",
27
+ });
28
+
29
+ rl.prompt();
30
+
31
+ rl.on("line", async (line) => {
32
+ const input = line.trim();
33
+ if (!input) {
34
+ rl.prompt();
35
+ return;
36
+ }
37
+
38
+ // Slash commands
39
+ if (input.startsWith("/")) {
40
+ await handleSlashCommand(input, messages, provider, options, rl);
41
+ rl.prompt();
42
+ return;
43
+ }
44
+
45
+ // Add user message
46
+ messages.push({ role: "user", content: input });
47
+
48
+ // Run agent loop
49
+ await agentLoop(provider, messages, tools, rl);
50
+ rl.prompt();
51
+ });
52
+
53
+ rl.on("close", () => {
54
+ console.log(chalk.dim("\nGoodbye! 🍒"));
55
+ process.exit(0);
56
+ });
57
+ }
58
+
59
+ async function agentLoop(provider, messages, tools, rl) {
60
+ let continueLoop = true;
61
+
62
+ while (continueLoop) {
63
+ continueLoop = false;
64
+ const spinner = ora({ text: "Thinking...", color: "yellow" }).start();
65
+
66
+ let fullText = "";
67
+ const toolCalls = [];
68
+ let streamingStarted = false;
69
+
70
+ try {
71
+ for await (const event of provider.chat(messages, tools)) {
72
+ switch (event.type) {
73
+ case "text":
74
+ if (!streamingStarted) {
75
+ spinner.stop();
76
+ streamingStarted = true;
77
+ }
78
+ process.stdout.write(event.content);
79
+ fullText += event.content;
80
+ break;
81
+
82
+ case "tool_use_start":
83
+ if (!streamingStarted) {
84
+ spinner.stop();
85
+ streamingStarted = true;
86
+ }
87
+ toolCalls.push({ id: event.id, name: event.name, inputJson: "", input: {} });
88
+ break;
89
+
90
+ case "tool_input_delta":
91
+ if (toolCalls.length > 0) {
92
+ toolCalls[toolCalls.length - 1].inputJson += event.content;
93
+ }
94
+ break;
95
+
96
+ case "tool_use_end":
97
+ if (toolCalls.length > 0) {
98
+ const tc = toolCalls[toolCalls.length - 1];
99
+ tc.input = event.input;
100
+ }
101
+ break;
102
+
103
+ case "done":
104
+ spinner.stop();
105
+ if (event.stopReason === "tool_use" || toolCalls.length > 0) {
106
+ continueLoop = true;
107
+ }
108
+ break;
109
+ }
110
+ }
111
+ } catch (err) {
112
+ spinner.stop();
113
+ console.error(chalk.red(`\n❌ API Error: ${err.message}`));
114
+ // Remove the last user message on error so conversation stays valid
115
+ if (messages.length > 0 && messages[messages.length - 1].role === "user") {
116
+ messages.pop();
117
+ }
118
+ return;
119
+ }
120
+
121
+ // End text output with newline if we streamed any text
122
+ if (fullText) {
123
+ process.stdout.write("\n");
124
+ }
125
+
126
+ // Build assistant message content
127
+ const assistantContent = [];
128
+ if (fullText) {
129
+ assistantContent.push({ type: "text", text: fullText });
130
+ }
131
+ for (const tc of toolCalls) {
132
+ assistantContent.push({ type: "tool_use", id: tc.id, name: tc.name, input: tc.input });
133
+ }
134
+ if (assistantContent.length > 0) {
135
+ messages.push({ role: "assistant", content: assistantContent });
136
+ }
137
+
138
+ // Execute tool calls if any
139
+ if (toolCalls.length > 0) {
140
+ const toolResults = [];
141
+
142
+ for (const tc of toolCalls) {
143
+ console.log(renderToolUse(tc.name, tc.input));
144
+
145
+ // Confirmation for dangerous commands
146
+ if (requiresConfirmation(tc.name)) {
147
+ const confirmed = await askConfirmation(rl, `Run command: ${chalk.bold(tc.input.command || "")}`);
148
+ if (!confirmed) {
149
+ toolResults.push({
150
+ type: "tool_result",
151
+ tool_use_id: tc.id,
152
+ content: "User denied execution of this command.",
153
+ });
154
+ console.log(chalk.yellow("⏭️ Skipped"));
155
+ continue;
156
+ }
157
+ const cmdSpinner = ora({ text: `Running: ${tc.input.command}`, color: "red" }).start();
158
+ const result = await executeTool(tc.name, tc.input);
159
+ cmdSpinner.stop();
160
+ console.log(renderToolResult(tc.name, result));
161
+ toolResults.push({
162
+ type: "tool_result",
163
+ tool_use_id: tc.id,
164
+ content: JSON.stringify(result),
165
+ });
166
+ } else {
167
+ const toolSpinner = ora({ text: `${tc.name}...`, color: "magenta" }).start();
168
+ const result = await executeTool(tc.name, tc.input);
169
+ toolSpinner.stop();
170
+ console.log(renderToolResult(tc.name, result));
171
+ toolResults.push({
172
+ type: "tool_result",
173
+ tool_use_id: tc.id,
174
+ content: JSON.stringify(result),
175
+ });
176
+ }
177
+ }
178
+
179
+ messages.push({ role: "user", content: toolResults });
180
+ }
181
+ }
182
+ }
183
+
184
+ function askConfirmation(rl, message) {
185
+ return new Promise((resolve) => {
186
+ rl.question(`${chalk.yellow("⚡")} ${message} ${chalk.dim("[Y/n]")} `, (answer) => {
187
+ const a = answer.trim().toLowerCase();
188
+ resolve(a === "" || a === "y" || a === "yes");
189
+ });
190
+ });
191
+ }
192
+
193
+ async function handleSlashCommand(input, messages, provider, options, rl) {
194
+ const [cmd, ...args] = input.split(/\s+/);
195
+
196
+ switch (cmd) {
197
+ case "/help":
198
+ console.log(`
199
+ ${chalk.bold("Cheri REPL Commands:")}
200
+ ${chalk.cyan("/help")} Show this help
201
+ ${chalk.cyan("/clear")} Clear conversation history
202
+ ${chalk.cyan("/model")} Show current model info
203
+ ${chalk.cyan("/exit")} Exit the REPL
204
+ `);
205
+ break;
206
+
207
+ case "/clear":
208
+ messages.length = 0;
209
+ console.log(chalk.dim("Conversation cleared."));
210
+ break;
211
+
212
+ case "/model":
213
+ console.log(` Provider: ${chalk.cyan(options.provider || "anthropic")}`);
214
+ console.log(` Model: ${chalk.cyan(provider.getModel())}`);
215
+ break;
216
+
217
+ case "/exit":
218
+ console.log(chalk.dim("Goodbye! 🍒"));
219
+ process.exit(0);
220
+ break;
221
+
222
+ default:
223
+ console.log(chalk.yellow(`Unknown command: ${cmd}. Type /help for available commands.`));
224
+ }
225
+ }
@@ -0,0 +1,34 @@
1
+ import { execSync } from "child_process";
2
+
3
+ export const runCommand = {
4
+ name: "run_command",
5
+ description: "Execute a shell command and return its output. Requires user confirmation before execution.",
6
+ parameters: {
7
+ type: "object",
8
+ properties: {
9
+ command: { type: "string", description: "The shell command to execute" },
10
+ cwd: { type: "string", description: "Working directory (optional, defaults to current directory)" },
11
+ },
12
+ required: ["command"],
13
+ },
14
+ requiresConfirmation: true,
15
+ handler: async ({ command, cwd }) => {
16
+ try {
17
+ const output = execSync(command, {
18
+ cwd: cwd || process.cwd(),
19
+ encoding: "utf-8",
20
+ timeout: 120_000,
21
+ maxBuffer: 1024 * 1024 * 10,
22
+ stdio: ["pipe", "pipe", "pipe"],
23
+ });
24
+ return { command, exitCode: 0, stdout: output, stderr: "" };
25
+ } catch (err) {
26
+ return {
27
+ command,
28
+ exitCode: err.status ?? 1,
29
+ stdout: err.stdout || "",
30
+ stderr: err.stderr || err.message,
31
+ };
32
+ }
33
+ },
34
+ };