@heysalad/cheri-cli 0.3.1 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -1,8 +1,6 @@
1
- # cheri-cli
1
+ # Cheri CLI
2
2
 
3
- CLI for [Cheri](https://cheri.heysalad.app) the AI-powered cloud IDE that never forgets.
4
-
5
- Manage workspaces, track API usage, and access your AI memory from the terminal.
3
+ AI-powered cloud IDE by [HeySalad](https://heysalad.app). Like Claude Code, but for cloud workspaces.
6
4
 
7
5
  ## Install
8
6
 
@@ -10,79 +8,53 @@ Manage workspaces, track API usage, and access your AI memory from the terminal.
10
8
  npm install -g @heysalad/cheri-cli
11
9
  ```
12
10
 
13
- Requires Node.js 18+.
14
-
15
- ## Quick Start
11
+ ## Usage
16
12
 
17
13
  ```bash
18
- # Authenticate with your Cheri account
14
+ # Login to your Cheri account
19
15
  cheri login
20
16
 
21
- # Launch a cloud workspace
22
- cheri workspace launch owner/my-repo
23
-
24
17
  # Check account status
25
18
  cheri status
26
19
 
27
- # View API usage and rate limits
28
- cheri usage
29
- ```
20
+ # Launch a cloud workspace
21
+ cheri workspace launch owner/repo
30
22
 
31
- ## Commands
32
-
33
- | Command | Description |
34
- |---|---|
35
- | `cheri login` | Authenticate with GitHub |
36
- | `cheri status` | Show account and workspace status |
37
- | `cheri usage` | Show API usage and rate limit status |
38
- | `cheri workspace launch <repo>` | Launch a new cloud workspace |
39
- | `cheri workspace list` | List all workspaces |
40
- | `cheri workspace stop <id>` | Stop a running workspace |
41
- | `cheri workspace status <id>` | Get workspace status |
42
- | `cheri memory show` | Show current memory entries |
43
- | `cheri memory add <text>` | Add a memory entry |
44
- | `cheri memory clear` | Clear all memory |
45
- | `cheri memory export` | Export memory to JSON |
46
- | `cheri config list` | Show all configuration |
47
- | `cheri config get <key>` | Get a config value |
48
- | `cheri config set <key> <value>` | Set a config value |
49
- | `cheri init` | Initialize a project |
50
-
51
- ## Interactive REPL
52
-
53
- Run `cheri` with no arguments to enter the interactive REPL:
23
+ # List your workspaces
24
+ cheri workspace list
54
25
 
55
- ```
56
- $ cheri
57
- 🍒 cheri > help
58
- 🍒 cheri > workspace list
59
- 🍒 cheri > usage
60
- 🍒 cheri > exit
61
- ```
26
+ # Stop a workspace
27
+ cheri workspace stop
28
+
29
+ # Initialize AI project config
30
+ cheri init
62
31
 
63
- ## Rate Limits
32
+ # Manage persistent memory
33
+ cheri memory show
34
+ cheri memory add "Always use TypeScript strict mode"
35
+ cheri memory clear
64
36
 
65
- | Plan | Limit |
66
- |---|---|
67
- | Free | 100 requests/hour |
68
- | Pro | 1,000 requests/hour |
37
+ # View/update configuration
38
+ cheri config list
39
+ cheri config set apiUrl https://cheri.heysalad.app
40
+ ```
69
41
 
70
- Use `cheri usage` to check your current rate limit status.
42
+ ## How it works
71
43
 
72
- ## Configuration
44
+ 1. **`cheri login`** opens your browser for GitHub OAuth, then you paste your API token
45
+ 2. **`cheri workspace launch`** spins up a cloud workspace with code-server (VS Code in browser)
46
+ 3. **`cheri memory`** stores persistent context that follows you across sessions
47
+ 4. **`cheri init`** creates a local `.ai/` directory with project constitution files
73
48
 
74
- Config is stored in `~/.cheri/`. Set the API URL if self-hosting:
49
+ ## Requirements
75
50
 
76
- ```bash
77
- cheri config set apiUrl https://your-instance.example.com
78
- ```
51
+ - Node.js >= 18
79
52
 
80
53
  ## Links
81
54
 
82
55
  - [Cheri Cloud IDE](https://cheri.heysalad.app)
83
- - [Dashboard](https://cheri.heysalad.app/dashboard)
84
- - [GitHub](https://github.com/chilu18/cloud-ide)
56
+ - [GitHub](https://github.com/Hey-Salad/cheri-cli)
85
57
 
86
58
  ## License
87
59
 
88
- MIT
60
+ MIT - HeySalad
package/bin/cheri.js CHANGED
@@ -7,12 +7,13 @@ import { registerStatusCommand } from "../src/commands/status.js";
7
7
  import { registerMemoryCommand } from "../src/commands/memory.js";
8
8
  import { registerConfigCommand } from "../src/commands/config.js";
9
9
  import { registerWorkspaceCommand } from "../src/commands/workspace.js";
10
- import { registerUsageCommand } from "../src/commands/usage.js";
10
+ import { registerChatCommand } from "../src/commands/chat.js";
11
+ import { registerAgentCommand } from "../src/commands/agent.js";
11
12
 
12
13
  program
13
14
  .name("cheri")
14
15
  .description("Cheri CLI - AI-powered cloud IDE by HeySalad")
15
- .version("0.1.0");
16
+ .version("0.2.0");
16
17
 
17
18
  registerLoginCommand(program);
18
19
  registerInitCommand(program);
@@ -20,7 +21,8 @@ registerStatusCommand(program);
20
21
  registerMemoryCommand(program);
21
22
  registerConfigCommand(program);
22
23
  registerWorkspaceCommand(program);
23
- registerUsageCommand(program);
24
+ registerChatCommand(program);
25
+ registerAgentCommand(program);
24
26
 
25
27
  // If no args, launch interactive command REPL
26
28
  if (!process.argv.slice(2).length) {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@heysalad/cheri-cli",
3
- "version": "0.3.1",
3
+ "version": "0.5.0",
4
4
  "description": "Cheri CLI - AI-powered cloud IDE by HeySalad. Like Claude Code, but for cloud workspaces.",
5
5
  "type": "module",
6
6
  "bin": {
@@ -8,14 +8,15 @@
8
8
  },
9
9
  "files": [
10
10
  "bin/",
11
- "src/"
11
+ "src/",
12
+ "README.md"
12
13
  ],
13
14
  "scripts": {
14
15
  "start": "node bin/cheri.js",
15
16
  "dev": "node bin/cheri.js",
16
- "release:patch": "npm version patch && npm publish && git push && git push --tags",
17
- "release:minor": "npm version minor && npm publish && git push && git push --tags",
18
- "release:major": "npm version major && npm publish && git push && git push --tags"
17
+ "release:patch": "npm version patch && npm publish --access public && git push && git push --tags",
18
+ "release:minor": "npm version minor && npm publish --access public && git push && git push --tags",
19
+ "release:major": "npm version major && npm publish --access public && git push && git push --tags"
19
20
  },
20
21
  "keywords": [
21
22
  "cloud-ide",
@@ -28,18 +29,23 @@
28
29
  ],
29
30
  "repository": {
30
31
  "type": "git",
31
- "url": "https://github.com/chilu18/cloud-ide.git",
32
- "directory": "cli"
32
+ "url": "https://github.com/Hey-Salad/cheri-cli.git"
33
33
  },
34
+ "homepage": "https://cheri.heysalad.app",
34
35
  "author": "HeySalad",
35
36
  "license": "MIT",
36
37
  "engines": {
37
38
  "node": ">=18"
38
39
  },
39
40
  "dependencies": {
41
+ "@anthropic-ai/sdk": "^0.74.0",
42
+ "@google/generative-ai": "^0.24.1",
40
43
  "chalk": "^5.3.0",
41
44
  "commander": "^12.1.0",
42
45
  "inquirer": "^9.2.23",
46
+ "marked": "^15.0.12",
47
+ "marked-terminal": "^7.3.0",
48
+ "openai": "^6.22.0",
43
49
  "ora": "^8.0.1"
44
50
  }
45
51
  }
@@ -0,0 +1,238 @@
1
+ import { apiClient } from "../lib/api-client.js";
2
+ import { getConfigValue, setConfigValue } from "../lib/config-store.js";
3
+ import { createProvider } from "../lib/providers/index.js";
4
+ import { log } from "../lib/logger.js";
5
+ import chalk from "chalk";
6
+
7
+ const SYSTEM_PROMPT = `You are Cheri Agent, an AI assistant for the Cheri cloud IDE platform. You help users manage cloud workspaces, memory, configuration, and their account. Use the provided tools to get real data — never guess or fabricate information. Be concise. After performing actions, briefly summarize what happened and the result.`;
8
+
9
+ const TOOLS = [
10
+ {
11
+ name: "get_account_info",
12
+ description: "Get the current user's account information",
13
+ parameters: { type: "object", properties: {}, required: [] },
14
+ },
15
+ {
16
+ name: "list_workspaces",
17
+ description: "List all cloud workspaces for the current user",
18
+ parameters: { type: "object", properties: {}, required: [] },
19
+ },
20
+ {
21
+ name: "create_workspace",
22
+ description: "Launch a new cloud workspace for a GitHub repository",
23
+ parameters: {
24
+ type: "object",
25
+ properties: {
26
+ repo: { type: "string", description: "GitHub repo in owner/name format" },
27
+ },
28
+ required: ["repo"],
29
+ },
30
+ },
31
+ {
32
+ name: "stop_workspace",
33
+ description: "Stop and delete a running workspace",
34
+ parameters: {
35
+ type: "object",
36
+ properties: {
37
+ id: { type: "string", description: "Workspace ID to stop" },
38
+ },
39
+ required: ["id"],
40
+ },
41
+ },
42
+ {
43
+ name: "get_workspace_status",
44
+ description: "Get the status of a specific workspace",
45
+ parameters: {
46
+ type: "object",
47
+ properties: {
48
+ id: { type: "string", description: "Workspace ID" },
49
+ },
50
+ required: ["id"],
51
+ },
52
+ },
53
+ {
54
+ name: "get_memory",
55
+ description: "Retrieve all stored memory entries",
56
+ parameters: { type: "object", properties: {}, required: [] },
57
+ },
58
+ {
59
+ name: "add_memory",
60
+ description: "Add a new memory entry for the user",
61
+ parameters: {
62
+ type: "object",
63
+ properties: {
64
+ content: { type: "string", description: "Memory content to store" },
65
+ category: { type: "string", description: "Optional category (defaults to 'general')" },
66
+ },
67
+ required: ["content"],
68
+ },
69
+ },
70
+ {
71
+ name: "clear_memory",
72
+ description: "Clear all stored memory entries",
73
+ parameters: { type: "object", properties: {}, required: [] },
74
+ },
75
+ {
76
+ name: "get_usage",
77
+ description: "Get the user's API usage and rate limit statistics",
78
+ parameters: { type: "object", properties: {}, required: [] },
79
+ },
80
+ {
81
+ name: "get_config",
82
+ description: "Get a configuration value by key (dot notation supported)",
83
+ parameters: {
84
+ type: "object",
85
+ properties: {
86
+ key: { type: "string", description: "Config key, e.g. 'ai.provider'" },
87
+ },
88
+ required: ["key"],
89
+ },
90
+ },
91
+ {
92
+ name: "set_config",
93
+ description: "Set a configuration value",
94
+ parameters: {
95
+ type: "object",
96
+ properties: {
97
+ key: { type: "string", description: "Config key" },
98
+ value: { type: "string", description: "Value to set" },
99
+ },
100
+ required: ["key", "value"],
101
+ },
102
+ },
103
+ ];
104
+
105
+ async function executeTool(name, args) {
106
+ try {
107
+ switch (name) {
108
+ case "get_account_info":
109
+ return await apiClient.getMe();
110
+ case "list_workspaces":
111
+ return await apiClient.listWorkspaces();
112
+ case "create_workspace":
113
+ return await apiClient.createWorkspace(args.repo);
114
+ case "stop_workspace":
115
+ return await apiClient.deleteWorkspace(args.id);
116
+ case "get_workspace_status":
117
+ return await apiClient.getWorkspaceStatus(args.id);
118
+ case "get_memory":
119
+ return await apiClient.getMemory();
120
+ case "add_memory":
121
+ return await apiClient.addMemory(args.content, args.category);
122
+ case "clear_memory":
123
+ return await apiClient.clearMemory();
124
+ case "get_usage":
125
+ return await apiClient.getUsage();
126
+ case "get_config":
127
+ return { key: args.key, value: getConfigValue(args.key) };
128
+ case "set_config":
129
+ setConfigValue(args.key, args.value);
130
+ return { key: args.key, value: args.value, status: "updated" };
131
+ default:
132
+ return { error: `Unknown tool: ${name}` };
133
+ }
134
+ } catch (err) {
135
+ return { error: err.message };
136
+ }
137
+ }
138
+
139
+ export async function runAgent(userRequest) {
140
+ const providerName = getConfigValue("agent.provider") || getConfigValue("ai.provider") || "anthropic";
141
+ const model = getConfigValue("agent.model") || undefined;
142
+
143
+ const provider = await createProvider({ provider: providerName, model });
144
+
145
+ const messages = [
146
+ { role: "user", content: userRequest },
147
+ ];
148
+
149
+ const MAX_ITERATIONS = 10;
150
+
151
+ for (let i = 0; i < MAX_ITERATIONS; i++) {
152
+ let fullText = "";
153
+ const toolCalls = [];
154
+
155
+ for await (const event of provider.chat(messages, TOOLS, { systemPrompt: SYSTEM_PROMPT })) {
156
+ switch (event.type) {
157
+ case "text":
158
+ process.stdout.write(event.content);
159
+ fullText += event.content;
160
+ break;
161
+
162
+ case "tool_use_start":
163
+ toolCalls.push({ id: event.id, name: event.name, input: {} });
164
+ break;
165
+
166
+ case "tool_input_delta":
167
+ // accumulated by provider, final input comes in tool_use_end
168
+ break;
169
+
170
+ case "tool_use_end":
171
+ if (toolCalls.length > 0) {
172
+ toolCalls[toolCalls.length - 1].input = event.input;
173
+ }
174
+ break;
175
+
176
+ case "done":
177
+ break;
178
+ }
179
+ }
180
+
181
+ // No tool calls — final text response, done
182
+ if (toolCalls.length === 0) {
183
+ if (fullText) process.stdout.write("\n");
184
+ return;
185
+ }
186
+
187
+ if (fullText) process.stdout.write("\n");
188
+
189
+ // Build assistant message in Anthropic content-block format
190
+ const assistantContent = [];
191
+ if (fullText) {
192
+ assistantContent.push({ type: "text", text: fullText });
193
+ }
194
+ for (const tc of toolCalls) {
195
+ assistantContent.push({ type: "tool_use", id: tc.id, name: tc.name, input: tc.input });
196
+ }
197
+ messages.push({ role: "assistant", content: assistantContent });
198
+
199
+ // Execute each tool call and build tool results
200
+ const toolResults = [];
201
+ for (const tc of toolCalls) {
202
+ log.info(`Calling ${chalk.cyan(tc.name)}${Object.keys(tc.input).length ? chalk.dim(" " + JSON.stringify(tc.input)) : ""}`);
203
+
204
+ const result = await executeTool(tc.name, tc.input);
205
+
206
+ if (result.error) {
207
+ log.error(result.error);
208
+ } else {
209
+ log.success(tc.name);
210
+ }
211
+
212
+ toolResults.push({
213
+ type: "tool_result",
214
+ tool_use_id: tc.id,
215
+ content: JSON.stringify(result),
216
+ });
217
+ }
218
+
219
+ messages.push({ role: "user", content: toolResults });
220
+ }
221
+
222
+ log.warn("Agent reached maximum iterations (10). Stopping.");
223
+ }
224
+
225
+ export function registerAgentCommand(program) {
226
+ program
227
+ .command("agent")
228
+ .argument("<request...>")
229
+ .description("AI agent — natural language command interface")
230
+ .action(async (requestParts) => {
231
+ const request = requestParts.join(" ");
232
+ try {
233
+ await runAgent(request);
234
+ } catch (err) {
235
+ log.error(err.message);
236
+ }
237
+ });
238
+ }
@@ -0,0 +1,15 @@
1
+ import { startRepl } from "../lib/repl.js";
2
+
3
+ export function registerChatCommand(program) {
4
+ program
5
+ .command("chat")
6
+ .description("Start an interactive AI coding session")
7
+ .option("-p, --provider <provider>", "AI provider (anthropic, openai, deepseek, gemini)")
8
+ .option("-m, --model <model>", "Model to use (overrides provider default)")
9
+ .action(async (options) => {
10
+ await startRepl({
11
+ provider: options.provider,
12
+ model: options.model,
13
+ });
14
+ });
15
+ }
@@ -53,13 +53,21 @@ export async function showMemory(options = {}) {
53
53
  }
54
54
 
55
55
  export async function addMemory(content, category = "general") {
56
- const { entry, count } = await apiClient.addMemory(content, category);
57
- log.success(`Memory saved (${count} total). Category: ${chalk.cyan(entry.category)}`);
56
+ try {
57
+ const { entry, count } = await apiClient.addMemory(content, category);
58
+ log.success(`Memory saved (${count} total). Category: ${chalk.cyan(entry.category)}`);
59
+ } catch (err) {
60
+ throw err;
61
+ }
58
62
  }
59
63
 
60
64
  export async function clearMemory() {
61
- await apiClient.clearMemory();
62
- log.success("All memories cleared.");
65
+ try {
66
+ await apiClient.clearMemory();
67
+ log.success("All memories cleared.");
68
+ } catch (err) {
69
+ throw err;
70
+ }
63
71
  }
64
72
 
65
73
  export async function exportMemory(options = {}) {
@@ -0,0 +1,37 @@
1
+ import chalk from "chalk";
2
+ import { getConfigValue } from "./config-store.js";
3
+
4
+ const CHERRY_ART = `
5
+ ${chalk.red("🍒🍒")}
6
+ ${chalk.red("🍒 🍒")}
7
+ `;
8
+
9
+ export function showStartupScreen(options = {}) {
10
+ const provider = options.provider || getConfigValue("ai.provider") || "anthropic";
11
+ const model = options.model || getConfigValue("ai.model") || getDefaultModel(provider);
12
+ const cwd = process.cwd();
13
+ const version = "0.2.0";
14
+
15
+ console.log(CHERRY_ART);
16
+ console.log(chalk.bold(` cheri v${version}`));
17
+ console.log(chalk.dim(" AI coding agent by HeySalad"));
18
+ console.log();
19
+ console.log(` ${chalk.dim("Provider:")} ${chalk.cyan(provider)}`);
20
+ console.log(` ${chalk.dim("Model:")} ${chalk.cyan(model)}`);
21
+ console.log(` ${chalk.dim("Directory:")} ${chalk.cyan(cwd)}`);
22
+ console.log();
23
+ console.log(chalk.dim(" Type your request. /help for commands, Ctrl+C to exit."));
24
+ console.log(chalk.dim(" " + "─".repeat(48)));
25
+ console.log();
26
+ }
27
+
28
+ export function getDefaultModel(provider) {
29
+ const defaults = {
30
+ anthropic: "claude-sonnet-4-20250514",
31
+ openai: "gpt-4o",
32
+ deepseek: "deepseek-chat",
33
+ gemini: "gemini-2.0-flash",
34
+ bedrock: "anthropic.claude-opus-4-6-v1:0",
35
+ };
36
+ return defaults[provider] || "unknown";
37
+ }
@@ -74,5 +74,19 @@ function getDefaultConfig() {
74
74
  theme: "dark",
75
75
  fontSize: 14,
76
76
  },
77
+ ai: {
78
+ provider: "anthropic",
79
+ model: "",
80
+ keys: {
81
+ anthropic: "",
82
+ openai: "",
83
+ deepseek: "",
84
+ gemini: "",
85
+ bedrock: "",
86
+ },
87
+ },
88
+ bedrock: {
89
+ region: "us-east-1",
90
+ },
77
91
  };
78
92
  }
package/src/lib/logger.js CHANGED
@@ -36,7 +36,7 @@ export const log = {
36
36
  console.log(chalk.dim(prefix) + " " + item);
37
37
  });
38
38
  },
39
- banner(version = "0.1.0") {
39
+ banner(version = "0.2.0") {
40
40
  console.log();
41
41
  console.log(` ${chalk.red("🍒")} ${chalk.red.bold("Cheri")}`);
42
42
  console.log(` ${chalk.dim("AI-powered cloud IDE by HeySalad")}`);
@@ -0,0 +1,66 @@
1
+ import { BaseProvider, SYSTEM_PROMPT } from "./base.js";
2
+
3
+ export class AnthropicProvider extends BaseProvider {
4
+ constructor(apiKey, model = "claude-sonnet-4-20250514") {
5
+ super(apiKey, model);
6
+ }
7
+
8
+ async *chat(messages, tools, options = {}) {
9
+ const { default: Anthropic } = await import("@anthropic-ai/sdk");
10
+ const client = new Anthropic({ apiKey: this.apiKey });
11
+
12
+ const anthropicTools = tools.map((t) => ({
13
+ name: t.name,
14
+ description: t.description,
15
+ input_schema: t.parameters,
16
+ }));
17
+
18
+ const stream = await client.messages.stream({
19
+ model: this.model,
20
+ max_tokens: 8192,
21
+ system: options.systemPrompt || SYSTEM_PROMPT,
22
+ messages,
23
+ tools: anthropicTools.length > 0 ? anthropicTools : undefined,
24
+ });
25
+
26
+ let currentToolId = null;
27
+ let currentToolName = null;
28
+ let toolInputJson = "";
29
+
30
+ for await (const event of stream) {
31
+ if (event.type === "content_block_start") {
32
+ if (event.content_block.type === "text") {
33
+ // text block starting
34
+ } else if (event.content_block.type === "tool_use") {
35
+ currentToolId = event.content_block.id;
36
+ currentToolName = event.content_block.name;
37
+ toolInputJson = "";
38
+ yield { type: "tool_use_start", id: currentToolId, name: currentToolName };
39
+ }
40
+ } else if (event.type === "content_block_delta") {
41
+ if (event.delta.type === "text_delta") {
42
+ yield { type: "text", content: event.delta.text };
43
+ } else if (event.delta.type === "input_json_delta") {
44
+ toolInputJson += event.delta.partial_json;
45
+ yield { type: "tool_input_delta", content: event.delta.partial_json };
46
+ }
47
+ } else if (event.type === "content_block_stop") {
48
+ if (currentToolId) {
49
+ let input = {};
50
+ try {
51
+ input = JSON.parse(toolInputJson);
52
+ } catch {}
53
+ yield { type: "tool_use_end", id: currentToolId, name: currentToolName, input };
54
+ currentToolId = null;
55
+ currentToolName = null;
56
+ toolInputJson = "";
57
+ }
58
+ } else if (event.type === "message_stop") {
59
+ // done
60
+ }
61
+ }
62
+
63
+ const finalMessage = await stream.finalMessage();
64
+ yield { type: "done", stopReason: finalMessage.stop_reason };
65
+ }
66
+ }
@@ -0,0 +1,34 @@
1
+ export const SYSTEM_PROMPT = `You are Cheri, an AI coding assistant by HeySalad. You help developers write, debug, and understand code.
2
+
3
+ You have access to tools that let you read files, write files, edit files, run shell commands, search files, and list directories. Use them proactively to help the user.
4
+
5
+ Guidelines:
6
+ - Read files before modifying them to understand the existing code.
7
+ - Use edit_file for targeted changes instead of rewriting entire files.
8
+ - When running commands, explain what you're about to run and why.
9
+ - Be concise but thorough. Show relevant code snippets in your responses.
10
+ - If you're unsure about something, say so rather than guessing.
11
+ - Format responses with markdown for readability.`;
12
+
13
+ export class BaseProvider {
14
+ constructor(apiKey, model) {
15
+ this.apiKey = apiKey;
16
+ this.model = model;
17
+ }
18
+
19
+ /**
20
+ * Async generator that yields streaming events:
21
+ * { type: "text", content: string }
22
+ * { type: "tool_use_start", id: string, name: string }
23
+ * { type: "tool_input_delta", content: string }
24
+ * { type: "tool_use_end", id: string, name: string, input: object }
25
+ * { type: "done", stopReason: string }
26
+ */
27
+ async *chat(messages, tools, options = {}) {
28
+ throw new Error("chat() must be implemented by subclass");
29
+ }
30
+
31
+ getModel() {
32
+ return this.model;
33
+ }
34
+ }
@@ -0,0 +1,7 @@
1
+ import { OpenAIProvider } from "./openai.js";
2
+
3
+ export class BedrockProvider extends OpenAIProvider {
4
+ constructor(apiKey, model = "anthropic.claude-opus-4-6-v1:0", region = "us-east-1") {
5
+ super(apiKey, model, `https://bedrock-runtime.${region}.amazonaws.com/openai/v1`);
6
+ }
7
+ }