edgecrab-sdk 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +111 -0
- package/dist/chunk-QUOGGCXF.mjs +300 -0
- package/dist/cli.mjs +572 -0
- package/dist/index.d.mts +234 -0
- package/dist/index.d.ts +234 -0
- package/dist/index.js +518 -0
- package/dist/index.mjs +482 -0
- package/dist/package-T3QGW35K.mjs +55 -0
- package/dist/package-UHAPSKPH.mjs +55 -0
- package/dist/package-VQYV6PVT.mjs +55 -0
- package/package.json +51 -0
package/README.md
ADDED
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
# edgecrab-sdk
|
|
2
|
+
|
|
3
|
+
[](https://www.npmjs.com/package/edgecrab-sdk)
|
|
4
|
+
[](https://nodejs.org)
|
|
5
|
+
|
|
6
|
+
Node.js SDK for **EdgeCrab** — a Rust-native autonomous coding agent.
|
|
7
|
+
|
|
8
|
+
## Install
|
|
9
|
+
|
|
10
|
+
```bash
|
|
11
|
+
npm install edgecrab-sdk
|
|
12
|
+
```
|
|
13
|
+
|
|
14
|
+
## Quick Start
|
|
15
|
+
|
|
16
|
+
### Agent API (recommended)
|
|
17
|
+
|
|
18
|
+
```typescript
|
|
19
|
+
import { Agent } from 'edgecrab-sdk';
|
|
20
|
+
|
|
21
|
+
const agent = new Agent({
|
|
22
|
+
model: 'anthropic/claude-sonnet-4-20250514',
|
|
23
|
+
systemPrompt: 'You are a helpful coding assistant',
|
|
24
|
+
apiKey: 'your-api-key', // optional
|
|
25
|
+
});
|
|
26
|
+
|
|
27
|
+
// Simple chat with conversation history
|
|
28
|
+
const reply = await agent.chat('Explain Rust ownership');
|
|
29
|
+
console.log(reply);
|
|
30
|
+
|
|
31
|
+
// Continue the conversation
|
|
32
|
+
const followUp = await agent.chat('Give me an example');
|
|
33
|
+
console.log(followUp);
|
|
34
|
+
```
|
|
35
|
+
|
|
36
|
+
### Full conversation run
|
|
37
|
+
|
|
38
|
+
```typescript
|
|
39
|
+
const result = await agent.run('Refactor this function for better error handling');
|
|
40
|
+
console.log(result.response);
|
|
41
|
+
console.log(`Turns: ${result.turnsUsed}, Tokens: ${result.usage.total_tokens}`);
|
|
42
|
+
```
|
|
43
|
+
|
|
44
|
+
### Streaming
|
|
45
|
+
|
|
46
|
+
```typescript
|
|
47
|
+
for await (const token of agent.stream('Write a haiku about Rust')) {
|
|
48
|
+
process.stdout.write(token);
|
|
49
|
+
}
|
|
50
|
+
```
|
|
51
|
+
|
|
52
|
+
### Low-level client
|
|
53
|
+
|
|
54
|
+
```typescript
|
|
55
|
+
import { EdgeCrabClient } from 'edgecrab-sdk';
|
|
56
|
+
|
|
57
|
+
const client = new EdgeCrabClient({
|
|
58
|
+
baseUrl: 'http://127.0.0.1:8642',
|
|
59
|
+
apiKey: 'your-key',
|
|
60
|
+
});
|
|
61
|
+
|
|
62
|
+
const resp = await client.createCompletion(
|
|
63
|
+
[{ role: 'user', content: 'Hello' }],
|
|
64
|
+
{ model: 'anthropic/claude-sonnet-4-20250514' },
|
|
65
|
+
);
|
|
66
|
+
console.log(resp.choices[0].message.content);
|
|
67
|
+
```
|
|
68
|
+
|
|
69
|
+
## CLI
|
|
70
|
+
|
|
71
|
+
```bash
|
|
72
|
+
edgecrab chat "What is the meaning of life?"
|
|
73
|
+
edgecrab chat --model gpt-4 --system "Be concise" "Explain monads"
|
|
74
|
+
edgecrab chat --stream "Tell me a story"
|
|
75
|
+
edgecrab models
|
|
76
|
+
edgecrab health
|
|
77
|
+
```
|
|
78
|
+
|
|
79
|
+
### Environment Variables
|
|
80
|
+
|
|
81
|
+
| Variable | Description |
|
|
82
|
+
|---|---|
|
|
83
|
+
| `EDGECRAB_BASE_URL` | API server URL (default: `http://127.0.0.1:8642`) |
|
|
84
|
+
| `EDGECRAB_API_KEY` | Bearer token for authentication |
|
|
85
|
+
|
|
86
|
+
## API Reference
|
|
87
|
+
|
|
88
|
+
### `Agent`
|
|
89
|
+
|
|
90
|
+
| Method | Description |
|
|
91
|
+
|---|---|
|
|
92
|
+
| `chat(message)` | Send a message, return reply string. Maintains history. |
|
|
93
|
+
| `run(message)` | Full conversation run — returns `AgentResult` |
|
|
94
|
+
| `stream(message)` | Async generator yielding tokens |
|
|
95
|
+
| `addMessage(role, content)` | Inject a message into history |
|
|
96
|
+
| `reset()` | Clear history and start a new session |
|
|
97
|
+
| `getMessages()` | Get conversation history |
|
|
98
|
+
| `getTurnCount()` | Number of completed turns |
|
|
99
|
+
| `getUsage()` | Accumulated token usage |
|
|
100
|
+
| `listModels()` | List available models |
|
|
101
|
+
| `health()` | Check server health |
|
|
102
|
+
|
|
103
|
+
### `EdgeCrabClient`
|
|
104
|
+
|
|
105
|
+
Lower-level HTTP client with `chat()`, `createCompletion()`, `streamCompletion()`, `listModels()`, `health()`.
|
|
106
|
+
|
|
107
|
+
## Links
|
|
108
|
+
|
|
109
|
+
- [GitHub](https://github.com/raphaelmansuy/edgecrab)
|
|
110
|
+
- [Python SDK](https://github.com/raphaelmansuy/edgecrab/tree/main/sdks/python)
|
|
111
|
+
- [EdgeCrab Documentation](https://github.com/raphaelmansuy/edgecrab/tree/main/docs)
|
|
@@ -0,0 +1,300 @@
|
|
|
1
|
+
// src/client.ts
|
|
2
|
+
var DEFAULT_BASE_URL = "http://127.0.0.1:8642";
|
|
3
|
+
var DEFAULT_TIMEOUT = 12e4;
|
|
4
|
+
var EdgeCrabError = class extends Error {
|
|
5
|
+
statusCode;
|
|
6
|
+
constructor(message, statusCode) {
|
|
7
|
+
super(message);
|
|
8
|
+
this.name = "EdgeCrabError";
|
|
9
|
+
this.statusCode = statusCode;
|
|
10
|
+
}
|
|
11
|
+
};
|
|
12
|
+
function buildHeaders(apiKey) {
|
|
13
|
+
const headers = { "Content-Type": "application/json" };
|
|
14
|
+
if (apiKey) headers["Authorization"] = `Bearer ${apiKey}`;
|
|
15
|
+
return headers;
|
|
16
|
+
}
|
|
17
|
+
var EdgeCrabClient = class {
|
|
18
|
+
baseUrl;
|
|
19
|
+
headers;
|
|
20
|
+
timeout;
|
|
21
|
+
constructor(options = {}) {
|
|
22
|
+
this.baseUrl = (options.baseUrl ?? DEFAULT_BASE_URL).replace(/\/+$/, "");
|
|
23
|
+
this.headers = buildHeaders(options.apiKey);
|
|
24
|
+
this.timeout = options.timeout ?? DEFAULT_TIMEOUT;
|
|
25
|
+
}
|
|
26
|
+
/** Simple chat — send a message, get a reply string. */
|
|
27
|
+
async chat(message, options) {
|
|
28
|
+
const messages = [];
|
|
29
|
+
if (options?.system) {
|
|
30
|
+
messages.push({ role: "system", content: options.system });
|
|
31
|
+
}
|
|
32
|
+
messages.push({ role: "user", content: message });
|
|
33
|
+
const resp = await this.createCompletion(messages, {
|
|
34
|
+
model: options?.model,
|
|
35
|
+
temperature: options?.temperature,
|
|
36
|
+
maxTokens: options?.maxTokens
|
|
37
|
+
});
|
|
38
|
+
return resp.choices?.[0]?.message?.content ?? "";
|
|
39
|
+
}
|
|
40
|
+
/** Create a chat completion (non-streaming). */
|
|
41
|
+
async createCompletion(messages, options) {
|
|
42
|
+
const body = {
|
|
43
|
+
model: options?.model ?? "anthropic/claude-sonnet-4-20250514",
|
|
44
|
+
messages,
|
|
45
|
+
stream: false
|
|
46
|
+
};
|
|
47
|
+
if (options?.temperature !== void 0) body.temperature = options.temperature;
|
|
48
|
+
if (options?.maxTokens !== void 0) body.max_tokens = options.maxTokens;
|
|
49
|
+
if (options?.tools) body.tools = options.tools;
|
|
50
|
+
const response = await this.fetchJSON(
|
|
51
|
+
"/v1/chat/completions",
|
|
52
|
+
{ method: "POST", body: JSON.stringify(body) }
|
|
53
|
+
);
|
|
54
|
+
return response;
|
|
55
|
+
}
|
|
56
|
+
/** Create a streaming chat completion. Returns an async iterator of chunks. */
|
|
57
|
+
async *streamCompletion(messages, options) {
|
|
58
|
+
const body = {
|
|
59
|
+
model: options?.model ?? "anthropic/claude-sonnet-4-20250514",
|
|
60
|
+
messages,
|
|
61
|
+
stream: true
|
|
62
|
+
};
|
|
63
|
+
if (options?.temperature !== void 0) body.temperature = options.temperature;
|
|
64
|
+
if (options?.maxTokens !== void 0) body.max_tokens = options.maxTokens;
|
|
65
|
+
if (options?.tools) body.tools = options.tools;
|
|
66
|
+
const controller = new AbortController();
|
|
67
|
+
const timeoutId = setTimeout(() => controller.abort(), this.timeout);
|
|
68
|
+
try {
|
|
69
|
+
const response = await fetch(`${this.baseUrl}/v1/chat/completions`, {
|
|
70
|
+
method: "POST",
|
|
71
|
+
headers: this.headers,
|
|
72
|
+
body: JSON.stringify(body),
|
|
73
|
+
signal: controller.signal
|
|
74
|
+
});
|
|
75
|
+
if (!response.ok) {
|
|
76
|
+
const text = await response.text();
|
|
77
|
+
throw new EdgeCrabError(`API error ${response.status}: ${text}`, response.status);
|
|
78
|
+
}
|
|
79
|
+
if (!response.body) {
|
|
80
|
+
throw new EdgeCrabError("No response body for streaming request");
|
|
81
|
+
}
|
|
82
|
+
const reader = response.body.getReader();
|
|
83
|
+
const decoder = new TextDecoder();
|
|
84
|
+
let buffer = "";
|
|
85
|
+
while (true) {
|
|
86
|
+
const { done, value } = await reader.read();
|
|
87
|
+
if (done) break;
|
|
88
|
+
buffer += decoder.decode(value, { stream: true });
|
|
89
|
+
const lines = buffer.split("\n");
|
|
90
|
+
buffer = lines.pop() ?? "";
|
|
91
|
+
for (const line of lines) {
|
|
92
|
+
const trimmed = line.trim();
|
|
93
|
+
if (!trimmed.startsWith("data: ")) continue;
|
|
94
|
+
const payload = trimmed.slice(6);
|
|
95
|
+
if (payload === "[DONE]") return;
|
|
96
|
+
yield JSON.parse(payload);
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
} finally {
|
|
100
|
+
clearTimeout(timeoutId);
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
/** List available models. */
|
|
104
|
+
async listModels() {
|
|
105
|
+
const data = await this.fetchJSON("/v1/models");
|
|
106
|
+
if (Array.isArray(data)) return data;
|
|
107
|
+
return data.data ?? [];
|
|
108
|
+
}
|
|
109
|
+
/** Check server health. */
|
|
110
|
+
async health() {
|
|
111
|
+
return this.fetchJSON("/v1/health");
|
|
112
|
+
}
|
|
113
|
+
async fetchJSON(path, init) {
|
|
114
|
+
const controller = new AbortController();
|
|
115
|
+
const timeoutId = setTimeout(() => controller.abort(), this.timeout);
|
|
116
|
+
try {
|
|
117
|
+
const response = await fetch(`${this.baseUrl}${path}`, {
|
|
118
|
+
...init,
|
|
119
|
+
headers: { ...this.headers, ...init?.headers ?? {} },
|
|
120
|
+
signal: controller.signal
|
|
121
|
+
});
|
|
122
|
+
if (!response.ok) {
|
|
123
|
+
const text = await response.text();
|
|
124
|
+
let detail = text;
|
|
125
|
+
try {
|
|
126
|
+
const json = JSON.parse(text);
|
|
127
|
+
detail = json?.error?.message ?? text;
|
|
128
|
+
} catch {
|
|
129
|
+
}
|
|
130
|
+
throw new EdgeCrabError(`API error ${response.status}: ${detail}`, response.status);
|
|
131
|
+
}
|
|
132
|
+
return await response.json();
|
|
133
|
+
} finally {
|
|
134
|
+
clearTimeout(timeoutId);
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
};
|
|
138
|
+
|
|
139
|
+
// src/agent.ts
|
|
140
|
+
import { randomUUID } from "crypto";
|
|
141
|
+
var Agent = class {
|
|
142
|
+
model;
|
|
143
|
+
systemPrompt;
|
|
144
|
+
maxTurns;
|
|
145
|
+
temperature;
|
|
146
|
+
maxTokens;
|
|
147
|
+
streaming;
|
|
148
|
+
sessionId;
|
|
149
|
+
// Callbacks
|
|
150
|
+
onToken;
|
|
151
|
+
onToolCall;
|
|
152
|
+
onTurn;
|
|
153
|
+
messages = [];
|
|
154
|
+
turnCount = 0;
|
|
155
|
+
totalUsage = { prompt_tokens: 0, completion_tokens: 0, total_tokens: 0 };
|
|
156
|
+
client;
|
|
157
|
+
constructor(options = {}) {
|
|
158
|
+
this.model = options.model ?? "anthropic/claude-sonnet-4-20250514";
|
|
159
|
+
this.systemPrompt = options.systemPrompt;
|
|
160
|
+
this.maxTurns = options.maxTurns ?? 50;
|
|
161
|
+
this.temperature = options.temperature;
|
|
162
|
+
this.maxTokens = options.maxTokens;
|
|
163
|
+
this.streaming = options.streaming ?? false;
|
|
164
|
+
this.sessionId = options.sessionId ?? randomUUID();
|
|
165
|
+
this.onToken = options.onToken;
|
|
166
|
+
this.onToolCall = options.onToolCall;
|
|
167
|
+
this.onTurn = options.onTurn;
|
|
168
|
+
const baseUrl = options.baseUrl ?? process.env.EDGECRAB_BASE_URL ?? "http://127.0.0.1:8642";
|
|
169
|
+
const apiKey = options.apiKey ?? process.env.EDGECRAB_API_KEY;
|
|
170
|
+
this.client = new EdgeCrabClient({
|
|
171
|
+
baseUrl,
|
|
172
|
+
apiKey,
|
|
173
|
+
timeout: options.timeout ? options.timeout * 1e3 : void 0
|
|
174
|
+
});
|
|
175
|
+
if (this.systemPrompt) {
|
|
176
|
+
this.messages.push({ role: "system", content: this.systemPrompt });
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
/** Send a message and return the assistant's text reply. Maintains history. */
|
|
180
|
+
async chat(message) {
|
|
181
|
+
this.messages.push({ role: "user", content: message });
|
|
182
|
+
this.turnCount++;
|
|
183
|
+
if (this.streaming && this.onToken) {
|
|
184
|
+
return this.chatStreaming();
|
|
185
|
+
}
|
|
186
|
+
const resp = await this.client.createCompletion(this.messages, {
|
|
187
|
+
model: this.model,
|
|
188
|
+
temperature: this.temperature,
|
|
189
|
+
maxTokens: this.maxTokens
|
|
190
|
+
});
|
|
191
|
+
const assistantMsg = this.extractResponse(resp);
|
|
192
|
+
this.accumulateUsage(resp.usage);
|
|
193
|
+
this.onTurn?.(this.turnCount, assistantMsg);
|
|
194
|
+
return assistantMsg.content;
|
|
195
|
+
}
|
|
196
|
+
async chatStreaming() {
|
|
197
|
+
const collected = [];
|
|
198
|
+
for await (const chunk of this.client.streamCompletion(this.messages, {
|
|
199
|
+
model: this.model,
|
|
200
|
+
temperature: this.temperature,
|
|
201
|
+
maxTokens: this.maxTokens
|
|
202
|
+
})) {
|
|
203
|
+
for (const choice of chunk.choices) {
|
|
204
|
+
if (choice.delta.content) {
|
|
205
|
+
collected.push(choice.delta.content);
|
|
206
|
+
this.onToken?.(choice.delta.content);
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
const fullText = collected.join("");
|
|
211
|
+
const assistantMsg = { role: "assistant", content: fullText };
|
|
212
|
+
this.messages.push(assistantMsg);
|
|
213
|
+
this.onTurn?.(this.turnCount, assistantMsg);
|
|
214
|
+
return fullText;
|
|
215
|
+
}
|
|
216
|
+
/** Run a full agent conversation. Returns a structured AgentResult. */
|
|
217
|
+
async run(message) {
|
|
218
|
+
const response = await this.chat(message);
|
|
219
|
+
return {
|
|
220
|
+
response,
|
|
221
|
+
messages: [...this.messages],
|
|
222
|
+
sessionId: this.sessionId,
|
|
223
|
+
model: this.model,
|
|
224
|
+
turnsUsed: this.turnCount,
|
|
225
|
+
finishedNaturally: true,
|
|
226
|
+
usage: { ...this.totalUsage }
|
|
227
|
+
};
|
|
228
|
+
}
|
|
229
|
+
/** Stream response tokens as an async iterable. */
|
|
230
|
+
async *stream(message) {
|
|
231
|
+
this.messages.push({ role: "user", content: message });
|
|
232
|
+
this.turnCount++;
|
|
233
|
+
const collected = [];
|
|
234
|
+
for await (const chunk of this.client.streamCompletion(this.messages, {
|
|
235
|
+
model: this.model,
|
|
236
|
+
temperature: this.temperature,
|
|
237
|
+
maxTokens: this.maxTokens
|
|
238
|
+
})) {
|
|
239
|
+
for (const choice of chunk.choices) {
|
|
240
|
+
if (choice.delta.content) {
|
|
241
|
+
collected.push(choice.delta.content);
|
|
242
|
+
yield choice.delta.content;
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
this.messages.push({ role: "assistant", content: collected.join("") });
|
|
247
|
+
}
|
|
248
|
+
/** Manually inject a message into the conversation history. */
|
|
249
|
+
addMessage(role, content) {
|
|
250
|
+
this.messages.push({ role, content });
|
|
251
|
+
}
|
|
252
|
+
/** Reset conversation state for a new session. */
|
|
253
|
+
reset() {
|
|
254
|
+
this.messages = [];
|
|
255
|
+
this.turnCount = 0;
|
|
256
|
+
this.totalUsage = { prompt_tokens: 0, completion_tokens: 0, total_tokens: 0 };
|
|
257
|
+
this.sessionId = randomUUID();
|
|
258
|
+
if (this.systemPrompt) {
|
|
259
|
+
this.messages.push({ role: "system", content: this.systemPrompt });
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
/** Current conversation history (copy). */
|
|
263
|
+
getMessages() {
|
|
264
|
+
return [...this.messages];
|
|
265
|
+
}
|
|
266
|
+
/** Number of user turns completed. */
|
|
267
|
+
getTurnCount() {
|
|
268
|
+
return this.turnCount;
|
|
269
|
+
}
|
|
270
|
+
/** Accumulated token usage. */
|
|
271
|
+
getUsage() {
|
|
272
|
+
return { ...this.totalUsage };
|
|
273
|
+
}
|
|
274
|
+
/** List available models from the server. */
|
|
275
|
+
async listModels() {
|
|
276
|
+
return this.client.listModels();
|
|
277
|
+
}
|
|
278
|
+
/** Check server health. */
|
|
279
|
+
async health() {
|
|
280
|
+
return this.client.health();
|
|
281
|
+
}
|
|
282
|
+
extractResponse(resp) {
|
|
283
|
+
const msg = resp.choices?.[0]?.message ?? { role: "assistant", content: "" };
|
|
284
|
+
this.messages.push(msg);
|
|
285
|
+
return msg;
|
|
286
|
+
}
|
|
287
|
+
accumulateUsage(usage) {
|
|
288
|
+
if (usage) {
|
|
289
|
+
this.totalUsage.prompt_tokens += usage.prompt_tokens;
|
|
290
|
+
this.totalUsage.completion_tokens += usage.completion_tokens;
|
|
291
|
+
this.totalUsage.total_tokens += usage.total_tokens;
|
|
292
|
+
}
|
|
293
|
+
}
|
|
294
|
+
};
|
|
295
|
+
|
|
296
|
+
export {
|
|
297
|
+
EdgeCrabError,
|
|
298
|
+
EdgeCrabClient,
|
|
299
|
+
Agent
|
|
300
|
+
};
|