edgecrab-sdk 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js ADDED
@@ -0,0 +1,518 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+
20
+ // src/index.ts
21
+ var index_exports = {};
22
+ __export(index_exports, {
23
+ Agent: () => Agent,
24
+ AuthenticationError: () => AuthenticationError,
25
+ ConnectionError: () => ConnectionError,
26
+ EdgeCrabClient: () => EdgeCrabClient,
27
+ EdgeCrabError: () => EdgeCrabError,
28
+ InterruptedError: () => InterruptedError,
29
+ MaxTurnsExceededError: () => MaxTurnsExceededError,
30
+ RateLimitError: () => RateLimitError,
31
+ ServerError: () => ServerError,
32
+ TimeoutError: () => TimeoutError
33
+ });
34
+ module.exports = __toCommonJS(index_exports);
35
+
36
+ // src/agent.ts
37
+ var import_node_crypto = require("crypto");
38
+
39
+ // src/client.ts
40
+ var DEFAULT_BASE_URL = "http://127.0.0.1:8642";
41
+ var DEFAULT_TIMEOUT = 12e4;
42
+ var DEFAULT_MAX_RETRIES = 3;
43
+ var DEFAULT_RETRY_BASE_DELAY = 1e3;
44
+ var EdgeCrabError = class extends Error {
45
+ statusCode;
46
+ constructor(message, statusCode) {
47
+ super(message);
48
+ this.name = "EdgeCrabError";
49
+ this.statusCode = statusCode;
50
+ }
51
+ };
52
+ var AuthenticationError = class extends EdgeCrabError {
53
+ constructor(message, statusCode) {
54
+ super(message, statusCode);
55
+ this.name = "AuthenticationError";
56
+ }
57
+ };
58
+ var RateLimitError = class extends EdgeCrabError {
59
+ retryAfter;
60
+ constructor(message, retryAfter) {
61
+ super(message, 429);
62
+ this.name = "RateLimitError";
63
+ this.retryAfter = retryAfter;
64
+ }
65
+ };
66
+ var ServerError = class extends EdgeCrabError {
67
+ constructor(message, statusCode) {
68
+ super(message, statusCode);
69
+ this.name = "ServerError";
70
+ }
71
+ };
72
+ var TimeoutError = class extends EdgeCrabError {
73
+ constructor(message = "Request timed out") {
74
+ super(message);
75
+ this.name = "TimeoutError";
76
+ }
77
+ };
78
+ var ConnectionError = class extends EdgeCrabError {
79
+ constructor(message = "Could not connect to EdgeCrab server") {
80
+ super(message);
81
+ this.name = "ConnectionError";
82
+ }
83
+ };
84
+ var MaxTurnsExceededError = class extends EdgeCrabError {
85
+ maxTurns;
86
+ constructor(maxTurns) {
87
+ super(`Agent exceeded maximum turns (${maxTurns})`);
88
+ this.name = "MaxTurnsExceededError";
89
+ this.maxTurns = maxTurns;
90
+ }
91
+ };
92
+ var InterruptedError = class extends EdgeCrabError {
93
+ constructor() {
94
+ super("Agent conversation was interrupted");
95
+ this.name = "InterruptedError";
96
+ }
97
+ };
98
+ function classifyError(status, detail, headers) {
99
+ const msg = `API error ${status}: ${detail}`;
100
+ if (status === 401 || status === 403) return new AuthenticationError(msg, status);
101
+ if (status === 429) {
102
+ const ra = headers?.get("retry-after");
103
+ return new RateLimitError(msg, ra ? parseFloat(ra) : void 0);
104
+ }
105
+ if (status >= 500) return new ServerError(msg, status);
106
+ return new EdgeCrabError(msg, status);
107
+ }
108
+ function isRetryable(err) {
109
+ if (err instanceof ServerError || err instanceof TimeoutError || err instanceof ConnectionError)
110
+ return true;
111
+ if (err instanceof RateLimitError) return true;
112
+ if (err instanceof TypeError && String(err.message).includes("fetch")) return true;
113
+ return false;
114
+ }
115
+ function buildHeaders(apiKey) {
116
+ const headers = { "Content-Type": "application/json" };
117
+ if (apiKey) headers["Authorization"] = `Bearer ${apiKey}`;
118
+ return headers;
119
+ }
120
+ var EdgeCrabClient = class {
121
+ baseUrl;
122
+ headers;
123
+ timeout;
124
+ maxRetries;
125
+ retryBaseDelay;
126
+ constructor(options = {}) {
127
+ this.baseUrl = (options.baseUrl ?? DEFAULT_BASE_URL).replace(/\/+$/, "");
128
+ this.headers = buildHeaders(options.apiKey);
129
+ this.timeout = options.timeout ?? DEFAULT_TIMEOUT;
130
+ this.maxRetries = options.maxRetries ?? DEFAULT_MAX_RETRIES;
131
+ this.retryBaseDelay = options.retryBaseDelay ?? DEFAULT_RETRY_BASE_DELAY;
132
+ }
133
+ /** Simple chat — send a message, get a reply string. */
134
+ async chat(message, options) {
135
+ const messages = [];
136
+ if (options?.system) {
137
+ messages.push({ role: "system", content: options.system });
138
+ }
139
+ messages.push({ role: "user", content: message });
140
+ const resp = await this.createCompletion(messages, {
141
+ model: options?.model,
142
+ temperature: options?.temperature,
143
+ maxTokens: options?.maxTokens
144
+ });
145
+ return resp.choices?.[0]?.message?.content ?? "";
146
+ }
147
+ /** Create a chat completion (non-streaming). */
148
+ async createCompletion(messages, options) {
149
+ const body = {
150
+ model: options?.model ?? "anthropic/claude-sonnet-4-20250514",
151
+ messages,
152
+ stream: false
153
+ };
154
+ if (options?.temperature !== void 0) body.temperature = options.temperature;
155
+ if (options?.maxTokens !== void 0) body.max_tokens = options.maxTokens;
156
+ if (options?.tools) body.tools = options.tools;
157
+ return this.postWithRetry(body);
158
+ }
159
+ async postWithRetry(body) {
160
+ let lastError;
161
+ for (let attempt = 0; attempt <= this.maxRetries; attempt++) {
162
+ try {
163
+ const response = await this.fetchJSON("/v1/chat/completions", {
164
+ method: "POST",
165
+ body: JSON.stringify(body)
166
+ });
167
+ return response;
168
+ } catch (err) {
169
+ lastError = err;
170
+ if (!isRetryable(err) || attempt === this.maxRetries) throw err;
171
+ let delay = this.retryBaseDelay * 2 ** attempt;
172
+ if (err instanceof RateLimitError && err.retryAfter) {
173
+ delay = Math.max(delay, err.retryAfter * 1e3);
174
+ }
175
+ await new Promise((r) => setTimeout(r, delay));
176
+ }
177
+ }
178
+ throw lastError;
179
+ }
180
+ /** Create a streaming chat completion. Returns an async iterator of chunks. */
181
+ async *streamCompletion(messages, options) {
182
+ const body = {
183
+ model: options?.model ?? "anthropic/claude-sonnet-4-20250514",
184
+ messages,
185
+ stream: true
186
+ };
187
+ if (options?.temperature !== void 0) body.temperature = options.temperature;
188
+ if (options?.maxTokens !== void 0) body.max_tokens = options.maxTokens;
189
+ if (options?.tools) body.tools = options.tools;
190
+ const controller = new AbortController();
191
+ const timeoutId = setTimeout(() => controller.abort(), this.timeout);
192
+ try {
193
+ const response = await fetch(`${this.baseUrl}/v1/chat/completions`, {
194
+ method: "POST",
195
+ headers: this.headers,
196
+ body: JSON.stringify(body),
197
+ signal: controller.signal
198
+ });
199
+ if (!response.ok) {
200
+ const text = await response.text();
201
+ throw classifyError(response.status, text, response.headers);
202
+ }
203
+ if (!response.body) {
204
+ throw new EdgeCrabError("No response body for streaming request");
205
+ }
206
+ const reader = response.body.getReader();
207
+ const decoder = new TextDecoder();
208
+ let buffer = "";
209
+ while (true) {
210
+ const { done, value } = await reader.read();
211
+ if (done) break;
212
+ buffer += decoder.decode(value, { stream: true });
213
+ const lines = buffer.split("\n");
214
+ buffer = lines.pop() ?? "";
215
+ for (const line of lines) {
216
+ const trimmed = line.trim();
217
+ if (!trimmed.startsWith("data: ")) continue;
218
+ const payload = trimmed.slice(6);
219
+ if (payload === "[DONE]") return;
220
+ yield JSON.parse(payload);
221
+ }
222
+ }
223
+ } finally {
224
+ clearTimeout(timeoutId);
225
+ }
226
+ }
227
+ /** List available models. */
228
+ async listModels() {
229
+ const data = await this.fetchJSON("/v1/models");
230
+ if (Array.isArray(data)) return data;
231
+ return data.data ?? [];
232
+ }
233
+ /** Check server health. */
234
+ async health() {
235
+ return this.fetchJSON("/v1/health");
236
+ }
237
+ async fetchJSON(path, init) {
238
+ const controller = new AbortController();
239
+ const timeoutId = setTimeout(() => controller.abort(), this.timeout);
240
+ try {
241
+ const response = await fetch(`${this.baseUrl}${path}`, {
242
+ ...init,
243
+ headers: { ...this.headers, ...init?.headers ?? {} },
244
+ signal: controller.signal
245
+ });
246
+ if (!response.ok) {
247
+ const text = await response.text();
248
+ let detail = text;
249
+ try {
250
+ const json = JSON.parse(text);
251
+ detail = json?.error?.message ?? text;
252
+ } catch {
253
+ }
254
+ throw classifyError(response.status, detail, response.headers);
255
+ }
256
+ return await response.json();
257
+ } finally {
258
+ clearTimeout(timeoutId);
259
+ }
260
+ }
261
+ };
262
+
263
+ // src/agent.ts
264
+ var Agent = class _Agent {
265
+ model;
266
+ systemPrompt;
267
+ maxTurns;
268
+ temperature;
269
+ maxTokens;
270
+ streaming;
271
+ sessionId;
272
+ // Callbacks
273
+ onToken;
274
+ onToolCall;
275
+ onTurn;
276
+ onError;
277
+ messages = [];
278
+ turnCount = 0;
279
+ totalUsage = { prompt_tokens: 0, completion_tokens: 0, total_tokens: 0 };
280
+ client;
281
+ interrupted = false;
282
+ constructor(options = {}) {
283
+ this.model = options.model ?? "anthropic/claude-sonnet-4-20250514";
284
+ this.systemPrompt = options.systemPrompt;
285
+ this.maxTurns = options.maxTurns ?? 50;
286
+ this.temperature = options.temperature;
287
+ this.maxTokens = options.maxTokens;
288
+ this.streaming = options.streaming ?? false;
289
+ this.sessionId = options.sessionId ?? (0, import_node_crypto.randomUUID)();
290
+ this.onToken = options.onToken;
291
+ this.onToolCall = options.onToolCall;
292
+ this.onTurn = options.onTurn;
293
+ this.onError = options.onError;
294
+ const baseUrl = options.baseUrl ?? process.env.EDGECRAB_BASE_URL ?? "http://127.0.0.1:8642";
295
+ const apiKey = options.apiKey ?? process.env.EDGECRAB_API_KEY;
296
+ this.client = new EdgeCrabClient({
297
+ baseUrl,
298
+ apiKey,
299
+ timeout: options.timeout ? options.timeout * 1e3 : void 0,
300
+ maxRetries: options.maxRetries
301
+ });
302
+ if (this.systemPrompt) {
303
+ this.messages.push({ role: "system", content: this.systemPrompt });
304
+ }
305
+ }
306
+ // ── Interrupt ───────────────────────────────────────────────────
307
+ /** Signal the agent to stop after the current turn. */
308
+ interrupt() {
309
+ this.interrupted = true;
310
+ }
311
+ /** Clear the interrupt flag so the agent can continue. */
312
+ clearInterrupt() {
313
+ this.interrupted = false;
314
+ }
315
+ /** Whether the agent has been interrupted. */
316
+ get isInterrupted() {
317
+ return this.interrupted;
318
+ }
319
+ // ── Chat ────────────────────────────────────────────────────────
320
+ /** Send a message and return the assistant's text reply. Maintains history. */
321
+ async chat(message) {
322
+ if (this.interrupted) throw new InterruptedError();
323
+ if (this.turnCount >= this.maxTurns) throw new MaxTurnsExceededError(this.maxTurns);
324
+ this.messages.push({ role: "user", content: message });
325
+ this.turnCount++;
326
+ if (this.streaming && this.onToken) {
327
+ return this.chatStreaming();
328
+ }
329
+ const resp = await this.client.createCompletion(this.messages, {
330
+ model: this.model,
331
+ temperature: this.temperature,
332
+ maxTokens: this.maxTokens
333
+ });
334
+ const assistantMsg = this.extractResponse(resp);
335
+ this.accumulateUsage(resp.usage);
336
+ this.onTurn?.(this.turnCount, assistantMsg);
337
+ return assistantMsg.content;
338
+ }
339
+ async chatStreaming() {
340
+ const collected = [];
341
+ for await (const chunk of this.client.streamCompletion(this.messages, {
342
+ model: this.model,
343
+ temperature: this.temperature,
344
+ maxTokens: this.maxTokens
345
+ })) {
346
+ if (this.interrupted) break;
347
+ for (const choice of chunk.choices) {
348
+ if (choice.delta.content) {
349
+ collected.push(choice.delta.content);
350
+ this.onToken?.(choice.delta.content);
351
+ }
352
+ }
353
+ }
354
+ const fullText = collected.join("");
355
+ const assistantMsg = { role: "assistant", content: fullText };
356
+ this.messages.push(assistantMsg);
357
+ this.onTurn?.(this.turnCount, assistantMsg);
358
+ return fullText;
359
+ }
360
+ // ── Run ─────────────────────────────────────────────────────────
361
+ /** Run a full agent conversation. Returns a structured AgentResult. */
362
+ async run(message, options) {
363
+ if (options?.conversationHistory) {
364
+ for (const msg of options.conversationHistory) {
365
+ this.messages.push(msg);
366
+ }
367
+ }
368
+ let response;
369
+ let wasInterrupted = false;
370
+ let wasMaxTurnsExceeded = false;
371
+ try {
372
+ response = await this.chat(message);
373
+ } catch (err) {
374
+ if (err instanceof InterruptedError) {
375
+ response = this.messages.length > 0 ? this.messages[this.messages.length - 1].content : "";
376
+ wasInterrupted = true;
377
+ } else if (err instanceof MaxTurnsExceededError) {
378
+ response = this.messages.length > 0 ? this.messages[this.messages.length - 1].content : "";
379
+ wasMaxTurnsExceeded = true;
380
+ } else {
381
+ throw err;
382
+ }
383
+ }
384
+ return {
385
+ response,
386
+ messages: [...this.messages],
387
+ sessionId: this.sessionId,
388
+ model: this.model,
389
+ turnsUsed: this.turnCount,
390
+ finishedNaturally: !wasInterrupted && !wasMaxTurnsExceeded,
391
+ interrupted: wasInterrupted,
392
+ maxTurnsExceeded: wasMaxTurnsExceeded,
393
+ usage: { ...this.totalUsage }
394
+ };
395
+ }
396
+ // ── Stream ──────────────────────────────────────────────────────
397
+ /** Stream response tokens as an async iterable. */
398
+ async *stream(message) {
399
+ if (this.interrupted) throw new InterruptedError();
400
+ if (this.turnCount >= this.maxTurns) throw new MaxTurnsExceededError(this.maxTurns);
401
+ this.messages.push({ role: "user", content: message });
402
+ this.turnCount++;
403
+ const collected = [];
404
+ for await (const chunk of this.client.streamCompletion(this.messages, {
405
+ model: this.model,
406
+ temperature: this.temperature,
407
+ maxTokens: this.maxTokens
408
+ })) {
409
+ if (this.interrupted) break;
410
+ for (const choice of chunk.choices) {
411
+ if (choice.delta.content) {
412
+ collected.push(choice.delta.content);
413
+ yield choice.delta.content;
414
+ }
415
+ }
416
+ }
417
+ this.messages.push({ role: "assistant", content: collected.join("") });
418
+ }
419
+ // ── Conversation management ─────────────────────────────────────
420
+ /** Manually inject a message into the conversation history. */
421
+ addMessage(role, content) {
422
+ this.messages.push({ role, content });
423
+ }
424
+ /** Reset conversation state for a new session. */
425
+ reset() {
426
+ this.messages = [];
427
+ this.turnCount = 0;
428
+ this.totalUsage = { prompt_tokens: 0, completion_tokens: 0, total_tokens: 0 };
429
+ this.sessionId = (0, import_node_crypto.randomUUID)();
430
+ this.interrupted = false;
431
+ if (this.systemPrompt) {
432
+ this.messages.push({ role: "system", content: this.systemPrompt });
433
+ }
434
+ }
435
+ // ── Conversation persistence ────────────────────────────────────
436
+ /** Export the current conversation state as a serializable object. */
437
+ exportConversation() {
438
+ return {
439
+ sessionId: this.sessionId,
440
+ model: this.model,
441
+ messages: [...this.messages],
442
+ turnCount: this.turnCount,
443
+ usage: { ...this.totalUsage }
444
+ };
445
+ }
446
+ /** Restore a conversation state from a previously exported object. */
447
+ importConversation(data) {
448
+ this.sessionId = data.sessionId ?? this.sessionId;
449
+ this.messages = [...data.messages ?? []];
450
+ this.turnCount = data.turnCount ?? 0;
451
+ if (data.usage) this.totalUsage = { ...data.usage };
452
+ }
453
+ /** Create a fork of this agent with an independent copy of the conversation. */
454
+ clone() {
455
+ const newAgent = new _Agent({
456
+ model: this.model,
457
+ systemPrompt: this.systemPrompt,
458
+ maxTurns: this.maxTurns,
459
+ temperature: this.temperature,
460
+ maxTokens: this.maxTokens,
461
+ streaming: this.streaming,
462
+ onToken: this.onToken,
463
+ onToolCall: this.onToolCall,
464
+ onTurn: this.onTurn,
465
+ onError: this.onError
466
+ });
467
+ newAgent.importConversation(this.exportConversation());
468
+ newAgent.sessionId = (0, import_node_crypto.randomUUID)();
469
+ return newAgent;
470
+ }
471
+ // ── Introspection ───────────────────────────────────────────────
472
+ /** Current conversation history (copy). */
473
+ getMessages() {
474
+ return [...this.messages];
475
+ }
476
+ /** Number of user turns completed. */
477
+ getTurnCount() {
478
+ return this.turnCount;
479
+ }
480
+ /** Accumulated token usage. */
481
+ getUsage() {
482
+ return { ...this.totalUsage };
483
+ }
484
+ /** List available models from the server. */
485
+ async listModels() {
486
+ return this.client.listModels();
487
+ }
488
+ /** Check server health. */
489
+ async health() {
490
+ return this.client.health();
491
+ }
492
+ // ── Internal ────────────────────────────────────────────────────
493
+ extractResponse(resp) {
494
+ const msg = resp.choices?.[0]?.message ?? { role: "assistant", content: "" };
495
+ this.messages.push(msg);
496
+ return msg;
497
+ }
498
+ accumulateUsage(usage) {
499
+ if (usage) {
500
+ this.totalUsage.prompt_tokens += usage.prompt_tokens;
501
+ this.totalUsage.completion_tokens += usage.completion_tokens;
502
+ this.totalUsage.total_tokens += usage.total_tokens;
503
+ }
504
+ }
505
+ };
506
+ // Annotate the CommonJS export names for ESM import in node:
507
+ 0 && (module.exports = {
508
+ Agent,
509
+ AuthenticationError,
510
+ ConnectionError,
511
+ EdgeCrabClient,
512
+ EdgeCrabError,
513
+ InterruptedError,
514
+ MaxTurnsExceededError,
515
+ RateLimitError,
516
+ ServerError,
517
+ TimeoutError
518
+ });