edgecrab-sdk 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.mjs ADDED
@@ -0,0 +1,572 @@
1
+ #!/usr/bin/env node
2
+
3
+ // src/cli.ts
4
+ import { parseArgs } from "util";
5
+
6
+ // src/client.ts
7
+ var DEFAULT_BASE_URL = "http://127.0.0.1:8642";
8
+ var DEFAULT_TIMEOUT = 12e4;
9
+ var DEFAULT_MAX_RETRIES = 3;
10
+ var DEFAULT_RETRY_BASE_DELAY = 1e3;
11
+ var EdgeCrabError = class extends Error {
12
+ statusCode;
13
+ constructor(message, statusCode) {
14
+ super(message);
15
+ this.name = "EdgeCrabError";
16
+ this.statusCode = statusCode;
17
+ }
18
+ };
19
+ var AuthenticationError = class extends EdgeCrabError {
20
+ constructor(message, statusCode) {
21
+ super(message, statusCode);
22
+ this.name = "AuthenticationError";
23
+ }
24
+ };
25
+ var RateLimitError = class extends EdgeCrabError {
26
+ retryAfter;
27
+ constructor(message, retryAfter) {
28
+ super(message, 429);
29
+ this.name = "RateLimitError";
30
+ this.retryAfter = retryAfter;
31
+ }
32
+ };
33
+ var ServerError = class extends EdgeCrabError {
34
+ constructor(message, statusCode) {
35
+ super(message, statusCode);
36
+ this.name = "ServerError";
37
+ }
38
+ };
39
+ var TimeoutError = class extends EdgeCrabError {
40
+ constructor(message = "Request timed out") {
41
+ super(message);
42
+ this.name = "TimeoutError";
43
+ }
44
+ };
45
+ var ConnectionError = class extends EdgeCrabError {
46
+ constructor(message = "Could not connect to EdgeCrab server") {
47
+ super(message);
48
+ this.name = "ConnectionError";
49
+ }
50
+ };
51
+ var MaxTurnsExceededError = class extends EdgeCrabError {
52
+ maxTurns;
53
+ constructor(maxTurns) {
54
+ super(`Agent exceeded maximum turns (${maxTurns})`);
55
+ this.name = "MaxTurnsExceededError";
56
+ this.maxTurns = maxTurns;
57
+ }
58
+ };
59
+ var InterruptedError = class extends EdgeCrabError {
60
+ constructor() {
61
+ super("Agent conversation was interrupted");
62
+ this.name = "InterruptedError";
63
+ }
64
+ };
65
+ function classifyError(status, detail, headers) {
66
+ const msg = `API error ${status}: ${detail}`;
67
+ if (status === 401 || status === 403) return new AuthenticationError(msg, status);
68
+ if (status === 429) {
69
+ const ra = headers?.get("retry-after");
70
+ return new RateLimitError(msg, ra ? parseFloat(ra) : void 0);
71
+ }
72
+ if (status >= 500) return new ServerError(msg, status);
73
+ return new EdgeCrabError(msg, status);
74
+ }
75
+ function isRetryable(err) {
76
+ if (err instanceof ServerError || err instanceof TimeoutError || err instanceof ConnectionError)
77
+ return true;
78
+ if (err instanceof RateLimitError) return true;
79
+ if (err instanceof TypeError && String(err.message).includes("fetch")) return true;
80
+ return false;
81
+ }
82
+ function buildHeaders(apiKey) {
83
+ const headers = { "Content-Type": "application/json" };
84
+ if (apiKey) headers["Authorization"] = `Bearer ${apiKey}`;
85
+ return headers;
86
+ }
87
+ var EdgeCrabClient = class {
88
+ baseUrl;
89
+ headers;
90
+ timeout;
91
+ maxRetries;
92
+ retryBaseDelay;
93
+ constructor(options = {}) {
94
+ this.baseUrl = (options.baseUrl ?? DEFAULT_BASE_URL).replace(/\/+$/, "");
95
+ this.headers = buildHeaders(options.apiKey);
96
+ this.timeout = options.timeout ?? DEFAULT_TIMEOUT;
97
+ this.maxRetries = options.maxRetries ?? DEFAULT_MAX_RETRIES;
98
+ this.retryBaseDelay = options.retryBaseDelay ?? DEFAULT_RETRY_BASE_DELAY;
99
+ }
100
+ /** Simple chat — send a message, get a reply string. */
101
+ async chat(message, options) {
102
+ const messages = [];
103
+ if (options?.system) {
104
+ messages.push({ role: "system", content: options.system });
105
+ }
106
+ messages.push({ role: "user", content: message });
107
+ const resp = await this.createCompletion(messages, {
108
+ model: options?.model,
109
+ temperature: options?.temperature,
110
+ maxTokens: options?.maxTokens
111
+ });
112
+ return resp.choices?.[0]?.message?.content ?? "";
113
+ }
114
+ /** Create a chat completion (non-streaming). */
115
+ async createCompletion(messages, options) {
116
+ const body = {
117
+ model: options?.model ?? "anthropic/claude-sonnet-4-20250514",
118
+ messages,
119
+ stream: false
120
+ };
121
+ if (options?.temperature !== void 0) body.temperature = options.temperature;
122
+ if (options?.maxTokens !== void 0) body.max_tokens = options.maxTokens;
123
+ if (options?.tools) body.tools = options.tools;
124
+ return this.postWithRetry(body);
125
+ }
126
+ async postWithRetry(body) {
127
+ let lastError;
128
+ for (let attempt = 0; attempt <= this.maxRetries; attempt++) {
129
+ try {
130
+ const response = await this.fetchJSON("/v1/chat/completions", {
131
+ method: "POST",
132
+ body: JSON.stringify(body)
133
+ });
134
+ return response;
135
+ } catch (err) {
136
+ lastError = err;
137
+ if (!isRetryable(err) || attempt === this.maxRetries) throw err;
138
+ let delay = this.retryBaseDelay * 2 ** attempt;
139
+ if (err instanceof RateLimitError && err.retryAfter) {
140
+ delay = Math.max(delay, err.retryAfter * 1e3);
141
+ }
142
+ await new Promise((r) => setTimeout(r, delay));
143
+ }
144
+ }
145
+ throw lastError;
146
+ }
147
+ /** Create a streaming chat completion. Returns an async iterator of chunks. */
148
+ async *streamCompletion(messages, options) {
149
+ const body = {
150
+ model: options?.model ?? "anthropic/claude-sonnet-4-20250514",
151
+ messages,
152
+ stream: true
153
+ };
154
+ if (options?.temperature !== void 0) body.temperature = options.temperature;
155
+ if (options?.maxTokens !== void 0) body.max_tokens = options.maxTokens;
156
+ if (options?.tools) body.tools = options.tools;
157
+ const controller = new AbortController();
158
+ const timeoutId = setTimeout(() => controller.abort(), this.timeout);
159
+ try {
160
+ const response = await fetch(`${this.baseUrl}/v1/chat/completions`, {
161
+ method: "POST",
162
+ headers: this.headers,
163
+ body: JSON.stringify(body),
164
+ signal: controller.signal
165
+ });
166
+ if (!response.ok) {
167
+ const text = await response.text();
168
+ throw classifyError(response.status, text, response.headers);
169
+ }
170
+ if (!response.body) {
171
+ throw new EdgeCrabError("No response body for streaming request");
172
+ }
173
+ const reader = response.body.getReader();
174
+ const decoder = new TextDecoder();
175
+ let buffer = "";
176
+ while (true) {
177
+ const { done, value } = await reader.read();
178
+ if (done) break;
179
+ buffer += decoder.decode(value, { stream: true });
180
+ const lines = buffer.split("\n");
181
+ buffer = lines.pop() ?? "";
182
+ for (const line of lines) {
183
+ const trimmed = line.trim();
184
+ if (!trimmed.startsWith("data: ")) continue;
185
+ const payload = trimmed.slice(6);
186
+ if (payload === "[DONE]") return;
187
+ yield JSON.parse(payload);
188
+ }
189
+ }
190
+ } finally {
191
+ clearTimeout(timeoutId);
192
+ }
193
+ }
194
+ /** List available models. */
195
+ async listModels() {
196
+ const data = await this.fetchJSON("/v1/models");
197
+ if (Array.isArray(data)) return data;
198
+ return data.data ?? [];
199
+ }
200
+ /** Check server health. */
201
+ async health() {
202
+ return this.fetchJSON("/v1/health");
203
+ }
204
+ async fetchJSON(path, init) {
205
+ const controller = new AbortController();
206
+ const timeoutId = setTimeout(() => controller.abort(), this.timeout);
207
+ try {
208
+ const response = await fetch(`${this.baseUrl}${path}`, {
209
+ ...init,
210
+ headers: { ...this.headers, ...init?.headers ?? {} },
211
+ signal: controller.signal
212
+ });
213
+ if (!response.ok) {
214
+ const text = await response.text();
215
+ let detail = text;
216
+ try {
217
+ const json = JSON.parse(text);
218
+ detail = json?.error?.message ?? text;
219
+ } catch {
220
+ }
221
+ throw classifyError(response.status, detail, response.headers);
222
+ }
223
+ return await response.json();
224
+ } finally {
225
+ clearTimeout(timeoutId);
226
+ }
227
+ }
228
+ };
229
+
230
+ // src/agent.ts
231
+ import { randomUUID } from "crypto";
232
+ var Agent = class _Agent {
233
+ model;
234
+ systemPrompt;
235
+ maxTurns;
236
+ temperature;
237
+ maxTokens;
238
+ streaming;
239
+ sessionId;
240
+ // Callbacks
241
+ onToken;
242
+ onToolCall;
243
+ onTurn;
244
+ onError;
245
+ messages = [];
246
+ turnCount = 0;
247
+ totalUsage = { prompt_tokens: 0, completion_tokens: 0, total_tokens: 0 };
248
+ client;
249
+ interrupted = false;
250
+ constructor(options = {}) {
251
+ this.model = options.model ?? "anthropic/claude-sonnet-4-20250514";
252
+ this.systemPrompt = options.systemPrompt;
253
+ this.maxTurns = options.maxTurns ?? 50;
254
+ this.temperature = options.temperature;
255
+ this.maxTokens = options.maxTokens;
256
+ this.streaming = options.streaming ?? false;
257
+ this.sessionId = options.sessionId ?? randomUUID();
258
+ this.onToken = options.onToken;
259
+ this.onToolCall = options.onToolCall;
260
+ this.onTurn = options.onTurn;
261
+ this.onError = options.onError;
262
+ const baseUrl = options.baseUrl ?? process.env.EDGECRAB_BASE_URL ?? "http://127.0.0.1:8642";
263
+ const apiKey = options.apiKey ?? process.env.EDGECRAB_API_KEY;
264
+ this.client = new EdgeCrabClient({
265
+ baseUrl,
266
+ apiKey,
267
+ timeout: options.timeout ? options.timeout * 1e3 : void 0,
268
+ maxRetries: options.maxRetries
269
+ });
270
+ if (this.systemPrompt) {
271
+ this.messages.push({ role: "system", content: this.systemPrompt });
272
+ }
273
+ }
274
+ // ── Interrupt ───────────────────────────────────────────────────
275
+ /** Signal the agent to stop after the current turn. */
276
+ interrupt() {
277
+ this.interrupted = true;
278
+ }
279
+ /** Clear the interrupt flag so the agent can continue. */
280
+ clearInterrupt() {
281
+ this.interrupted = false;
282
+ }
283
+ /** Whether the agent has been interrupted. */
284
+ get isInterrupted() {
285
+ return this.interrupted;
286
+ }
287
+ // ── Chat ────────────────────────────────────────────────────────
288
+ /** Send a message and return the assistant's text reply. Maintains history. */
289
+ async chat(message) {
290
+ if (this.interrupted) throw new InterruptedError();
291
+ if (this.turnCount >= this.maxTurns) throw new MaxTurnsExceededError(this.maxTurns);
292
+ this.messages.push({ role: "user", content: message });
293
+ this.turnCount++;
294
+ if (this.streaming && this.onToken) {
295
+ return this.chatStreaming();
296
+ }
297
+ const resp = await this.client.createCompletion(this.messages, {
298
+ model: this.model,
299
+ temperature: this.temperature,
300
+ maxTokens: this.maxTokens
301
+ });
302
+ const assistantMsg = this.extractResponse(resp);
303
+ this.accumulateUsage(resp.usage);
304
+ this.onTurn?.(this.turnCount, assistantMsg);
305
+ return assistantMsg.content;
306
+ }
307
+ async chatStreaming() {
308
+ const collected = [];
309
+ for await (const chunk of this.client.streamCompletion(this.messages, {
310
+ model: this.model,
311
+ temperature: this.temperature,
312
+ maxTokens: this.maxTokens
313
+ })) {
314
+ if (this.interrupted) break;
315
+ for (const choice of chunk.choices) {
316
+ if (choice.delta.content) {
317
+ collected.push(choice.delta.content);
318
+ this.onToken?.(choice.delta.content);
319
+ }
320
+ }
321
+ }
322
+ const fullText = collected.join("");
323
+ const assistantMsg = { role: "assistant", content: fullText };
324
+ this.messages.push(assistantMsg);
325
+ this.onTurn?.(this.turnCount, assistantMsg);
326
+ return fullText;
327
+ }
328
+ // ── Run ─────────────────────────────────────────────────────────
329
+ /** Run a full agent conversation. Returns a structured AgentResult. */
330
+ async run(message, options) {
331
+ if (options?.conversationHistory) {
332
+ for (const msg of options.conversationHistory) {
333
+ this.messages.push(msg);
334
+ }
335
+ }
336
+ let response;
337
+ let wasInterrupted = false;
338
+ let wasMaxTurnsExceeded = false;
339
+ try {
340
+ response = await this.chat(message);
341
+ } catch (err) {
342
+ if (err instanceof InterruptedError) {
343
+ response = this.messages.length > 0 ? this.messages[this.messages.length - 1].content : "";
344
+ wasInterrupted = true;
345
+ } else if (err instanceof MaxTurnsExceededError) {
346
+ response = this.messages.length > 0 ? this.messages[this.messages.length - 1].content : "";
347
+ wasMaxTurnsExceeded = true;
348
+ } else {
349
+ throw err;
350
+ }
351
+ }
352
+ return {
353
+ response,
354
+ messages: [...this.messages],
355
+ sessionId: this.sessionId,
356
+ model: this.model,
357
+ turnsUsed: this.turnCount,
358
+ finishedNaturally: !wasInterrupted && !wasMaxTurnsExceeded,
359
+ interrupted: wasInterrupted,
360
+ maxTurnsExceeded: wasMaxTurnsExceeded,
361
+ usage: { ...this.totalUsage }
362
+ };
363
+ }
364
+ // ── Stream ──────────────────────────────────────────────────────
365
+ /** Stream response tokens as an async iterable. */
366
+ async *stream(message) {
367
+ if (this.interrupted) throw new InterruptedError();
368
+ if (this.turnCount >= this.maxTurns) throw new MaxTurnsExceededError(this.maxTurns);
369
+ this.messages.push({ role: "user", content: message });
370
+ this.turnCount++;
371
+ const collected = [];
372
+ for await (const chunk of this.client.streamCompletion(this.messages, {
373
+ model: this.model,
374
+ temperature: this.temperature,
375
+ maxTokens: this.maxTokens
376
+ })) {
377
+ if (this.interrupted) break;
378
+ for (const choice of chunk.choices) {
379
+ if (choice.delta.content) {
380
+ collected.push(choice.delta.content);
381
+ yield choice.delta.content;
382
+ }
383
+ }
384
+ }
385
+ this.messages.push({ role: "assistant", content: collected.join("") });
386
+ }
387
+ // ── Conversation management ─────────────────────────────────────
388
+ /** Manually inject a message into the conversation history. */
389
+ addMessage(role, content) {
390
+ this.messages.push({ role, content });
391
+ }
392
+ /** Reset conversation state for a new session. */
393
+ reset() {
394
+ this.messages = [];
395
+ this.turnCount = 0;
396
+ this.totalUsage = { prompt_tokens: 0, completion_tokens: 0, total_tokens: 0 };
397
+ this.sessionId = randomUUID();
398
+ this.interrupted = false;
399
+ if (this.systemPrompt) {
400
+ this.messages.push({ role: "system", content: this.systemPrompt });
401
+ }
402
+ }
403
+ // ── Conversation persistence ────────────────────────────────────
404
+ /** Export the current conversation state as a serializable object. */
405
+ exportConversation() {
406
+ return {
407
+ sessionId: this.sessionId,
408
+ model: this.model,
409
+ messages: [...this.messages],
410
+ turnCount: this.turnCount,
411
+ usage: { ...this.totalUsage }
412
+ };
413
+ }
414
+ /** Restore a conversation state from a previously exported object. */
415
+ importConversation(data) {
416
+ this.sessionId = data.sessionId ?? this.sessionId;
417
+ this.messages = [...data.messages ?? []];
418
+ this.turnCount = data.turnCount ?? 0;
419
+ if (data.usage) this.totalUsage = { ...data.usage };
420
+ }
421
+ /** Create a fork of this agent with an independent copy of the conversation. */
422
+ clone() {
423
+ const newAgent = new _Agent({
424
+ model: this.model,
425
+ systemPrompt: this.systemPrompt,
426
+ maxTurns: this.maxTurns,
427
+ temperature: this.temperature,
428
+ maxTokens: this.maxTokens,
429
+ streaming: this.streaming,
430
+ onToken: this.onToken,
431
+ onToolCall: this.onToolCall,
432
+ onTurn: this.onTurn,
433
+ onError: this.onError
434
+ });
435
+ newAgent.importConversation(this.exportConversation());
436
+ newAgent.sessionId = randomUUID();
437
+ return newAgent;
438
+ }
439
+ // ── Introspection ───────────────────────────────────────────────
440
+ /** Current conversation history (copy). */
441
+ getMessages() {
442
+ return [...this.messages];
443
+ }
444
+ /** Number of user turns completed. */
445
+ getTurnCount() {
446
+ return this.turnCount;
447
+ }
448
+ /** Accumulated token usage. */
449
+ getUsage() {
450
+ return { ...this.totalUsage };
451
+ }
452
+ /** List available models from the server. */
453
+ async listModels() {
454
+ return this.client.listModels();
455
+ }
456
+ /** Check server health. */
457
+ async health() {
458
+ return this.client.health();
459
+ }
460
+ // ── Internal ────────────────────────────────────────────────────
461
+ extractResponse(resp) {
462
+ const msg = resp.choices?.[0]?.message ?? { role: "assistant", content: "" };
463
+ this.messages.push(msg);
464
+ return msg;
465
+ }
466
+ accumulateUsage(usage) {
467
+ if (usage) {
468
+ this.totalUsage.prompt_tokens += usage.prompt_tokens;
469
+ this.totalUsage.completion_tokens += usage.completion_tokens;
470
+ this.totalUsage.total_tokens += usage.total_tokens;
471
+ }
472
+ }
473
+ };
474
+
475
+ // src/cli.ts
476
+ var { values, positionals } = parseArgs({
477
+ allowPositionals: true,
478
+ options: {
479
+ "base-url": { type: "string" },
480
+ "api-key": { type: "string" },
481
+ model: { type: "string", short: "m", default: "anthropic/claude-sonnet-4-20250514" },
482
+ system: { type: "string", short: "s" },
483
+ temperature: { type: "string", short: "t" },
484
+ stream: { type: "boolean", default: false },
485
+ version: { type: "boolean", short: "v" },
486
+ help: { type: "boolean", short: "h" }
487
+ }
488
+ });
489
+ if (values.version) {
490
+ const pkg = await import("./package-UHAPSKPH.mjs").catch(() => ({ default: { version: "0.1.0" } }));
491
+ console.log(`edgecrab-sdk ${pkg.default.version}`);
492
+ process.exit(0);
493
+ }
494
+ var command = positionals[0];
495
+ var messageArgs = positionals.slice(1);
496
+ if (values.help || !command) {
497
+ console.log(`Usage: edgecrab <command> [options] [args...]
498
+
499
+ Commands:
500
+ chat <message> Send a message to the agent
501
+ models List available models
502
+ health Check API health
503
+
504
+ Options:
505
+ --base-url <url> API server URL (env: EDGECRAB_BASE_URL)
506
+ --api-key <key> Bearer token (env: EDGECRAB_API_KEY)
507
+ -m, --model <id> Model to use (default: anthropic/claude-sonnet-4-20250514)
508
+ -s, --system <msg> System prompt
509
+ -t, --temperature Sampling temperature
510
+ --stream Stream the response
511
+ -v, --version Show version
512
+ -h, --help Show this help
513
+ `);
514
+ process.exit(values.help ? 0 : 1);
515
+ }
516
+ var clientOpts = {
517
+ baseUrl: values["base-url"] ?? process.env.EDGECRAB_BASE_URL,
518
+ apiKey: values["api-key"] ?? process.env.EDGECRAB_API_KEY
519
+ };
520
+ try {
521
+ switch (command) {
522
+ case "chat": {
523
+ const message = messageArgs.join(" ");
524
+ if (!message) {
525
+ console.error("Error: no message provided");
526
+ process.exit(1);
527
+ }
528
+ const agent = new Agent({
529
+ ...clientOpts,
530
+ model: values.model,
531
+ systemPrompt: values.system,
532
+ temperature: values.temperature ? parseFloat(values.temperature) : void 0,
533
+ streaming: values.stream,
534
+ onToken: values.stream ? (token) => process.stdout.write(token) : void 0
535
+ });
536
+ if (values.stream) {
537
+ for await (const token of agent.stream(message)) {
538
+ process.stdout.write(token);
539
+ }
540
+ process.stdout.write("\n");
541
+ } else {
542
+ const reply = await agent.chat(message);
543
+ console.log(reply);
544
+ }
545
+ break;
546
+ }
547
+ case "models": {
548
+ const client = new EdgeCrabClient(clientOpts);
549
+ const models = await client.listModels();
550
+ for (const m of models) {
551
+ console.log(` ${m.id}${m.owned_by ? ` (by ${m.owned_by})` : ""}`);
552
+ }
553
+ break;
554
+ }
555
+ case "health": {
556
+ const client = new EdgeCrabClient(clientOpts);
557
+ const h = await client.health();
558
+ console.log(JSON.stringify(h, null, 2));
559
+ break;
560
+ }
561
+ default:
562
+ console.error(`Unknown command: ${command}`);
563
+ process.exit(1);
564
+ }
565
+ } catch (err) {
566
+ if (err instanceof EdgeCrabError) {
567
+ console.error(`Error: ${err.message}`);
568
+ } else {
569
+ console.error(`Error: ${err instanceof Error ? err.message : String(err)}`);
570
+ }
571
+ process.exit(1);
572
+ }