wauldo 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +143 -0
- package/dist/index.d.mts +590 -0
- package/dist/index.d.ts +590 -0
- package/dist/index.js +1212 -0
- package/dist/index.mjs +1176 -0
- package/package.json +63 -0
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,1176 @@
|
|
|
1
|
+
// src/errors.ts
|
|
2
|
+
var WauldoError = class _WauldoError extends Error {
|
|
3
|
+
code;
|
|
4
|
+
data;
|
|
5
|
+
constructor(message, code, data) {
|
|
6
|
+
super(message);
|
|
7
|
+
this.name = "WauldoError";
|
|
8
|
+
this.code = code;
|
|
9
|
+
this.data = data;
|
|
10
|
+
Object.setPrototypeOf(this, _WauldoError.prototype);
|
|
11
|
+
}
|
|
12
|
+
toString() {
|
|
13
|
+
if (this.code !== void 0) {
|
|
14
|
+
return `[${this.code}] ${this.message}`;
|
|
15
|
+
}
|
|
16
|
+
return this.message;
|
|
17
|
+
}
|
|
18
|
+
};
|
|
19
|
+
var ConnectionError = class _ConnectionError extends WauldoError {
|
|
20
|
+
constructor(message = "Failed to connect to MCP server") {
|
|
21
|
+
super(message, -32e3);
|
|
22
|
+
this.name = "ConnectionError";
|
|
23
|
+
Object.setPrototypeOf(this, _ConnectionError.prototype);
|
|
24
|
+
}
|
|
25
|
+
};
|
|
26
|
+
var ServerError = class _ServerError extends WauldoError {
|
|
27
|
+
constructor(message, code, data) {
|
|
28
|
+
super(message, code, data);
|
|
29
|
+
this.name = "ServerError";
|
|
30
|
+
Object.setPrototypeOf(this, _ServerError.prototype);
|
|
31
|
+
}
|
|
32
|
+
};
|
|
33
|
+
var ValidationError = class _ValidationError extends WauldoError {
|
|
34
|
+
field;
|
|
35
|
+
constructor(message, field) {
|
|
36
|
+
super(message, -32602);
|
|
37
|
+
this.name = "ValidationError";
|
|
38
|
+
this.field = field;
|
|
39
|
+
Object.setPrototypeOf(this, _ValidationError.prototype);
|
|
40
|
+
}
|
|
41
|
+
};
|
|
42
|
+
var TimeoutError = class _TimeoutError extends WauldoError {
|
|
43
|
+
timeout;
|
|
44
|
+
constructor(message = "Operation timed out", timeout) {
|
|
45
|
+
super(message, -32001);
|
|
46
|
+
this.name = "TimeoutError";
|
|
47
|
+
this.timeout = timeout;
|
|
48
|
+
Object.setPrototypeOf(this, _TimeoutError.prototype);
|
|
49
|
+
}
|
|
50
|
+
};
|
|
51
|
+
var ToolNotFoundError = class _ToolNotFoundError extends WauldoError {
|
|
52
|
+
toolName;
|
|
53
|
+
constructor(toolName) {
|
|
54
|
+
super(`Tool not found: ${toolName}`, -32601);
|
|
55
|
+
this.name = "ToolNotFoundError";
|
|
56
|
+
this.toolName = toolName;
|
|
57
|
+
Object.setPrototypeOf(this, _ToolNotFoundError.prototype);
|
|
58
|
+
}
|
|
59
|
+
};
|
|
60
|
+
|
|
61
|
+
// src/transport.ts
|
|
62
|
+
import { spawn } from "child_process";
|
|
63
|
+
import { existsSync } from "fs";
|
|
64
|
+
import { join } from "path";
|
|
65
|
+
import { homedir } from "os";
|
|
66
|
+
import { createInterface } from "readline";
|
|
67
|
+
var StdioTransport = class {
|
|
68
|
+
serverPath;
|
|
69
|
+
timeout;
|
|
70
|
+
process = null;
|
|
71
|
+
requestId = 0;
|
|
72
|
+
readline = null;
|
|
73
|
+
connectingPromise = null;
|
|
74
|
+
disconnected = false;
|
|
75
|
+
responseQueue = /* @__PURE__ */ new Map();
|
|
76
|
+
constructor(serverPath, timeout = 3e4) {
|
|
77
|
+
this.serverPath = serverPath ?? null;
|
|
78
|
+
this.timeout = timeout;
|
|
79
|
+
}
|
|
80
|
+
/**
|
|
81
|
+
* Find MCP server binary in common locations
|
|
82
|
+
*/
|
|
83
|
+
findServer() {
|
|
84
|
+
const searchPaths = [
|
|
85
|
+
join(process.cwd(), "target", "release", "wauldo-mcp"),
|
|
86
|
+
join(process.cwd(), "target", "debug", "wauldo-mcp"),
|
|
87
|
+
join(process.cwd(), "..", "target", "release", "wauldo-mcp"),
|
|
88
|
+
join(homedir(), ".cargo", "bin", "wauldo-mcp")
|
|
89
|
+
];
|
|
90
|
+
for (const path of searchPaths) {
|
|
91
|
+
if (existsSync(path)) {
|
|
92
|
+
return path;
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
throw new ConnectionError(
|
|
96
|
+
'MCP server binary not found. Please provide serverPath or install with "cargo install".'
|
|
97
|
+
);
|
|
98
|
+
}
|
|
99
|
+
/**
|
|
100
|
+
* Get server path, finding it lazily if needed
|
|
101
|
+
*/
|
|
102
|
+
getServerPath() {
|
|
103
|
+
if (this.serverPath === null) {
|
|
104
|
+
this.serverPath = this.findServer();
|
|
105
|
+
}
|
|
106
|
+
return this.serverPath;
|
|
107
|
+
}
|
|
108
|
+
/**
|
|
109
|
+
* Connect to MCP server
|
|
110
|
+
*/
|
|
111
|
+
async connect() {
|
|
112
|
+
if (this.process !== null) {
|
|
113
|
+
return;
|
|
114
|
+
}
|
|
115
|
+
if (this.connectingPromise !== null) {
|
|
116
|
+
return this.connectingPromise;
|
|
117
|
+
}
|
|
118
|
+
this.connectingPromise = this.doConnect();
|
|
119
|
+
try {
|
|
120
|
+
await this.connectingPromise;
|
|
121
|
+
} finally {
|
|
122
|
+
this.connectingPromise = null;
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
async doConnect() {
|
|
126
|
+
const serverPath = this.getServerPath();
|
|
127
|
+
try {
|
|
128
|
+
this.process = spawn(serverPath, [], {
|
|
129
|
+
stdio: ["pipe", "pipe", "pipe"]
|
|
130
|
+
});
|
|
131
|
+
} catch (error) {
|
|
132
|
+
throw new ConnectionError(`Failed to start server: ${error}`);
|
|
133
|
+
}
|
|
134
|
+
if (!this.process.stdout || !this.process.stdin) {
|
|
135
|
+
throw new ConnectionError("Failed to get stdio handles");
|
|
136
|
+
}
|
|
137
|
+
this.readline = createInterface({
|
|
138
|
+
input: this.process.stdout,
|
|
139
|
+
crlfDelay: Infinity
|
|
140
|
+
});
|
|
141
|
+
this.disconnected = false;
|
|
142
|
+
this.readline.on("line", (line) => {
|
|
143
|
+
if (!this.disconnected) this.handleResponse(line);
|
|
144
|
+
});
|
|
145
|
+
this.process.on("error", (error) => {
|
|
146
|
+
if (!this.disconnected) this.handleError(new ConnectionError(`Server error: ${error.message}`));
|
|
147
|
+
});
|
|
148
|
+
this.process.on("close", (code) => {
|
|
149
|
+
if (!this.disconnected && code !== 0) {
|
|
150
|
+
this.handleError(new ConnectionError(`Server exited with code ${code}`));
|
|
151
|
+
}
|
|
152
|
+
});
|
|
153
|
+
try {
|
|
154
|
+
await this.initialize();
|
|
155
|
+
} catch (err) {
|
|
156
|
+
this.disconnect();
|
|
157
|
+
throw err;
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
/**
|
|
161
|
+
* Disconnect from MCP server
|
|
162
|
+
*/
|
|
163
|
+
disconnect() {
|
|
164
|
+
this.disconnected = true;
|
|
165
|
+
if (this.readline) {
|
|
166
|
+
this.readline.close();
|
|
167
|
+
this.readline = null;
|
|
168
|
+
}
|
|
169
|
+
if (this.process) {
|
|
170
|
+
this.process.kill();
|
|
171
|
+
this.process = null;
|
|
172
|
+
}
|
|
173
|
+
for (const [, pending] of this.responseQueue) {
|
|
174
|
+
clearTimeout(pending.timer);
|
|
175
|
+
pending.reject(new ConnectionError("Connection closed"));
|
|
176
|
+
}
|
|
177
|
+
this.responseQueue.clear();
|
|
178
|
+
}
|
|
179
|
+
/**
|
|
180
|
+
* Handle incoming response
|
|
181
|
+
*/
|
|
182
|
+
handleResponse(line) {
|
|
183
|
+
try {
|
|
184
|
+
const response = JSON.parse(line);
|
|
185
|
+
const pending = this.responseQueue.get(response.id);
|
|
186
|
+
if (pending) {
|
|
187
|
+
clearTimeout(pending.timer);
|
|
188
|
+
this.responseQueue.delete(response.id);
|
|
189
|
+
if (response.error) {
|
|
190
|
+
pending.reject(
|
|
191
|
+
new ServerError(
|
|
192
|
+
response.error.message,
|
|
193
|
+
response.error.code,
|
|
194
|
+
response.error.data
|
|
195
|
+
)
|
|
196
|
+
);
|
|
197
|
+
} else {
|
|
198
|
+
pending.resolve(response.result);
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
} catch {
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
/**
|
|
205
|
+
* Handle transport error
|
|
206
|
+
*/
|
|
207
|
+
handleError(error) {
|
|
208
|
+
for (const [, pending] of this.responseQueue) {
|
|
209
|
+
clearTimeout(pending.timer);
|
|
210
|
+
pending.reject(error);
|
|
211
|
+
}
|
|
212
|
+
this.responseQueue.clear();
|
|
213
|
+
}
|
|
214
|
+
/**
|
|
215
|
+
* Send MCP initialize request
|
|
216
|
+
*/
|
|
217
|
+
async initialize() {
|
|
218
|
+
await this.request("initialize", {
|
|
219
|
+
protocolVersion: "2024-11-05",
|
|
220
|
+
capabilities: {},
|
|
221
|
+
clientInfo: { name: "wauldo-typescript", version: "0.1.0" }
|
|
222
|
+
});
|
|
223
|
+
}
|
|
224
|
+
/**
|
|
225
|
+
* Send JSON-RPC request and wait for response
|
|
226
|
+
*/
|
|
227
|
+
async request(method, params, timeout) {
|
|
228
|
+
if (!this.process || !this.process.stdin) {
|
|
229
|
+
throw new ConnectionError("Not connected. Call connect() first.");
|
|
230
|
+
}
|
|
231
|
+
this.requestId++;
|
|
232
|
+
const id = this.requestId;
|
|
233
|
+
const request = {
|
|
234
|
+
jsonrpc: "2.0",
|
|
235
|
+
id,
|
|
236
|
+
method
|
|
237
|
+
};
|
|
238
|
+
if (params) {
|
|
239
|
+
request.params = params;
|
|
240
|
+
}
|
|
241
|
+
const requestData = JSON.stringify(request) + "\n";
|
|
242
|
+
return new Promise((resolve, reject) => {
|
|
243
|
+
const timeoutMs = timeout ?? this.timeout;
|
|
244
|
+
let settled = false;
|
|
245
|
+
const safeReject = (err) => {
|
|
246
|
+
if (settled) return;
|
|
247
|
+
settled = true;
|
|
248
|
+
clearTimeout(timer);
|
|
249
|
+
this.responseQueue.delete(id);
|
|
250
|
+
reject(err);
|
|
251
|
+
};
|
|
252
|
+
const timer = setTimeout(() => {
|
|
253
|
+
safeReject(new TimeoutError(`Request timed out after ${timeoutMs}ms`, timeoutMs));
|
|
254
|
+
}, timeoutMs);
|
|
255
|
+
this.responseQueue.set(id, {
|
|
256
|
+
resolve: (value) => {
|
|
257
|
+
if (!settled) {
|
|
258
|
+
settled = true;
|
|
259
|
+
clearTimeout(timer);
|
|
260
|
+
resolve(value);
|
|
261
|
+
}
|
|
262
|
+
},
|
|
263
|
+
reject: safeReject,
|
|
264
|
+
timer
|
|
265
|
+
});
|
|
266
|
+
this.process.stdin.write(requestData, (error) => {
|
|
267
|
+
if (error) {
|
|
268
|
+
safeReject(new ConnectionError(`Failed to send request: ${error.message}`));
|
|
269
|
+
}
|
|
270
|
+
});
|
|
271
|
+
});
|
|
272
|
+
}
|
|
273
|
+
};
|
|
274
|
+
|
|
275
|
+
// src/client.ts
|
|
276
|
+
function parseChunkList(raw, primaryKey, fallbackKey) {
|
|
277
|
+
try {
|
|
278
|
+
const data = JSON.parse(raw);
|
|
279
|
+
const items = data?.[primaryKey] ?? data?.[fallbackKey] ?? [];
|
|
280
|
+
if (!Array.isArray(items)) return [];
|
|
281
|
+
return items.filter((c) => typeof c === "object" && c !== null).map((c, i) => ({
|
|
282
|
+
id: String(c.id ?? ""),
|
|
283
|
+
content: String(c.content ?? ""),
|
|
284
|
+
position: Number(c.position ?? i),
|
|
285
|
+
priority: String(c.priority ?? "medium")
|
|
286
|
+
}));
|
|
287
|
+
} catch {
|
|
288
|
+
return [];
|
|
289
|
+
}
|
|
290
|
+
}
|
|
291
|
+
function parseChunks(raw) {
|
|
292
|
+
return parseChunkList(raw, "chunks", "results");
|
|
293
|
+
}
|
|
294
|
+
function parseRetrievalResults(raw) {
|
|
295
|
+
return parseChunkList(raw, "results", "chunks");
|
|
296
|
+
}
|
|
297
|
+
var AgentClient = class {
|
|
298
|
+
transport;
|
|
299
|
+
autoConnect;
|
|
300
|
+
connected = false;
|
|
301
|
+
constructor(options = {}) {
|
|
302
|
+
this.transport = new StdioTransport(
|
|
303
|
+
options.serverPath,
|
|
304
|
+
options.timeout ?? 3e4
|
|
305
|
+
);
|
|
306
|
+
this.autoConnect = options.autoConnect ?? true;
|
|
307
|
+
}
|
|
308
|
+
/**
|
|
309
|
+
* Connect to MCP server
|
|
310
|
+
*/
|
|
311
|
+
async connect() {
|
|
312
|
+
await this.transport.connect();
|
|
313
|
+
this.connected = true;
|
|
314
|
+
return this;
|
|
315
|
+
}
|
|
316
|
+
/**
|
|
317
|
+
* Disconnect from MCP server
|
|
318
|
+
*/
|
|
319
|
+
disconnect() {
|
|
320
|
+
this.transport.disconnect();
|
|
321
|
+
this.connected = false;
|
|
322
|
+
}
|
|
323
|
+
/**
|
|
324
|
+
* Ensure client is connected
|
|
325
|
+
*/
|
|
326
|
+
async ensureConnected() {
|
|
327
|
+
if (!this.connected) {
|
|
328
|
+
if (this.autoConnect) {
|
|
329
|
+
await this.connect();
|
|
330
|
+
} else {
|
|
331
|
+
throw new ConnectionError("Not connected. Call connect() first.");
|
|
332
|
+
}
|
|
333
|
+
}
|
|
334
|
+
}
|
|
335
|
+
// Tool discovery
|
|
336
|
+
/**
|
|
337
|
+
* List all available tools
|
|
338
|
+
*/
|
|
339
|
+
async listTools() {
|
|
340
|
+
await this.ensureConnected();
|
|
341
|
+
const result = await this.transport.request("tools/list");
|
|
342
|
+
return result.tools ?? [];
|
|
343
|
+
}
|
|
344
|
+
/**
|
|
345
|
+
* Call a tool by name
|
|
346
|
+
*/
|
|
347
|
+
async callTool(name, args) {
|
|
348
|
+
await this.ensureConnected();
|
|
349
|
+
const result = await this.transport.request("tools/call", {
|
|
350
|
+
name,
|
|
351
|
+
arguments: args
|
|
352
|
+
});
|
|
353
|
+
const content = result.content;
|
|
354
|
+
if (content && content.length > 0 && content[0]) {
|
|
355
|
+
return content[0].text ?? "";
|
|
356
|
+
}
|
|
357
|
+
return "";
|
|
358
|
+
}
|
|
359
|
+
// Reasoning
|
|
360
|
+
/**
|
|
361
|
+
* Perform Tree-of-Thought reasoning on a problem
|
|
362
|
+
*
|
|
363
|
+
* @example
|
|
364
|
+
* ```typescript
|
|
365
|
+
* const result = await client.reason(
|
|
366
|
+
* "What's the best sorting algorithm for nearly sorted data?",
|
|
367
|
+
* { depth: 4, branches: 3 }
|
|
368
|
+
* );
|
|
369
|
+
* console.log(result.solution);
|
|
370
|
+
* ```
|
|
371
|
+
*/
|
|
372
|
+
async reason(problem, options = {}) {
|
|
373
|
+
const { depth = 3, branches = 3 } = options;
|
|
374
|
+
if (!problem.trim()) {
|
|
375
|
+
throw new ValidationError("Problem cannot be empty", "problem");
|
|
376
|
+
}
|
|
377
|
+
if (depth < 1 || depth > 10) {
|
|
378
|
+
throw new ValidationError("Depth must be between 1 and 10", "depth");
|
|
379
|
+
}
|
|
380
|
+
if (branches < 1 || branches > 10) {
|
|
381
|
+
throw new ValidationError("Branches must be between 1 and 10", "branches");
|
|
382
|
+
}
|
|
383
|
+
const content = await this.callTool("reason_tree_of_thought", {
|
|
384
|
+
problem,
|
|
385
|
+
depth,
|
|
386
|
+
branches
|
|
387
|
+
});
|
|
388
|
+
return this.parseReasoningResult(content, problem, depth, branches);
|
|
389
|
+
}
|
|
390
|
+
parseReasoningResult(content, problem, depth, branches) {
|
|
391
|
+
try {
|
|
392
|
+
const data = JSON.parse(content);
|
|
393
|
+
if (data.solution !== void 0) {
|
|
394
|
+
return {
|
|
395
|
+
problem: data.problem ?? problem,
|
|
396
|
+
solution: data.solution,
|
|
397
|
+
thoughtTree: data.thought_tree ?? content,
|
|
398
|
+
depth: data.depth ?? depth,
|
|
399
|
+
branches: data.branches ?? branches,
|
|
400
|
+
rawContent: content
|
|
401
|
+
};
|
|
402
|
+
}
|
|
403
|
+
} catch {
|
|
404
|
+
}
|
|
405
|
+
const lines = content.split("\n");
|
|
406
|
+
let solution = "";
|
|
407
|
+
let inSolution = false;
|
|
408
|
+
for (const line of lines) {
|
|
409
|
+
if (line.includes("Solution:") || line.includes("Best path:")) {
|
|
410
|
+
inSolution = true;
|
|
411
|
+
continue;
|
|
412
|
+
}
|
|
413
|
+
if (inSolution && line.trim()) {
|
|
414
|
+
solution = line.trim();
|
|
415
|
+
break;
|
|
416
|
+
}
|
|
417
|
+
}
|
|
418
|
+
return {
|
|
419
|
+
problem,
|
|
420
|
+
solution: solution || "See thought tree for analysis",
|
|
421
|
+
thoughtTree: content,
|
|
422
|
+
depth,
|
|
423
|
+
branches,
|
|
424
|
+
rawContent: content
|
|
425
|
+
};
|
|
426
|
+
}
|
|
427
|
+
// Concept extraction
|
|
428
|
+
/**
|
|
429
|
+
* Extract concepts from text or code
|
|
430
|
+
*
|
|
431
|
+
* @example
|
|
432
|
+
* ```typescript
|
|
433
|
+
* const result = await client.extractConcepts(code, 'code');
|
|
434
|
+
* for (const concept of result.concepts) {
|
|
435
|
+
* console.log(`${concept.name}: ${concept.weight}`);
|
|
436
|
+
* }
|
|
437
|
+
* ```
|
|
438
|
+
*/
|
|
439
|
+
async extractConcepts(text, sourceType = "text") {
|
|
440
|
+
if (!text.trim()) {
|
|
441
|
+
throw new ValidationError("Text cannot be empty", "text");
|
|
442
|
+
}
|
|
443
|
+
const content = await this.callTool("extract_concepts", {
|
|
444
|
+
text,
|
|
445
|
+
source_type: sourceType
|
|
446
|
+
});
|
|
447
|
+
return this.parseConceptResult(content, sourceType);
|
|
448
|
+
}
|
|
449
|
+
parseConceptResult(content, sourceType) {
|
|
450
|
+
try {
|
|
451
|
+
const data = JSON.parse(content);
|
|
452
|
+
if (Array.isArray(data.concepts)) {
|
|
453
|
+
return {
|
|
454
|
+
concepts: data.concepts.map((c) => ({
|
|
455
|
+
name: String(c.name ?? ""),
|
|
456
|
+
conceptType: String(c.concept_type ?? "Entity"),
|
|
457
|
+
weight: Number(c.weight ?? 0.8)
|
|
458
|
+
})),
|
|
459
|
+
sourceType: ["text", "code"].includes(String(data.source_type)) ? String(data.source_type) : sourceType,
|
|
460
|
+
rawContent: content
|
|
461
|
+
};
|
|
462
|
+
}
|
|
463
|
+
} catch {
|
|
464
|
+
}
|
|
465
|
+
const concepts = [];
|
|
466
|
+
const lines = content.split("\n");
|
|
467
|
+
for (const line of lines) {
|
|
468
|
+
if (line.trim().startsWith("- ")) {
|
|
469
|
+
const name = line.trim().slice(2).split(":")[0]?.trim();
|
|
470
|
+
if (name) {
|
|
471
|
+
concepts.push({
|
|
472
|
+
name,
|
|
473
|
+
conceptType: "Entity",
|
|
474
|
+
weight: 0.8
|
|
475
|
+
});
|
|
476
|
+
}
|
|
477
|
+
}
|
|
478
|
+
}
|
|
479
|
+
return {
|
|
480
|
+
concepts,
|
|
481
|
+
sourceType,
|
|
482
|
+
rawContent: content
|
|
483
|
+
};
|
|
484
|
+
}
|
|
485
|
+
// Long context management
|
|
486
|
+
/**
|
|
487
|
+
* Split a document into manageable chunks
|
|
488
|
+
*/
|
|
489
|
+
async chunkDocument(content, chunkSize = 512) {
|
|
490
|
+
if (!content.trim()) {
|
|
491
|
+
throw new ValidationError("Content cannot be empty", "content");
|
|
492
|
+
}
|
|
493
|
+
const result = await this.callTool("manage_long_context", {
|
|
494
|
+
operation: "chunk",
|
|
495
|
+
content,
|
|
496
|
+
chunk_size: chunkSize
|
|
497
|
+
});
|
|
498
|
+
const chunks = parseChunks(result);
|
|
499
|
+
return {
|
|
500
|
+
chunks,
|
|
501
|
+
totalChunks: chunks.length,
|
|
502
|
+
rawContent: result
|
|
503
|
+
};
|
|
504
|
+
}
|
|
505
|
+
/**
|
|
506
|
+
* Retrieve relevant context for a query
|
|
507
|
+
*/
|
|
508
|
+
async retrieveContext(query, topK = 5) {
|
|
509
|
+
if (!query.trim()) {
|
|
510
|
+
throw new ValidationError("Query cannot be empty", "query");
|
|
511
|
+
}
|
|
512
|
+
const result = await this.callTool("manage_long_context", {
|
|
513
|
+
operation: "retrieve",
|
|
514
|
+
query,
|
|
515
|
+
top_k: topK
|
|
516
|
+
});
|
|
517
|
+
return {
|
|
518
|
+
query,
|
|
519
|
+
results: parseRetrievalResults(result),
|
|
520
|
+
rawContent: result
|
|
521
|
+
};
|
|
522
|
+
}
|
|
523
|
+
/**
|
|
524
|
+
* Summarize document content
|
|
525
|
+
*/
|
|
526
|
+
async summarize(content) {
|
|
527
|
+
if (!content.trim()) {
|
|
528
|
+
throw new ValidationError("Content cannot be empty", "content");
|
|
529
|
+
}
|
|
530
|
+
return this.callTool("manage_long_context", {
|
|
531
|
+
operation: "summarize",
|
|
532
|
+
content
|
|
533
|
+
});
|
|
534
|
+
}
|
|
535
|
+
// Knowledge graph
|
|
536
|
+
/**
|
|
537
|
+
* Search the knowledge graph
|
|
538
|
+
*/
|
|
539
|
+
async searchKnowledge(query, limit = 10) {
|
|
540
|
+
if (!query.trim()) {
|
|
541
|
+
throw new ValidationError("Query cannot be empty", "query");
|
|
542
|
+
}
|
|
543
|
+
const result = await this.callTool("query_knowledge_graph", {
|
|
544
|
+
operation: "search",
|
|
545
|
+
query,
|
|
546
|
+
limit
|
|
547
|
+
});
|
|
548
|
+
return {
|
|
549
|
+
operation: "search",
|
|
550
|
+
nodes: [],
|
|
551
|
+
rawContent: result
|
|
552
|
+
};
|
|
553
|
+
}
|
|
554
|
+
/**
|
|
555
|
+
* Add concepts from text to knowledge graph
|
|
556
|
+
*/
|
|
557
|
+
async addToKnowledge(text) {
|
|
558
|
+
if (!text.trim()) {
|
|
559
|
+
throw new ValidationError("Text cannot be empty", "text");
|
|
560
|
+
}
|
|
561
|
+
const result = await this.callTool("query_knowledge_graph", {
|
|
562
|
+
operation: "add",
|
|
563
|
+
text
|
|
564
|
+
});
|
|
565
|
+
return {
|
|
566
|
+
operation: "add",
|
|
567
|
+
nodes: [],
|
|
568
|
+
rawContent: result
|
|
569
|
+
};
|
|
570
|
+
}
|
|
571
|
+
/**
|
|
572
|
+
* Get knowledge graph statistics
|
|
573
|
+
*/
|
|
574
|
+
async knowledgeStats() {
|
|
575
|
+
const result = await this.callTool("query_knowledge_graph", {
|
|
576
|
+
operation: "stats"
|
|
577
|
+
});
|
|
578
|
+
return {
|
|
579
|
+
operation: "stats",
|
|
580
|
+
nodes: [],
|
|
581
|
+
rawContent: result
|
|
582
|
+
};
|
|
583
|
+
}
|
|
584
|
+
// Task planning
|
|
585
|
+
/**
|
|
586
|
+
* Break down a task into actionable steps
|
|
587
|
+
*
|
|
588
|
+
* @example
|
|
589
|
+
* ```typescript
|
|
590
|
+
* const plan = await client.planTask(
|
|
591
|
+
* "Implement user authentication",
|
|
592
|
+
* { context: "Using JWT tokens", detailLevel: "detailed" }
|
|
593
|
+
* );
|
|
594
|
+
* for (const step of plan.steps) {
|
|
595
|
+
* console.log(`${step.number}. ${step.title}`);
|
|
596
|
+
* }
|
|
597
|
+
* ```
|
|
598
|
+
*/
|
|
599
|
+
async planTask(task, options = {}) {
|
|
600
|
+
const {
|
|
601
|
+
context = "",
|
|
602
|
+
maxSteps = 10,
|
|
603
|
+
detailLevel = "normal"
|
|
604
|
+
} = options;
|
|
605
|
+
if (!task.trim()) {
|
|
606
|
+
throw new ValidationError("Task cannot be empty", "task");
|
|
607
|
+
}
|
|
608
|
+
if (maxSteps < 1 || maxSteps > 20) {
|
|
609
|
+
throw new ValidationError("maxSteps must be between 1 and 20", "maxSteps");
|
|
610
|
+
}
|
|
611
|
+
const content = await this.callTool("plan_task", {
|
|
612
|
+
task,
|
|
613
|
+
context,
|
|
614
|
+
max_steps: maxSteps,
|
|
615
|
+
detail_level: detailLevel
|
|
616
|
+
});
|
|
617
|
+
return this.parsePlanResult(content, task);
|
|
618
|
+
}
|
|
619
|
+
parsePlanResult(content, task) {
|
|
620
|
+
try {
|
|
621
|
+
const data = JSON.parse(content);
|
|
622
|
+
if (Array.isArray(data.steps)) {
|
|
623
|
+
return {
|
|
624
|
+
task: data.task ?? task,
|
|
625
|
+
category: data.category ?? "General",
|
|
626
|
+
steps: data.steps.map((s, i) => ({
|
|
627
|
+
number: Number(s.number ?? i + 1),
|
|
628
|
+
title: String(s.title ?? ""),
|
|
629
|
+
description: String(s.description ?? ""),
|
|
630
|
+
priority: String(s.priority ?? "Medium"),
|
|
631
|
+
effort: String(s.effort ?? ""),
|
|
632
|
+
dependencies: Array.isArray(s.dependencies) ? s.dependencies.map(String) : []
|
|
633
|
+
})),
|
|
634
|
+
totalEffort: String(data.total_effort ?? ""),
|
|
635
|
+
rawContent: content
|
|
636
|
+
};
|
|
637
|
+
}
|
|
638
|
+
} catch {
|
|
639
|
+
}
|
|
640
|
+
const steps = [];
|
|
641
|
+
let category = "General";
|
|
642
|
+
let totalEffort = "";
|
|
643
|
+
let currentStep = 0;
|
|
644
|
+
const stepPattern = /^(\d+)\.\s+(.+)$/;
|
|
645
|
+
const lines = content.split("\n");
|
|
646
|
+
for (const line of lines) {
|
|
647
|
+
const trimmed = line.trim();
|
|
648
|
+
if (trimmed.startsWith("**Category**:")) {
|
|
649
|
+
category = trimmed.slice("**Category**:".length).trim() || "General";
|
|
650
|
+
continue;
|
|
651
|
+
}
|
|
652
|
+
const match = stepPattern.exec(trimmed);
|
|
653
|
+
if (match) {
|
|
654
|
+
currentStep++;
|
|
655
|
+
const title = match[2]?.trim() ?? "";
|
|
656
|
+
if (title) {
|
|
657
|
+
steps.push({
|
|
658
|
+
number: currentStep,
|
|
659
|
+
title,
|
|
660
|
+
description: "",
|
|
661
|
+
priority: "Medium",
|
|
662
|
+
effort: "",
|
|
663
|
+
dependencies: []
|
|
664
|
+
});
|
|
665
|
+
}
|
|
666
|
+
continue;
|
|
667
|
+
}
|
|
668
|
+
if (trimmed.startsWith("**Estimated total effort**:")) {
|
|
669
|
+
totalEffort = trimmed.slice("**Estimated total effort**:".length).trim();
|
|
670
|
+
}
|
|
671
|
+
}
|
|
672
|
+
return {
|
|
673
|
+
task,
|
|
674
|
+
category,
|
|
675
|
+
steps,
|
|
676
|
+
totalEffort,
|
|
677
|
+
rawContent: content
|
|
678
|
+
};
|
|
679
|
+
}
|
|
680
|
+
};
|
|
681
|
+
|
|
682
|
+
// src/conversation.ts
|
|
683
|
+
var Conversation = class {
|
|
684
|
+
client;
|
|
685
|
+
history = [];
|
|
686
|
+
model;
|
|
687
|
+
constructor(client, options) {
|
|
688
|
+
this.client = client;
|
|
689
|
+
this.model = options?.model ?? "default";
|
|
690
|
+
if (options?.system) {
|
|
691
|
+
this.history.push({ role: "system", content: options.system });
|
|
692
|
+
}
|
|
693
|
+
}
|
|
694
|
+
/**
|
|
695
|
+
* Send a user message and get the assistant reply.
|
|
696
|
+
* Both the user message and the assistant reply are appended to history.
|
|
697
|
+
*
|
|
698
|
+
* @param message - The user message to send
|
|
699
|
+
* @returns The assistant's reply content string
|
|
700
|
+
*
|
|
701
|
+
* @example
|
|
702
|
+
* ```typescript
|
|
703
|
+
* const conv = client.conversation({ system: 'You are helpful' });
|
|
704
|
+
* const reply = await conv.say('What is TypeScript?');
|
|
705
|
+
* const followUp = await conv.say('Show me an example'); // includes prior context
|
|
706
|
+
* ```
|
|
707
|
+
*/
|
|
708
|
+
async say(message) {
|
|
709
|
+
this.history.push({ role: "user", content: message });
|
|
710
|
+
let response;
|
|
711
|
+
try {
|
|
712
|
+
response = await this.client.chat({
|
|
713
|
+
model: this.model,
|
|
714
|
+
messages: [...this.history]
|
|
715
|
+
});
|
|
716
|
+
} catch (err) {
|
|
717
|
+
this.history.pop();
|
|
718
|
+
throw err;
|
|
719
|
+
}
|
|
720
|
+
const reply = response.choices[0]?.message?.content ?? "";
|
|
721
|
+
this.history.push({ role: "assistant", content: reply });
|
|
722
|
+
return reply;
|
|
723
|
+
}
|
|
724
|
+
/**
|
|
725
|
+
* Return a copy of the full conversation history.
|
|
726
|
+
*
|
|
727
|
+
* @returns An array of ChatMessage objects (system, user, assistant turns)
|
|
728
|
+
*
|
|
729
|
+
* @example
|
|
730
|
+
* ```typescript
|
|
731
|
+
* const history = conv.getHistory();
|
|
732
|
+
* console.log(`${history.length} messages in conversation`);
|
|
733
|
+
* ```
|
|
734
|
+
*/
|
|
735
|
+
getHistory() {
|
|
736
|
+
return [...this.history];
|
|
737
|
+
}
|
|
738
|
+
/**
|
|
739
|
+
* Clear user and assistant messages, preserving the system prompt (if any).
|
|
740
|
+
*
|
|
741
|
+
* @example
|
|
742
|
+
* ```typescript
|
|
743
|
+
* conv.clear();
|
|
744
|
+
* // System prompt is preserved; user/assistant messages are removed.
|
|
745
|
+
* ```
|
|
746
|
+
*/
|
|
747
|
+
clear() {
|
|
748
|
+
const systemMsg = this.history.find((m) => m.role === "system");
|
|
749
|
+
this.history = systemMsg ? [systemMsg] : [];
|
|
750
|
+
}
|
|
751
|
+
};
|
|
752
|
+
|
|
753
|
+
// src/retry_fetch.ts
|
|
754
|
+
var RETRYABLE_STATUSES = /* @__PURE__ */ new Set([429, 500, 502, 503, 504]);
|
|
755
|
+
async function fetchWithRetry(config, method, path, body, overrideTimeoutMs) {
|
|
756
|
+
const effectiveTimeout = overrideTimeoutMs ?? config.timeoutMs;
|
|
757
|
+
let lastError;
|
|
758
|
+
for (let attempt = 0; attempt <= config.maxRetries; attempt++) {
|
|
759
|
+
try {
|
|
760
|
+
config.onLog?.("debug", `${method} ${path} (attempt ${attempt + 1})`);
|
|
761
|
+
config.onRequest?.(method, path);
|
|
762
|
+
const start = Date.now();
|
|
763
|
+
const options = {
|
|
764
|
+
method,
|
|
765
|
+
headers: config.headers,
|
|
766
|
+
signal: AbortSignal.timeout(effectiveTimeout)
|
|
767
|
+
};
|
|
768
|
+
if (body !== void 0 && method !== "GET" && method !== "HEAD") {
|
|
769
|
+
options.body = JSON.stringify(body);
|
|
770
|
+
}
|
|
771
|
+
const resp = await fetch(`${config.baseUrl}${path}`, options);
|
|
772
|
+
const durationMs = Date.now() - start;
|
|
773
|
+
if (resp.ok) {
|
|
774
|
+
config.onLog?.("debug", `${method} ${path} -> ${resp.status}`);
|
|
775
|
+
config.onResponse?.(resp.status, durationMs);
|
|
776
|
+
return resp.json();
|
|
777
|
+
}
|
|
778
|
+
config.onResponse?.(resp.status, durationMs);
|
|
779
|
+
if (RETRYABLE_STATUSES.has(resp.status) && attempt < config.maxRetries) {
|
|
780
|
+
const waitMs = computeBackoff(config.retryBackoffMs, attempt, resp);
|
|
781
|
+
config.onLog?.("warn", `${method} ${path} -> ${resp.status}, retrying in ${waitMs}ms`);
|
|
782
|
+
await sleep(waitMs);
|
|
783
|
+
lastError = new Error(`HTTP ${resp.status}: ${await resp.text()}`);
|
|
784
|
+
continue;
|
|
785
|
+
}
|
|
786
|
+
const text = await resp.text();
|
|
787
|
+
config.onLog?.("error", `${method} ${path} -> ${resp.status}: ${text}`);
|
|
788
|
+
const err = new Error(`HTTP ${resp.status}: ${text}`);
|
|
789
|
+
config.onError?.(err);
|
|
790
|
+
throw err;
|
|
791
|
+
} catch (err) {
|
|
792
|
+
if (err instanceof TypeError && attempt < config.maxRetries) {
|
|
793
|
+
const waitMs = config.retryBackoffMs * Math.pow(2, attempt);
|
|
794
|
+
config.onLog?.("warn", `${method} ${path} network error, retrying in ${waitMs}ms`);
|
|
795
|
+
await sleep(waitMs);
|
|
796
|
+
lastError = err;
|
|
797
|
+
continue;
|
|
798
|
+
}
|
|
799
|
+
if (err instanceof Error) {
|
|
800
|
+
config.onError?.(err);
|
|
801
|
+
}
|
|
802
|
+
throw err;
|
|
803
|
+
}
|
|
804
|
+
}
|
|
805
|
+
const finalErr = lastError ?? new Error("Request failed after retries");
|
|
806
|
+
config.onError?.(finalErr);
|
|
807
|
+
throw finalErr;
|
|
808
|
+
}
|
|
809
|
+
function computeBackoff(retryBackoffMs, attempt, resp) {
|
|
810
|
+
const retryAfter = resp.headers.get("Retry-After");
|
|
811
|
+
if (retryAfter) {
|
|
812
|
+
const seconds = Number(retryAfter);
|
|
813
|
+
if (!Number.isNaN(seconds) && seconds > 0) {
|
|
814
|
+
return seconds * 1e3;
|
|
815
|
+
}
|
|
816
|
+
}
|
|
817
|
+
return retryBackoffMs * Math.pow(2, attempt);
|
|
818
|
+
}
|
|
819
|
+
function sleep(ms) {
|
|
820
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
821
|
+
}
|
|
822
|
+
|
|
823
|
+
// src/sse_parser.ts
|
|
824
|
+
async function* parseSSEStream(body) {
|
|
825
|
+
const reader = body.getReader();
|
|
826
|
+
const decoder = new TextDecoder();
|
|
827
|
+
let buffer = "";
|
|
828
|
+
try {
|
|
829
|
+
while (true) {
|
|
830
|
+
const { done, value } = await reader.read();
|
|
831
|
+
if (done) {
|
|
832
|
+
const remaining = decoder.decode();
|
|
833
|
+
if (remaining) buffer += remaining;
|
|
834
|
+
break;
|
|
835
|
+
}
|
|
836
|
+
buffer += decoder.decode(value, { stream: true });
|
|
837
|
+
const lines = buffer.split("\n");
|
|
838
|
+
buffer = lines.pop() ?? "";
|
|
839
|
+
for (const line of lines) {
|
|
840
|
+
const trimmed = line.trim();
|
|
841
|
+
if (!trimmed.startsWith("data: ")) continue;
|
|
842
|
+
const payload = trimmed.slice(6);
|
|
843
|
+
if (payload === "[DONE]") return;
|
|
844
|
+
try {
|
|
845
|
+
const chunk = JSON.parse(payload);
|
|
846
|
+
const choices = chunk["choices"];
|
|
847
|
+
const delta = choices?.[0]?.["delta"];
|
|
848
|
+
const content = delta?.["content"];
|
|
849
|
+
if (typeof content === "string") yield content;
|
|
850
|
+
} catch (e) {
|
|
851
|
+
console.warn("[wauldo] Malformed SSE chunk skipped:", String(e).slice(0, 100));
|
|
852
|
+
}
|
|
853
|
+
}
|
|
854
|
+
}
|
|
855
|
+
} finally {
|
|
856
|
+
reader.releaseLock();
|
|
857
|
+
}
|
|
858
|
+
}
|
|
859
|
+
|
|
860
|
+
// src/http_client.ts
|
|
861
|
+
function validateResponse(data, typeName) {
|
|
862
|
+
if (data === null || data === void 0) {
|
|
863
|
+
throw new ServerError(`Invalid ${typeName}: response is null`, 0);
|
|
864
|
+
}
|
|
865
|
+
return data;
|
|
866
|
+
}
|
|
867
|
+
var HttpClient = class {
|
|
868
|
+
retryConfig;
|
|
869
|
+
constructor(config = {}) {
|
|
870
|
+
const baseUrl = (config.baseUrl ?? "http://localhost:3000").replace(/\/$/, "");
|
|
871
|
+
const headers = { "Content-Type": "application/json" };
|
|
872
|
+
if (config.apiKey) {
|
|
873
|
+
headers["Authorization"] = `Bearer ${config.apiKey}`;
|
|
874
|
+
}
|
|
875
|
+
if (config.headers) {
|
|
876
|
+
Object.assign(headers, config.headers);
|
|
877
|
+
}
|
|
878
|
+
this.retryConfig = {
|
|
879
|
+
baseUrl,
|
|
880
|
+
headers,
|
|
881
|
+
timeoutMs: config.timeoutMs ?? 12e4,
|
|
882
|
+
maxRetries: config.maxRetries ?? 3,
|
|
883
|
+
retryBackoffMs: config.retryBackoffMs ?? 1e3,
|
|
884
|
+
onLog: config.onLog,
|
|
885
|
+
onRequest: config.onRequest,
|
|
886
|
+
onResponse: config.onResponse,
|
|
887
|
+
onError: config.onError
|
|
888
|
+
};
|
|
889
|
+
}
|
|
890
|
+
// ── OpenAI-compatible endpoints ──────────────────────────────────────
|
|
891
|
+
/** GET /v1/models — List available LLM models */
|
|
892
|
+
async listModels() {
|
|
893
|
+
const data = await fetchWithRetry(this.retryConfig, "GET", "/v1/models");
|
|
894
|
+
return validateResponse(data, "ModelList");
|
|
895
|
+
}
|
|
896
|
+
/**
|
|
897
|
+
* POST /v1/chat/completions — Chat completion (non-streaming).
|
|
898
|
+
*
|
|
899
|
+
* @param request - The chat request (model, messages, temperature, etc.)
|
|
900
|
+
* @param options - Optional per-request overrides (e.g. timeoutMs)
|
|
901
|
+
* @returns The full chat completion response
|
|
902
|
+
*
|
|
903
|
+
* @example
|
|
904
|
+
* ```typescript
|
|
905
|
+
* const resp = await client.chat({
|
|
906
|
+
* model: 'qwen2.5:7b',
|
|
907
|
+
* messages: [{ role: 'user', content: 'Hello' }],
|
|
908
|
+
* });
|
|
909
|
+
* console.log(resp.choices[0]?.message?.content);
|
|
910
|
+
* ```
|
|
911
|
+
*/
|
|
912
|
+
async chat(request, options) {
|
|
913
|
+
const data = await fetchWithRetry(
|
|
914
|
+
this.retryConfig,
|
|
915
|
+
"POST",
|
|
916
|
+
"/v1/chat/completions",
|
|
917
|
+
{ ...request, stream: false },
|
|
918
|
+
options?.timeoutMs
|
|
919
|
+
);
|
|
920
|
+
return validateResponse(data, "ChatResponse");
|
|
921
|
+
}
|
|
922
|
+
/** Convenience: single message chat, returns content string */
|
|
923
|
+
async chatSimple(model, message) {
|
|
924
|
+
const resp = await this.chat({
|
|
925
|
+
model,
|
|
926
|
+
messages: [{ role: "user", content: message }]
|
|
927
|
+
});
|
|
928
|
+
return resp.choices[0]?.message?.content ?? "";
|
|
929
|
+
}
|
|
930
|
+
/** POST /v1/chat/completions — SSE streaming, yields content chunks */
|
|
931
|
+
async *chatStream(request, options) {
|
|
932
|
+
const cfg = this.retryConfig;
|
|
933
|
+
const effectiveTimeout = options?.timeoutMs ?? cfg.timeoutMs;
|
|
934
|
+
cfg.onRequest?.("POST", "/v1/chat/completions");
|
|
935
|
+
const start = Date.now();
|
|
936
|
+
let resp;
|
|
937
|
+
try {
|
|
938
|
+
resp = await fetch(`${cfg.baseUrl}/v1/chat/completions`, {
|
|
939
|
+
method: "POST",
|
|
940
|
+
headers: { ...cfg.headers },
|
|
941
|
+
body: JSON.stringify({ ...request, stream: true }),
|
|
942
|
+
signal: AbortSignal.timeout(effectiveTimeout)
|
|
943
|
+
});
|
|
944
|
+
} catch (err) {
|
|
945
|
+
if (err instanceof Error) cfg.onError?.(err);
|
|
946
|
+
throw err;
|
|
947
|
+
}
|
|
948
|
+
if (!resp.ok) {
|
|
949
|
+
const body = await resp.text();
|
|
950
|
+
const err = new ServerError(`HTTP ${resp.status}: ${body}`, resp.status);
|
|
951
|
+
cfg.onError?.(err);
|
|
952
|
+
throw err;
|
|
953
|
+
}
|
|
954
|
+
cfg.onResponse?.(resp.status, Date.now() - start);
|
|
955
|
+
if (!resp.body) throw new ServerError("No response body for streaming", 0);
|
|
956
|
+
yield* parseSSEStream(resp.body);
|
|
957
|
+
}
|
|
958
|
+
/** POST /v1/embeddings — Generate text embeddings */
|
|
959
|
+
async embeddings(input, model) {
|
|
960
|
+
const data = await fetchWithRetry(
|
|
961
|
+
this.retryConfig,
|
|
962
|
+
"POST",
|
|
963
|
+
"/v1/embeddings",
|
|
964
|
+
{ input, model }
|
|
965
|
+
);
|
|
966
|
+
return validateResponse(data, "EmbeddingResponse");
|
|
967
|
+
}
|
|
968
|
+
// ── RAG endpoints ────────────────────────────────────────────────────
|
|
969
|
+
/**
|
|
970
|
+
* POST /v1/upload — Upload document for RAG indexing.
|
|
971
|
+
*
|
|
972
|
+
* @param content - The document text to index
|
|
973
|
+
* @param filename - Optional filename for the document
|
|
974
|
+
* @param options - Optional per-request overrides (e.g. timeoutMs)
|
|
975
|
+
* @returns Upload confirmation with document_id and chunks_count
|
|
976
|
+
*/
|
|
977
|
+
async ragUpload(content, filename, options) {
|
|
978
|
+
const body = { content };
|
|
979
|
+
if (filename) body["filename"] = filename;
|
|
980
|
+
const data = await fetchWithRetry(
|
|
981
|
+
this.retryConfig,
|
|
982
|
+
"POST",
|
|
983
|
+
"/v1/upload",
|
|
984
|
+
body,
|
|
985
|
+
options?.timeoutMs
|
|
986
|
+
);
|
|
987
|
+
return validateResponse(data, "RagUploadResponse");
|
|
988
|
+
}
|
|
989
|
+
/** POST /v1/query — Query RAG knowledge base */
|
|
990
|
+
async ragQuery(query, topK = 5, options) {
|
|
991
|
+
const body = { query, top_k: topK };
|
|
992
|
+
if (options?.debug) body.debug = true;
|
|
993
|
+
if (options?.qualityMode) body.quality_mode = options.qualityMode;
|
|
994
|
+
const data = await fetchWithRetry(
|
|
995
|
+
this.retryConfig,
|
|
996
|
+
"POST",
|
|
997
|
+
"/v1/query",
|
|
998
|
+
body
|
|
999
|
+
);
|
|
1000
|
+
return validateResponse(data, "RagQueryResponse");
|
|
1001
|
+
}
|
|
1002
|
+
// ── Conversation & RAG helpers ────────────────────────────────────────
|
|
1003
|
+
/**
|
|
1004
|
+
* Create a stateful conversation that tracks message history automatically.
|
|
1005
|
+
*
|
|
1006
|
+
* @param options - Optional system prompt and model name
|
|
1007
|
+
* @returns A Conversation instance bound to this client
|
|
1008
|
+
*
|
|
1009
|
+
* @example
|
|
1010
|
+
* ```typescript
|
|
1011
|
+
* const conv = client.conversation({ system: 'You are a TypeScript expert' });
|
|
1012
|
+
* const reply = await conv.say('What are generics?');
|
|
1013
|
+
* ```
|
|
1014
|
+
*/
|
|
1015
|
+
conversation(options) {
|
|
1016
|
+
return new Conversation(this, options);
|
|
1017
|
+
}
|
|
1018
|
+
/**
|
|
1019
|
+
* Upload text to RAG, then query it — one-shot Q&A over a document.
|
|
1020
|
+
*
|
|
1021
|
+
* @param question - The question to ask about the document
|
|
1022
|
+
* @param text - The document text to index and query
|
|
1023
|
+
* @param source - Optional source name (defaults to 'document')
|
|
1024
|
+
* @returns The answer string
|
|
1025
|
+
*/
|
|
1026
|
+
async ragAsk(question, text, source = "document") {
|
|
1027
|
+
await this.ragUpload(text, source);
|
|
1028
|
+
const result = await this.ragQuery(question, 3);
|
|
1029
|
+
return result.answer ?? JSON.stringify(result.sources);
|
|
1030
|
+
}
|
|
1031
|
+
// ── Orchestrator endpoints ───────────────────────────────────────────
|
|
1032
|
+
/** POST /v1/orchestrator/execute — Route to best specialist agent */
|
|
1033
|
+
async orchestrate(prompt) {
|
|
1034
|
+
const data = await fetchWithRetry(
|
|
1035
|
+
this.retryConfig,
|
|
1036
|
+
"POST",
|
|
1037
|
+
"/v1/orchestrator/execute",
|
|
1038
|
+
{ prompt }
|
|
1039
|
+
);
|
|
1040
|
+
return validateResponse(data, "OrchestratorResponse");
|
|
1041
|
+
}
|
|
1042
|
+
/** POST /v1/orchestrator/parallel — Run all 4 specialists in parallel */
|
|
1043
|
+
async orchestrateParallel(prompt) {
|
|
1044
|
+
const data = await fetchWithRetry(
|
|
1045
|
+
this.retryConfig,
|
|
1046
|
+
"POST",
|
|
1047
|
+
"/v1/orchestrator/parallel",
|
|
1048
|
+
{ prompt }
|
|
1049
|
+
);
|
|
1050
|
+
return validateResponse(data, "OrchestratorResponse");
|
|
1051
|
+
}
|
|
1052
|
+
};
|
|
1053
|
+
|
|
1054
|
+
// src/mock_client.ts
|
|
1055
|
+
var DEFAULT_CHAT = {
|
|
1056
|
+
id: "mock-1",
|
|
1057
|
+
object: "chat.completion",
|
|
1058
|
+
created: 0,
|
|
1059
|
+
model: "mock-model",
|
|
1060
|
+
choices: [{ index: 0, message: { role: "assistant", content: "Mock reply" }, finish_reason: "stop" }],
|
|
1061
|
+
usage: { prompt_tokens: 10, completion_tokens: 5, total_tokens: 15 }
|
|
1062
|
+
};
|
|
1063
|
+
var DEFAULT_MODELS = {
|
|
1064
|
+
object: "list",
|
|
1065
|
+
data: [{ id: "mock-model", object: "model", created: 0, owned_by: "mock" }]
|
|
1066
|
+
};
|
|
1067
|
+
var MockHttpClient = class {
|
|
1068
|
+
chatResponse = DEFAULT_CHAT;
|
|
1069
|
+
modelList = DEFAULT_MODELS;
|
|
1070
|
+
calls = [];
|
|
1071
|
+
/**
|
|
1072
|
+
* Configure the response returned by `chat()` and `chatSimple()`.
|
|
1073
|
+
*
|
|
1074
|
+
* @param response - The ChatResponse to return on subsequent chat calls
|
|
1075
|
+
* @returns `this` for method chaining
|
|
1076
|
+
*
|
|
1077
|
+
* @example
|
|
1078
|
+
* ```typescript
|
|
1079
|
+
* const mock = new MockHttpClient().withChatResponse({
|
|
1080
|
+
* id: 'test-1', object: 'chat.completion', created: 0, model: 'test',
|
|
1081
|
+
* choices: [{ index: 0, message: { role: 'assistant', content: 'Hi' }, finish_reason: 'stop' }],
|
|
1082
|
+
* usage: { prompt_tokens: 1, completion_tokens: 1, total_tokens: 2 },
|
|
1083
|
+
* });
|
|
1084
|
+
* ```
|
|
1085
|
+
*/
|
|
1086
|
+
withChatResponse(response) {
|
|
1087
|
+
this.chatResponse = response;
|
|
1088
|
+
return this;
|
|
1089
|
+
}
|
|
1090
|
+
/**
|
|
1091
|
+
* Configure the model list returned by `listModels()`.
|
|
1092
|
+
*
|
|
1093
|
+
* @param models - Array of ModelInfo objects
|
|
1094
|
+
* @returns `this` for method chaining
|
|
1095
|
+
*
|
|
1096
|
+
* @example
|
|
1097
|
+
* ```typescript
|
|
1098
|
+
* const mock = new MockHttpClient().withModels([
|
|
1099
|
+
* { id: 'gpt-4', object: 'model', created: 0, owned_by: 'openai' },
|
|
1100
|
+
* ]);
|
|
1101
|
+
* ```
|
|
1102
|
+
*/
|
|
1103
|
+
withModels(models) {
|
|
1104
|
+
this.modelList = { object: "list", data: models };
|
|
1105
|
+
return this;
|
|
1106
|
+
}
|
|
1107
|
+
async listModels() {
|
|
1108
|
+
this.record("listModels");
|
|
1109
|
+
return this.modelList;
|
|
1110
|
+
}
|
|
1111
|
+
async chat(request, _options) {
|
|
1112
|
+
this.record("chat", request);
|
|
1113
|
+
return this.chatResponse;
|
|
1114
|
+
}
|
|
1115
|
+
async chatSimple(model, message) {
|
|
1116
|
+
this.record("chatSimple", model, message);
|
|
1117
|
+
return this.chatResponse.choices[0]?.message?.content ?? "";
|
|
1118
|
+
}
|
|
1119
|
+
async *chatStream(_request, _options) {
|
|
1120
|
+
this.record("chatStream", _request);
|
|
1121
|
+
const content = this.chatResponse.choices[0]?.message?.content ?? "";
|
|
1122
|
+
for (const word of content.split(" ")) {
|
|
1123
|
+
yield word + " ";
|
|
1124
|
+
}
|
|
1125
|
+
}
|
|
1126
|
+
async embeddings(input, model) {
|
|
1127
|
+
this.record("embeddings", input, model);
|
|
1128
|
+
const items = Array.isArray(input) ? input : [input];
|
|
1129
|
+
return {
|
|
1130
|
+
data: items.map((_, i) => ({ embedding: [0.1, 0.2, 0.3], index: i })),
|
|
1131
|
+
model,
|
|
1132
|
+
usage: { prompt_tokens: 5, total_tokens: 5 }
|
|
1133
|
+
};
|
|
1134
|
+
}
|
|
1135
|
+
async ragUpload(content, filename, _options) {
|
|
1136
|
+
this.record("ragUpload", content, filename);
|
|
1137
|
+
return { document_id: "mock-doc-1", chunks_count: 1 };
|
|
1138
|
+
}
|
|
1139
|
+
async ragQuery(query, topK = 5, options) {
|
|
1140
|
+
this.record("ragQuery", query, topK, options);
|
|
1141
|
+
return { answer: `Mock answer for: ${query}`, sources: [] };
|
|
1142
|
+
}
|
|
1143
|
+
async orchestrate(prompt) {
|
|
1144
|
+
this.record("orchestrate", prompt);
|
|
1145
|
+
return { final_output: `Mock orchestration: ${prompt}` };
|
|
1146
|
+
}
|
|
1147
|
+
async orchestrateParallel(prompt) {
|
|
1148
|
+
this.record("orchestrateParallel", prompt);
|
|
1149
|
+
return { final_output: `Mock parallel: ${prompt}` };
|
|
1150
|
+
}
|
|
1151
|
+
conversation(options) {
|
|
1152
|
+
this.record("conversation", options);
|
|
1153
|
+
return new Conversation(this, options);
|
|
1154
|
+
}
|
|
1155
|
+
async ragAsk(question, text, source = "document") {
|
|
1156
|
+
this.record("ragAsk", question, text, source);
|
|
1157
|
+
await this.ragUpload(text, source);
|
|
1158
|
+
const result = await this.ragQuery(question, 3);
|
|
1159
|
+
return result.answer;
|
|
1160
|
+
}
|
|
1161
|
+
record(method, ...args) {
|
|
1162
|
+
this.calls.push({ method, args });
|
|
1163
|
+
}
|
|
1164
|
+
};
|
|
1165
|
+
export {
|
|
1166
|
+
AgentClient,
|
|
1167
|
+
ConnectionError,
|
|
1168
|
+
Conversation,
|
|
1169
|
+
HttpClient,
|
|
1170
|
+
MockHttpClient,
|
|
1171
|
+
ServerError,
|
|
1172
|
+
TimeoutError,
|
|
1173
|
+
ToolNotFoundError,
|
|
1174
|
+
ValidationError,
|
|
1175
|
+
WauldoError
|
|
1176
|
+
};
|