langfn 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +32 -0
- package/dist/chunk-6NDYO7WC.js +131 -0
- package/dist/chunk-IDYTII3W.js +47 -0
- package/dist/chunk-LIUWQ4NY.js +49 -0
- package/dist/chunk-MHMMFGVC.js +60 -0
- package/dist/chunk-MLKGABMK.js +9 -0
- package/dist/index.d.ts +666 -0
- package/dist/index.js +1174 -0
- package/dist/openai-4W5RU3CU.js +7 -0
- package/dist/openai-LHMGJO6V.js +7 -0
- package/dist/react-EKLNOUM4.js +7 -0
- package/dist/sse-4Y3LCWWO.js +13 -0
- package/dist/tool_agent-OA4BZHA6.js +7 -0
- package/package.json +63 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,1174 @@
|
|
|
1
|
+
import {
|
|
2
|
+
Embeddings,
|
|
3
|
+
OpenAIEmbeddings,
|
|
4
|
+
Retriever,
|
|
5
|
+
VectorStore
|
|
6
|
+
} from "./chunk-LIUWQ4NY.js";
|
|
7
|
+
import {
|
|
8
|
+
ChatModel,
|
|
9
|
+
OpenAIChatModel
|
|
10
|
+
} from "./chunk-6NDYO7WC.js";
|
|
11
|
+
import {
|
|
12
|
+
ReActAgent
|
|
13
|
+
} from "./chunk-MHMMFGVC.js";
|
|
14
|
+
import {
|
|
15
|
+
ToolAgent
|
|
16
|
+
} from "./chunk-IDYTII3W.js";
|
|
17
|
+
import {
|
|
18
|
+
__export
|
|
19
|
+
} from "./chunk-MLKGABMK.js";
|
|
20
|
+
|
|
21
|
+
// src/client.ts
|
|
22
|
+
var LangFn = class {
|
|
23
|
+
model;
|
|
24
|
+
config;
|
|
25
|
+
constructor(config) {
|
|
26
|
+
this.model = config.model;
|
|
27
|
+
this.config = config;
|
|
28
|
+
}
|
|
29
|
+
async complete(prompt, options = {}) {
|
|
30
|
+
const cache = this._cache;
|
|
31
|
+
if (cache) {
|
|
32
|
+
const cached = await cache.get(prompt, this.model.model, this.model.provider, options.metadata);
|
|
33
|
+
if (cached) {
|
|
34
|
+
if (options.structured) {
|
|
35
|
+
const parsed = options.structured.parse(cached.content);
|
|
36
|
+
return { ...cached, parsed };
|
|
37
|
+
}
|
|
38
|
+
return cached;
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
const result = await this.model.complete({ prompt, ...options });
|
|
42
|
+
if (cache) {
|
|
43
|
+
await cache.set(prompt, this.model.model, this.model.provider, result, options.metadata);
|
|
44
|
+
}
|
|
45
|
+
if (options.structured) {
|
|
46
|
+
const parsed = options.structured.parse(result.content);
|
|
47
|
+
return { ...result, parsed };
|
|
48
|
+
}
|
|
49
|
+
return result;
|
|
50
|
+
}
|
|
51
|
+
async chat(messages, options = {}) {
|
|
52
|
+
const result = await this.model.chat({
|
|
53
|
+
messages,
|
|
54
|
+
tools: options.tools?.map((t) => t.json_schema()),
|
|
55
|
+
tool_choice: options.tool_choice,
|
|
56
|
+
...options
|
|
57
|
+
});
|
|
58
|
+
if (options.structured) {
|
|
59
|
+
const parsed = options.structured.parse(result.message.content);
|
|
60
|
+
return { ...result, parsed };
|
|
61
|
+
}
|
|
62
|
+
return result;
|
|
63
|
+
}
|
|
64
|
+
async feedback(traceId, options) {
|
|
65
|
+
if (this.config.observability?.enabled && this.config.observability?.watchfn) {
|
|
66
|
+
this.config.observability.watchfn.track("feedback", {
|
|
67
|
+
traceId,
|
|
68
|
+
rating: options.rating,
|
|
69
|
+
comment: options.comment,
|
|
70
|
+
metadata: options.metadata || {}
|
|
71
|
+
});
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
async *stream(prompt, options = {}) {
|
|
75
|
+
yield* this.model.stream({ prompt, ...options });
|
|
76
|
+
}
|
|
77
|
+
async *streamSSE(prompt) {
|
|
78
|
+
const { toSSE } = await import("./sse-4Y3LCWWO.js");
|
|
79
|
+
yield* toSSE(this, prompt);
|
|
80
|
+
}
|
|
81
|
+
async completeBatch(prompts, options = {}) {
|
|
82
|
+
const concurrency = options.concurrency ?? 5;
|
|
83
|
+
const results = [];
|
|
84
|
+
for (let i = 0; i < prompts.length; i += concurrency) {
|
|
85
|
+
const batch = prompts.slice(i, i + concurrency);
|
|
86
|
+
const batchResults = await Promise.all(batch.map((p) => this.complete(p, options)));
|
|
87
|
+
results.push(...batchResults);
|
|
88
|
+
}
|
|
89
|
+
return results;
|
|
90
|
+
}
|
|
91
|
+
async embed(texts) {
|
|
92
|
+
const { OpenAIEmbeddings: OpenAIEmbeddings2 } = await import("./openai-4W5RU3CU.js");
|
|
93
|
+
const { OpenAIChatModel: OpenAIChatModel2 } = await import("./openai-LHMGJO6V.js");
|
|
94
|
+
if (!this._embeddings && this.model instanceof OpenAIChatModel2) {
|
|
95
|
+
this._embeddings = new OpenAIEmbeddings2({
|
|
96
|
+
apiKey: this.model.apiKey,
|
|
97
|
+
baseUrl: this.model.baseUrl
|
|
98
|
+
});
|
|
99
|
+
}
|
|
100
|
+
const embeddings = this._embeddings;
|
|
101
|
+
if (!embeddings) throw new Error("Embeddings not configured");
|
|
102
|
+
if (typeof texts === "string") {
|
|
103
|
+
return embeddings.embedQuery(texts);
|
|
104
|
+
}
|
|
105
|
+
return embeddings.embedDocuments(texts);
|
|
106
|
+
}
|
|
107
|
+
async createReactAgent(tools, options = {}) {
|
|
108
|
+
const { ReActAgent: ReActAgent2 } = await import("./react-EKLNOUM4.js");
|
|
109
|
+
return new ReActAgent2({
|
|
110
|
+
model: this,
|
|
111
|
+
tools,
|
|
112
|
+
...options
|
|
113
|
+
});
|
|
114
|
+
}
|
|
115
|
+
async createToolAgent(tools, options = {}) {
|
|
116
|
+
const { ToolAgent: ToolAgent2 } = await import("./tool_agent-OA4BZHA6.js");
|
|
117
|
+
return new ToolAgent2({
|
|
118
|
+
lang: this,
|
|
119
|
+
tools,
|
|
120
|
+
...options
|
|
121
|
+
});
|
|
122
|
+
}
|
|
123
|
+
// To be implemented: chains, etc.
|
|
124
|
+
};
|
|
125
|
+
function langfn(config) {
|
|
126
|
+
return new LangFn(config);
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
// src/tools/base.ts
|
|
130
|
+
import { zodToJsonSchema } from "zod-to-json-schema";
|
|
131
|
+
var Tool = class {
|
|
132
|
+
name;
|
|
133
|
+
description;
|
|
134
|
+
schema;
|
|
135
|
+
_execute;
|
|
136
|
+
constructor(config) {
|
|
137
|
+
this.name = config.name;
|
|
138
|
+
this.description = config.description;
|
|
139
|
+
this.schema = config.schema;
|
|
140
|
+
this._execute = config.execute;
|
|
141
|
+
}
|
|
142
|
+
json_schema() {
|
|
143
|
+
return {
|
|
144
|
+
name: this.name,
|
|
145
|
+
description: this.description,
|
|
146
|
+
parameters: zodToJsonSchema(this.schema)
|
|
147
|
+
};
|
|
148
|
+
}
|
|
149
|
+
async run(args2, context) {
|
|
150
|
+
const parsed = this.schema.parse(args2);
|
|
151
|
+
return this._execute(parsed, context || { metadata: {} });
|
|
152
|
+
}
|
|
153
|
+
};
|
|
154
|
+
|
|
155
|
+
// src/orchestration/chain.ts
|
|
156
|
+
var Chain = class _Chain {
|
|
157
|
+
constructor(_run) {
|
|
158
|
+
this._run = _run;
|
|
159
|
+
}
|
|
160
|
+
async run(input) {
|
|
161
|
+
return this._run(input);
|
|
162
|
+
}
|
|
163
|
+
static sequential(steps) {
|
|
164
|
+
return new _Chain(async (input) => {
|
|
165
|
+
let current = input;
|
|
166
|
+
for (const step of steps) {
|
|
167
|
+
current = await step(current);
|
|
168
|
+
}
|
|
169
|
+
return current;
|
|
170
|
+
});
|
|
171
|
+
}
|
|
172
|
+
static parallel(steps) {
|
|
173
|
+
return new _Chain(async (input) => {
|
|
174
|
+
return Promise.all(steps.map((step) => step(input)));
|
|
175
|
+
});
|
|
176
|
+
}
|
|
177
|
+
static mapReduce(options) {
|
|
178
|
+
return new _Chain(async (inputs) => {
|
|
179
|
+
const mapped = await Promise.all(inputs.map(options.map));
|
|
180
|
+
return options.reduce(mapped);
|
|
181
|
+
});
|
|
182
|
+
}
|
|
183
|
+
};
|
|
184
|
+
|
|
185
|
+
// src/orchestration/index.ts
|
|
186
|
+
var orchestration_exports = {};
|
|
187
|
+
__export(orchestration_exports, {
|
|
188
|
+
Chain: () => Chain
|
|
189
|
+
});
|
|
190
|
+
|
|
191
|
+
// src/models/index.ts
|
|
192
|
+
var models_exports = {};
|
|
193
|
+
__export(models_exports, {
|
|
194
|
+
AnthropicChatModel: () => AnthropicChatModel,
|
|
195
|
+
ChatModel: () => ChatModel,
|
|
196
|
+
MockChatModel: () => MockChatModel,
|
|
197
|
+
OllamaChatModel: () => OllamaChatModel,
|
|
198
|
+
OpenAIChatModel: () => OpenAIChatModel
|
|
199
|
+
});
|
|
200
|
+
|
|
201
|
+
// src/models/mock.ts
|
|
202
|
+
var MockChatModel = class extends ChatModel {
|
|
203
|
+
constructor(responses = []) {
|
|
204
|
+
super();
|
|
205
|
+
this.responses = responses;
|
|
206
|
+
}
|
|
207
|
+
provider = "mock";
|
|
208
|
+
model = "mock-1";
|
|
209
|
+
async complete(request) {
|
|
210
|
+
const content = this.responses.shift() || request.prompt;
|
|
211
|
+
return { content, raw: { mock: true } };
|
|
212
|
+
}
|
|
213
|
+
async chat(request) {
|
|
214
|
+
const content = this.responses.shift() || JSON.stringify({ echo: request.messages });
|
|
215
|
+
return {
|
|
216
|
+
message: { role: "assistant", content },
|
|
217
|
+
raw: { mock: true }
|
|
218
|
+
};
|
|
219
|
+
}
|
|
220
|
+
async *stream(request) {
|
|
221
|
+
const content = this.responses.shift() || request.prompt;
|
|
222
|
+
yield { type: "content", content, delta: content };
|
|
223
|
+
yield { type: "end", finish_reason: "stop" };
|
|
224
|
+
}
|
|
225
|
+
};
|
|
226
|
+
|
|
227
|
+
// src/models/anthropic.ts
|
|
228
|
+
var AnthropicChatModel = class extends ChatModel {
|
|
229
|
+
provider = "anthropic";
|
|
230
|
+
model;
|
|
231
|
+
apiKey;
|
|
232
|
+
baseUrl;
|
|
233
|
+
version;
|
|
234
|
+
constructor(config) {
|
|
235
|
+
super();
|
|
236
|
+
this.apiKey = config.apiKey;
|
|
237
|
+
this.model = config.model || "claude-3-opus-20240229";
|
|
238
|
+
this.baseUrl = config.baseUrl || "https://api.anthropic.com/v1";
|
|
239
|
+
this.version = config.version || "2023-06-01";
|
|
240
|
+
}
|
|
241
|
+
async fetch(path, options) {
|
|
242
|
+
const response = await fetch(`${this.baseUrl}${path}`, {
|
|
243
|
+
...options,
|
|
244
|
+
headers: {
|
|
245
|
+
"x-api-key": this.apiKey,
|
|
246
|
+
"anthropic-version": this.version,
|
|
247
|
+
"content-type": "application/json",
|
|
248
|
+
...options.headers
|
|
249
|
+
}
|
|
250
|
+
});
|
|
251
|
+
if (!response.ok) {
|
|
252
|
+
const error = await response.text();
|
|
253
|
+
throw new Error(`Anthropic API Error ${response.status}: ${error}`);
|
|
254
|
+
}
|
|
255
|
+
return response;
|
|
256
|
+
}
|
|
257
|
+
async complete(request) {
|
|
258
|
+
const chatResp = await this.chat({
|
|
259
|
+
messages: [{ role: "user", content: request.prompt }],
|
|
260
|
+
metadata: request.metadata
|
|
261
|
+
});
|
|
262
|
+
return {
|
|
263
|
+
content: chatResp.message.content,
|
|
264
|
+
usage: chatResp.usage,
|
|
265
|
+
raw: chatResp.raw,
|
|
266
|
+
trace_id: chatResp.trace_id
|
|
267
|
+
};
|
|
268
|
+
}
|
|
269
|
+
async chat(request) {
|
|
270
|
+
let system;
|
|
271
|
+
const messages = request.messages.filter((m) => {
|
|
272
|
+
if (m.role === "system") {
|
|
273
|
+
system = m.content;
|
|
274
|
+
return false;
|
|
275
|
+
}
|
|
276
|
+
return true;
|
|
277
|
+
});
|
|
278
|
+
let tools;
|
|
279
|
+
if (request.tools && request.tools.length > 0) {
|
|
280
|
+
tools = request.tools.map((t) => ({
|
|
281
|
+
name: t.name,
|
|
282
|
+
description: t.description,
|
|
283
|
+
input_schema: t.parameters
|
|
284
|
+
}));
|
|
285
|
+
}
|
|
286
|
+
const body = {
|
|
287
|
+
model: this.model,
|
|
288
|
+
messages: messages.map((m) => ({
|
|
289
|
+
role: m.role === "tool" ? "user" : m.role,
|
|
290
|
+
// Mapping might need adjustment based on specific Anthropic tool use syntax
|
|
291
|
+
content: m.content
|
|
292
|
+
})),
|
|
293
|
+
max_tokens: 4096,
|
|
294
|
+
system,
|
|
295
|
+
tools
|
|
296
|
+
};
|
|
297
|
+
if (request.tool_choice) {
|
|
298
|
+
if (typeof request.tool_choice === "string" && request.tool_choice !== "auto") {
|
|
299
|
+
body.tool_choice = { type: "tool", name: request.tool_choice };
|
|
300
|
+
} else if (typeof request.tool_choice === "object") {
|
|
301
|
+
body.tool_choice = request.tool_choice;
|
|
302
|
+
} else if (request.tool_choice === "auto") {
|
|
303
|
+
body.tool_choice = { type: "auto" };
|
|
304
|
+
}
|
|
305
|
+
}
|
|
306
|
+
const response = await this.fetch("/messages", {
|
|
307
|
+
method: "POST",
|
|
308
|
+
body: JSON.stringify(body)
|
|
309
|
+
});
|
|
310
|
+
const data = await response.json();
|
|
311
|
+
let content = "";
|
|
312
|
+
const tool_calls = [];
|
|
313
|
+
for (const block of data.content) {
|
|
314
|
+
if (block.type === "text") {
|
|
315
|
+
content += block.text;
|
|
316
|
+
} else if (block.type === "tool_use") {
|
|
317
|
+
tool_calls.push({
|
|
318
|
+
id: block.id,
|
|
319
|
+
name: block.name,
|
|
320
|
+
arguments: block.input
|
|
321
|
+
});
|
|
322
|
+
}
|
|
323
|
+
}
|
|
324
|
+
const usage = data.usage ? {
|
|
325
|
+
prompt_tokens: data.usage.input_tokens,
|
|
326
|
+
completion_tokens: data.usage.output_tokens,
|
|
327
|
+
total_tokens: data.usage.input_tokens + data.usage.output_tokens
|
|
328
|
+
} : void 0;
|
|
329
|
+
return {
|
|
330
|
+
message: {
|
|
331
|
+
role: "assistant",
|
|
332
|
+
content
|
|
333
|
+
},
|
|
334
|
+
tool_calls: tool_calls.length > 0 ? tool_calls : void 0,
|
|
335
|
+
usage,
|
|
336
|
+
raw: data
|
|
337
|
+
};
|
|
338
|
+
}
|
|
339
|
+
async *stream(request) {
|
|
340
|
+
const response = await this.fetch("/messages", {
|
|
341
|
+
method: "POST",
|
|
342
|
+
body: JSON.stringify({
|
|
343
|
+
model: this.model,
|
|
344
|
+
messages: [{ role: "user", content: request.prompt }],
|
|
345
|
+
max_tokens: 4096,
|
|
346
|
+
stream: true
|
|
347
|
+
})
|
|
348
|
+
});
|
|
349
|
+
if (!response.body) throw new Error("No response body");
|
|
350
|
+
const reader = response.body.getReader();
|
|
351
|
+
const decoder = new TextDecoder();
|
|
352
|
+
let buffer = "";
|
|
353
|
+
while (true) {
|
|
354
|
+
const { done, value } = await reader.read();
|
|
355
|
+
if (done) break;
|
|
356
|
+
buffer += decoder.decode(value, { stream: true });
|
|
357
|
+
const lines = buffer.split("\n");
|
|
358
|
+
buffer = lines.pop() || "";
|
|
359
|
+
for (const line of lines) {
|
|
360
|
+
if (!line.startsWith("data: ")) continue;
|
|
361
|
+
const dataStr = line.slice(6).trim();
|
|
362
|
+
if (dataStr === "[DONE]") continue;
|
|
363
|
+
try {
|
|
364
|
+
const event = JSON.parse(dataStr);
|
|
365
|
+
if (event.type === "content_block_delta" && event.delta?.type === "text_delta") {
|
|
366
|
+
const delta = event.delta.text;
|
|
367
|
+
yield {
|
|
368
|
+
type: "content",
|
|
369
|
+
content: delta,
|
|
370
|
+
delta
|
|
371
|
+
};
|
|
372
|
+
} else if (event.type === "message_stop") {
|
|
373
|
+
yield { type: "end", finish_reason: "stop" };
|
|
374
|
+
}
|
|
375
|
+
} catch (e) {
|
|
376
|
+
}
|
|
377
|
+
}
|
|
378
|
+
}
|
|
379
|
+
yield { type: "end", finish_reason: "stop" };
|
|
380
|
+
}
|
|
381
|
+
};
|
|
382
|
+
|
|
383
|
+
// src/models/ollama.ts
|
|
384
|
+
var OllamaChatModel = class extends ChatModel {
|
|
385
|
+
provider = "ollama";
|
|
386
|
+
model;
|
|
387
|
+
baseUrl;
|
|
388
|
+
constructor(config = {}) {
|
|
389
|
+
super();
|
|
390
|
+
this.model = config.model || "llama3";
|
|
391
|
+
this.baseUrl = config.baseUrl || "http://localhost:11434";
|
|
392
|
+
}
|
|
393
|
+
async fetch(path, options) {
|
|
394
|
+
const response = await fetch(`${this.baseUrl}${path}`, {
|
|
395
|
+
...options,
|
|
396
|
+
headers: {
|
|
397
|
+
"Content-Type": "application/json",
|
|
398
|
+
...options.headers
|
|
399
|
+
}
|
|
400
|
+
});
|
|
401
|
+
if (!response.ok) {
|
|
402
|
+
const error = await response.text();
|
|
403
|
+
throw new Error(`Ollama API Error ${response.status}: ${error}`);
|
|
404
|
+
}
|
|
405
|
+
return response;
|
|
406
|
+
}
|
|
407
|
+
async complete(request) {
|
|
408
|
+
const chatResp = await this.chat({
|
|
409
|
+
messages: [{ role: "user", content: request.prompt }],
|
|
410
|
+
metadata: request.metadata
|
|
411
|
+
});
|
|
412
|
+
return {
|
|
413
|
+
content: chatResp.message.content,
|
|
414
|
+
usage: chatResp.usage,
|
|
415
|
+
raw: chatResp.raw,
|
|
416
|
+
trace_id: chatResp.trace_id
|
|
417
|
+
};
|
|
418
|
+
}
|
|
419
|
+
async chat(request) {
|
|
420
|
+
const response = await this.fetch("/api/chat", {
|
|
421
|
+
method: "POST",
|
|
422
|
+
body: JSON.stringify({
|
|
423
|
+
model: this.model,
|
|
424
|
+
messages: request.messages,
|
|
425
|
+
stream: false
|
|
426
|
+
})
|
|
427
|
+
});
|
|
428
|
+
const data = await response.json();
|
|
429
|
+
const message = data.message;
|
|
430
|
+
const usage = data.eval_count ? {
|
|
431
|
+
prompt_tokens: data.prompt_eval_count || 0,
|
|
432
|
+
completion_tokens: data.eval_count || 0,
|
|
433
|
+
total_tokens: (data.prompt_eval_count || 0) + (data.eval_count || 0)
|
|
434
|
+
} : void 0;
|
|
435
|
+
return {
|
|
436
|
+
message: {
|
|
437
|
+
role: message.role,
|
|
438
|
+
content: message.content || ""
|
|
439
|
+
},
|
|
440
|
+
usage,
|
|
441
|
+
raw: data
|
|
442
|
+
};
|
|
443
|
+
}
|
|
444
|
+
async *stream(request) {
|
|
445
|
+
const response = await this.fetch("/api/chat", {
|
|
446
|
+
method: "POST",
|
|
447
|
+
body: JSON.stringify({
|
|
448
|
+
model: this.model,
|
|
449
|
+
messages: [{ role: "user", content: request.prompt }],
|
|
450
|
+
stream: true
|
|
451
|
+
})
|
|
452
|
+
});
|
|
453
|
+
if (!response.body) throw new Error("No response body");
|
|
454
|
+
const reader = response.body.getReader();
|
|
455
|
+
const decoder = new TextDecoder();
|
|
456
|
+
let buffer = "";
|
|
457
|
+
while (true) {
|
|
458
|
+
const { done, value } = await reader.read();
|
|
459
|
+
if (done) break;
|
|
460
|
+
buffer += decoder.decode(value, { stream: true });
|
|
461
|
+
const lines = buffer.split("\n");
|
|
462
|
+
buffer = lines.pop() || "";
|
|
463
|
+
for (const line of lines) {
|
|
464
|
+
if (!line.trim()) continue;
|
|
465
|
+
try {
|
|
466
|
+
const chunk = JSON.parse(line);
|
|
467
|
+
if (chunk.done) {
|
|
468
|
+
yield { type: "end", finish_reason: "stop" };
|
|
469
|
+
return;
|
|
470
|
+
}
|
|
471
|
+
const delta = chunk.message?.content;
|
|
472
|
+
if (delta) {
|
|
473
|
+
yield {
|
|
474
|
+
type: "content",
|
|
475
|
+
content: delta,
|
|
476
|
+
delta
|
|
477
|
+
};
|
|
478
|
+
}
|
|
479
|
+
} catch (e) {
|
|
480
|
+
}
|
|
481
|
+
}
|
|
482
|
+
}
|
|
483
|
+
yield { type: "end", finish_reason: "stop" };
|
|
484
|
+
}
|
|
485
|
+
};
|
|
486
|
+
|
|
487
|
+
// src/tools/index.ts
|
|
488
|
+
var tools_exports = {};
|
|
489
|
+
__export(tools_exports, {
|
|
490
|
+
ApiCallTool: () => ApiCallTool,
|
|
491
|
+
CalculatorTool: () => CalculatorTool,
|
|
492
|
+
Tool: () => Tool
|
|
493
|
+
});
|
|
494
|
+
|
|
495
|
+
// src/tools/calculator.ts
|
|
496
|
+
import { z } from "zod";
|
|
497
|
+
var CalculatorSchema = z.object({
|
|
498
|
+
expression: z.string().describe("The mathematical expression to evaluate (e.g., '2 + 2 * 5')")
|
|
499
|
+
});
|
|
500
|
+
var CalculatorTool = new Tool({
|
|
501
|
+
name: "calculator",
|
|
502
|
+
description: "Perform basic mathematical calculations.",
|
|
503
|
+
schema: CalculatorSchema,
|
|
504
|
+
execute: async (args) => {
|
|
505
|
+
try {
|
|
506
|
+
const safeExpression = args.expression.replace(/[^0-9+\-*/(). ]/g, "");
|
|
507
|
+
return String(eval(safeExpression));
|
|
508
|
+
} catch (e) {
|
|
509
|
+
return `Error: ${e.message}`;
|
|
510
|
+
}
|
|
511
|
+
}
|
|
512
|
+
});
|
|
513
|
+
|
|
514
|
+
// src/tools/api_call.ts
|
|
515
|
+
import { z as z2 } from "zod";
|
|
516
|
+
var ApiCallSchema = z2.object({
|
|
517
|
+
url: z2.string().describe("The URL to call"),
|
|
518
|
+
method: z2.string().default("GET").describe("The HTTP method (GET, POST, etc.)"),
|
|
519
|
+
headers: z2.record(z2.string()).optional().describe("HTTP headers"),
|
|
520
|
+
body: z2.any().optional().describe("Request body")
|
|
521
|
+
});
|
|
522
|
+
var ApiCallTool = new Tool({
|
|
523
|
+
name: "api_call",
|
|
524
|
+
description: "Make an HTTP API call to a remote service.",
|
|
525
|
+
schema: ApiCallSchema,
|
|
526
|
+
execute: async (args2) => {
|
|
527
|
+
const response = await fetch(args2.url, {
|
|
528
|
+
method: args2.method,
|
|
529
|
+
headers: args2.headers,
|
|
530
|
+
body: args2.body ? JSON.stringify(args2.body) : void 0
|
|
531
|
+
});
|
|
532
|
+
const contentType = response.headers.get("content-type");
|
|
533
|
+
if (contentType?.includes("application/json")) {
|
|
534
|
+
return response.json();
|
|
535
|
+
}
|
|
536
|
+
return response.text();
|
|
537
|
+
}
|
|
538
|
+
});
|
|
539
|
+
|
|
540
|
+
// src/agents/index.ts
|
|
541
|
+
var agents_exports = {};
|
|
542
|
+
__export(agents_exports, {
|
|
543
|
+
ReActAgent: () => ReActAgent,
|
|
544
|
+
ToolAgent: () => ToolAgent
|
|
545
|
+
});
|
|
546
|
+
|
|
547
|
+
// src/graph/index.ts
|
|
548
|
+
var graph_exports = {};
|
|
549
|
+
__export(graph_exports, {
|
|
550
|
+
CompiledGraph: () => CompiledGraph,
|
|
551
|
+
END: () => END,
|
|
552
|
+
StateGraph: () => StateGraph
|
|
553
|
+
});
|
|
554
|
+
|
|
555
|
+
// src/graph/state_graph.ts
|
|
556
|
+
var END = "__end__";
|
|
557
|
+
var CompiledGraph = class {
|
|
558
|
+
constructor(config) {
|
|
559
|
+
this.config = config;
|
|
560
|
+
}
|
|
561
|
+
async invoke(state, options = {}) {
|
|
562
|
+
const maxSteps = options.maxSteps ?? 100;
|
|
563
|
+
let currentState = { ...this.config.initialState, ...state };
|
|
564
|
+
let currentNode = this.config.entrypoint;
|
|
565
|
+
for (let i = 0; i < maxSteps; i++) {
|
|
566
|
+
const fn = this.config.nodes.get(currentNode);
|
|
567
|
+
if (!fn) throw new Error(`Unknown node: ${currentNode}`);
|
|
568
|
+
currentState = await fn(currentState);
|
|
569
|
+
const router = this.config.conditionalEdges.get(currentNode);
|
|
570
|
+
if (router) {
|
|
571
|
+
const nextNode2 = router(currentState);
|
|
572
|
+
if (nextNode2 === END) return currentState;
|
|
573
|
+
currentNode = nextNode2;
|
|
574
|
+
continue;
|
|
575
|
+
}
|
|
576
|
+
const nextNode = this.config.edges.get(currentNode);
|
|
577
|
+
if (!nextNode || nextNode === END) return currentState;
|
|
578
|
+
currentNode = nextNode;
|
|
579
|
+
}
|
|
580
|
+
throw new Error("Graph exceeded max steps");
|
|
581
|
+
}
|
|
582
|
+
};
|
|
583
|
+
var StateGraph = class {
|
|
584
|
+
constructor(initialState) {
|
|
585
|
+
this.initialState = initialState;
|
|
586
|
+
}
|
|
587
|
+
nodes = /* @__PURE__ */ new Map();
|
|
588
|
+
edges = /* @__PURE__ */ new Map();
|
|
589
|
+
conditionalEdges = /* @__PURE__ */ new Map();
|
|
590
|
+
entrypoint;
|
|
591
|
+
addNode(name, fn) {
|
|
592
|
+
this.nodes.set(name, fn);
|
|
593
|
+
return this;
|
|
594
|
+
}
|
|
595
|
+
addEdge(from, to) {
|
|
596
|
+
this.edges.set(from, to);
|
|
597
|
+
return this;
|
|
598
|
+
}
|
|
599
|
+
addConditionalEdge(from, router) {
|
|
600
|
+
this.conditionalEdges.set(from, router);
|
|
601
|
+
return this;
|
|
602
|
+
}
|
|
603
|
+
setEntryPoint(name) {
|
|
604
|
+
this.entrypoint = name;
|
|
605
|
+
return this;
|
|
606
|
+
}
|
|
607
|
+
compile() {
|
|
608
|
+
if (!this.entrypoint) throw new Error("Entrypoint not set");
|
|
609
|
+
return new CompiledGraph({
|
|
610
|
+
initialState: this.initialState,
|
|
611
|
+
nodes: this.nodes,
|
|
612
|
+
edges: this.edges,
|
|
613
|
+
conditionalEdges: this.conditionalEdges,
|
|
614
|
+
entrypoint: this.entrypoint
|
|
615
|
+
});
|
|
616
|
+
}
|
|
617
|
+
};
|
|
618
|
+
|
|
619
|
+
// src/prompts/index.ts
|
|
620
|
+
var prompts_exports = {};
|
|
621
|
+
__export(prompts_exports, {
|
|
622
|
+
ChatPromptTemplate: () => ChatPromptTemplate,
|
|
623
|
+
FewShotPrompt: () => FewShotPrompt,
|
|
624
|
+
PromptRegistry: () => PromptRegistry,
|
|
625
|
+
PromptTemplate: () => PromptTemplate
|
|
626
|
+
});
|
|
627
|
+
|
|
628
|
+
// src/prompts/template.ts
|
|
629
|
+
var PromptTemplate = class {
|
|
630
|
+
constructor(config) {
|
|
631
|
+
this.config = config;
|
|
632
|
+
}
|
|
633
|
+
format(variables) {
|
|
634
|
+
let result = this.config.template;
|
|
635
|
+
for (const [key, value] of Object.entries(variables)) {
|
|
636
|
+
result = result.replace(new RegExp(`{${key}}`, "g"), String(value));
|
|
637
|
+
}
|
|
638
|
+
return result;
|
|
639
|
+
}
|
|
640
|
+
};
|
|
641
|
+
var ChatPromptTemplate = class {
|
|
642
|
+
constructor(config) {
|
|
643
|
+
this.config = config;
|
|
644
|
+
}
|
|
645
|
+
format(variables) {
|
|
646
|
+
return this.config.messages.map((msg) => {
|
|
647
|
+
let content = msg.content;
|
|
648
|
+
for (const [key, value] of Object.entries(variables)) {
|
|
649
|
+
content = content.replace(new RegExp(`{${key}}`, "g"), String(value));
|
|
650
|
+
}
|
|
651
|
+
return { role: msg.role, content };
|
|
652
|
+
});
|
|
653
|
+
}
|
|
654
|
+
};
|
|
655
|
+
|
|
656
|
+
// src/prompts/registry.ts
|
|
657
|
+
var PromptRegistry = class {
|
|
658
|
+
constructor(db) {
|
|
659
|
+
this.db = db;
|
|
660
|
+
}
|
|
661
|
+
tableName = "langfn_prompts";
|
|
662
|
+
async save(name, prompt, options) {
|
|
663
|
+
const record = {
|
|
664
|
+
id: `${name}:${options.version}`,
|
|
665
|
+
name,
|
|
666
|
+
version: options.version,
|
|
667
|
+
template: prompt.config.template,
|
|
668
|
+
variables: prompt.config.variables || [],
|
|
669
|
+
description: options.description,
|
|
670
|
+
tags: options.tags || [],
|
|
671
|
+
createdAt: Date.now(),
|
|
672
|
+
updatedAt: Date.now()
|
|
673
|
+
};
|
|
674
|
+
await this.db.upsert({
|
|
675
|
+
model: this.tableName,
|
|
676
|
+
where: [{ field: "id", operator: "eq", value: record.id }],
|
|
677
|
+
create: record,
|
|
678
|
+
update: record
|
|
679
|
+
});
|
|
680
|
+
}
|
|
681
|
+
async load(name, options = {}) {
|
|
682
|
+
const version = options.version || "latest";
|
|
683
|
+
const where = options.version ? [{ field: "id", operator: "eq", value: `${name}:${options.version}` }] : [{ field: "name", operator: "eq", value: name }];
|
|
684
|
+
const record = await this.db.findOne({
|
|
685
|
+
model: this.tableName,
|
|
686
|
+
where
|
|
687
|
+
});
|
|
688
|
+
if (!record) return null;
|
|
689
|
+
return new PromptTemplate({
|
|
690
|
+
template: record.template,
|
|
691
|
+
variables: record.variables
|
|
692
|
+
});
|
|
693
|
+
}
|
|
694
|
+
async list(tags) {
|
|
695
|
+
const where = [];
|
|
696
|
+
const records = await this.db.findMany({
|
|
697
|
+
model: this.tableName,
|
|
698
|
+
where
|
|
699
|
+
});
|
|
700
|
+
if (!tags || tags.length === 0) return records;
|
|
701
|
+
return records.filter(
|
|
702
|
+
(record) => tags.some((tag) => record.tags.includes(tag))
|
|
703
|
+
);
|
|
704
|
+
}
|
|
705
|
+
};
|
|
706
|
+
|
|
707
|
+
// src/prompts/few-shot.ts
|
|
708
|
+
var FewShotPrompt = class {
|
|
709
|
+
constructor(config) {
|
|
710
|
+
this.config = config;
|
|
711
|
+
}
|
|
712
|
+
format(variables) {
|
|
713
|
+
const separator = this.config.exampleSeparator ?? "\n\n";
|
|
714
|
+
const exampleStrs = this.config.examples.map(
|
|
715
|
+
(ex) => `Input: ${ex.input}
|
|
716
|
+
Output: ${ex.output}`
|
|
717
|
+
);
|
|
718
|
+
let res = `${this.config.prefix}${separator}${exampleStrs.join(separator)}${separator}${this.config.suffix}`;
|
|
719
|
+
for (const [key, value] of Object.entries(variables)) {
|
|
720
|
+
res = res.replace(new RegExp(`{${key}}`, "g"), String(value));
|
|
721
|
+
}
|
|
722
|
+
return res;
|
|
723
|
+
}
|
|
724
|
+
};
|
|
725
|
+
|
|
726
|
+
// src/memory/index.ts
|
|
727
|
+
var memory_exports = {};
|
|
728
|
+
__export(memory_exports, {
|
|
729
|
+
BufferMemory: () => BufferMemory,
|
|
730
|
+
SummaryMemory: () => SummaryMemory
|
|
731
|
+
});
|
|
732
|
+
|
|
733
|
+
// src/memory/buffer.ts
|
|
734
|
+
var BufferMemory = class {
|
|
735
|
+
messages = [];
|
|
736
|
+
maxMessages;
|
|
737
|
+
constructor(options = {}) {
|
|
738
|
+
this.maxMessages = options.maxMessages ?? 10;
|
|
739
|
+
}
|
|
740
|
+
async add(message) {
|
|
741
|
+
this.messages.push(message);
|
|
742
|
+
if (this.messages.length > this.maxMessages) {
|
|
743
|
+
this.messages = this.messages.slice(-this.maxMessages);
|
|
744
|
+
}
|
|
745
|
+
}
|
|
746
|
+
async extend(messages) {
|
|
747
|
+
for (const msg of messages) {
|
|
748
|
+
await this.add(msg);
|
|
749
|
+
}
|
|
750
|
+
}
|
|
751
|
+
async get() {
|
|
752
|
+
return [...this.messages];
|
|
753
|
+
}
|
|
754
|
+
async clear() {
|
|
755
|
+
this.messages = [];
|
|
756
|
+
}
|
|
757
|
+
};
|
|
758
|
+
|
|
759
|
+
// src/memory/summary.ts
|
|
760
|
+
var SummaryMemory = class {
|
|
761
|
+
constructor(lang, options = {}) {
|
|
762
|
+
this.lang = lang;
|
|
763
|
+
this.maxTokens = options.maxTokens ?? 2e3;
|
|
764
|
+
this.summaryPrompt = options.summaryPrompt ?? "Summarize the following conversation history concisely, retaining all key information:";
|
|
765
|
+
}
|
|
766
|
+
messages = [];
|
|
767
|
+
summary;
|
|
768
|
+
maxTokens;
|
|
769
|
+
summaryPrompt;
|
|
770
|
+
async add(message) {
|
|
771
|
+
this.messages.push(message);
|
|
772
|
+
if (this.messages.length > 10) {
|
|
773
|
+
await this.summarize();
|
|
774
|
+
}
|
|
775
|
+
}
|
|
776
|
+
async summarize() {
|
|
777
|
+
const history = this.messages.map((m) => `${m.role}: ${m.content}`).join("\n");
|
|
778
|
+
const prompt = `${this.summaryPrompt}
|
|
779
|
+
|
|
780
|
+
Existing Summary: ${this.summary || "None"}
|
|
781
|
+
|
|
782
|
+
New Messages:
|
|
783
|
+
${history}`;
|
|
784
|
+
const response = await this.lang.complete(prompt);
|
|
785
|
+
this.summary = response.content;
|
|
786
|
+
this.messages = [];
|
|
787
|
+
}
|
|
788
|
+
async get() {
|
|
789
|
+
const history = [];
|
|
790
|
+
if (this.summary) {
|
|
791
|
+
history.push({ role: "system", content: `Previous conversation summary: ${this.summary}` });
|
|
792
|
+
}
|
|
793
|
+
history.push(...this.messages);
|
|
794
|
+
return history;
|
|
795
|
+
}
|
|
796
|
+
async clear() {
|
|
797
|
+
this.messages = [];
|
|
798
|
+
this.summary = void 0;
|
|
799
|
+
}
|
|
800
|
+
};
|
|
801
|
+
|
|
802
|
+
// src/rag/index.ts
|
|
803
|
+
var rag_exports = {};
|
|
804
|
+
__export(rag_exports, {
|
|
805
|
+
DbVectorStore: () => DbVectorStore,
|
|
806
|
+
Embeddings: () => Embeddings,
|
|
807
|
+
InMemoryVectorStore: () => InMemoryVectorStore,
|
|
808
|
+
OpenAIEmbeddings: () => OpenAIEmbeddings,
|
|
809
|
+
Retriever: () => Retriever,
|
|
810
|
+
VectorStore: () => VectorStore
|
|
811
|
+
});
|
|
812
|
+
|
|
813
|
+
// src/rag/memory.ts
|
|
814
|
+
var InMemoryVectorStore = class extends VectorStore {
|
|
815
|
+
constructor(embeddings) {
|
|
816
|
+
super();
|
|
817
|
+
this.embeddings = embeddings;
|
|
818
|
+
}
|
|
819
|
+
documents = [];
|
|
820
|
+
vectors = [];
|
|
821
|
+
async addDocuments(documents) {
|
|
822
|
+
const texts = documents.map((doc) => doc.content);
|
|
823
|
+
const vectors = await this.embeddings.embedDocuments(texts);
|
|
824
|
+
this.documents.push(...documents);
|
|
825
|
+
this.vectors.push(...vectors);
|
|
826
|
+
}
|
|
827
|
+
async search(query, options = {}) {
|
|
828
|
+
const k = options.k ?? 4;
|
|
829
|
+
const queryVector = await this.embeddings.embedQuery(query);
|
|
830
|
+
const scores = [];
|
|
831
|
+
for (let i = 0; i < this.vectors.length; i++) {
|
|
832
|
+
const docVector = this.vectors[i];
|
|
833
|
+
const doc = this.documents[i];
|
|
834
|
+
if (options.filter) {
|
|
835
|
+
let match = true;
|
|
836
|
+
for (const [key, value] of Object.entries(options.filter)) {
|
|
837
|
+
if (doc.metadata[key] !== value) {
|
|
838
|
+
match = false;
|
|
839
|
+
break;
|
|
840
|
+
}
|
|
841
|
+
}
|
|
842
|
+
if (!match) continue;
|
|
843
|
+
}
|
|
844
|
+
const score = this.cosineSimilarity(queryVector, docVector);
|
|
845
|
+
scores.push({ score, doc });
|
|
846
|
+
}
|
|
847
|
+
return scores.sort((a, b) => b.score - a.score).slice(0, k).map((item) => ({ ...item.doc, score: item.score }));
|
|
848
|
+
}
|
|
849
|
+
cosineSimilarity(v1, v2) {
|
|
850
|
+
let dotProduct = 0;
|
|
851
|
+
let magnitude1 = 0;
|
|
852
|
+
let magnitude2 = 0;
|
|
853
|
+
for (let i = 0; i < v1.length; i++) {
|
|
854
|
+
dotProduct += v1[i] * v2[i];
|
|
855
|
+
magnitude1 += v1[i] * v1[i];
|
|
856
|
+
magnitude2 += v2[i] * v2[i];
|
|
857
|
+
}
|
|
858
|
+
magnitude1 = Math.sqrt(magnitude1);
|
|
859
|
+
magnitude2 = Math.sqrt(magnitude2);
|
|
860
|
+
if (magnitude1 === 0 || magnitude2 === 0) return 0;
|
|
861
|
+
return dotProduct / (magnitude1 * magnitude2);
|
|
862
|
+
}
|
|
863
|
+
};
|
|
864
|
+
|
|
865
|
+
// src/rag/db-vector-store.ts
|
|
866
|
+
var DbVectorStore = class extends VectorStore {
|
|
867
|
+
constructor(db, embeddings, namespace = "default") {
|
|
868
|
+
super();
|
|
869
|
+
this.db = db;
|
|
870
|
+
this.embeddings = embeddings;
|
|
871
|
+
this.namespace = namespace;
|
|
872
|
+
}
|
|
873
|
+
tableName = "langfn_documents";
|
|
874
|
+
async addDocuments(documents) {
|
|
875
|
+
const texts = documents.map((doc) => doc.content);
|
|
876
|
+
const vectors = await this.embeddings.embedDocuments(texts);
|
|
877
|
+
const records = documents.map((doc, i) => ({
|
|
878
|
+
id: Math.random().toString(36).substring(7),
|
|
879
|
+
content: doc.content,
|
|
880
|
+
embedding: vectors[i],
|
|
881
|
+
metadata: doc.metadata,
|
|
882
|
+
namespace: this.namespace,
|
|
883
|
+
createdAt: Date.now()
|
|
884
|
+
}));
|
|
885
|
+
await this.db.createMany({
|
|
886
|
+
model: this.tableName,
|
|
887
|
+
data: records
|
|
888
|
+
});
|
|
889
|
+
}
|
|
890
|
+
async search(query, options = {}) {
|
|
891
|
+
const k = options.k ?? 4;
|
|
892
|
+
const where = [{ field: "namespace", operator: "eq", value: this.namespace }];
|
|
893
|
+
if (options.filter) {
|
|
894
|
+
for (const [key, value] of Object.entries(options.filter)) {
|
|
895
|
+
where.push({ field: key, operator: "eq", value });
|
|
896
|
+
}
|
|
897
|
+
}
|
|
898
|
+
const records = await this.db.findMany({
|
|
899
|
+
model: this.tableName,
|
|
900
|
+
where,
|
|
901
|
+
limit: 100
|
|
902
|
+
// Get some candidates
|
|
903
|
+
});
|
|
904
|
+
if (records.length === 0) return [];
|
|
905
|
+
const queryVector = await this.embeddings.embedQuery(query);
|
|
906
|
+
const scored = records.map((record) => ({
|
|
907
|
+
doc: { content: record.content, metadata: record.metadata },
|
|
908
|
+
score: this.cosineSimilarity(queryVector, record.embedding)
|
|
909
|
+
}));
|
|
910
|
+
return scored.sort((a, b) => b.score - a.score).slice(0, k).map((item) => ({ ...item.doc, score: item.score }));
|
|
911
|
+
}
|
|
912
|
+
cosineSimilarity(v1, v2) {
|
|
913
|
+
let dotProduct = 0;
|
|
914
|
+
let magnitude1 = 0;
|
|
915
|
+
let magnitude2 = 0;
|
|
916
|
+
for (let i = 0; i < v1.length; i++) {
|
|
917
|
+
dotProduct += v1[i] * v2[i];
|
|
918
|
+
magnitude1 += v1[i] * v1[i];
|
|
919
|
+
magnitude2 += v2[i] * v2[i];
|
|
920
|
+
}
|
|
921
|
+
magnitude1 = Math.sqrt(magnitude1);
|
|
922
|
+
magnitude2 = Math.sqrt(magnitude2);
|
|
923
|
+
if (magnitude1 === 0 || magnitude2 === 0) return 0;
|
|
924
|
+
return dotProduct / (magnitude1 * magnitude2);
|
|
925
|
+
}
|
|
926
|
+
};
|
|
927
|
+
|
|
928
|
+
// src/storage/index.ts
|
|
929
|
+
var storage_exports = {};
|
|
930
|
+
__export(storage_exports, {
|
|
931
|
+
ResponseCache: () => ResponseCache
|
|
932
|
+
});
|
|
933
|
+
|
|
934
|
+
// src/storage/cache.ts
|
|
935
|
+
var ResponseCache = class {
|
|
936
|
+
constructor(db, ttl = 3600) {
|
|
937
|
+
this.db = db;
|
|
938
|
+
this.ttl = ttl;
|
|
939
|
+
}
|
|
940
|
+
tableName = "langfn_cache";
|
|
941
|
+
async generateKey(prompt, model, provider, metadata) {
|
|
942
|
+
const keyContent = `${provider}:${model}:${prompt}:${JSON.stringify(metadata || {})}`;
|
|
943
|
+
const msgUint8 = new TextEncoder().encode(keyContent);
|
|
944
|
+
const hashBuffer = await crypto.subtle.digest("SHA-256", msgUint8);
|
|
945
|
+
const hashArray = Array.from(new Uint8Array(hashBuffer));
|
|
946
|
+
return hashArray.map((b) => b.toString(16).padStart(2, "0")).join("");
|
|
947
|
+
}
|
|
948
|
+
async get(prompt, model, provider, metadata) {
|
|
949
|
+
const key = await this.generateKey(prompt, model, provider, metadata);
|
|
950
|
+
const where = [{ field: "key", operator: "eq", value: key }];
|
|
951
|
+
const record = await this.db.findOne({
|
|
952
|
+
model: this.tableName,
|
|
953
|
+
where
|
|
954
|
+
});
|
|
955
|
+
if (!record) return null;
|
|
956
|
+
if (record.expiresAt < Math.floor(Date.now() / 1e3)) {
|
|
957
|
+
await this.db.delete({ model: this.tableName, where });
|
|
958
|
+
return null;
|
|
959
|
+
}
|
|
960
|
+
return record.value;
|
|
961
|
+
}
|
|
962
|
+
async set(prompt, model, provider, value, metadata) {
|
|
963
|
+
const key = await this.generateKey(prompt, model, provider, metadata);
|
|
964
|
+
const expiresAt = Math.floor(Date.now() / 1e3) + this.ttl;
|
|
965
|
+
const data = {
|
|
966
|
+
key,
|
|
967
|
+
value,
|
|
968
|
+
expiresAt,
|
|
969
|
+
createdAt: Math.floor(Date.now() / 1e3)
|
|
970
|
+
};
|
|
971
|
+
await this.db.upsert({
|
|
972
|
+
model: this.tableName,
|
|
973
|
+
where: [{ field: "key", operator: "eq", value: key }],
|
|
974
|
+
create: data,
|
|
975
|
+
update: data
|
|
976
|
+
});
|
|
977
|
+
}
|
|
978
|
+
};
|
|
979
|
+
|
|
980
|
+
// src/utils/index.ts
|
|
981
|
+
var utils_exports = {};
|
|
982
|
+
__export(utils_exports, {
|
|
983
|
+
countMessagesTokens: () => countMessagesTokens,
|
|
984
|
+
countTokens: () => countTokens
|
|
985
|
+
});
|
|
986
|
+
|
|
987
|
+
// src/utils/token-counter.ts
|
|
988
|
+
function countTokens(text, model = "gpt-4") {
|
|
989
|
+
return Math.ceil(text.length / 4);
|
|
990
|
+
}
|
|
991
|
+
function countMessagesTokens(messages, model = "gpt-4") {
|
|
992
|
+
let total = 0;
|
|
993
|
+
for (const msg of messages) {
|
|
994
|
+
total += countTokens(msg.content || "", model);
|
|
995
|
+
total += 4;
|
|
996
|
+
}
|
|
997
|
+
return total + 3;
|
|
998
|
+
}
|
|
999
|
+
|
|
1000
|
+
// src/observability/index.ts
|
|
1001
|
+
var observability_exports = {};
|
|
1002
|
+
__export(observability_exports, {
|
|
1003
|
+
TraceStorage: () => TraceStorage,
|
|
1004
|
+
Tracer: () => Tracer
|
|
1005
|
+
});
|
|
1006
|
+
|
|
1007
|
+
// src/observability/tracer.ts
|
|
1008
|
+
var Tracer = class {
|
|
1009
|
+
watch;
|
|
1010
|
+
constructor(options = {}) {
|
|
1011
|
+
this.watch = options.watch;
|
|
1012
|
+
}
|
|
1013
|
+
async trace(name, metadata, fn) {
|
|
1014
|
+
const traceId = Math.random().toString(36).substring(7);
|
|
1015
|
+
if (this.watch) {
|
|
1016
|
+
this.watch.track(name, { phase: "start", traceId, ...metadata });
|
|
1017
|
+
}
|
|
1018
|
+
try {
|
|
1019
|
+
const result = await fn();
|
|
1020
|
+
if (this.watch) {
|
|
1021
|
+
this.watch.track(name, { phase: "end", traceId, ...metadata });
|
|
1022
|
+
}
|
|
1023
|
+
return result;
|
|
1024
|
+
} catch (error) {
|
|
1025
|
+
if (this.watch) {
|
|
1026
|
+
this.watch.track(name, { phase: "error", traceId, error: error.message, ...metadata });
|
|
1027
|
+
}
|
|
1028
|
+
throw error;
|
|
1029
|
+
}
|
|
1030
|
+
}
|
|
1031
|
+
};
|
|
1032
|
+
|
|
1033
|
+
// src/observability/storage.ts
|
|
1034
|
+
var TraceStorage = class {
|
|
1035
|
+
constructor(db) {
|
|
1036
|
+
this.db = db;
|
|
1037
|
+
}
|
|
1038
|
+
tableName = "langfn_traces";
|
|
1039
|
+
async save(trace) {
|
|
1040
|
+
await this.db.create({
|
|
1041
|
+
model: this.tableName,
|
|
1042
|
+
data: trace
|
|
1043
|
+
});
|
|
1044
|
+
}
|
|
1045
|
+
async findMany(options = {}) {
|
|
1046
|
+
const where = [];
|
|
1047
|
+
if (options.model) {
|
|
1048
|
+
where.push({ field: "model", operator: "eq", value: options.model });
|
|
1049
|
+
}
|
|
1050
|
+
if (options.provider) {
|
|
1051
|
+
where.push({ field: "provider", operator: "eq", value: options.provider });
|
|
1052
|
+
}
|
|
1053
|
+
return this.db.findMany({
|
|
1054
|
+
model: this.tableName,
|
|
1055
|
+
where,
|
|
1056
|
+
limit: options.limit,
|
|
1057
|
+
offset: options.offset,
|
|
1058
|
+
orderBy: [{ field: "startTime", direction: "desc" }]
|
|
1059
|
+
});
|
|
1060
|
+
}
|
|
1061
|
+
async findOne(id) {
|
|
1062
|
+
return this.db.findOne({
|
|
1063
|
+
model: this.tableName,
|
|
1064
|
+
where: [{ field: "id", operator: "eq", value: id }]
|
|
1065
|
+
});
|
|
1066
|
+
}
|
|
1067
|
+
};
|
|
1068
|
+
|
|
1069
|
+
// src/http/index.ts
|
|
1070
|
+
var http_exports = {};
|
|
1071
|
+
__export(http_exports, {
|
|
1072
|
+
createLangFnRouter: () => createLangFnRouter
|
|
1073
|
+
});
|
|
1074
|
+
|
|
1075
|
+
// src/http/routes.ts
|
|
1076
|
+
function createLangFnRouter(lang) {
|
|
1077
|
+
return [
|
|
1078
|
+
{
|
|
1079
|
+
method: "GET",
|
|
1080
|
+
path: "/health",
|
|
1081
|
+
handler: async () => {
|
|
1082
|
+
return new Response(
|
|
1083
|
+
JSON.stringify({ status: "ok", name: "langfn", version: "0.1.0" }),
|
|
1084
|
+
{ status: 200, headers: { "Content-Type": "application/json" } }
|
|
1085
|
+
);
|
|
1086
|
+
}
|
|
1087
|
+
},
|
|
1088
|
+
{
|
|
1089
|
+
method: "POST",
|
|
1090
|
+
path: "/complete",
|
|
1091
|
+
handler: async (request) => {
|
|
1092
|
+
const body = await request.json();
|
|
1093
|
+
const result = await lang.complete(body.prompt, { metadata: body.metadata });
|
|
1094
|
+
return new Response(
|
|
1095
|
+
JSON.stringify({
|
|
1096
|
+
content: result.content,
|
|
1097
|
+
trace_id: result.trace_id,
|
|
1098
|
+
usage: result.usage
|
|
1099
|
+
}),
|
|
1100
|
+
{ status: 200, headers: { "Content-Type": "application/json" } }
|
|
1101
|
+
);
|
|
1102
|
+
}
|
|
1103
|
+
},
|
|
1104
|
+
{
|
|
1105
|
+
method: "POST",
|
|
1106
|
+
path: "/chat",
|
|
1107
|
+
handler: async (request) => {
|
|
1108
|
+
const body = await request.json();
|
|
1109
|
+
const result = await lang.chat(body.messages, {
|
|
1110
|
+
tools: body.tools,
|
|
1111
|
+
tool_choice: body.tool_choice,
|
|
1112
|
+
metadata: body.metadata
|
|
1113
|
+
});
|
|
1114
|
+
return new Response(
|
|
1115
|
+
JSON.stringify({
|
|
1116
|
+
message: result.message,
|
|
1117
|
+
tool_calls: result.tool_calls,
|
|
1118
|
+
trace_id: result.trace_id,
|
|
1119
|
+
usage: result.usage
|
|
1120
|
+
}),
|
|
1121
|
+
{ status: 200, headers: { "Content-Type": "application/json" } }
|
|
1122
|
+
);
|
|
1123
|
+
}
|
|
1124
|
+
}
|
|
1125
|
+
];
|
|
1126
|
+
}
|
|
1127
|
+
|
|
1128
|
+
// src/structured/index.ts
|
|
1129
|
+
var structured_exports = {};
|
|
1130
|
+
__export(structured_exports, {
|
|
1131
|
+
StructuredOutput: () => StructuredOutput
|
|
1132
|
+
});
|
|
1133
|
+
|
|
1134
|
+
// src/structured/output.ts
|
|
1135
|
+
var StructuredOutput = class {
|
|
1136
|
+
constructor(schema) {
|
|
1137
|
+
this.schema = schema;
|
|
1138
|
+
}
|
|
1139
|
+
parse(text) {
|
|
1140
|
+
const jsonMatch = text.match(/\{[\s\S]*\}/);
|
|
1141
|
+
if (!jsonMatch) {
|
|
1142
|
+
throw new Error("No JSON object found in output");
|
|
1143
|
+
}
|
|
1144
|
+
try {
|
|
1145
|
+
const data = JSON.parse(jsonMatch[0]);
|
|
1146
|
+
return this.schema.parse(data);
|
|
1147
|
+
} catch (e) {
|
|
1148
|
+
throw new Error(`Failed to parse structured output: ${e.message}`);
|
|
1149
|
+
}
|
|
1150
|
+
}
|
|
1151
|
+
getSchema() {
|
|
1152
|
+
return this.schema;
|
|
1153
|
+
}
|
|
1154
|
+
};
|
|
1155
|
+
export {
|
|
1156
|
+
Chain,
|
|
1157
|
+
ChatModel,
|
|
1158
|
+
LangFn,
|
|
1159
|
+
Tool,
|
|
1160
|
+
agents_exports as agents,
|
|
1161
|
+
graph_exports as graph,
|
|
1162
|
+
http_exports as http,
|
|
1163
|
+
langfn,
|
|
1164
|
+
memory_exports as memory,
|
|
1165
|
+
models_exports as models,
|
|
1166
|
+
observability_exports as observability,
|
|
1167
|
+
orchestration_exports as orchestration,
|
|
1168
|
+
prompts_exports as prompts,
|
|
1169
|
+
rag_exports as rag,
|
|
1170
|
+
storage_exports as storage,
|
|
1171
|
+
structured_exports as structured,
|
|
1172
|
+
tools_exports as tools,
|
|
1173
|
+
utils_exports as utils
|
|
1174
|
+
};
|