extrait 0.4.0 → 0.5.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -70,6 +70,7 @@ const llm = createLLM({
70
70
  mode: "loose" | "strict", // loose allows repair
71
71
  selfHeal: 0 | 1 | 2, // retry attempts
72
72
  debug: false, // show repair logs
73
+ timeout: { request: 30_000 }, // optional default timeouts
73
74
  },
74
75
  });
75
76
  ```
@@ -128,6 +129,16 @@ const result = await llm.structured(
128
129
  .user`Analyze: """${input}"""`
129
130
  );
130
131
 
132
+ // Multi-turn conversation
133
+ const conversationResult = await llm.structured(
134
+ Schema,
135
+ prompt()
136
+ .system`You are an expert assistant.`
137
+ .user`Hello`
138
+ .assistant`Hi, how can I help?`
139
+ .user`Analyze: """${input}"""`
140
+ );
141
+
131
142
  // With options
132
143
  const result = await llm.structured(
133
144
  Schema,
@@ -136,6 +147,7 @@ const result = await llm.structured(
136
147
  mode: "loose",
137
148
  selfHeal: 1,
138
149
  debug: true,
150
+ systemPrompt: "You are a helpful assistant.",
139
151
  stream: {
140
152
  to: "stdout",
141
153
  onData: (event) => {
@@ -145,10 +157,54 @@ const result = await llm.structured(
145
157
  }
146
158
  },
147
159
  },
160
+ request: {
161
+ signal: abortController.signal, // optional AbortSignal
162
+ },
163
+ timeout: {
164
+ request: 30_000, // ms per LLM HTTP request
165
+ tool: 10_000, // ms per MCP tool call
166
+ },
148
167
  }
149
168
  );
150
169
  ```
151
170
 
171
+ `prompt()` builds an ordered `messages` payload. Use ``prompt`...` `` for a single string prompt, or the fluent builder for multi-turn conversations. The `LLMMessage` type is exported if you need to type your own message arrays.
172
+
173
+ ### Images (multimodal)
174
+
175
+ Use `images()` to build base64 image content blocks for vision-capable models.
176
+
177
+ ```typescript
178
+ import { images } from "extrait";
179
+ import { readFileSync } from "fs";
180
+
181
+ const base64 = readFileSync("photo.png").toString("base64");
182
+
183
+ // Single image
184
+ const result = await llm.structured(Schema, {
185
+ messages: [
186
+ {
187
+ role: "user",
188
+ content: [
189
+ { type: "text", text: "Describe this image." },
190
+ ...images({ base64, mimeType: "image/png" }),
191
+ ],
192
+ },
193
+ ],
194
+ });
195
+
196
+ // Multiple images
197
+ const content = [
198
+ { type: "text", text: "Compare these two images." },
199
+ ...images([
200
+ { base64: base64A, mimeType: "image/png" },
201
+ { base64: base64B, mimeType: "image/jpeg" },
202
+ ]),
203
+ ];
204
+ ```
205
+
206
+ `images()` accepts a single `{ base64, mimeType }` object or an array, and always returns an `LLMImageContent[]` that spreads directly into a content array.
207
+
152
208
  ### Result Object
153
209
 
154
210
  ```typescript
@@ -219,6 +275,10 @@ const result = await llm.structured(
219
275
  transformToolOutput: (output, execution) => {
220
276
  return { ...output, source: execution.name };
221
277
  },
278
+ // Optional: transform tool arguments before the tool is called
279
+ transformToolArguments: (args, call) => args,
280
+ // Optional: custom error message when an unknown tool is called
281
+ unknownToolError: (toolName) => `Tool "${toolName}" is not available.`,
222
282
  },
223
283
  }
224
284
  );
@@ -226,6 +286,34 @@ const result = await llm.structured(
226
286
  await mcpClient.close?.();
227
287
  ```
228
288
 
289
+ ### Timeouts
290
+
291
+ Use `timeout` to set per-request and per-tool-call time limits without managing `AbortSignal` manually.
292
+
293
+ ```typescript
294
+ const result = await llm.structured(Schema, prompt`...`, {
295
+ timeout: {
296
+ request: 30_000, // abort the LLM HTTP request after 30s
297
+ tool: 5_000, // abort each MCP tool call after 5s
298
+ },
299
+ });
300
+ ```
301
+
302
+ Both fields are optional. `timeout.request` creates an `AbortSignal.timeout` internally; it is ignored if you also pass `request.signal` (your signal takes precedence). `timeout.tool` wraps each MCP client transparently.
303
+
304
+ You can also set defaults on the client:
305
+
306
+ ```typescript
307
+ const llm = createLLM({
308
+ provider: "openai-compatible",
309
+ model: "gpt-5-nano",
310
+ transport: { apiKey: process.env.LLM_API_KEY },
311
+ defaults: {
312
+ timeout: { request: 60_000 },
313
+ },
314
+ });
315
+ ```
316
+
229
317
  ## Examples
230
318
 
231
319
  Run examples with: `bun run dev <example-name>`
@@ -234,17 +322,20 @@ Available examples:
234
322
  - `streaming` - Real LLM streaming + snapshot self-check ([streaming.ts](examples/streaming.ts))
235
323
  - `streaming-with-tools` - Real text streaming with MCP tools + self-check ([streaming-with-tools.ts](examples/streaming-with-tools.ts))
236
324
  - `abort-signal` - Start a generation then cancel quickly with `AbortSignal` ([abort-signal.ts](examples/abort-signal.ts))
325
+ - `timeout` - Set per-request and per-tool timeouts via the `timeout` option ([timeout.ts](examples/timeout.ts))
237
326
  - `simple` - Basic structured output with streaming ([simple.ts](examples/simple.ts))
238
327
  - `sentiment-analysis` - Enum validation, strict mode ([sentiment-analysis.ts](examples/sentiment-analysis.ts))
239
328
  - `data-extraction` - Complex nested schemas, self-healing ([data-extraction.ts](examples/data-extraction.ts))
240
329
  - `multi-step-reasoning` - Chained structured calls ([multi-step-reasoning.ts](examples/multi-step-reasoning.ts))
241
330
  - `calculator-tool` - MCP tool integration ([calculator-tool.ts](examples/calculator-tool.ts))
331
+ - `image-analysis` - Multimodal structured extraction from an image file ([image-analysis.ts](examples/image-analysis.ts))
242
332
 
243
333
  Pass arguments after the example name:
244
334
  ```bash
245
335
  bun run dev streaming
246
336
  bun run dev streaming-with-tools
247
337
  bun run dev abort-signal 120 "JSON cancellation demo"
338
+ bun run dev timeout 5000
248
339
  bun run dev simple "Bun.js runtime"
249
340
  bun run dev sentiment-analysis "I love this product."
250
341
  bun run dev multi-step-reasoning "Why is the sky blue?"
@@ -0,0 +1,8 @@
1
+ import type { LLMImageContent } from "./types";
2
+ export interface ImageInput {
3
+ base64: string;
4
+ mimeType: string;
5
+ }
6
+ export type ImageSize = "low" | "mid" | "high" | "xhigh" | "raw" | number;
7
+ export declare function images(input: ImageInput | ImageInput[]): LLMImageContent[];
8
+ export declare function resizeImage(source: string | Uint8Array | ArrayBuffer, size: ImageSize, mimeType?: string): Promise<ImageInput>;