extrait 0.3.1 → 0.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -128,6 +128,16 @@ const result = await llm.structured(
128
128
  .user`Analyze: """${input}"""`
129
129
  );
130
130
 
131
+ // Multi-turn conversation
132
+ const conversationResult = await llm.structured(
133
+ Schema,
134
+ prompt()
135
+ .system`You are an expert assistant.`
136
+ .user`Hello`
137
+ .assistant`Hi, how can I help?`
138
+ .user`Analyze: """${input}"""`
139
+ );
140
+
131
141
  // With options
132
142
  const result = await llm.structured(
133
143
  Schema,
@@ -136,6 +146,7 @@ const result = await llm.structured(
136
146
  mode: "loose",
137
147
  selfHeal: 1,
138
148
  debug: true,
149
+ systemPrompt: "You are a helpful assistant.",
139
150
  stream: {
140
151
  to: "stdout",
141
152
  onData: (event) => {
@@ -145,10 +156,15 @@ const result = await llm.structured(
145
156
  }
146
157
  },
147
158
  },
159
+ request: {
160
+ signal: abortController.signal, // optional AbortSignal
161
+ },
148
162
  }
149
163
  );
150
164
  ```
151
165
 
166
+ `prompt()` builds an ordered `messages` payload. Use `prompt\`...\`` for a single string prompt, or the fluent builder for multi-turn conversations. The `LLMMessage` type is exported if you need to type your own message arrays.
167
+
152
168
  ### Result Object
153
169
 
154
170
  ```typescript
@@ -219,6 +235,10 @@ const result = await llm.structured(
219
235
  transformToolOutput: (output, execution) => {
220
236
  return { ...output, source: execution.name };
221
237
  },
238
+ // Optional: transform tool arguments before the tool is called
239
+ transformToolArguments: (args, call) => args,
240
+ // Optional: custom error message when an unknown tool is called
241
+ unknownToolError: (toolName) => `Tool "${toolName}" is not available.`,
222
242
  },
223
243
  }
224
244
  );
@@ -233,6 +253,7 @@ Run examples with: `bun run dev <example-name>`
233
253
  Available examples:
234
254
  - `streaming` - Real LLM streaming + snapshot self-check ([streaming.ts](examples/streaming.ts))
235
255
  - `streaming-with-tools` - Real text streaming with MCP tools + self-check ([streaming-with-tools.ts](examples/streaming-with-tools.ts))
256
+ - `abort-signal` - Start a generation then cancel quickly with `AbortSignal` ([abort-signal.ts](examples/abort-signal.ts))
236
257
  - `simple` - Basic structured output with streaming ([simple.ts](examples/simple.ts))
237
258
  - `sentiment-analysis` - Enum validation, strict mode ([sentiment-analysis.ts](examples/sentiment-analysis.ts))
238
259
  - `data-extraction` - Complex nested schemas, self-healing ([data-extraction.ts](examples/data-extraction.ts))
@@ -243,6 +264,7 @@ Pass arguments after the example name:
243
264
  ```bash
244
265
  bun run dev streaming
245
266
  bun run dev streaming-with-tools
267
+ bun run dev abort-signal 120 "JSON cancellation demo"
246
268
  bun run dev simple "Bun.js runtime"
247
269
  bun run dev sentiment-analysis "I love this product."
248
270
  bun run dev multi-step-reasoning "Why is the sky blue?"