@recombine-ai/engine 0.4.0 → 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/build/lib/ai.d.ts +29 -16
- package/build/lib/ai.d.ts.map +1 -1
- package/build/lib/ai.js +32 -21
- package/changelog.md +18 -5
- package/package.json +1 -1
package/build/lib/ai.d.ts
CHANGED
|
@@ -5,8 +5,8 @@ export declare namespace AIEngine {
|
|
|
5
5
|
/**
|
|
6
6
|
* Represents a basic model name for LLMs.
|
|
7
7
|
*/
|
|
8
|
-
type BasicModel = 'o3-mini-2025-01-31' | 'o1-preview-2024-09-12' | 'gpt-4o-2024-11-20' | 'o1-2024-12-17' | (string & {});
|
|
9
|
-
interface ProgrammaticStep {
|
|
8
|
+
export type BasicModel = 'o3-mini-2025-01-31' | 'o1-preview-2024-09-12' | 'gpt-4o-2024-11-20' | 'o1-2024-12-17' | (string & {});
|
|
9
|
+
export interface ProgrammaticStep {
|
|
10
10
|
/** Step name for debugging */
|
|
11
11
|
name: string;
|
|
12
12
|
/** Determines if the step should be run or not */
|
|
@@ -16,7 +16,7 @@ export declare namespace AIEngine {
|
|
|
16
16
|
/** Error handler called if an error occurred during in `execute` function */
|
|
17
17
|
onError: (error: string) => Promise<unknown>;
|
|
18
18
|
}
|
|
19
|
-
interface LLMStep {
|
|
19
|
+
export interface LLMStep {
|
|
20
20
|
/** Step name for debugging */
|
|
21
21
|
name: string;
|
|
22
22
|
/** Determines if the step should be run or not */
|
|
@@ -29,10 +29,12 @@ export declare namespace AIEngine {
|
|
|
29
29
|
*/
|
|
30
30
|
prompt: string | File;
|
|
31
31
|
/**
|
|
32
|
-
*
|
|
33
|
-
*
|
|
32
|
+
* Defines the expected structure of the LLM's output.
|
|
33
|
+
* Accepts either a boolean (for plain text or JSON responses) or a ZodSchema, which is automatically
|
|
34
|
+
* converted to a JSON schema. When provided, the LLM's response is validated and parsed according
|
|
35
|
+
* to this schema ensuring reliable structured output.
|
|
34
36
|
*/
|
|
35
|
-
|
|
37
|
+
json: boolean | ZodSchema;
|
|
36
38
|
/** Exclude directives from message history passed to the LLM for this step */
|
|
37
39
|
ignoreDirectives?: boolean;
|
|
38
40
|
/**
|
|
@@ -78,15 +80,16 @@ export declare namespace AIEngine {
|
|
|
78
80
|
/**
|
|
79
81
|
* A useful trace of a step execution. It's properties are filled during the execution. There is no guarantee that any of them will be filled.
|
|
80
82
|
*/
|
|
81
|
-
type StepTrace = {
|
|
83
|
+
export type StepTrace = {
|
|
82
84
|
renderedPrompt?: string;
|
|
83
85
|
receivedContext?: Record<string, unknown>;
|
|
84
86
|
receivedPrompt?: string;
|
|
87
|
+
stringifiedConversation?: string;
|
|
85
88
|
};
|
|
86
89
|
/**
|
|
87
90
|
* An AI workflow composed of steps.
|
|
88
91
|
*/
|
|
89
|
-
interface Workflow {
|
|
92
|
+
export interface Workflow {
|
|
90
93
|
/**
|
|
91
94
|
* Terminates the workflow, preventing further steps from being executed.
|
|
92
95
|
*/
|
|
@@ -165,7 +168,7 @@ export declare namespace AIEngine {
|
|
|
165
168
|
* console.log(response)
|
|
166
169
|
* ```
|
|
167
170
|
*/
|
|
168
|
-
interface AIEngine {
|
|
171
|
+
export interface AIEngine {
|
|
169
172
|
/**
|
|
170
173
|
* Creates a workflow from a sequence of steps.
|
|
171
174
|
* @param steps - An array of LLM or programmatic steps to be executed in order.
|
|
@@ -190,6 +193,13 @@ export declare namespace AIEngine {
|
|
|
190
193
|
* @returns A new Conversation object.
|
|
191
194
|
*/
|
|
192
195
|
createConversation: (messages?: Message[]) => Conversation;
|
|
196
|
+
/**
|
|
197
|
+
* Renders a prompt string using Nunjucks templating engine.
|
|
198
|
+
* @param prompt - The prompt string to render.
|
|
199
|
+
* @param context - Optional context object to use for rendering the prompt.
|
|
200
|
+
* @returns The rendered prompt string.
|
|
201
|
+
*/
|
|
202
|
+
renderPrompt: typeof renderPrompt;
|
|
193
203
|
}
|
|
194
204
|
/**
|
|
195
205
|
* Represents a conversation between a user and an AI agent.
|
|
@@ -215,7 +225,7 @@ export declare namespace AIEngine {
|
|
|
215
225
|
* // System: Ask for account details
|
|
216
226
|
* ```
|
|
217
227
|
*/
|
|
218
|
-
interface Conversation {
|
|
228
|
+
export interface Conversation {
|
|
219
229
|
/**
|
|
220
230
|
* Sets the name of the user in the conversation to be used in {@link toString}.
|
|
221
231
|
* @param name - The name to set for the user.
|
|
@@ -245,7 +255,7 @@ export declare namespace AIEngine {
|
|
|
245
255
|
* // System: Ask the user for their preferred date and time for the reservation
|
|
246
256
|
* ```
|
|
247
257
|
*/
|
|
248
|
-
addDirective: (message: string) => void;
|
|
258
|
+
addDirective: (message: string, formatter?: (message: Message) => string) => void;
|
|
249
259
|
/**
|
|
250
260
|
* Adds a message from a specified sender to the conversation.
|
|
251
261
|
* @param name - The sender of the message.
|
|
@@ -256,7 +266,7 @@ export declare namespace AIEngine {
|
|
|
256
266
|
* Sets a custom formatter for directive messages.
|
|
257
267
|
* @param formatter - A function that takes a Message and returns a formatted string.
|
|
258
268
|
*/
|
|
259
|
-
|
|
269
|
+
setDefaultDirectiveFormatter: (formatter: (message: Message) => string) => void;
|
|
260
270
|
/**
|
|
261
271
|
* Sets a custom formatter for proposed messages.
|
|
262
272
|
* @param formatter - A function that takes a message string and returns a formatted string.
|
|
@@ -282,21 +292,22 @@ export declare namespace AIEngine {
|
|
|
282
292
|
* Represents a message in a conversation between a user and an agent, or a system message.
|
|
283
293
|
* Messages can contain text and optionally an image URL. To be used in the {@link Conversation} interface.
|
|
284
294
|
*/
|
|
285
|
-
interface Message {
|
|
295
|
+
export interface Message {
|
|
286
296
|
/** The sender of the message, which can be one of the following: 'user', 'agent', or 'system' */
|
|
287
297
|
sender: 'user' | 'agent' | 'system';
|
|
288
298
|
/** The text content of the message */
|
|
289
299
|
text: string;
|
|
290
300
|
/** Optional URL of an image associated with the message */
|
|
291
301
|
imageUrl?: string;
|
|
302
|
+
formatter?: (message: Message) => string;
|
|
292
303
|
}
|
|
293
|
-
interface File {
|
|
304
|
+
export interface File {
|
|
294
305
|
content: () => Promise<string>;
|
|
295
306
|
}
|
|
296
307
|
/**
|
|
297
308
|
* Configuration options for the Engine.
|
|
298
309
|
*/
|
|
299
|
-
interface EngineConfig {
|
|
310
|
+
export interface EngineConfig {
|
|
300
311
|
/**
|
|
301
312
|
* Optional token storage object that provides access to authentication tokens.
|
|
302
313
|
* @property {object} tokenStorage - Object containing method to retrieve token.
|
|
@@ -344,6 +355,8 @@ export declare namespace AIEngine {
|
|
|
344
355
|
* const reply = await workflow.run(conversation);
|
|
345
356
|
* ```
|
|
346
357
|
*/
|
|
347
|
-
function createAIEngine(cfg?: EngineConfig): AIEngine;
|
|
358
|
+
export function createAIEngine(cfg?: EngineConfig): AIEngine;
|
|
359
|
+
function renderPrompt(prompt: string, context?: Record<string, unknown>): string;
|
|
360
|
+
export {};
|
|
348
361
|
}
|
|
349
362
|
//# sourceMappingURL=ai.d.ts.map
|
package/build/lib/ai.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"ai.d.ts","sourceRoot":"","sources":["../../src/lib/ai.ts"],"names":[],"mappings":"AAMA,OAAO,EAAE,SAAS,EAAE,MAAM,KAAK,CAAA;AAE/B,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAA;AACrC,OAAO,EAAc,UAAU,EAAE,MAAM,gBAAgB,CAAA;AAGvD,yBAAiB,QAAQ,CAAC;IACtB;;OAEG;IACH,
|
|
1
|
+
{"version":3,"file":"ai.d.ts","sourceRoot":"","sources":["../../src/lib/ai.ts"],"names":[],"mappings":"AAMA,OAAO,EAAE,SAAS,EAAE,MAAM,KAAK,CAAA;AAE/B,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAA;AACrC,OAAO,EAAc,UAAU,EAAE,MAAM,gBAAgB,CAAA;AAGvD,yBAAiB,QAAQ,CAAC;IACtB;;OAEG;IACH,MAAM,MAAM,UAAU,GAChB,oBAAoB,GACpB,uBAAuB,GACvB,mBAAmB,GACnB,eAAe,GACf,CAAC,MAAM,GAAG,EAAE,CAAC,CAAA;IAEnB,MAAM,WAAW,gBAAgB;QAC7B,8BAA8B;QAC9B,IAAI,EAAE,MAAM,CAAA;QAEZ,kDAAkD;QAClD,KAAK,CAAC,EAAE,CAAC,QAAQ,EAAE,YAAY,KAAK,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,CAAA;QAE9D,0BAA0B;QAC1B,OAAO,EAAE,MAAM,OAAO,CAAC,OAAO,CAAC,CAAA;QAE/B,6EAA6E;QAC7E,OAAO,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC,OAAO,CAAC,CAAA;KAC/C;IAED,MAAM,WAAW,OAAO;QACpB,8BAA8B;QAC9B,IAAI,EAAE,MAAM,CAAA;QAEZ,kDAAkD;QAClD,KAAK,CAAC,EAAE,CAAC,QAAQ,EAAE,YAAY,KAAK,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,CAAA;QAE9D,qCAAqC;QACrC,KAAK,CAAC,EAAE,UAAU,CAAA;QAElB;;;WAGG;QACH,MAAM,EAAE,MAAM,GAAG,IAAI,CAAA;QAErB;;;;;WAKG;QACH,IAAI,EAAE,OAAO,GAAG,SAAS,CAAA;QAEzB,8EAA8E;QAC9E,gBAAgB,CAAC,EAAE,OAAO,CAAA;QAE1B;;;;;;;WAOG;QACH,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;QAEjC;;;;;;;;;;;;;;WAcG;QACH,OAAO,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC,OAAO,CAAC,CAAA;QAE5C;;;;YAII;QACJ,aAAa,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,CAAA;QAE7D;;;;WAIG;QACH,WAAW,CAAC,EAAE,MAAM,CAAA;QAEpB,6FAA6F;QAC7F,OAAO,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC,OAAO,CAAC,CAAA;KAC/C;IAED;;OAEG;IACH,MAAM,MAAM,SAAS,GAAG;QACpB,cAAc,CAAC,EAAE,MAAM,CAAC;QACxB,eAAe,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAC1C,cAAc,CAAC,EAAE,MAAM,CAAC;QACxB,uBAAuB,CAAC,EAAE,MAAM,CAAA;KACnC,CAAA;IAED;;OAEG;IACH,MAAM,WAAW,QAAQ;QACrB;;WAEG;QACH,SAAS,EAAE,MAAM,IAAI,CAAA;QAErB;;;;;WAKG;QACH,GAAG,EAAE,CAAC,QAAQ,EAAE,YAAY,KAAK,OAAO,CAAC;YAAE,KAAK,EAAE,MAAM,GAAG,IAAI,CAAC;YAAC,KAAK,EAAE;gBAAE,KAAK,EAAE,MAAM,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;aAAE,CAAA;SAAE,CAAC,CAAA;QAEhH;;;WAGG;QACH,QAAQ,EAAE,CAAC,IAAI,EAAE,OAAO,GAAG,gBAAgB,KAAK,IAAI,CAAA;QAEpD;;;WAGG;QACH,UAAU,EAAE,CAAC,QAAQ,EAAE,MAAM,OAAO,CAAC,OAAO,CAAC,KAAK,IAAI,CAAA;KACzD;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAkDG;IACH,MAAM,WAAW,QAAQ;QACrB;;;;WAIG;QACH,cAAc,EAAE,CAAC,GAAG,KAAK,EAAE,KAAK,CAAC,OAAO,GAAG,gBAAgB,CAAC,KAAK,OAAO,CAAC,QAAQ,CAAC,CAAC;QAEnF;;;;WAIG;QACH,UAAU,EAAE,CAAC,CAAC,SAAS,OAAO,GAAG,gBAAgB,EAAE,IAAI,EAAE,CAAC,KAAK,CAAC,CAAC;QAEjE;;;;WAIG;QACH,QAAQ,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,IAAI,CAAC;QAEjC;;;;WAIG;QACH,kBAAkB,EAAE,CAAC,QAAQ,CAAC,EAAE,OAAO,EAAE,KAAK,YAAY,CAAC;QAE3D;;;;;WAKG;QACH,YAAY,EAAE,OAAO,YAAY,CAAA;KACpC;IAED;;;;;;;;;;;;;;;;;;;;;;;OAuBG;IACH,MAAM,WAAW,YAAY;QACzB;;;WAGG;QACH,WAAW,CAAC,IAAI,EAAE,MAAM,GAAG,IAAI,CAAA;QAE/B;;;WAGG;QACH,YAAY,CAAC,IAAI,EAAE,MAAM,GAAG,IAAI,CAAA;QAEhC;;;;WAIG;QACH,QAAQ,EAAE,CAAC,gBAAgB,CAAC,EAAE,OAAO,KAAK,MAAM,CAAA;QAEhD;;;;;;;;;;;;WAYG;QACH,YAAY,EAAE,CAAC,OAAO,EAAE,MAAM,EAAE,SAAS,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,MAAM,KAAK,IAAI,CAAA;QAEjF;;;;WAIG;QACH,UAAU,EAAE,CAAC,IAAI,EAAE,OAAO,CAAC,QAAQ,CAAC,EAAE,OAAO,EAAE,MAAM,KAAK,IAAI,CAAA;QAE9D;;;WAGG;QACH,4BAA4B,EAAE,CAAC,SAAS,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,MAAM,KAAK,IAAI,CAAA;QAE/E;;;WAGG;QACH,2BAA2B,EAAE,CAAC,SAAS,EAAE,CAAC,OAAO,EAAE,MAAM,KAAK,MAAM,KAAK,IAAI,CAAA;QAE7E;;;WAGG;QACH,gBAAgB,EAAE,CAAC,OAAO,EAAE,MAAM,KAAK,IAAI,CAAA;QAE3C;;;WAGG;QACH,gBAAgB,EAAE,MAAM,MAAM,GAAG,IAAI,CAAA;QAErC;;;WAGG;QACH,UAAU,EAAE,MAAM,OAAO,EAAE,CAAA;KAC9B;IAED;;;OAGG;IACH,MAAM,WAAW,OAAO;QACpB,iGAAiG;QACjG,MAAM,EAAE,MAAM,GAAG,OAAO,GAAG,QAAQ,CAAA;QACnC,sCAAsC;QACtC,IAAI,EAAE,MAAM,CAAA;QACZ,2DAA2D;QAC3D,QAAQ,CAAC,EAAE,MAAM,CAAA;QACjB,SAAS,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,MAAM,CAAA;KAC3C;IAED,MAAM,WAAW,IAAI;QACjB,OAAO,EAAE,MAAM,OAAO,CAAC,MAAM,CAAC,CAAA;KACjC;IAED;;OAEG;IACH,MAAM,WAAW,YAAY;QACzB;;;;WAIG;QACH,YAAY,CAAC,EAAE;YAAE,QAAQ,EAAE,MAAM,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC,CAAA;SAAE,CAAA;QACzD;;WAEG;QACH,QAAQ,CAAC,EAAE,MAAM,CAAA;QACjB;;WAEG;QACH,MAAM,CAAC,EAAE,MAAM,CAAA;QACf;;WAEG;QACH,UAAU,CAAC,EAAE,UAAU,CAAA;KAC1B;IAED;;;;;;;;;;;;;;;;;;;;;;;;;OAyBG;IACH,MAAM,UAAU,cAAc,CAAC,GAAG,GAAE,YAAiB,GAAG,QAAQ,CAqQ/D;IAgCD,SAAS,YAAY,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,MAAM,CAO/E;;CACJ"}
|
package/build/lib/ai.js
CHANGED
|
@@ -55,7 +55,7 @@ var AIEngine;
|
|
|
55
55
|
return step;
|
|
56
56
|
}
|
|
57
57
|
function getConversation(messages = []) {
|
|
58
|
-
let
|
|
58
|
+
let defaultDirectivesFormatter = (message) => `${message.sender}: ${message.text}`;
|
|
59
59
|
let proposedFormatter = (message) => `Proposed reply: ${message}`;
|
|
60
60
|
let proposedReply = null;
|
|
61
61
|
const names = {
|
|
@@ -67,7 +67,8 @@ var AIEngine;
|
|
|
67
67
|
toString: (ignoreDirectives = false) => messages
|
|
68
68
|
.map((msg) => {
|
|
69
69
|
if (msg.sender === 'system') {
|
|
70
|
-
|
|
70
|
+
logger.debug('formatter', msg.formatter);
|
|
71
|
+
return ignoreDirectives ? null : (msg.formatter ? msg.formatter(msg) : defaultDirectivesFormatter(msg));
|
|
71
72
|
}
|
|
72
73
|
return `${names[msg.sender]}: ${msg.text}`;
|
|
73
74
|
})
|
|
@@ -75,12 +76,12 @@ var AIEngine;
|
|
|
75
76
|
.join('\n') +
|
|
76
77
|
(proposedReply ? `\n${proposedFormatter(proposedReply)}` : ''),
|
|
77
78
|
addMessage: (sender, text) => messages.push({ sender, text }),
|
|
78
|
-
addDirective: (message) => {
|
|
79
|
+
addDirective: (message, formatter) => {
|
|
79
80
|
logger.debug(`AI Engine: add directive: ${message}`);
|
|
80
|
-
messages.push({ sender: 'system', text: message });
|
|
81
|
+
messages.push({ sender: 'system', text: message, formatter });
|
|
81
82
|
},
|
|
82
|
-
|
|
83
|
-
|
|
83
|
+
setDefaultDirectiveFormatter: (formatter) => {
|
|
84
|
+
defaultDirectivesFormatter = formatter;
|
|
84
85
|
},
|
|
85
86
|
setProposedMessageFormatter: (formatter) => {
|
|
86
87
|
proposedFormatter = formatter;
|
|
@@ -165,14 +166,11 @@ var AIEngine;
|
|
|
165
166
|
stepTrace.receivedPrompt = prompt;
|
|
166
167
|
logger.debug('AI Engine: context', step.context);
|
|
167
168
|
logger.debug('AI Engine: messages', messages.toString(step.ignoreDirectives || false));
|
|
168
|
-
|
|
169
|
-
autoescape: true,
|
|
170
|
-
trimBlocks: true,
|
|
171
|
-
lstripBlocks: true,
|
|
172
|
-
});
|
|
173
|
-
prompt = nunjucks_1.default.renderString(prompt, step.context || {});
|
|
169
|
+
prompt = renderPrompt(prompt, step.context);
|
|
174
170
|
stepTrace.renderedPrompt = prompt;
|
|
175
|
-
|
|
171
|
+
const stringifiedMessages = messages.toString(step.ignoreDirectives || false);
|
|
172
|
+
stepTrace.stringifiedConversation = stringifiedMessages;
|
|
173
|
+
response = await runLLM(apiKey, prompt, stringifiedMessages, step.json, step.model);
|
|
176
174
|
if (!response) {
|
|
177
175
|
throw new Error('No response from OpenAI');
|
|
178
176
|
}
|
|
@@ -228,16 +226,17 @@ var AIEngine;
|
|
|
228
226
|
attempts.set(step, 0);
|
|
229
227
|
}
|
|
230
228
|
}
|
|
231
|
-
async function runLLM(apiKey, systemPrompt, messages,
|
|
229
|
+
async function runLLM(apiKey, systemPrompt, messages, json, model = 'gpt-4o-2024-08-06') {
|
|
232
230
|
logger.debug('AI Engine: model:', model);
|
|
233
231
|
logger.debug('----------- RENDERED PROMPT ---------------');
|
|
234
232
|
logger.debug(systemPrompt);
|
|
235
233
|
logger.debug('-------------------------------------------');
|
|
236
234
|
if (apiKey === '__TESTING__') {
|
|
237
235
|
await (0, core_1.sleep)(100);
|
|
238
|
-
|
|
239
|
-
? JSON.stringify({ message: 'canned response', reasons: [] })
|
|
240
|
-
|
|
236
|
+
if (typeof json === 'boolean') {
|
|
237
|
+
return json ? JSON.stringify({ message: 'canned response', reasons: [] }) : 'canned response';
|
|
238
|
+
}
|
|
239
|
+
return JSON.stringify({ message: 'canned response', reasons: [] });
|
|
241
240
|
}
|
|
242
241
|
const client = new openai_1.default({ apiKey });
|
|
243
242
|
const response = await client.chat.completions.create({
|
|
@@ -245,7 +244,7 @@ var AIEngine;
|
|
|
245
244
|
{ role: 'system', content: systemPrompt },
|
|
246
245
|
{ role: 'user', content: messages },
|
|
247
246
|
],
|
|
248
|
-
...getOpenAiOptions(model,
|
|
247
|
+
...getOpenAiOptions(model, json),
|
|
249
248
|
});
|
|
250
249
|
if (!response.choices[0].message.content) {
|
|
251
250
|
throw new Error('No response from OpenAI');
|
|
@@ -266,10 +265,11 @@ var AIEngine;
|
|
|
266
265
|
createStep,
|
|
267
266
|
loadFile,
|
|
268
267
|
createConversation: getConversation,
|
|
268
|
+
renderPrompt
|
|
269
269
|
};
|
|
270
270
|
}
|
|
271
271
|
AIEngine.createAIEngine = createAIEngine;
|
|
272
|
-
function getOpenAiOptions(model,
|
|
272
|
+
function getOpenAiOptions(model, json) {
|
|
273
273
|
const options = {
|
|
274
274
|
model,
|
|
275
275
|
};
|
|
@@ -282,18 +282,29 @@ var AIEngine;
|
|
|
282
282
|
else {
|
|
283
283
|
options.temperature = 0.1;
|
|
284
284
|
}
|
|
285
|
-
if (
|
|
285
|
+
if (typeof json !== 'boolean') {
|
|
286
286
|
options.response_format = {
|
|
287
287
|
type: 'json_schema',
|
|
288
288
|
json_schema: {
|
|
289
289
|
name: 'detector_response',
|
|
290
|
-
schema: (0, zod_to_json_schema_1.zodToJsonSchema)(
|
|
290
|
+
schema: (0, zod_to_json_schema_1.zodToJsonSchema)(json),
|
|
291
291
|
},
|
|
292
292
|
};
|
|
293
293
|
}
|
|
294
|
+
else if (json) {
|
|
295
|
+
options.response_format = { type: 'json_object' };
|
|
296
|
+
}
|
|
294
297
|
else {
|
|
295
298
|
options.response_format = { type: 'text' };
|
|
296
299
|
}
|
|
297
300
|
return options;
|
|
298
301
|
}
|
|
302
|
+
function renderPrompt(prompt, context) {
|
|
303
|
+
nunjucks_1.default.configure({
|
|
304
|
+
autoescape: false,
|
|
305
|
+
trimBlocks: true,
|
|
306
|
+
lstripBlocks: true,
|
|
307
|
+
});
|
|
308
|
+
return nunjucks_1.default.renderString(prompt, context || {});
|
|
309
|
+
}
|
|
299
310
|
})(AIEngine || (exports.AIEngine = AIEngine = {}));
|
package/changelog.md
CHANGED
|
@@ -1,6 +1,19 @@
|
|
|
1
1
|
# Changelog
|
|
2
2
|
|
|
3
|
-
### 0.
|
|
3
|
+
### 0.4.0 → 0.5.0 (unstable)
|
|
4
|
+
|
|
5
|
+
Breaking changes:
|
|
6
|
+
|
|
7
|
+
- `schema` property replaced with `json` which can be boolean,
|
|
8
|
+
- `setDirectiveFormatter` →
|
|
9
|
+
|
|
10
|
+
Other changes:
|
|
11
|
+
|
|
12
|
+
- `renderPrompt` method added
|
|
13
|
+
- `addDirective` accepts optional formatter function
|
|
14
|
+
- `formatter` optional method added to `Message`
|
|
15
|
+
|
|
16
|
+
### 0.3.2 → 0.4.0 (unstable)
|
|
4
17
|
|
|
5
18
|
Breaking changes:
|
|
6
19
|
|
|
@@ -22,7 +35,7 @@ Breaking changes:
|
|
|
22
35
|
Breaking changes:
|
|
23
36
|
|
|
24
37
|
- Break down the library into namespace: AIEngine, Scheduler
|
|
25
|
-
- Models → BasicModel
|
|
26
|
-
- Step → LLMStep & ProgrammaticStep
|
|
27
|
-
- makeMessagesList → getConversation
|
|
28
|
-
- Deprecation of shouldExecute (discouraged to use if there's no `maxAttempts` in a step)
|
|
38
|
+
- `Models` → `BasicModel`
|
|
39
|
+
- `Step` → `LLMStep` & `ProgrammaticStep`
|
|
40
|
+
- `makeMessagesList` → `getConversation`
|
|
41
|
+
- Deprecation of `shouldExecute` (discouraged to use if there's no `maxAttempts` in a step)
|