@recombine-ai/engine 0.3.2 → 0.5.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/build/lib/ai.d.ts CHANGED
@@ -5,8 +5,8 @@ export declare namespace AIEngine {
5
5
  /**
6
6
  * Represents a basic model name for LLMs.
7
7
  */
8
- type BasicModel = 'o3-mini-2025-01-31' | 'o1-preview-2024-09-12' | 'gpt-4o-2024-11-20' | 'o1-2024-12-17' | (string & {});
9
- interface ProgrammaticStep {
8
+ export type BasicModel = 'o3-mini-2025-01-31' | 'o1-preview-2024-09-12' | 'gpt-4o-2024-11-20' | 'o1-2024-12-17' | (string & {});
9
+ export interface ProgrammaticStep {
10
10
  /** Step name for debugging */
11
11
  name: string;
12
12
  /** Determines if the step should be run or not */
@@ -16,7 +16,7 @@ export declare namespace AIEngine {
16
16
  /** Error handler called if an error occurred during in `execute` function */
17
17
  onError: (error: string) => Promise<unknown>;
18
18
  }
19
- interface LLMStep {
19
+ export interface LLMStep {
20
20
  /** Step name for debugging */
21
21
  name: string;
22
22
  /** Determines if the step should be run or not */
@@ -29,10 +29,12 @@ export declare namespace AIEngine {
29
29
  */
30
30
  prompt: string | File;
31
31
  /**
32
- * Schema for structured LLM output using {@link zod https://zod.dev/}
33
- * library.
32
+ * Defines the expected structure of the LLM's output.
33
+ * Accepts either a boolean (for plain text or JSON responses) or a ZodSchema, which is automatically
34
+ * converted to a JSON schema. When provided, the LLM's response is validated and parsed according
35
+ * to this schema ensuring reliable structured output.
34
36
  */
35
- schema?: ZodSchema;
37
+ json: boolean | ZodSchema;
36
38
  /** Exclude directives from message history passed to the LLM for this step */
37
39
  ignoreDirectives?: boolean;
38
40
  /**
@@ -75,10 +77,19 @@ export declare namespace AIEngine {
75
77
  /** Error handler called if an error occurred during LLM API call or in `execute` function */
76
78
  onError: (error: string) => Promise<unknown>;
77
79
  }
80
+ /**
81
+ * A useful trace of a step execution. It's properties are filled during the execution. There is no guarantee that any of them will be filled.
82
+ */
83
+ export type StepTrace = {
84
+ renderedPrompt?: string;
85
+ receivedContext?: Record<string, unknown>;
86
+ receivedPrompt?: string;
87
+ stringifiedConversation?: string;
88
+ };
78
89
  /**
79
90
  * An AI workflow composed of steps.
80
91
  */
81
- interface Workflow {
92
+ export interface Workflow {
82
93
  /**
83
94
  * Terminates the workflow, preventing further steps from being executed.
84
95
  */
@@ -89,7 +100,12 @@ export declare namespace AIEngine {
89
100
  * @param messages - The conversation context for the workflow
90
101
  * @returns The proposed reply if workflow completes, or null if terminated
91
102
  */
92
- run: (messages: Conversation) => Promise<string | null>;
103
+ run: (messages: Conversation) => Promise<{
104
+ reply: string | null;
105
+ trace: {
106
+ steps: Record<string, StepTrace>;
107
+ };
108
+ }>;
93
109
  /**
94
110
  * Rewinds the workflow execution to a specific step.
95
111
  * @param step - The step to rewind to
@@ -152,7 +168,7 @@ export declare namespace AIEngine {
152
168
  * console.log(response)
153
169
  * ```
154
170
  */
155
- interface AIEngine {
171
+ export interface AIEngine {
156
172
  /**
157
173
  * Creates a workflow from a sequence of steps.
158
174
  * @param steps - An array of LLM or programmatic steps to be executed in order.
@@ -177,6 +193,13 @@ export declare namespace AIEngine {
177
193
  * @returns A new Conversation object.
178
194
  */
179
195
  createConversation: (messages?: Message[]) => Conversation;
196
+ /**
197
+ * Renders a prompt string using Nunjucks templating engine.
198
+ * @param prompt - The prompt string to render.
199
+ * @param context - Optional context object to use for rendering the prompt.
200
+ * @returns The rendered prompt string.
201
+ */
202
+ renderPrompt: typeof renderPrompt;
180
203
  }
181
204
  /**
182
205
  * Represents a conversation between a user and an AI agent.
@@ -202,7 +225,7 @@ export declare namespace AIEngine {
202
225
  * // System: Ask for account details
203
226
  * ```
204
227
  */
205
- interface Conversation {
228
+ export interface Conversation {
206
229
  /**
207
230
  * Sets the name of the user in the conversation to be used in {@link toString}.
208
231
  * @param name - The name to set for the user.
@@ -232,7 +255,7 @@ export declare namespace AIEngine {
232
255
  * // System: Ask the user for their preferred date and time for the reservation
233
256
  * ```
234
257
  */
235
- addDirective: (message: string) => void;
258
+ addDirective: (message: string, formatter?: (message: Message) => string) => void;
236
259
  /**
237
260
  * Adds a message from a specified sender to the conversation.
238
261
  * @param name - The sender of the message.
@@ -243,7 +266,7 @@ export declare namespace AIEngine {
243
266
  * Sets a custom formatter for directive messages.
244
267
  * @param formatter - A function that takes a Message and returns a formatted string.
245
268
  */
246
- setDirectiveFormatter: (formatter: (message: Message) => string) => void;
269
+ setDefaultDirectiveFormatter: (formatter: (message: Message) => string) => void;
247
270
  /**
248
271
  * Sets a custom formatter for proposed messages.
249
272
  * @param formatter - A function that takes a message string and returns a formatted string.
@@ -269,21 +292,22 @@ export declare namespace AIEngine {
269
292
  * Represents a message in a conversation between a user and an agent, or a system message.
270
293
  * Messages can contain text and optionally an image URL. To be used in the {@link Conversation} interface.
271
294
  */
272
- interface Message {
295
+ export interface Message {
273
296
  /** The sender of the message, which can be one of the following: 'user', 'agent', or 'system' */
274
297
  sender: 'user' | 'agent' | 'system';
275
298
  /** The text content of the message */
276
299
  text: string;
277
300
  /** Optional URL of an image associated with the message */
278
301
  imageUrl?: string;
302
+ formatter?: (message: Message) => string;
279
303
  }
280
- interface File {
304
+ export interface File {
281
305
  content: () => Promise<string>;
282
306
  }
283
307
  /**
284
308
  * Configuration options for the Engine.
285
309
  */
286
- interface EngineConfig {
310
+ export interface EngineConfig {
287
311
  /**
288
312
  * Optional token storage object that provides access to authentication tokens.
289
313
  * @property {object} tokenStorage - Object containing method to retrieve token.
@@ -331,6 +355,8 @@ export declare namespace AIEngine {
331
355
  * const reply = await workflow.run(conversation);
332
356
  * ```
333
357
  */
334
- function createAIEngine(cfg?: EngineConfig): AIEngine;
358
+ export function createAIEngine(cfg?: EngineConfig): AIEngine;
359
+ function renderPrompt(prompt: string, context?: Record<string, unknown>): string;
360
+ export {};
335
361
  }
336
362
  //# sourceMappingURL=ai.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"ai.d.ts","sourceRoot":"","sources":["../../src/lib/ai.ts"],"names":[],"mappings":"AAMA,OAAO,EAAE,SAAS,EAAE,MAAM,KAAK,CAAA;AAE/B,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAA;AACrC,OAAO,EAAc,UAAU,EAAE,MAAM,gBAAgB,CAAA;AAGvD,yBAAiB,QAAQ,CAAC;IACtB;;OAEG;IACH,KAAY,UAAU,GAChB,oBAAoB,GACpB,uBAAuB,GACvB,mBAAmB,GACnB,eAAe,GACf,CAAC,MAAM,GAAG,EAAE,CAAC,CAAA;IAEnB,UAAiB,gBAAgB;QAC7B,8BAA8B;QAC9B,IAAI,EAAE,MAAM,CAAA;QAEZ,kDAAkD;QAClD,KAAK,CAAC,EAAE,CAAC,QAAQ,EAAE,YAAY,KAAK,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,CAAA;QAE9D,0BAA0B;QAC1B,OAAO,EAAE,MAAM,OAAO,CAAC,OAAO,CAAC,CAAA;QAE/B,6EAA6E;QAC7E,OAAO,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC,OAAO,CAAC,CAAA;KAC/C;IAED,UAAiB,OAAO;QACpB,8BAA8B;QAC9B,IAAI,EAAE,MAAM,CAAA;QAEZ,kDAAkD;QAClD,KAAK,CAAC,EAAE,CAAC,QAAQ,EAAE,YAAY,KAAK,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,CAAA;QAE9D,qCAAqC;QACrC,KAAK,CAAC,EAAE,UAAU,CAAA;QAElB;;;WAGG;QACH,MAAM,EAAE,MAAM,GAAG,IAAI,CAAA;QAErB;;;WAGG;QACH,MAAM,CAAC,EAAE,SAAS,CAAA;QAElB,8EAA8E;QAC9E,gBAAgB,CAAC,EAAE,OAAO,CAAA;QAE1B;;;;;;;WAOG;QACH,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;QAEjC;;;;;;;;;;;;;;WAcG;QACH,OAAO,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC,OAAO,CAAC,CAAA;QAE5C;;;;YAII;QACJ,aAAa,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,CAAA;QAE7D;;;;WAIG;QACH,WAAW,CAAC,EAAE,MAAM,CAAA;QAEpB,6FAA6F;QAC7F,OAAO,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC,OAAO,CAAC,CAAA;KAC/C;IAED;;OAEG;IACH,UAAiB,QAAQ;QACrB;;WAEG;QACH,SAAS,EAAE,MAAM,IAAI,CAAA;QAErB;;;;;WAKG;QACH,GAAG,EAAE,CAAC,QAAQ,EAAE,YAAY,KAAK,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC,CAAA;QAEvD;;;WAGG;QACH,QAAQ,EAAE,CAAC,IAAI,EAAE,OAAO,GAAG,gBAAgB,KAAK,IAAI,CAAA;QAEpD;;;WAGG;QACH,UAAU,EAAE,CAAC,QAAQ,EAAE,MAAM,OAAO,CAAC,OAAO,CAAC,KAAK,IAAI,CAAA;KACzD;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAkDG;IACH,UAAiB,QAAQ;QACrB;;;;WAIG;QACH,cAAc,EAAE,CAAC,GAAG,KAAK,EAAE,KAAK,CAAC,OAAO,GAAG,gBAAgB,CAAC,KAAK,OAAO,CAAC,QAAQ,CAAC,CAAC;QAEnF;;;;WAIG;QACH,UAAU,EAAE,CAAC,CAAC,SAAS,OAAO,GAAG,gBAAgB,EAAE,IAAI,EAAE,CAAC,KAAK,CAAC,CAAC;QAEjE;;;;WAIG;QACH,QAAQ,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,IAAI,CAAC;QAEjC;;;;WAIG;QACH,kBAAkB,EAAE,CAAC,QAAQ,CAAC,EAAE,OAAO,EAAE,KAAK,YAAY,CAAC;KAC9D;IAED;;;;;;;;;;;;;;;;;;;;;;;OAuBG;IACH,UAAiB,YAAY;QACzB;;;WAGG;QACH,WAAW,CAAC,IAAI,EAAE,MAAM,GAAG,IAAI,CAAA;QAE/B;;;WAGG;QACH,YAAY,CAAC,IAAI,EAAE,MAAM,GAAG,IAAI,CAAA;QAEhC;;;;WAIG;QACH,QAAQ,EAAE,CAAC,gBAAgB,CAAC,EAAE,OAAO,KAAK,MAAM,CAAA;QAEhD;;;;;;;;;;;;WAYG;QACH,YAAY,EAAE,CAAC,OAAO,EAAE,MAAM,KAAK,IAAI,CAAA;QAEvC;;;;WAIG;QACH,UAAU,EAAE,CAAC,IAAI,EAAE,OAAO,CAAC,QAAQ,CAAC,EAAE,OAAO,EAAE,MAAM,KAAK,IAAI,CAAA;QAE9D;;;WAGG;QACH,qBAAqB,EAAE,CAAC,SAAS,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,MAAM,KAAK,IAAI,CAAA;QAExE;;;WAGG;QACH,2BAA2B,EAAE,CAAC,SAAS,EAAE,CAAC,OAAO,EAAE,MAAM,KAAK,MAAM,KAAK,IAAI,CAAA;QAE7E;;;WAGG;QACH,gBAAgB,EAAE,CAAC,OAAO,EAAE,MAAM,KAAK,IAAI,CAAA;QAE3C;;;WAGG;QACH,gBAAgB,EAAE,MAAM,MAAM,GAAG,IAAI,CAAA;QAErC;;;WAGG;QACH,UAAU,EAAE,MAAM,OAAO,EAAE,CAAA;KAC9B;IAED;;;OAGG;IACH,UAAiB,OAAO;QACpB,iGAAiG;QACjG,MAAM,EAAE,MAAM,GAAG,OAAO,GAAG,QAAQ,CAAA;QACnC,sCAAsC;QACtC,IAAI,EAAE,MAAM,CAAA;QACZ,2DAA2D;QAC3D,QAAQ,CAAC,EAAE,MAAM,CAAA;KACpB;IAED,UAAiB,IAAI;QACjB,OAAO,EAAE,MAAM,OAAO,CAAC,MAAM,CAAC,CAAA;KACjC;IAED;;OAEG;IACH,UAAiB,YAAY;QACzB;;;;WAIG;QACH,YAAY,CAAC,EAAE;YAAE,QAAQ,EAAE,MAAM,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC,CAAA;SAAE,CAAA;QACzD;;WAEG;QACH,QAAQ,CAAC,EAAE,MAAM,CAAA;QACjB;;WAEG;QACH,MAAM,CAAC,EAAE,MAAM,CAAA;QACf;;WAEG;QACH,UAAU,CAAC,EAAE,UAAU,CAAA;KAC1B;IAED;;;;;;;;;;;;;;;;;;;;;;;;;OAyBG;IACH,SAAgB,cAAc,CAAC,GAAG,GAAE,YAAiB,GAAG,QAAQ,CAoP/D;CA6BJ"}
1
+ {"version":3,"file":"ai.d.ts","sourceRoot":"","sources":["../../src/lib/ai.ts"],"names":[],"mappings":"AAMA,OAAO,EAAE,SAAS,EAAE,MAAM,KAAK,CAAA;AAE/B,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAA;AACrC,OAAO,EAAc,UAAU,EAAE,MAAM,gBAAgB,CAAA;AAGvD,yBAAiB,QAAQ,CAAC;IACtB;;OAEG;IACH,MAAM,MAAM,UAAU,GAChB,oBAAoB,GACpB,uBAAuB,GACvB,mBAAmB,GACnB,eAAe,GACf,CAAC,MAAM,GAAG,EAAE,CAAC,CAAA;IAEnB,MAAM,WAAW,gBAAgB;QAC7B,8BAA8B;QAC9B,IAAI,EAAE,MAAM,CAAA;QAEZ,kDAAkD;QAClD,KAAK,CAAC,EAAE,CAAC,QAAQ,EAAE,YAAY,KAAK,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,CAAA;QAE9D,0BAA0B;QAC1B,OAAO,EAAE,MAAM,OAAO,CAAC,OAAO,CAAC,CAAA;QAE/B,6EAA6E;QAC7E,OAAO,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC,OAAO,CAAC,CAAA;KAC/C;IAED,MAAM,WAAW,OAAO;QACpB,8BAA8B;QAC9B,IAAI,EAAE,MAAM,CAAA;QAEZ,kDAAkD;QAClD,KAAK,CAAC,EAAE,CAAC,QAAQ,EAAE,YAAY,KAAK,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,CAAA;QAE9D,qCAAqC;QACrC,KAAK,CAAC,EAAE,UAAU,CAAA;QAElB;;;WAGG;QACH,MAAM,EAAE,MAAM,GAAG,IAAI,CAAA;QAErB;;;;;WAKG;QACH,IAAI,EAAE,OAAO,GAAG,SAAS,CAAA;QAEzB,8EAA8E;QAC9E,gBAAgB,CAAC,EAAE,OAAO,CAAA;QAE1B;;;;;;;WAOG;QACH,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;QAEjC;;;;;;;;;;;;;;WAcG;QACH,OAAO,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC,OAAO,CAAC,CAAA;QAE5C;;;;YAII;QACJ,aAAa,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,CAAA;QAE7D;;;;WAIG;QACH,WAAW,CAAC,EAAE,MAAM,CAAA;QAEpB,6FAA6F;QAC7F,OAAO,EAAE,CAAC,KAAK,EAAE,MAAM,KAAK,OAAO,CAAC,OAAO,CAAC,CAAA;KAC/C;IAED;;OAEG;IACH,MAAM,MAAM,SAAS,GAAG;QACpB,cAAc,CAAC,EAAE,MAAM,CAAC;QACxB,eAAe,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAC1C,cAAc,CAAC,EAAE,MAAM,CAAC;QACxB,uBAAuB,CAAC,EAAE,MAAM,CAAA;KACnC,CAAA;IAED;;OAEG;IACH,MAAM,WAAW,QAAQ;QACrB;;WAEG;QACH,SAAS,EAAE,MAAM,IAAI,CAAA;QAErB;;;;;WAKG;QACH,GAAG,EAAE,CAAC,QAAQ,EAAE,YAAY,KAAK,OAAO,CAAC;YAAE,KAAK,EAAE,MAAM,GAAG,IAAI,CAAC;YAAC,KAAK,EAAE;gBAAE,KAAK,EAAE,MAAM,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;aAAE,CAAA;SAAE,CAAC,CAAA;QAEhH;;;WAGG;QACH,QAAQ,EAAE,CAAC,IAAI,EAAE,OAAO,GAAG,gBAAgB,KAAK,IAAI,CAAA;QAEpD;;;WAGG;QACH,UAAU,EAAE,CAAC,QAAQ,EAAE,MAAM,OAAO,CAAC,OAAO,CAAC,KAAK,IAAI,CAAA;KACzD;IAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;OAkDG;IACH,MAAM,WAAW,QAAQ;QACrB;;;;WAIG;QACH,cAAc,EAAE,CAAC,GAAG,KAAK,EAAE,KAAK,CAAC,OAAO,GAAG,gBAAgB,CAAC,KAAK,OAAO,CAAC,QAAQ,CAAC,CAAC;QAEnF;;;;WAIG;QACH,UAAU,EAAE,CAAC,CAAC,SAAS,OAAO,GAAG,gBAAgB,EAAE,IAAI,EAAE,CAAC,KAAK,CAAC,CAAC;QAEjE;;;;WAIG;QACH,QAAQ,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,IAAI,CAAC;QAEjC;;;;WAIG;QACH,kBAAkB,EAAE,CAAC,QAAQ,CAAC,EAAE,OAAO,EAAE,KAAK,YAAY,CAAC;QAE3D;;;;;WAKG;QACH,YAAY,EAAE,OAAO,YAAY,CAAA;KACpC;IAED;;;;;;;;;;;;;;;;;;;;;;;OAuBG;IACH,MAAM,WAAW,YAAY;QACzB;;;WAGG;QACH,WAAW,CAAC,IAAI,EAAE,MAAM,GAAG,IAAI,CAAA;QAE/B;;;WAGG;QACH,YAAY,CAAC,IAAI,EAAE,MAAM,GAAG,IAAI,CAAA;QAEhC;;;;WAIG;QACH,QAAQ,EAAE,CAAC,gBAAgB,CAAC,EAAE,OAAO,KAAK,MAAM,CAAA;QAEhD;;;;;;;;;;;;WAYG;QACH,YAAY,EAAE,CAAC,OAAO,EAAE,MAAM,EAAE,SAAS,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,MAAM,KAAK,IAAI,CAAA;QAEjF;;;;WAIG;QACH,UAAU,EAAE,CAAC,IAAI,EAAE,OAAO,CAAC,QAAQ,CAAC,EAAE,OAAO,EAAE,MAAM,KAAK,IAAI,CAAA;QAE9D;;;WAGG;QACH,4BAA4B,EAAE,CAAC,SAAS,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,MAAM,KAAK,IAAI,CAAA;QAE/E;;;WAGG;QACH,2BAA2B,EAAE,CAAC,SAAS,EAAE,CAAC,OAAO,EAAE,MAAM,KAAK,MAAM,KAAK,IAAI,CAAA;QAE7E;;;WAGG;QACH,gBAAgB,EAAE,CAAC,OAAO,EAAE,MAAM,KAAK,IAAI,CAAA;QAE3C;;;WAGG;QACH,gBAAgB,EAAE,MAAM,MAAM,GAAG,IAAI,CAAA;QAErC;;;WAGG;QACH,UAAU,EAAE,MAAM,OAAO,EAAE,CAAA;KAC9B;IAED;;;OAGG;IACH,MAAM,WAAW,OAAO;QACpB,iGAAiG;QACjG,MAAM,EAAE,MAAM,GAAG,OAAO,GAAG,QAAQ,CAAA;QACnC,sCAAsC;QACtC,IAAI,EAAE,MAAM,CAAA;QACZ,2DAA2D;QAC3D,QAAQ,CAAC,EAAE,MAAM,CAAA;QACjB,SAAS,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,MAAM,CAAA;KAC3C;IAED,MAAM,WAAW,IAAI;QACjB,OAAO,EAAE,MAAM,OAAO,CAAC,MAAM,CAAC,CAAA;KACjC;IAED;;OAEG;IACH,MAAM,WAAW,YAAY;QACzB;;;;WAIG;QACH,YAAY,CAAC,EAAE;YAAE,QAAQ,EAAE,MAAM,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC,CAAA;SAAE,CAAA;QACzD;;WAEG;QACH,QAAQ,CAAC,EAAE,MAAM,CAAA;QACjB;;WAEG;QACH,MAAM,CAAC,EAAE,MAAM,CAAA;QACf;;WAEG;QACH,UAAU,CAAC,EAAE,UAAU,CAAA;KAC1B;IAED;;;;;;;;;;;;;;;;;;;;;;;;;OAyBG;IACH,MAAM,UAAU,cAAc,CAAC,GAAG,GAAE,YAAiB,GAAG,QAAQ,CAqQ/D;IAgCD,SAAS,YAAY,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,MAAM,CAO/E;;CACJ"}
package/build/lib/ai.js CHANGED
@@ -55,7 +55,7 @@ var AIEngine;
55
55
  return step;
56
56
  }
57
57
  function getConversation(messages = []) {
58
- let directivesFormatter = (message) => `${message.sender}: ${message.text}`;
58
+ let defaultDirectivesFormatter = (message) => `${message.sender}: ${message.text}`;
59
59
  let proposedFormatter = (message) => `Proposed reply: ${message}`;
60
60
  let proposedReply = null;
61
61
  const names = {
@@ -67,7 +67,8 @@ var AIEngine;
67
67
  toString: (ignoreDirectives = false) => messages
68
68
  .map((msg) => {
69
69
  if (msg.sender === 'system') {
70
- return ignoreDirectives ? null : directivesFormatter(msg);
70
+ logger.debug('formatter', msg.formatter);
71
+ return ignoreDirectives ? null : (msg.formatter ? msg.formatter(msg) : defaultDirectivesFormatter(msg));
71
72
  }
72
73
  return `${names[msg.sender]}: ${msg.text}`;
73
74
  })
@@ -75,12 +76,12 @@ var AIEngine;
75
76
  .join('\n') +
76
77
  (proposedReply ? `\n${proposedFormatter(proposedReply)}` : ''),
77
78
  addMessage: (sender, text) => messages.push({ sender, text }),
78
- addDirective: (message) => {
79
+ addDirective: (message, formatter) => {
79
80
  logger.debug(`AI Engine: add directive: ${message}`);
80
- messages.push({ sender: 'system', text: message });
81
+ messages.push({ sender: 'system', text: message, formatter });
81
82
  },
82
- setDirectiveFormatter: (formatter) => {
83
- directivesFormatter = formatter;
83
+ setDefaultDirectiveFormatter: (formatter) => {
84
+ defaultDirectivesFormatter = formatter;
84
85
  },
85
86
  setProposedMessageFormatter: (formatter) => {
86
87
  proposedFormatter = formatter;
@@ -102,6 +103,12 @@ var AIEngine;
102
103
  let currentStep = 0;
103
104
  let beforeEachCallback = async () => Promise.resolve(null);
104
105
  const attempts = new Map();
106
+ const trace = {
107
+ steps: steps.reduce((acc, step) => {
108
+ acc[step.name] = {};
109
+ return acc;
110
+ }, {})
111
+ };
105
112
  return {
106
113
  terminate: () => {
107
114
  logger.debug('AI Engine: Terminating conversation...');
@@ -119,7 +126,8 @@ var AIEngine;
119
126
  await action('started');
120
127
  logger.debug(`AI Engine: Step: ${step.name}`);
121
128
  if ('prompt' in step) {
122
- await runStep(step, messages);
129
+ const stepTrace = await runStep(step, messages);
130
+ trace.steps[step.name] = stepTrace;
123
131
  }
124
132
  else {
125
133
  await runDumbStep(step, messages);
@@ -127,7 +135,10 @@ var AIEngine;
127
135
  await action('completed');
128
136
  }
129
137
  }
130
- return shouldRun ? messages.getProposedReply() : null;
138
+ return {
139
+ reply: shouldRun ? messages.getProposedReply() : null,
140
+ trace
141
+ };
131
142
  },
132
143
  rewindTo: (step) => {
133
144
  const index = steps.indexOf(step);
@@ -147,20 +158,19 @@ var AIEngine;
147
158
  if (!apiKey) {
148
159
  throw new Error('OpenAI API key is not set');
149
160
  }
161
+ const stepTrace = {};
150
162
  try {
163
+ stepTrace.receivedContext = step.context;
151
164
  let response = null;
152
165
  let prompt = typeof step.prompt === 'string' ? step.prompt : await step.prompt.content();
166
+ stepTrace.receivedPrompt = prompt;
153
167
  logger.debug('AI Engine: context', step.context);
154
168
  logger.debug('AI Engine: messages', messages.toString(step.ignoreDirectives || false));
155
- if (step.context) {
156
- nunjucks_1.default.configure({
157
- autoescape: true,
158
- trimBlocks: true,
159
- lstripBlocks: true,
160
- });
161
- prompt = nunjucks_1.default.renderString(prompt, step.context);
162
- }
163
- response = await runLLM(apiKey, prompt, messages.toString(step.ignoreDirectives || false), step.schema, step.model);
169
+ prompt = renderPrompt(prompt, step.context);
170
+ stepTrace.renderedPrompt = prompt;
171
+ const stringifiedMessages = messages.toString(step.ignoreDirectives || false);
172
+ stepTrace.stringifiedConversation = stringifiedMessages;
173
+ response = await runLLM(apiKey, prompt, stringifiedMessages, step.json, step.model);
164
174
  if (!response) {
165
175
  throw new Error('No response from OpenAI');
166
176
  }
@@ -180,11 +190,13 @@ var AIEngine;
180
190
  logger.debug(`AI Engine: replying`);
181
191
  await step.execute(response);
182
192
  }
193
+ return stepTrace;
183
194
  }
184
195
  catch (error) {
185
196
  // FIXME: this doesn't terminate the workflow
186
197
  await step.onError(error.message);
187
198
  shouldRun = false;
199
+ return stepTrace;
188
200
  }
189
201
  }
190
202
  async function runDumbStep(step, messages) {
@@ -214,16 +226,17 @@ var AIEngine;
214
226
  attempts.set(step, 0);
215
227
  }
216
228
  }
217
- async function runLLM(apiKey, systemPrompt, messages, schema, model = 'gpt-4o-2024-08-06') {
229
+ async function runLLM(apiKey, systemPrompt, messages, json, model = 'gpt-4o-2024-08-06') {
218
230
  logger.debug('AI Engine: model:', model);
219
231
  logger.debug('----------- RENDERED PROMPT ---------------');
220
232
  logger.debug(systemPrompt);
221
233
  logger.debug('-------------------------------------------');
222
234
  if (apiKey === '__TESTING__') {
223
235
  await (0, core_1.sleep)(100);
224
- return schema
225
- ? JSON.stringify({ message: 'canned response', reasons: [] })
226
- : 'canned response';
236
+ if (typeof json === 'boolean') {
237
+ return json ? JSON.stringify({ message: 'canned response', reasons: [] }) : 'canned response';
238
+ }
239
+ return JSON.stringify({ message: 'canned response', reasons: [] });
227
240
  }
228
241
  const client = new openai_1.default({ apiKey });
229
242
  const response = await client.chat.completions.create({
@@ -231,7 +244,7 @@ var AIEngine;
231
244
  { role: 'system', content: systemPrompt },
232
245
  { role: 'user', content: messages },
233
246
  ],
234
- ...getOpenAiOptions(model, schema),
247
+ ...getOpenAiOptions(model, json),
235
248
  });
236
249
  if (!response.choices[0].message.content) {
237
250
  throw new Error('No response from OpenAI');
@@ -252,10 +265,11 @@ var AIEngine;
252
265
  createStep,
253
266
  loadFile,
254
267
  createConversation: getConversation,
268
+ renderPrompt
255
269
  };
256
270
  }
257
271
  AIEngine.createAIEngine = createAIEngine;
258
- function getOpenAiOptions(model, schema) {
272
+ function getOpenAiOptions(model, json) {
259
273
  const options = {
260
274
  model,
261
275
  };
@@ -268,18 +282,29 @@ var AIEngine;
268
282
  else {
269
283
  options.temperature = 0.1;
270
284
  }
271
- if (schema) {
285
+ if (typeof json !== 'boolean') {
272
286
  options.response_format = {
273
287
  type: 'json_schema',
274
288
  json_schema: {
275
289
  name: 'detector_response',
276
- schema: (0, zod_to_json_schema_1.zodToJsonSchema)(schema),
290
+ schema: (0, zod_to_json_schema_1.zodToJsonSchema)(json),
277
291
  },
278
292
  };
279
293
  }
294
+ else if (json) {
295
+ options.response_format = { type: 'json_object' };
296
+ }
280
297
  else {
281
298
  options.response_format = { type: 'text' };
282
299
  }
283
300
  return options;
284
301
  }
302
+ function renderPrompt(prompt, context) {
303
+ nunjucks_1.default.configure({
304
+ autoescape: false,
305
+ trimBlocks: true,
306
+ lstripBlocks: true,
307
+ });
308
+ return nunjucks_1.default.renderString(prompt, context || {});
309
+ }
285
310
  })(AIEngine || (exports.AIEngine = AIEngine = {}));
package/changelog.md CHANGED
@@ -1,5 +1,24 @@
1
1
  # Changelog
2
2
 
3
+ ### 0.4.0 → 0.5.0 (unstable)
4
+
5
+ Breaking changes:
6
+
7
+ - `schema` property replaced with `json` which can be boolean,
8
+ - `setDirectiveFormatter` →
9
+
10
+ Other changes:
11
+
12
+ - `renderPrompt` method added
13
+ - `addDirective` accepts optional formatter function
14
+ - `formatter` optional method added to `Message`
15
+
16
+ ### 0.3.2 → 0.4.0 (unstable)
17
+
18
+ Breaking changes:
19
+
20
+ - `await workflow.run()` now returns an object with `reply: string` and `trace` object
21
+
3
22
  ### 0.3.1 → 0.3.2 (unstable)
4
23
 
5
24
  - add `ScheduleQuery` class that provides three additional capabilities compared to `delayFactory`:
@@ -16,7 +35,7 @@
16
35
  Breaking changes:
17
36
 
18
37
  - Break down the library into namespace: AIEngine, Scheduler
19
- - Models → BasicModel
20
- - Step → LLMStep & ProgrammaticStep
21
- - makeMessagesList → getConversation
22
- - Deprecation of shouldExecute (discouraged to use if there's no `maxAttempts` in a step)
38
+ - `Models``BasicModel`
39
+ - `Step``LLMStep` & `ProgrammaticStep`
40
+ - `makeMessagesList``getConversation`
41
+ - Deprecation of `shouldExecute` (discouraged to use if there's no `maxAttempts` in a step)
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@recombine-ai/engine",
3
- "version": "0.3.2",
3
+ "version": "0.5.0",
4
4
  "description": "Recombine AI engine for creating conversational AI agents",
5
5
  "main": "build/index.js",
6
6
  "types": "build/index.d.ts",
package/readme.md CHANGED
@@ -60,7 +60,7 @@ const myCoordinator = engine.createStep({
60
60
 
61
61
  // Create and run workflow
62
62
  const workflow = await engine.createWorkflow(mainStep, myReviewer, myCoordinator)
63
- const response = await workflow.run(messages)
63
+ const { reply, trace } = await workflow.run(messages)
64
64
  ```
65
65
 
66
66
  ## Main concepts