@recombine-ai/engine 0.8.1 → 0.8.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/linting.yml +3 -0
- package/build/index.d.ts +1 -0
- package/build/index.d.ts.map +1 -1
- package/build/index.js +1 -0
- package/build/lib/ai.d.ts +9 -3
- package/build/lib/ai.d.ts.map +1 -1
- package/build/lib/ai.js +24 -43
- package/build/lib/llm-adapters/index.d.ts +2 -0
- package/build/lib/llm-adapters/index.d.ts.map +1 -0
- package/build/lib/llm-adapters/index.js +17 -0
- package/build/lib/llm-adapters/openai.d.ts +10 -0
- package/build/lib/llm-adapters/openai.d.ts.map +1 -0
- package/build/lib/llm-adapters/openai.js +36 -0
- package/changelog.md +54 -45
- package/eslint.config.mjs +0 -1
- package/package.json +2 -2
- package/readme.md +20 -21
package/build/index.d.ts
CHANGED
package/build/index.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,UAAU,CAAA;AAExB,cAAc,gBAAgB,CAAA;AAE9B,cAAc,kBAAkB,CAAA;AAEhC,cAAc,aAAa,CAAA;AAE3B,cAAc,iBAAiB,CAAA"}
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../src/index.ts"],"names":[],"mappings":"AAAA,cAAc,UAAU,CAAA;AAExB,cAAc,gBAAgB,CAAA;AAE9B,cAAc,kBAAkB,CAAA;AAEhC,cAAc,aAAa,CAAA;AAE3B,cAAc,iBAAiB,CAAA;AAE/B,cAAc,oBAAoB,CAAA"}
|
package/build/index.js
CHANGED
package/build/lib/ai.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { ZodTypeAny } from 'zod';
|
|
1
|
+
import { ZodSchema, ZodTypeAny } from 'zod';
|
|
2
2
|
import { Logger } from './interfaces';
|
|
3
3
|
import { SendAction } from './bosun/action';
|
|
4
4
|
import { PromptFile } from './prompt-fs';
|
|
@@ -8,6 +8,11 @@ import { Tracer } from './bosun';
|
|
|
8
8
|
* Represents a basic model name for LLMs.
|
|
9
9
|
*/
|
|
10
10
|
export type BasicModel = 'o3-mini-2025-01-31' | 'o1-preview-2024-09-12' | 'gpt-4o-2024-08-06' | 'gpt-4o-2024-11-20' | 'gpt-4.1-2025-04-14' | 'o1-2024-12-17' | (string & {});
|
|
11
|
+
export interface LlmAdapter {
|
|
12
|
+
generateResponse: (systemPrompt: string, messages: string, schema?: ZodSchema) => Promise<string>;
|
|
13
|
+
/** Returns adapter's configuration/options for tracing */
|
|
14
|
+
getOptions: () => unknown;
|
|
15
|
+
}
|
|
11
16
|
export interface BasicStep<CTX> {
|
|
12
17
|
/** Step name */
|
|
13
18
|
name: string;
|
|
@@ -28,8 +33,8 @@ export interface ProgrammaticStep<CTX> extends BasicStep<CTX> {
|
|
|
28
33
|
export interface LLMStep<CTX> extends BasicStep<CTX> {
|
|
29
34
|
/** Determines if the step should be run or not */
|
|
30
35
|
runIf?: (messages: Conversation, ctx: CTX) => boolean | Promise<boolean>;
|
|
31
|
-
/** LLM to use. Defaults to gpt-4o */
|
|
32
|
-
model?: BasicModel;
|
|
36
|
+
/** LLM to use. Can be a model name or an adapter. Defaults to gpt-4o */
|
|
37
|
+
model?: BasicModel | LlmAdapter;
|
|
33
38
|
/**
|
|
34
39
|
* Prompt can be a simple string or a link to a file, loaded with `loadFile` function which
|
|
35
40
|
* takes a path to the file relative to `src/use-cases` directory. Should be Nunjucks-compatible.
|
|
@@ -300,6 +305,7 @@ export interface EngineConfig {
|
|
|
300
305
|
* Optional token storage object that provides access to authentication tokens.
|
|
301
306
|
* @property {object} tokenStorage - Object containing method to retrieve token.
|
|
302
307
|
* @property {() => Promise<string | null>} tokenStorage.getToken - Function that returns a promise resolving to an authentication token or null.
|
|
308
|
+
* @deprecated
|
|
303
309
|
*/
|
|
304
310
|
tokenStorage?: {
|
|
305
311
|
getToken: () => Promise<string | null>;
|
package/build/lib/ai.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"ai.d.ts","sourceRoot":"","sources":["../../src/lib/ai.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"ai.d.ts","sourceRoot":"","sources":["../../src/lib/ai.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,SAAS,EAAE,UAAU,EAAE,MAAM,KAAK,CAAA;AAC3C,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAA;AACrC,OAAO,EAAc,UAAU,EAAE,MAAM,gBAAgB,CAAA;AACvD,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AACxC,OAAO,EAAa,UAAU,EAAE,MAAM,oBAAoB,CAAA;AAC1D,OAAO,EAAE,MAAM,EAAE,MAAM,SAAS,CAAA;AAKhC;;GAEG;AACH,MAAM,MAAM,UAAU,GAChB,oBAAoB,GACpB,uBAAuB,GACvB,mBAAmB,GACnB,mBAAmB,GACnB,oBAAoB,GACpB,eAAe,GACf,CAAC,MAAM,GAAG,EAAE,CAAC,CAAA;AAEnB,MAAM,WAAW,UAAU;IACvB,gBAAgB,EAAE,CACd,YAAY,EAAE,MAAM,EACpB,QAAQ,EAAE,MAAM,EAChB,MAAM,CAAC,EAAE,SAAS,KACjB,OAAO,CAAC,MAAM,CAAC,CAAA;IACpB,0DAA0D;IAC1D,UAAU,EAAE,MAAM,OAAO,CAAA;CAC5B;AAED,MAAM,WAAW,SAAS,CAAC,GAAG;IAC1B,iBAAiB;IACjB,IAAI,EAAE,MAAM,CAAA;IAEZ,kDAAkD;IAClD,KAAK,CAAC,EAAE,CAAC,QAAQ,EAAE,YAAY,EAAE,GAAG,EAAE,GAAG,KAAK,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,CAAA;IAExE;;;OAGG;IACH,WAAW,CAAC,EAAE,MAAM,CAAA;IAEpB,6EAA6E;IAC7E,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,GAAG,EAAE,GAAG,KAAK,OAAO,CAAC,OAAO,CAAC,CAAA;CAC1D;AAED,MAAM,WAAW,gBAAgB,CAAC,GAAG,CAAE,SAAQ,SAAS,CAAC,GAAG,CAAC;IACzD,0BAA0B;IAC1B,OAAO,EAAE,CAAC,QAAQ,EAAE,YAAY,EAAE,GAAG,EAAE,GAAG,EAAE,QAAQ,EAAE,gBAAgB,KAAK,OAAO,CAAC,OAAO,CAAC,CAAA;CAC9F;AAED,MAAM,WAAW,OAAO,CAAC,GAAG,CAAE,SAAQ,SAAS,CAAC,GAAG,CAAC;IAChD,kDAAkD;IAClD,KAAK,CAAC,EAAE,CAAC,QAAQ,EAAE,YAAY,EAAE,GAAG,EAAE,GAAG,KAAK,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,CAAA;IAExE,wEAAwE;IACxE,KAAK,CAAC,EAAE,UAAU,GAAG,UAAU,CAAA;IAE/B;;;OAGG;IACH,MAAM,EAAE,MAAM,GAAG,UAAU,CAAA;IAE3B;;OAEG;IACH,mBAAmB,CAAC,EAAE,OAAO,CAAA;CAChC;AAED,MAAM,WAAW,gBAAgB;IAC7B;;OAEG;IACH,SAAS,EAAE,MAAM,IAAI,CAAA;IAErB;;;OAGG;IACH,QAAQ,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,IAAI,CAAA;CACnC;AAED,MAAM,WAAW,WAAW,CAAC,GAAG,EAAE,MAAM,SAAS,UAAU,CAAE,SAAQ,OAAO,CAAC,GAAG,CAAC;IAC7E;;;;OAIG;IACH,MAAM,EAAE,MAAM,CAAA;IACd;;;;;;;;;;;;;;OAcG;IACH,OAAO,EAAE,CACL,KAAK,EAAE,GAAG,CAAC,KAAK,CAAC,MAAM,CAAC,EACxB,YAAY,EAAE,YAAY,EAC1B,GAAG,EAAE,GAAG,EACR,gBAAgB,EAAE,gBAAgB,KACjC,OAAO,CAAC,OAAO,CAAC,CAAA;CACxB;AAED,MAAM,WAAW,aAAa,CAAC,GAAG,CAAE,SAAQ,OAAO,CAAC,GAAG,CAAC;IACpD;;;;;;;;;;;;;;OAcG;IACH,OAAO,EAAE,CACL,KAAK,EAAE,MAAM,EACb,YAAY,EAAE,YAAY,EAC1B,GAAG,EAAE,GAAG,EACR,gBAAgB,CAAC,EAAE,gBAAgB,KAClC,OAAO,CAAC,OAAO,CAAC,CAAA;CACxB;AAED,KAAK,cAAc,CAAC,GAAG,IAAI,CACvB,YAAY,EAAE,YAAY,EAC1B,GAAG,EAAE,GAAG,EACR,gBAAgB,CAAC,EAAE,gBAAgB,KAClC,OAAO,CAAC,IAAI,CAAC,CAAA;AAElB;;GAEG;AACH,MAAM,WAAW,QAAQ,CAAC,GAAG;IACzB;;;;;;;OAOG;IACH,GAAG,EAAE,CACD,YAAY,EAAE,YAAY,EAC1B,OAAO,CAAC,EAAE,GAAG,EACb,UAAU,CAAC,EAAE,cAAc,CAAC,GAAG,CAAC,KAC/B,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC,CAAA;IAE3B;;OAEG;IACH,OAAO,CAAC,MAAM,SAAS,UAAU,EAAE,IAAI,EAAE,WAAW,CAAC,GAAG,EAAE,MAAM,CAAC,GAAG,IAAI,CAAA;IACxE,OAAO,CAAC,IAAI,EAAE,aAAa,CAAC,GAAG,CAAC,GAAG,IAAI,CAAA;IACvC,OAAO,CAAC,IAAI,EAAE,gBAAgB,CAAC,GAAG,CAAC,GAAG,IAAI,CAAA;CAC7C;AAED,KAAK,YAAY,CAAC,GAAG,IAAI,aAAa,CAAC,GAAG,CAAC,GAAG,WAAW,CAAC,GAAG,EAAE,GAAG,CAAC,GAAG,gBAAgB,CAAC,GAAG,CAAC,CAAA;AAE3F,MAAM,WAAW,cAAc,CAAC,GAAG;IAC/B,OAAO,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,GAAG,EAAE,GAAG,KAAK,OAAO,CAAC,OAAO,CAAC,CAAA;IACtD,KAAK,CAAC,EAAE,YAAY,CAAC,GAAG,CAAC,EAAE,CAAA;IAC3B,kBAAkB,CAAC,EAAE,MAAM,OAAO,CAAC,OAAO,CAAC,CAAA;CAC9C;AAED,UAAU,WAAW,CAAC,GAAG;IACrB,CAAC,MAAM,SAAS,UAAU,EAAE,IAAI,EAAE,WAAW,CAAC,GAAG,EAAE,MAAM,CAAC,GAAG,WAAW,CAAC,GAAG,EAAE,MAAM,CAAC,CAAA;IACrF,CAAC,IAAI,EAAE,aAAa,CAAC,GAAG,CAAC,GAAG,aAAa,CAAC,GAAG,CAAC,CAAA;IAC9C,CAAC,IAAI,EAAE,gBAAgB,CAAC,GAAG,CAAC,GAAG,gBAAgB,CAAC,GAAG,CAAC,CAAA;CACvD;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAkDG;AACH,MAAM,WAAW,QAAQ;IACrB;;;;OAIG;IACH,cAAc,EAAE,CAAC,GAAG,SAAS,MAAM,EAAE,MAAM,EAAE,cAAc,CAAC,GAAG,CAAC,KAAK,QAAQ,CAAC,GAAG,CAAC,CAAA;IAElF;;;;OAIG;IACH,kBAAkB,EAAE,CAAC,QAAQ,CAAC,EAAE,OAAO,EAAE,KAAK,YAAY,CAAA;IAE1D;;;OAGG;IACH,cAAc,CAAC,GAAG,KAAK,WAAW,CAAC,GAAG,CAAC,CAAA;IAEvC;;;;;OAKG;IACH,YAAY,EAAE,OAAO,YAAY,CAAA;CACpC;AAED;;;;;;;;;;;;;;;;;;;;;;;GAuBG;AACH,MAAM,WAAW,YAAY;IACzB;;;OAGG;IACH,WAAW,CAAC,IAAI,EAAE,MAAM,GAAG,IAAI,CAAA;IAE/B;;;OAGG;IACH,YAAY,CAAC,IAAI,EAAE,MAAM,GAAG,IAAI,CAAA;IAEhC;;;OAGG;IACH,mBAAmB,EAAE,CAAC,SAAS,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,MAAM,KAAK,IAAI,CAAA;IAEtE;;;;;OAKG;IACH,QAAQ,EAAE,CAAC,OAAO,CAAC,EAAE;QAAE,mBAAmB,CAAC,EAAE,OAAO,CAAA;KAAE,KAAK,MAAM,CAAA;IAEjE;;;OAGG;IACH,UAAU,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,IAAI,CAAC,EAAE;QAAE,SAAS,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,MAAM,CAAA;KAAE,KAAK,IAAI,CAAA;IAE3F;;;OAGG;IACH,2BAA2B,EAAE,CAAC,SAAS,EAAE,CAAC,OAAO,EAAE,MAAM,KAAK,MAAM,KAAK,IAAI,CAAA;IAE7E;;;OAGG;IACH,gBAAgB,EAAE,CAAC,OAAO,EAAE,MAAM,KAAK,IAAI,CAAA;IAE3C;;;OAGG;IACH,gBAAgB,EAAE,MAAM,MAAM,GAAG,IAAI,CAAA;IAErC;;;;OAIG;IACH,UAAU,EAAE,MAAM,OAAO,EAAE,CAAA;CAC9B;AAED;;;GAGG;AACH,MAAM,WAAW,OAAO;IACpB,iGAAiG;IACjG,MAAM,EAAE,MAAM,GAAG,OAAO,GAAG,QAAQ,CAAA;IACnC,sCAAsC;IACtC,IAAI,EAAE,MAAM,CAAA;IACZ,2DAA2D;IAC3D,QAAQ,CAAC,EAAE,MAAM,CAAA;CACpB;AAED;;GAEG;AACH,MAAM,WAAW,YAAY;IACzB;;;;;OAKG;IACH,YAAY,CAAC,EAAE;QAAE,QAAQ,EAAE,MAAM,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC,CAAA;KAAE,CAAA;IACzD;;OAEG;IACH,MAAM,CAAC,EAAE,MAAM,CAAA;IACf;;OAEG;IACH,UAAU,CAAC,EAAE,UAAU,CAAA;IACvB,iGAAiG;IACjG,UAAU,CAAC,EAAE,UAAU,CAAA;IACvB,kCAAkC;IAClC,MAAM,CAAC,EAAE,MAAM,CAAA;CAClB;AAED;;;;;;;;;;;;;;;;;;;;;;;;GAwBG;AACH,wBAAgB,cAAc,CAAC,GAAG,GAAE,YAAiB,GAAG,QAAQ,CA8K/D;AA+ED,iBAAS,YAAY,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,MAAM,GAAG,MAAM,CAO9D;AAED,wBAAgB,kBAAkB,CAAC,eAAe,GAAE,OAAO,EAAO,GAAG,YAAY,CAiDhF;AAED,wBAAgB,cAAc,CAAC,GAAG,GAAG,OAAO,KAAK,WAAW,CAAC,GAAG,CAAC,CAEhE"}
|
package/build/lib/ai.js
CHANGED
|
@@ -6,14 +6,12 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
6
6
|
exports.createAIEngine = createAIEngine;
|
|
7
7
|
exports.createConversation = createConversation;
|
|
8
8
|
exports.getStepBuilder = getStepBuilder;
|
|
9
|
-
// cspell:words lstripBlocks
|
|
10
|
-
const openai_1 = require("openai");
|
|
11
9
|
const nunjucks_1 = __importDefault(require("nunjucks"));
|
|
12
10
|
const zod_1 = require("zod");
|
|
13
11
|
const action_1 = require("./bosun/action");
|
|
14
|
-
const core_1 = require("openai/core");
|
|
15
12
|
const tracer_1 = require("./bosun/tracer");
|
|
16
13
|
const zod_to_json_schema_1 = require("zod-to-json-schema");
|
|
14
|
+
const openai_1 = require("./llm-adapters/openai");
|
|
17
15
|
/**
|
|
18
16
|
* Creates an AI Engine with the given configuration.
|
|
19
17
|
*
|
|
@@ -43,15 +41,7 @@ function createAIEngine(cfg = {}) {
|
|
|
43
41
|
const stepTracer = cfg.stepTracer || undefined;
|
|
44
42
|
const logger = cfg.logger || globalThis.console;
|
|
45
43
|
const tracer = cfg.tracer || (0, tracer_1.createConsoleTracer)(logger);
|
|
46
|
-
|
|
47
|
-
const tokenStorage = cfg.tokenStorage || {
|
|
48
|
-
async getToken() {
|
|
49
|
-
if (process.env.OPENAI_API_KEY) {
|
|
50
|
-
return process.env.OPENAI_API_KEY;
|
|
51
|
-
}
|
|
52
|
-
throw new Error('OpenAI API key is not set');
|
|
53
|
-
},
|
|
54
|
-
};
|
|
44
|
+
// tokenStorage is used by the default adapter to fetch API keys (backwards compatible)
|
|
55
45
|
function createWorkflow({ onError, steps = [], }) {
|
|
56
46
|
steps.forEach(addStepToTracer);
|
|
57
47
|
return {
|
|
@@ -95,15 +85,13 @@ function createAIEngine(cfg = {}) {
|
|
|
95
85
|
}
|
|
96
86
|
}
|
|
97
87
|
async function runStep(step, conversation, ctx, state) {
|
|
98
|
-
if (!apiKey) {
|
|
99
|
-
apiKey = await tokenStorage.getToken();
|
|
100
|
-
}
|
|
101
|
-
if (!apiKey) {
|
|
102
|
-
throw new Error('LLM API key is not provided');
|
|
103
|
-
}
|
|
104
88
|
const stepTrace = {
|
|
105
89
|
name: step.name,
|
|
106
|
-
model: step.model
|
|
90
|
+
model: typeof step.model === 'string'
|
|
91
|
+
? step.model
|
|
92
|
+
: step.model
|
|
93
|
+
? JSON.stringify(step.model.getOptions())
|
|
94
|
+
: 'default',
|
|
107
95
|
schema: 'schema' in step
|
|
108
96
|
? step.schema instanceof zod_1.ZodSchema
|
|
109
97
|
? step.schema
|
|
@@ -126,11 +114,11 @@ function createAIEngine(cfg = {}) {
|
|
|
126
114
|
stepTrace.stringifiedConversation = stringifiedMessages;
|
|
127
115
|
stepTracer?.addStepTrace(stepTrace);
|
|
128
116
|
if ('schema' in step) {
|
|
129
|
-
response = await runLLM(
|
|
117
|
+
response = await runLLM(step.model, prompt, stringifiedMessages, step.schema);
|
|
130
118
|
response = step.schema.parse(JSON.parse(response));
|
|
131
119
|
}
|
|
132
120
|
else {
|
|
133
|
-
response = await runLLM(
|
|
121
|
+
response = await runLLM(step.model, prompt, stringifiedMessages, undefined);
|
|
134
122
|
}
|
|
135
123
|
if (!response) {
|
|
136
124
|
throw new Error('No response from OpenAI');
|
|
@@ -142,7 +130,6 @@ function createAIEngine(cfg = {}) {
|
|
|
142
130
|
await (step.onError
|
|
143
131
|
? step.onError(error.message, ctx)
|
|
144
132
|
: onError(error.message, ctx));
|
|
145
|
-
// FIXME: this doesn't terminate the workflow
|
|
146
133
|
stepTracer?.addStepTrace(stepTrace);
|
|
147
134
|
state.terminate();
|
|
148
135
|
}
|
|
@@ -162,30 +149,24 @@ function createAIEngine(cfg = {}) {
|
|
|
162
149
|
}
|
|
163
150
|
}
|
|
164
151
|
}
|
|
165
|
-
async function runLLM(
|
|
166
|
-
logger.debug('AI Engine: model:', model
|
|
152
|
+
async function runLLM(model, systemPrompt, messages, schema) {
|
|
153
|
+
logger.debug('AI Engine: model:', typeof model === 'string' || model === undefined
|
|
154
|
+
? model || 'gpt-4o-2024-08-06'
|
|
155
|
+
: '[adapter]');
|
|
167
156
|
logger.debug('----------- RENDERED PROMPT ---------------');
|
|
168
157
|
logger.debug(systemPrompt);
|
|
169
158
|
logger.debug('-------------------------------------------');
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
{ role: 'user', content: messages },
|
|
182
|
-
],
|
|
183
|
-
...getOpenAiOptions(model, schema),
|
|
184
|
-
});
|
|
185
|
-
if (!response.choices[0].message.content) {
|
|
186
|
-
throw new Error('No response from OpenAI');
|
|
187
|
-
}
|
|
188
|
-
return response.choices[0].message.content;
|
|
159
|
+
const adapter = typeof model === 'string' || model === undefined
|
|
160
|
+
? (0, openai_1.createOpenAIAdapter)(getOpenAiOptions(model || 'gpt-4o-2024-08-06', schema), {
|
|
161
|
+
tokenStorage: cfg.tokenStorage ||
|
|
162
|
+
{
|
|
163
|
+
async getToken() {
|
|
164
|
+
return process.env.OPENAI_API_KEY ?? null;
|
|
165
|
+
},
|
|
166
|
+
},
|
|
167
|
+
})
|
|
168
|
+
: model;
|
|
169
|
+
return adapter.generateResponse(systemPrompt, messages, schema);
|
|
189
170
|
}
|
|
190
171
|
return {
|
|
191
172
|
createWorkflow,
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/lib/llm-adapters/index.ts"],"names":[],"mappings":"AAAA,cAAc,UAAU,CAAA"}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
14
|
+
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
15
|
+
};
|
|
16
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
|
+
__exportStar(require("./openai"), exports);
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
import { ChatCompletionCreateParamsBase } from 'openai/resources/chat/completions';
|
|
2
|
+
import type { LlmAdapter } from '../ai';
|
|
3
|
+
export type OpenAIChatOptions = Omit<ChatCompletionCreateParamsBase, 'messages' | 'stream'>;
|
|
4
|
+
export type OpenAIAdapterAuth = {
|
|
5
|
+
tokenStorage: {
|
|
6
|
+
getToken: () => Promise<string | null>;
|
|
7
|
+
};
|
|
8
|
+
};
|
|
9
|
+
export declare function createOpenAIAdapter(options: OpenAIChatOptions, auth: OpenAIAdapterAuth): LlmAdapter;
|
|
10
|
+
//# sourceMappingURL=openai.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"openai.d.ts","sourceRoot":"","sources":["../../../src/lib/llm-adapters/openai.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,8BAA8B,EAAE,MAAM,mCAAmC,CAAA;AAElF,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,OAAO,CAAA;AAEvC,MAAM,MAAM,iBAAiB,GAAG,IAAI,CAAC,8BAA8B,EAAE,UAAU,GAAG,QAAQ,CAAC,CAAA;AAE3F,MAAM,MAAM,iBAAiB,GAAG;IAC5B,YAAY,EAAE;QAAE,QAAQ,EAAE,MAAM,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC,CAAA;KAAE,CAAA;CAC3D,CAAA;AAID,wBAAgB,mBAAmB,CAC/B,OAAO,EAAE,iBAAiB,EAC1B,IAAI,EAAE,iBAAiB,GACxB,UAAU,CAoCZ"}
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.createOpenAIAdapter = createOpenAIAdapter;
|
|
4
|
+
const openai_1 = require("openai");
|
|
5
|
+
const delay = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
|
|
6
|
+
function createOpenAIAdapter(options, auth) {
|
|
7
|
+
return {
|
|
8
|
+
getOptions: () => options,
|
|
9
|
+
async generateResponse(systemPrompt, messages, _schema) {
|
|
10
|
+
const apiKey = await auth.tokenStorage.getToken();
|
|
11
|
+
if (!apiKey) {
|
|
12
|
+
throw new Error('OpenAI API key is not set');
|
|
13
|
+
}
|
|
14
|
+
if (apiKey === '__TESTING__') {
|
|
15
|
+
await delay(100);
|
|
16
|
+
if (!_schema) {
|
|
17
|
+
return 'canned response';
|
|
18
|
+
}
|
|
19
|
+
return JSON.stringify({ message: 'canned response', reasons: [] });
|
|
20
|
+
}
|
|
21
|
+
const client = new openai_1.OpenAI({ apiKey });
|
|
22
|
+
const response = await client.chat.completions.create({
|
|
23
|
+
messages: [
|
|
24
|
+
{ role: 'system', content: systemPrompt },
|
|
25
|
+
{ role: 'user', content: messages },
|
|
26
|
+
],
|
|
27
|
+
...options,
|
|
28
|
+
});
|
|
29
|
+
const content = response.choices[0]?.message?.content;
|
|
30
|
+
if (!content) {
|
|
31
|
+
throw new Error('No response from OpenAI');
|
|
32
|
+
}
|
|
33
|
+
return content;
|
|
34
|
+
},
|
|
35
|
+
};
|
|
36
|
+
}
|
package/changelog.md
CHANGED
|
@@ -1,100 +1,109 @@
|
|
|
1
1
|
# Changelog
|
|
2
2
|
|
|
3
|
+
### 0.8.1 → 0.8.3 (unstable)
|
|
4
|
+
|
|
5
|
+
- Fixed missing tokenStorage in `LlmAdapter`.
|
|
6
|
+
|
|
7
|
+
### 0.8.1 → 0.8.2 (unstable)
|
|
8
|
+
|
|
9
|
+
- Model now accepts a string or an `LlmAdapter`. Old defaults are used when model is a string.
|
|
10
|
+
- Added OpenAI adapter `createOpenAIAdapter(options)`.
|
|
11
|
+
|
|
3
12
|
### 0.8.0 → 0.8.1 (unstable)
|
|
4
13
|
|
|
5
|
-
-
|
|
6
|
-
-
|
|
7
|
-
|
|
8
|
-
-
|
|
9
|
-
-
|
|
14
|
+
- Added prompt context mapping functionality to Context class:
|
|
15
|
+
- `setPromptContextMapper(promptMapper: (ctx: T) => object)` - how to map Bosun's context into
|
|
16
|
+
prompt context
|
|
17
|
+
- `getPromptContextMapper()` - retrieves the current prompt context mapper function
|
|
18
|
+
- Default mapper returns the context as-is
|
|
10
19
|
|
|
11
20
|
### 0.7.1 → 0.8.0 (unstable)
|
|
12
21
|
|
|
13
22
|
Breaking changes:
|
|
14
23
|
|
|
15
|
-
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
-
|
|
22
|
-
|
|
23
|
-
-
|
|
24
|
+
- `WorkflowControls` parameter added to step `execute` methods - all `ProgrammaticStep`,
|
|
25
|
+
`StringLLMStep`, and `JsonLLMStep` execute functions now receive workflow controls as the last
|
|
26
|
+
parameter,
|
|
27
|
+
- `Workflow.terminate()` and `Workflow.rewindTo()` methods removed - use
|
|
28
|
+
`WorkflowControls.terminate()` and `WorkflowControls.rewindTo()` from within step execution
|
|
29
|
+
instead, note that `WorkflowControls.rewindTo()` accept step name, rather then link to a step,
|
|
30
|
+
- `shouldExecute` property removed from `StringLLMStep` and `JsonLLMStep` interfaces - use
|
|
31
|
+
conditional logic within the `execute` function.
|
|
32
|
+
- `beforeEach` callback also moved into `Workflow.run` as third parameter
|
|
24
33
|
|
|
25
34
|
Other changes:
|
|
26
35
|
|
|
27
|
-
-
|
|
36
|
+
- Added support for additional OpenAI models: `gpt-4o-2024-08-06` and `gpt-4.1-2025-04-14`.
|
|
28
37
|
|
|
29
38
|
### 0.7.0 → 0.7.1 (unstable)
|
|
30
39
|
|
|
31
|
-
-
|
|
40
|
+
- fixed multiple runs on static workflows
|
|
32
41
|
|
|
33
42
|
### 0.6.0 → 0.7.0 (unstable)
|
|
34
43
|
|
|
35
44
|
Breaking changes:
|
|
36
45
|
|
|
37
|
-
-
|
|
38
|
-
-
|
|
39
|
-
-
|
|
40
|
-
-
|
|
41
|
-
-
|
|
46
|
+
- namespaces removed
|
|
47
|
+
- `TestAgentFactory` now returns a promise
|
|
48
|
+
- `TesAgentFactoryProps` now requires `Tracer` and doesn't require `AIEngine`
|
|
49
|
+
- `loadFile` method was removed in favor of `PromptFS`
|
|
50
|
+
- `workflow.run` now returns a string instead object as a response
|
|
42
51
|
|
|
43
52
|
Other changes
|
|
44
53
|
|
|
45
|
-
-
|
|
46
|
-
-
|
|
47
|
-
-
|
|
48
|
-
-
|
|
49
|
-
-
|
|
54
|
+
- interface `TestVoiceAgent` added
|
|
55
|
+
- interface `Tracer` added
|
|
56
|
+
- interface `PromptFS` and function `createLocalFS` were added
|
|
57
|
+
- added `StepTracer` interface
|
|
58
|
+
- Engine config now accepts `tracer` and `stepTracer`
|
|
50
59
|
|
|
51
60
|
### 0.5.0 → 0.6.0 (unstable)
|
|
52
61
|
|
|
53
62
|
Breaking changes:
|
|
54
63
|
|
|
55
|
-
-
|
|
56
|
-
-
|
|
64
|
+
- `addDirective` is removed. Use `addMessage` with role: 'system' instead.
|
|
65
|
+
- `ignoreDirectives` → `ignoreAddedMessages`
|
|
57
66
|
|
|
58
67
|
Other changes:
|
|
59
68
|
|
|
60
|
-
-
|
|
69
|
+
- `AIEngine.sendMessage` now accepts `Message` rather than a string
|
|
61
70
|
|
|
62
71
|
### 0.4.0 → 0.5.0 (unstable)
|
|
63
72
|
|
|
64
73
|
Breaking changes:
|
|
65
74
|
|
|
66
|
-
-
|
|
67
|
-
-
|
|
75
|
+
- `schema` property replaced with `json` which can be boolean,
|
|
76
|
+
- `setDirectiveFormatter` removed
|
|
68
77
|
|
|
69
78
|
Other changes:
|
|
70
79
|
|
|
71
|
-
-
|
|
72
|
-
-
|
|
73
|
-
-
|
|
80
|
+
- `renderPrompt` method added
|
|
81
|
+
- `addDirective` accepts optional formatter function
|
|
82
|
+
- `formatter` optional method added to `Message`
|
|
74
83
|
|
|
75
84
|
### 0.3.2 → 0.4.0 (unstable)
|
|
76
85
|
|
|
77
86
|
Breaking changes:
|
|
78
87
|
|
|
79
|
-
-
|
|
88
|
+
- `await workflow.run()` now returns an object with `reply: string` and `trace` object
|
|
80
89
|
|
|
81
90
|
### 0.3.1 → 0.3.2 (unstable)
|
|
82
91
|
|
|
83
|
-
-
|
|
84
|
-
-
|
|
85
|
-
-
|
|
86
|
-
-
|
|
92
|
+
- add `ScheduleQuery` class that provides three additional capabilities compared to `delayFactory`:
|
|
93
|
+
- `query.nextValidDate(date: Date)` – to find next closest date after a specific date (rather than delay)
|
|
94
|
+
- `query.next()` – gets closest date within provided schedule
|
|
95
|
+
- `query.isValid(date)` – checks if provided date is within the schedule
|
|
87
96
|
|
|
88
97
|
### 0.3.0 → 0.3.1 (unstable)
|
|
89
98
|
|
|
90
|
-
-
|
|
99
|
+
- Fix: allow other models (as a string)
|
|
91
100
|
|
|
92
101
|
### 0.2.0 → 0.3.0 (unstable)
|
|
93
102
|
|
|
94
103
|
Breaking changes:
|
|
95
104
|
|
|
96
|
-
-
|
|
97
|
-
-
|
|
98
|
-
-
|
|
99
|
-
-
|
|
100
|
-
-
|
|
105
|
+
- Break down the library into namespace: AIEngine, Scheduler
|
|
106
|
+
- `Models` → `BasicModel`
|
|
107
|
+
- `Step` → `LLMStep` & `ProgrammaticStep`
|
|
108
|
+
- `makeMessagesList` → `getConversation`
|
|
109
|
+
- Deprecation of `shouldExecute` (discouraged to use if there's no `maxAttempts` in a step)
|
package/eslint.config.mjs
CHANGED
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@recombine-ai/engine",
|
|
3
|
-
"version": "0.8.
|
|
3
|
+
"version": "0.8.3",
|
|
4
4
|
"description": "Recombine AI engine for creating conversational AI agents",
|
|
5
5
|
"main": "build/index.js",
|
|
6
6
|
"types": "build/index.d.ts",
|
|
@@ -35,7 +35,7 @@
|
|
|
35
35
|
},
|
|
36
36
|
"dependencies": {
|
|
37
37
|
"nunjucks": "^3.2.4",
|
|
38
|
-
"openai": "^
|
|
38
|
+
"openai": "^6.8.1",
|
|
39
39
|
"zod": "3.23.8",
|
|
40
40
|
"zod-to-json-schema": "^3.24.6"
|
|
41
41
|
},
|
package/readme.md
CHANGED
|
@@ -6,13 +6,13 @@ A TypeScript library for building agentic workflows for conversational AI.
|
|
|
6
6
|
|
|
7
7
|
## Features
|
|
8
8
|
|
|
9
|
-
-
|
|
10
|
-
-
|
|
11
|
-
-
|
|
12
|
-
-
|
|
13
|
-
-
|
|
14
|
-
-
|
|
15
|
-
-
|
|
9
|
+
- 🔄 Multi-step agentic workflows
|
|
10
|
+
- 🎯 Conditional execution and reviewers
|
|
11
|
+
- 📝 Structured responses using Zod schemas
|
|
12
|
+
- 🗂️ File-based prompts
|
|
13
|
+
- ⚡ Message history management
|
|
14
|
+
- 🌍 Context injection using Nunjucks templates
|
|
15
|
+
- 👩💻 Ready to be integrated with Recombine Bosun prompt-engineering IDE.
|
|
16
16
|
|
|
17
17
|
## Installation
|
|
18
18
|
|
|
@@ -123,28 +123,27 @@ To contribute to this project, you'll need to set up your local development envi
|
|
|
123
123
|
2. **Set up Git hooks with Husky:**
|
|
124
124
|
|
|
125
125
|
Git hooks are automatically installed when you run `yarn install` (via the `prepare` script). The project uses Husky to manage the following Git hooks:
|
|
126
|
-
|
|
127
126
|
- **`pre-commit`**: Runs `lint-staged` to format and lint only staged files using Prettier and ESLint
|
|
128
127
|
- **`pre-push`**: Runs TypeScript type checking to ensure no type errors before pushing
|
|
129
128
|
- **`post-merge`**: Automatically runs `yarn install` if `package.json` or `yarn.yaml` changed after a merge
|
|
130
129
|
|
|
131
130
|
### Available Scripts
|
|
132
131
|
|
|
133
|
-
-
|
|
134
|
-
-
|
|
135
|
-
-
|
|
136
|
-
-
|
|
137
|
-
-
|
|
138
|
-
-
|
|
139
|
-
-
|
|
140
|
-
-
|
|
132
|
+
- **`yarn dev`** - Start TypeScript compiler in watch mode
|
|
133
|
+
- **`yarn build`** - Build the project
|
|
134
|
+
- **`yarn typecheck`** - Run TypeScript type checking without emitting files
|
|
135
|
+
- **`yarn test`** - Run tests with Vitest
|
|
136
|
+
- **`yarn lint`** - Lint code with ESLint
|
|
137
|
+
- **`yarn lint:fix`** - Lint and automatically fix issues
|
|
138
|
+
- **`yarn format`** - Format code with Prettier
|
|
139
|
+
- **`yarn format:check`** - Check code formatting without making changes
|
|
141
140
|
|
|
142
141
|
### Code Quality
|
|
143
142
|
|
|
144
143
|
The project maintains code quality through:
|
|
145
144
|
|
|
146
|
-
-
|
|
147
|
-
-
|
|
148
|
-
-
|
|
149
|
-
-
|
|
150
|
-
-
|
|
145
|
+
- **TypeScript** for type safety
|
|
146
|
+
- **ESLint** for code linting
|
|
147
|
+
- **Prettier** for consistent code formatting
|
|
148
|
+
- **Vitest** for testing
|
|
149
|
+
- **Husky + lint-staged** for automated pre-commit checks
|