@recombine-ai/engine 0.6.0 → 0.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/build/index.d.ts +4 -5
- package/build/index.d.ts.map +1 -1
- package/build/index.js +4 -8
- package/build/lib/ai.d.ts +336 -328
- package/build/lib/ai.d.ts.map +1 -1
- package/build/lib/ai.js +286 -275
- package/build/lib/bosun/agent.d.ts +34 -24
- package/build/lib/bosun/agent.d.ts.map +1 -1
- package/build/lib/bosun/agent.js +4 -27
- package/build/lib/bosun/context.d.ts.map +1 -1
- package/build/lib/bosun/context.js +1 -4
- package/build/lib/bosun/index.d.ts +2 -0
- package/build/lib/bosun/index.d.ts.map +1 -1
- package/build/lib/bosun/index.js +1 -0
- package/build/lib/bosun/stepTracer.d.ts +14 -0
- package/build/lib/bosun/stepTracer.d.ts.map +1 -0
- package/build/lib/bosun/stepTracer.js +2 -0
- package/build/lib/bosun/tracer.d.ts +19 -0
- package/build/lib/bosun/tracer.d.ts.map +1 -0
- package/build/lib/bosun/tracer.js +20 -0
- package/build/lib/interfaces.d.ts +17 -19
- package/build/lib/interfaces.d.ts.map +1 -1
- package/build/lib/prompt-fs.d.ts +17 -0
- package/build/lib/prompt-fs.d.ts.map +1 -0
- package/build/lib/prompt-fs.js +23 -0
- package/build/lib/rc-fs.d.ts +17 -0
- package/build/lib/rc-fs.d.ts.map +1 -0
- package/build/lib/rc-fs.js +22 -0
- package/changelog.md +22 -4
- package/eslint.config.mjs +52 -0
- package/package.json +9 -2
package/build/lib/ai.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"ai.d.ts","sourceRoot":"","sources":["../../src/lib/ai.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"ai.d.ts","sourceRoot":"","sources":["../../src/lib/ai.ts"],"names":[],"mappings":"AAIA,OAAO,EAAa,UAAU,EAAK,MAAM,KAAK,CAAA;AAC9C,OAAO,EAAE,MAAM,EAAE,MAAM,cAAc,CAAA;AACrC,OAAO,EAAc,UAAU,EAAE,MAAM,gBAAgB,CAAA;AAEvD,OAAO,EAAE,UAAU,EAAE,MAAM,aAAa,CAAA;AACxC,OAAO,EAAa,UAAU,EAAE,MAAM,oBAAoB,CAAA;AAC1D,OAAO,EAAE,MAAM,EAAE,MAAM,SAAS,CAAA;AAIhC;;GAEG;AACH,MAAM,MAAM,UAAU,GAChB,oBAAoB,GACpB,uBAAuB,GACvB,mBAAmB,GACnB,eAAe,GACf,CAAC,MAAM,GAAG,EAAE,CAAC,CAAA;AAEnB,MAAM,WAAW,gBAAgB,CAAC,GAAG;IACjC,8BAA8B;IAC9B,IAAI,EAAE,MAAM,CAAA;IAEZ,kDAAkD;IAClD,KAAK,CAAC,EAAE,CAAC,QAAQ,EAAE,YAAY,EAAE,GAAG,EAAE,GAAG,KAAK,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,CAAA;IAExE,0BAA0B;IAC1B,OAAO,EAAE,CAAC,QAAQ,EAAE,YAAY,EAAE,GAAG,EAAE,GAAG,KAAK,OAAO,CAAC,OAAO,CAAC,CAAA;IAE/D,6EAA6E;IAC7E,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,GAAG,EAAE,GAAG,KAAK,OAAO,CAAC,OAAO,CAAC,CAAA;CAC1D;AAED,MAAM,WAAW,OAAO,CAAC,GAAG;IACxB,8BAA8B;IAC9B,IAAI,EAAE,MAAM,CAAA;IAEZ,kDAAkD;IAClD,KAAK,CAAC,EAAE,CAAC,QAAQ,EAAE,YAAY,EAAE,GAAG,EAAE,GAAG,KAAK,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,CAAA;IAExE,qCAAqC;IACrC,KAAK,CAAC,EAAE,UAAU,CAAA;IAElB;;;OAGG;IACH,MAAM,EAAE,MAAM,GAAG,UAAU,CAAA;IAE3B;;OAEG;IACH,mBAAmB,CAAC,EAAE,OAAO,CAAA;IAE7B;;;;OAIG;IACH,WAAW,CAAC,EAAE,MAAM,CAAA;IAEpB,6FAA6F;IAC7F,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,GAAG,EAAE,GAAG,KAAK,OAAO,CAAC,OAAO,CAAC,CAAA;CAC1D;AAED,MAAM,WAAW,WAAW,CAAC,GAAG,EAAE,MAAM,SAAS,UAAU,CAAE,SAAQ,OAAO,CAAC,GAAG,CAAC;IAC7E;;;;OAIG;IACH,MAAM,EAAE,MAAM,CAAA;IACd;;;;;;;;;;;;;;OAcG;IACH,OAAO,EAAE,CAAC,KAAK,EAAE,GAAG,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,YAAY,EAAE,YAAY,EAAE,GAAG,EAAE,GAAG,KAAK,OAAO,CAAC,OAAO,CAAC,CAAA;IAE7F;;;;QAII;IACJ,aAAa,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,GAAG,EAAE,GAAG,KAAK,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,CAAA;CAC1E;AAED,MAAM,WAAW,aAAa,CAAC,GAAG,CAAE,SAAQ,OAAO,CAAC,GAAG,CAAC;IACpD;;;;;;;;;;;;;;OAcG;IACH,OAAO,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,YAAY,EAAE,YAAY,EAAE,GAAG,EAAE,GAAG,KAAK,OAAO,CAAC,OAAO,CAAC,CAAA;IAElF;;;;QAII;IACJ,aAAa,CAAC,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,GAAG,EAAE,GAAG,KAAK,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,CAAA;CAC1E;AAED;;GAEG;AACH,MAAM,WAAW,QAAQ,CAAC,GAAG;IACzB;;OAEG;IACH,SAAS,EAAE,MAAM,IAAI,CAAA;IAErB;;;;;OAKG;IACH,GAAG,EAAE,CAAC,QAAQ,EAAE,YAAY,EAAE,GAAG,CAAC,EAAE,GAAG,KAAK,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC,CAAA;IAElE;;;OAGG;IACH,QAAQ,EAAE,CAAC,IAAI,EAAE,OAAO,CAAC,GAAG,CAAC,GAAG,gBAAgB,CAAC,GAAG,CAAC,KAAK,IAAI,CAAA;IAE9D;;;OAGG;IACH,UAAU,EAAE,CAAC,QAAQ,EAAE,MAAM,OAAO,CAAC,OAAO,CAAC,KAAK,IAAI,CAAA;IAEtD;;OAEG;IACH,OAAO,CAAC,MAAM,SAAS,UAAU,EAAE,IAAI,EAAE,WAAW,CAAC,GAAG,EAAE,MAAM,CAAC,GAAG,IAAI,CAAA;IACxE,OAAO,CAAC,IAAI,EAAE,aAAa,CAAC,GAAG,CAAC,GAAG,IAAI,CAAA;IACvC,OAAO,CAAC,IAAI,EAAE,gBAAgB,CAAC,GAAG,CAAC,GAAG,IAAI,CAAA;CAC7C;AAED,MAAM,WAAW,cAAc,CAAC,GAAG;IAC/B,OAAO,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,GAAG,EAAE,GAAG,KAAK,OAAO,CAAC,OAAO,CAAC,CAAA;CACzD;AAED,UAAU,WAAW,CAAC,GAAG;IACrB,CAAC,MAAM,SAAS,UAAU,EAAE,IAAI,EAAE,WAAW,CAAC,GAAG,EAAE,MAAM,CAAC,GAAG,WAAW,CAAC,GAAG,EAAE,MAAM,CAAC,CAAA;IACrF,CAAC,IAAI,EAAE,aAAa,CAAC,GAAG,CAAC,GAAG,aAAa,CAAC,GAAG,CAAC,CAAA;IAC9C,CAAC,IAAI,EAAE,gBAAgB,CAAC,GAAG,CAAC,GAAG,gBAAgB,CAAC,GAAG,CAAC,CAAA;CACvD;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAkDG;AACH,MAAM,WAAW,QAAQ;IACrB;;;;OAIG;IACH,cAAc,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,cAAc,CAAC,GAAG,CAAC,KAAK,QAAQ,CAAC,GAAG,CAAC,CAAA;IAEnE;;;;OAIG;IACH,kBAAkB,EAAE,CAAC,QAAQ,CAAC,EAAE,OAAO,EAAE,KAAK,YAAY,CAAA;IAE1D;;;OAGG;IACH,cAAc,CAAC,GAAG,KAAK,WAAW,CAAC,GAAG,CAAC,CAAA;IAEvC;;;;;OAKG;IACH,YAAY,EAAE,OAAO,YAAY,CAAA;CACpC;AAED;;;;;;;;;;;;;;;;;;;;;;;GAuBG;AACH,MAAM,WAAW,YAAY;IACzB;;;OAGG;IACH,WAAW,CAAC,IAAI,EAAE,MAAM,GAAG,IAAI,CAAA;IAE/B;;;OAGG;IACH,YAAY,CAAC,IAAI,EAAE,MAAM,GAAG,IAAI,CAAA;IAEhC;;;OAGG;IACH,mBAAmB,EAAE,CAAC,SAAS,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,MAAM,KAAK,IAAI,CAAA;IAEtE;;;;;OAKG;IACH,QAAQ,EAAE,CAAC,OAAO,CAAC,EAAE;QAAE,mBAAmB,CAAC,EAAE,OAAO,CAAA;KAAE,KAAK,MAAM,CAAA;IAEjE;;;OAGG;IACH,UAAU,EAAE,CAAC,OAAO,EAAE,OAAO,EAAE,IAAI,CAAC,EAAE;QAAE,SAAS,CAAC,EAAE,CAAC,OAAO,EAAE,OAAO,KAAK,MAAM,CAAA;KAAE,KAAK,IAAI,CAAA;IAE3F;;;OAGG;IACH,2BAA2B,EAAE,CAAC,SAAS,EAAE,CAAC,OAAO,EAAE,MAAM,KAAK,MAAM,KAAK,IAAI,CAAA;IAE7E;;;OAGG;IACH,gBAAgB,EAAE,CAAC,OAAO,EAAE,MAAM,KAAK,IAAI,CAAA;IAE3C;;;OAGG;IACH,gBAAgB,EAAE,MAAM,MAAM,GAAG,IAAI,CAAA;IAErC;;;;OAIG;IACH,UAAU,EAAE,MAAM,OAAO,EAAE,CAAA;CAC9B;AAED;;;GAGG;AACH,MAAM,WAAW,OAAO;IACpB,iGAAiG;IACjG,MAAM,EAAE,MAAM,GAAG,OAAO,GAAG,QAAQ,CAAA;IACnC,sCAAsC;IACtC,IAAI,EAAE,MAAM,CAAA;IACZ,2DAA2D;IAC3D,QAAQ,CAAC,EAAE,MAAM,CAAA;CACpB;AAED;;GAEG;AACH,MAAM,WAAW,YAAY;IACzB;;;;OAIG;IACH,YAAY,CAAC,EAAE;QAAE,QAAQ,EAAE,MAAM,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC,CAAA;KAAE,CAAA;IACzD;;OAEG;IACH,MAAM,CAAC,EAAE,MAAM,CAAA;IACf;;OAEG;IACH,UAAU,CAAC,EAAE,UAAU,CAAA;IACvB,iGAAiG;IACjG,UAAU,CAAC,EAAE,UAAU,CAAA;IACvB,kCAAkC;IAClC,MAAM,CAAC,EAAE,MAAM,CAAA;CAClB;AAED;;;;;;;;;;;;;;;;;;;;;;;;;GAyBG;AACH,wBAAgB,cAAc,CAAC,GAAG,GAAE,YAAiB,GAAG,QAAQ,CA8O/D;AA8BD,iBAAS,YAAY,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,MAAM,CAO/E;AAED,wBAAgB,kBAAkB,CAAC,eAAe,GAAE,OAAO,EAAO,GAAG,YAAY,CAiDhF;AAED,wBAAgB,cAAc,CAAC,GAAG,GAAG,OAAO,KAAK,WAAW,CAAC,GAAG,CAAC,CAEhE"}
|
package/build/lib/ai.js
CHANGED
|
@@ -3,315 +3,326 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
3
3
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
4
|
};
|
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.
|
|
6
|
+
exports.createAIEngine = createAIEngine;
|
|
7
|
+
exports.createConversation = createConversation;
|
|
8
|
+
exports.getStepBuilder = getStepBuilder;
|
|
7
9
|
// cspell:words lstripBlocks
|
|
8
|
-
const fs_1 = __importDefault(require("fs"));
|
|
9
10
|
const openai_1 = __importDefault(require("openai"));
|
|
10
|
-
const path_1 = require("path");
|
|
11
11
|
const nunjucks_1 = __importDefault(require("nunjucks"));
|
|
12
|
-
const
|
|
12
|
+
const zod_1 = require("zod");
|
|
13
13
|
const action_1 = require("./bosun/action");
|
|
14
14
|
const core_1 = require("openai/core");
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
15
|
+
const tracer_1 = require("./bosun/tracer");
|
|
16
|
+
const zod_to_json_schema_1 = require("zod-to-json-schema");
|
|
17
|
+
/**
|
|
18
|
+
* Creates an AI Engine with the given configuration.
|
|
19
|
+
*
|
|
20
|
+
* The AI Engine provides utilities for creating and running conversational workflows
|
|
21
|
+
* with large language models, specifically OpenAI GPT models.
|
|
22
|
+
*
|
|
23
|
+
* @returns An AIEngine instance.
|
|
24
|
+
*
|
|
25
|
+
* @example
|
|
26
|
+
* ```ts
|
|
27
|
+
* const engine = createAIEngine({
|
|
28
|
+
* logger: customLogger,
|
|
29
|
+
* basePath: '/path/to/prompts'
|
|
30
|
+
* });
|
|
31
|
+
*
|
|
32
|
+
* const workflow = await engine.createWorkflow(
|
|
33
|
+
* engine.createStep({
|
|
34
|
+
* name: 'generate-response',
|
|
35
|
+
* prompt: engine.loadFile('prompts/response.txt'),
|
|
36
|
+
* execute: (response) => conversation.setProposedReply(response)
|
|
37
|
+
* })
|
|
38
|
+
* );
|
|
39
|
+
*
|
|
40
|
+
* const reply = await workflow.run(conversation);
|
|
41
|
+
* ```
|
|
42
|
+
*/
|
|
43
|
+
function createAIEngine(cfg = {}) {
|
|
44
|
+
const stepTracer = cfg.stepTracer || undefined;
|
|
45
|
+
const logger = cfg.logger || globalThis.console;
|
|
46
|
+
const tracer = cfg.tracer || (0, tracer_1.createConsoleTracer)(logger);
|
|
47
|
+
let apiKey = null;
|
|
48
|
+
const tokenStorage = cfg.tokenStorage || {
|
|
49
|
+
async getToken() {
|
|
50
|
+
if (process.env.OPENAI_API_KEY) {
|
|
51
|
+
return process.env.OPENAI_API_KEY;
|
|
52
|
+
}
|
|
53
|
+
throw new Error('OpenAI API key is not set');
|
|
54
|
+
},
|
|
55
|
+
};
|
|
56
|
+
function createWorkflow({ onError }) {
|
|
57
|
+
let shouldRun = true;
|
|
58
|
+
let currentStep = 0;
|
|
59
|
+
let beforeEachCallback = async () => Promise.resolve(null);
|
|
60
|
+
const attempts = new Map();
|
|
61
|
+
const steps = [];
|
|
62
|
+
return {
|
|
63
|
+
terminate: () => {
|
|
64
|
+
logger.debug('AI Engine: Terminating conversation...');
|
|
65
|
+
shouldRun = false;
|
|
52
66
|
},
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
let shouldRun = true;
|
|
60
|
-
let currentStep = 0;
|
|
61
|
-
let beforeEachCallback = async () => Promise.resolve(null);
|
|
62
|
-
const attempts = new Map();
|
|
63
|
-
const trace = {
|
|
64
|
-
steps: steps.reduce((acc, step) => {
|
|
65
|
-
acc[step.name] = {};
|
|
66
|
-
return acc;
|
|
67
|
-
}, {})
|
|
68
|
-
};
|
|
69
|
-
return {
|
|
70
|
-
terminate: () => {
|
|
71
|
-
logger.debug('AI Engine: Terminating conversation...');
|
|
72
|
-
shouldRun = false;
|
|
73
|
-
},
|
|
74
|
-
run: async (messages) => {
|
|
75
|
-
for (; currentStep < steps.length; currentStep++) {
|
|
76
|
-
await beforeEachCallback();
|
|
77
|
-
const step = steps[currentStep];
|
|
78
|
-
if (!shouldRun) {
|
|
79
|
-
break;
|
|
80
|
-
}
|
|
81
|
-
if (!step.runIf || (await step.runIf(messages))) {
|
|
82
|
-
const action = (0, action_1.makeAction)(cfg.sendAction, 'AI', step.name);
|
|
83
|
-
await action('started');
|
|
84
|
-
logger.debug(`AI Engine: Step: ${step.name}`);
|
|
85
|
-
if ('prompt' in step) {
|
|
86
|
-
const stepTrace = await runStep(step, messages);
|
|
87
|
-
trace.steps[step.name] = stepTrace;
|
|
88
|
-
}
|
|
89
|
-
else {
|
|
90
|
-
await runDumbStep(step, messages);
|
|
91
|
-
}
|
|
92
|
-
await action('completed');
|
|
93
|
-
}
|
|
94
|
-
}
|
|
95
|
-
return {
|
|
96
|
-
reply: shouldRun ? messages.getProposedReply() : null,
|
|
97
|
-
trace
|
|
98
|
-
};
|
|
99
|
-
},
|
|
100
|
-
rewindTo: (step) => {
|
|
101
|
-
const index = steps.indexOf(step);
|
|
102
|
-
if (index === -1) {
|
|
103
|
-
throw new Error(`Step ${step.name} not found`);
|
|
104
|
-
}
|
|
105
|
-
if (index > currentStep) {
|
|
106
|
-
throw new Error(`Cannot rewind to a step ahead of the current step`);
|
|
107
|
-
}
|
|
108
|
-
currentStep = index - 1; // -1 because it will be incremented in the loop definition
|
|
109
|
-
},
|
|
110
|
-
beforeEach(callback) {
|
|
111
|
-
beforeEachCallback = callback;
|
|
112
|
-
},
|
|
113
|
-
};
|
|
114
|
-
async function runStep(step, messages) {
|
|
115
|
-
if (!apiKey) {
|
|
116
|
-
throw new Error('OpenAI API key is not set');
|
|
117
|
-
}
|
|
118
|
-
const stepTrace = {};
|
|
119
|
-
try {
|
|
120
|
-
stepTrace.receivedContext = step.context;
|
|
121
|
-
let response = null;
|
|
122
|
-
let prompt = typeof step.prompt === 'string' ? step.prompt : await step.prompt.content();
|
|
123
|
-
stepTrace.receivedPrompt = prompt;
|
|
124
|
-
logger.debug('AI Engine: context', step.context);
|
|
125
|
-
logger.debug('AI Engine: messages', messages.toString({ ignoreAddedMessages: step.ignoreAddedMessages }));
|
|
126
|
-
prompt = renderPrompt(prompt, step.context);
|
|
127
|
-
stepTrace.renderedPrompt = prompt;
|
|
128
|
-
const stringifiedMessages = messages.toString({ ignoreAddedMessages: step.ignoreAddedMessages });
|
|
129
|
-
stepTrace.stringifiedConversation = stringifiedMessages;
|
|
130
|
-
response = await runLLM(apiKey, prompt, stringifiedMessages, step.json, step.model);
|
|
131
|
-
if (!response) {
|
|
132
|
-
throw new Error('No response from OpenAI');
|
|
67
|
+
run: async (messages, ctx) => {
|
|
68
|
+
for (; currentStep < steps.length; currentStep++) {
|
|
69
|
+
await beforeEachCallback();
|
|
70
|
+
const step = steps[currentStep];
|
|
71
|
+
if (!shouldRun) {
|
|
72
|
+
break;
|
|
133
73
|
}
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
await step
|
|
74
|
+
if (!step.runIf || (await step.runIf(messages, ctx))) {
|
|
75
|
+
const action = (0, action_1.makeAction)(cfg.sendAction, 'AI', step.name);
|
|
76
|
+
await action('started');
|
|
77
|
+
logger.debug(`AI Engine: Step: ${step.name}`);
|
|
78
|
+
if ('prompt' in step) {
|
|
79
|
+
await runStep(step, messages, ctx, onError);
|
|
140
80
|
}
|
|
141
81
|
else {
|
|
142
|
-
|
|
143
|
-
logger.debug(`AI Engine: skipping`);
|
|
82
|
+
await runProgrammaticStep(step, messages, ctx);
|
|
144
83
|
}
|
|
84
|
+
await action('completed');
|
|
145
85
|
}
|
|
146
|
-
else {
|
|
147
|
-
logger.debug(`AI Engine: replying`);
|
|
148
|
-
await step.execute(response);
|
|
149
|
-
}
|
|
150
|
-
return stepTrace;
|
|
151
86
|
}
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
87
|
+
return shouldRun ? messages.getProposedReply() : null;
|
|
88
|
+
},
|
|
89
|
+
rewindTo: (step) => {
|
|
90
|
+
const index = steps.indexOf(step);
|
|
91
|
+
if (index === -1) {
|
|
92
|
+
throw new Error(`Step ${step.name} not found`);
|
|
157
93
|
}
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
try {
|
|
161
|
-
if (!step.runIf || (await step.runIf(messages))) {
|
|
162
|
-
await step.execute();
|
|
163
|
-
}
|
|
94
|
+
if (index > currentStep) {
|
|
95
|
+
throw new Error(`Cannot rewind to a step ahead of the current step`);
|
|
164
96
|
}
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
97
|
+
currentStep = index - 1; // -1 because it will be incremented in the loop definition
|
|
98
|
+
},
|
|
99
|
+
beforeEach(callback) {
|
|
100
|
+
beforeEachCallback = callback;
|
|
101
|
+
},
|
|
102
|
+
addStep(step) {
|
|
103
|
+
if ('prompt' in step) {
|
|
104
|
+
tracer.addStep({
|
|
105
|
+
name: step.name,
|
|
106
|
+
prompt: (0, tracer_1.stdPrompt)(step.prompt),
|
|
107
|
+
type: 'text',
|
|
108
|
+
schema: 'schema' in step ? step.schema : undefined,
|
|
109
|
+
});
|
|
169
110
|
}
|
|
111
|
+
steps.push(step);
|
|
112
|
+
},
|
|
113
|
+
};
|
|
114
|
+
async function runStep(step, conversation, ctx, onError) {
|
|
115
|
+
if (!apiKey) {
|
|
116
|
+
apiKey = await tokenStorage.getToken();
|
|
170
117
|
}
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
118
|
+
if (!apiKey) {
|
|
119
|
+
throw new Error('LLM API key is not provided');
|
|
120
|
+
}
|
|
121
|
+
const stepTrace = {
|
|
122
|
+
name: step.name,
|
|
123
|
+
model: step.model,
|
|
124
|
+
schema: 'schema' in step
|
|
125
|
+
? step.schema instanceof zod_1.ZodSchema
|
|
126
|
+
? step.schema
|
|
127
|
+
: undefined
|
|
128
|
+
: undefined,
|
|
129
|
+
};
|
|
130
|
+
try {
|
|
131
|
+
stepTrace.receivedContext = ctx;
|
|
132
|
+
let response = null;
|
|
133
|
+
let prompt = typeof step.prompt === 'string' ? step.prompt : await step.prompt.content();
|
|
134
|
+
stepTrace.receivedPrompt = prompt;
|
|
135
|
+
logger.debug('AI Engine: context', ctx);
|
|
136
|
+
logger.debug('AI Engine: messages', conversation.toString({ ignoreAddedMessages: step.ignoreAddedMessages }));
|
|
137
|
+
prompt = renderPrompt(prompt, ctx);
|
|
138
|
+
stepTrace.renderedPrompt = prompt;
|
|
139
|
+
const stringifiedMessages = conversation.toString({
|
|
140
|
+
ignoreAddedMessages: step.ignoreAddedMessages,
|
|
141
|
+
});
|
|
142
|
+
stepTrace.stringifiedConversation = stringifiedMessages;
|
|
143
|
+
stepTracer?.addStepTrace(stepTrace);
|
|
144
|
+
if ('schema' in step) {
|
|
145
|
+
response = await runLLM(apiKey, prompt, stringifiedMessages, step.schema, step.model);
|
|
146
|
+
response = step.schema.parse(JSON.parse(response));
|
|
147
|
+
}
|
|
148
|
+
else {
|
|
149
|
+
response = await runLLM(apiKey, prompt, stringifiedMessages, undefined, step.model);
|
|
150
|
+
}
|
|
151
|
+
if (!response) {
|
|
152
|
+
throw new Error('No response from OpenAI');
|
|
153
|
+
}
|
|
154
|
+
logger.debug(`AI Engine: response: ${response}`);
|
|
155
|
+
if (typeof step.shouldExecute === 'function') {
|
|
156
|
+
if (await step.shouldExecute(response, ctx)) {
|
|
157
|
+
logger.debug(`AI Engine: executing`);
|
|
158
|
+
checkAttempts(step);
|
|
159
|
+
await step.execute(response, conversation, ctx);
|
|
175
160
|
}
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
161
|
+
else {
|
|
162
|
+
resetAttempts(step);
|
|
163
|
+
logger.debug(`AI Engine: skipping`);
|
|
179
164
|
}
|
|
180
165
|
}
|
|
166
|
+
else {
|
|
167
|
+
logger.debug(`AI Engine: replying`);
|
|
168
|
+
await step.execute(response, conversation, ctx);
|
|
169
|
+
}
|
|
181
170
|
}
|
|
182
|
-
|
|
183
|
-
|
|
171
|
+
catch (error) {
|
|
172
|
+
await (step.onError
|
|
173
|
+
? step.onError(error.message, ctx)
|
|
174
|
+
: onError(error.message, ctx));
|
|
175
|
+
// FIXME: this doesn't terminate the workflow
|
|
176
|
+
stepTracer?.addStepTrace(stepTrace);
|
|
177
|
+
shouldRun = false;
|
|
184
178
|
}
|
|
185
179
|
}
|
|
186
|
-
async function
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
logger.debug('-------------------------------------------');
|
|
191
|
-
if (apiKey === '__TESTING__') {
|
|
192
|
-
await (0, core_1.sleep)(100);
|
|
193
|
-
if (typeof json === 'boolean') {
|
|
194
|
-
return json ? JSON.stringify({ message: 'canned response', reasons: [] }) : 'canned response';
|
|
180
|
+
async function runProgrammaticStep(step, messages, ctx) {
|
|
181
|
+
try {
|
|
182
|
+
if (!step.runIf || (await step.runIf(messages, ctx))) {
|
|
183
|
+
await step.execute(messages, ctx);
|
|
195
184
|
}
|
|
196
|
-
return JSON.stringify({ message: 'canned response', reasons: [] });
|
|
197
185
|
}
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
...getOpenAiOptions(model, json),
|
|
205
|
-
});
|
|
206
|
-
if (!response.choices[0].message.content) {
|
|
207
|
-
throw new Error('No response from OpenAI');
|
|
186
|
+
catch (error) {
|
|
187
|
+
console.error(`AI Engine: error in dumb step ${step.name}: ${error.message}`);
|
|
188
|
+
await (step.onError
|
|
189
|
+
? step.onError(error.message, ctx)
|
|
190
|
+
: onError(error.message, ctx));
|
|
191
|
+
shouldRun = false;
|
|
208
192
|
}
|
|
209
|
-
return response.choices[0].message.content;
|
|
210
193
|
}
|
|
211
|
-
function
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
return {
|
|
221
|
-
createWorkflow: createWorkflow,
|
|
222
|
-
createStep,
|
|
223
|
-
loadFile,
|
|
224
|
-
createConversation,
|
|
225
|
-
renderPrompt
|
|
226
|
-
};
|
|
227
|
-
}
|
|
228
|
-
AIEngine.createAIEngine = createAIEngine;
|
|
229
|
-
function getOpenAiOptions(model, json) {
|
|
230
|
-
const options = {
|
|
231
|
-
model,
|
|
232
|
-
};
|
|
233
|
-
const isReasoningModel = ['o3-', 'o1-', 'o1-preview-'].some((m) => model.startsWith(m));
|
|
234
|
-
if (isReasoningModel) {
|
|
235
|
-
if (!model.startsWith('o1-preview-')) {
|
|
236
|
-
options.reasoning_effort = 'high';
|
|
194
|
+
function checkAttempts(step) {
|
|
195
|
+
if (step.maxAttempts) {
|
|
196
|
+
if (!attempts.has(step)) {
|
|
197
|
+
attempts.set(step, 0);
|
|
198
|
+
}
|
|
199
|
+
attempts.set(step, attempts.get(step) + 1);
|
|
200
|
+
if (attempts.get(step) > step.maxAttempts) {
|
|
201
|
+
throw new Error(`Max attempts reached for step ${step.name}`);
|
|
202
|
+
}
|
|
237
203
|
}
|
|
238
204
|
}
|
|
239
|
-
|
|
240
|
-
|
|
205
|
+
function resetAttempts(step) {
|
|
206
|
+
attempts.set(step, 0);
|
|
241
207
|
}
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
208
|
+
}
|
|
209
|
+
async function runLLM(apiKey, systemPrompt, messages, schema, model = 'gpt-4o-2024-08-06') {
|
|
210
|
+
logger.debug('AI Engine: model:', model);
|
|
211
|
+
logger.debug('----------- RENDERED PROMPT ---------------');
|
|
212
|
+
logger.debug(systemPrompt);
|
|
213
|
+
logger.debug('-------------------------------------------');
|
|
214
|
+
if (apiKey === '__TESTING__') {
|
|
215
|
+
await (0, core_1.sleep)(100);
|
|
216
|
+
if (!schema) {
|
|
217
|
+
return 'canned response';
|
|
218
|
+
}
|
|
219
|
+
return JSON.stringify({ message: 'canned response', reasons: [] });
|
|
250
220
|
}
|
|
251
|
-
|
|
252
|
-
|
|
221
|
+
const client = new openai_1.default({ apiKey });
|
|
222
|
+
const response = await client.chat.completions.create({
|
|
223
|
+
messages: [
|
|
224
|
+
{ role: 'system', content: systemPrompt },
|
|
225
|
+
{ role: 'user', content: messages },
|
|
226
|
+
],
|
|
227
|
+
...getOpenAiOptions(model, schema),
|
|
228
|
+
});
|
|
229
|
+
if (!response.choices[0].message.content) {
|
|
230
|
+
throw new Error('No response from OpenAI');
|
|
253
231
|
}
|
|
254
|
-
|
|
255
|
-
|
|
232
|
+
return response.choices[0].message.content;
|
|
233
|
+
}
|
|
234
|
+
return {
|
|
235
|
+
createWorkflow,
|
|
236
|
+
createConversation,
|
|
237
|
+
renderPrompt,
|
|
238
|
+
getStepBuilder() {
|
|
239
|
+
return (step) => step;
|
|
240
|
+
},
|
|
241
|
+
};
|
|
242
|
+
}
|
|
243
|
+
function getOpenAiOptions(model, schema) {
|
|
244
|
+
const options = {
|
|
245
|
+
model,
|
|
246
|
+
};
|
|
247
|
+
const isReasoningModel = ['o3-', 'o1-', 'o1-preview-'].some((m) => model.startsWith(m));
|
|
248
|
+
if (isReasoningModel) {
|
|
249
|
+
if (!model.startsWith('o1-preview-')) {
|
|
250
|
+
options.reasoning_effort = 'high';
|
|
256
251
|
}
|
|
257
|
-
return options;
|
|
258
252
|
}
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
autoescape: false,
|
|
262
|
-
trimBlocks: true,
|
|
263
|
-
lstripBlocks: true,
|
|
264
|
-
});
|
|
265
|
-
return nunjucks_1.default.renderString(prompt, context || {});
|
|
253
|
+
else {
|
|
254
|
+
options.temperature = 0.1;
|
|
266
255
|
}
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
const names = {
|
|
274
|
-
agent: 'Agent',
|
|
275
|
-
user: 'User',
|
|
276
|
-
system: 'System',
|
|
277
|
-
};
|
|
278
|
-
let defaultFormatter = (message) => `${names[message.sender]}: ${message.text}`;
|
|
279
|
-
let proposedFormatter = (message) => `Proposed reply: ${message}`;
|
|
280
|
-
let proposedReply = null;
|
|
281
|
-
return {
|
|
282
|
-
toString: (options) => {
|
|
283
|
-
return messages
|
|
284
|
-
.filter((msg) => !options?.ignoreAddedMessages || !msg.isAddedMessage)
|
|
285
|
-
.map((msg) => {
|
|
286
|
-
return msg.formatter ? msg.formatter(msg) : defaultFormatter(msg);
|
|
287
|
-
})
|
|
288
|
-
.filter((msg) => msg !== null)
|
|
289
|
-
.join('\n') +
|
|
290
|
-
(proposedReply ? `\n${proposedFormatter(proposedReply)}` : '');
|
|
291
|
-
},
|
|
292
|
-
addMessage: (message, opts) => {
|
|
293
|
-
messages.push({
|
|
294
|
-
...message,
|
|
295
|
-
isAddedMessage: true,
|
|
296
|
-
formatter: opts?.formatter ?? defaultFormatter,
|
|
297
|
-
});
|
|
298
|
-
},
|
|
299
|
-
setDefaultFormatter: (formatter) => {
|
|
300
|
-
defaultFormatter = formatter;
|
|
301
|
-
},
|
|
302
|
-
setProposedMessageFormatter: (formatter) => {
|
|
303
|
-
proposedFormatter = formatter;
|
|
304
|
-
},
|
|
305
|
-
setProposedReply: (message) => (proposedReply = message),
|
|
306
|
-
getProposedReply: () => proposedReply,
|
|
307
|
-
getHistory: () => messages,
|
|
308
|
-
setUserName: (name) => {
|
|
309
|
-
names.user = name;
|
|
256
|
+
if (schema) {
|
|
257
|
+
options.response_format = {
|
|
258
|
+
type: 'json_schema',
|
|
259
|
+
json_schema: {
|
|
260
|
+
name: 'detector_response',
|
|
261
|
+
schema: (0, zod_to_json_schema_1.zodToJsonSchema)(schema),
|
|
310
262
|
},
|
|
311
|
-
setAgentName: (name) => {
|
|
312
|
-
names.agent = name;
|
|
313
|
-
}
|
|
314
263
|
};
|
|
315
264
|
}
|
|
316
|
-
|
|
317
|
-
|
|
265
|
+
else {
|
|
266
|
+
options.response_format = { type: 'text' };
|
|
267
|
+
}
|
|
268
|
+
return options;
|
|
269
|
+
}
|
|
270
|
+
function renderPrompt(prompt, context) {
|
|
271
|
+
nunjucks_1.default.configure({
|
|
272
|
+
autoescape: false,
|
|
273
|
+
trimBlocks: true,
|
|
274
|
+
lstripBlocks: true,
|
|
275
|
+
});
|
|
276
|
+
return nunjucks_1.default.renderString(prompt, context || {});
|
|
277
|
+
}
|
|
278
|
+
function createConversation(initialMessages = []) {
|
|
279
|
+
const messages = initialMessages.map((msg) => ({
|
|
280
|
+
...msg,
|
|
281
|
+
isAddedMessage: false,
|
|
282
|
+
formatter: undefined,
|
|
283
|
+
}));
|
|
284
|
+
const names = {
|
|
285
|
+
agent: 'Agent',
|
|
286
|
+
user: 'User',
|
|
287
|
+
system: 'System',
|
|
288
|
+
};
|
|
289
|
+
let defaultFormatter = (message) => `${names[message.sender]}: ${message.text}`;
|
|
290
|
+
let proposedFormatter = (message) => `Proposed reply: ${message}`;
|
|
291
|
+
let proposedReply = null;
|
|
292
|
+
return {
|
|
293
|
+
toString: (options) => {
|
|
294
|
+
return (messages
|
|
295
|
+
.filter((msg) => !options?.ignoreAddedMessages || !msg.isAddedMessage)
|
|
296
|
+
.map((msg) => {
|
|
297
|
+
return msg.formatter ? msg.formatter(msg) : defaultFormatter(msg);
|
|
298
|
+
})
|
|
299
|
+
.filter((msg) => msg !== null)
|
|
300
|
+
.join('\n') + (proposedReply ? `\n${proposedFormatter(proposedReply)}` : ''));
|
|
301
|
+
},
|
|
302
|
+
addMessage: (message, opts) => {
|
|
303
|
+
messages.push({
|
|
304
|
+
...message,
|
|
305
|
+
isAddedMessage: true,
|
|
306
|
+
formatter: opts?.formatter ?? defaultFormatter,
|
|
307
|
+
});
|
|
308
|
+
},
|
|
309
|
+
setDefaultFormatter: (formatter) => {
|
|
310
|
+
defaultFormatter = formatter;
|
|
311
|
+
},
|
|
312
|
+
setProposedMessageFormatter: (formatter) => {
|
|
313
|
+
proposedFormatter = formatter;
|
|
314
|
+
},
|
|
315
|
+
setProposedReply: (message) => (proposedReply = message),
|
|
316
|
+
getProposedReply: () => proposedReply,
|
|
317
|
+
getHistory: () => messages,
|
|
318
|
+
setUserName: (name) => {
|
|
319
|
+
names.user = name;
|
|
320
|
+
},
|
|
321
|
+
setAgentName: (name) => {
|
|
322
|
+
names.agent = name;
|
|
323
|
+
},
|
|
324
|
+
};
|
|
325
|
+
}
|
|
326
|
+
function getStepBuilder() {
|
|
327
|
+
return (step) => step;
|
|
328
|
+
}
|