@proteinjs/conversation 2.4.2 → 2.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +27 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +1 -0
- package/dist/index.js.map +1 -1
- package/dist/src/Conversation.d.ts +4 -2
- package/dist/src/Conversation.d.ts.map +1 -1
- package/dist/src/Conversation.js +4 -14
- package/dist/src/Conversation.js.map +1 -1
- package/dist/src/OpenAiResponses.d.ts +121 -0
- package/dist/src/OpenAiResponses.d.ts.map +1 -0
- package/dist/src/OpenAiResponses.js +941 -0
- package/dist/src/OpenAiResponses.js.map +1 -0
- package/dist/src/fs/keyword_to_files_index/KeywordToFilesIndexFunctions.d.ts.map +1 -1
- package/dist/src/fs/keyword_to_files_index/KeywordToFilesIndexFunctions.js +1 -1
- package/dist/src/fs/keyword_to_files_index/KeywordToFilesIndexFunctions.js.map +1 -1
- package/index.ts +1 -0
- package/package.json +2 -2
- package/src/Conversation.ts +6 -0
- package/src/OpenAiResponses.ts +1076 -0
- package/src/fs/keyword_to_files_index/KeywordToFilesIndexFunctions.ts +2 -1
|
@@ -0,0 +1,941 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
var __generator = (this && this.__generator) || function (thisArg, body) {
|
|
12
|
+
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
|
13
|
+
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
|
14
|
+
function verb(n) { return function (v) { return step([n, v]); }; }
|
|
15
|
+
function step(op) {
|
|
16
|
+
if (f) throw new TypeError("Generator is already executing.");
|
|
17
|
+
while (g && (g = 0, op[0] && (_ = 0)), _) try {
|
|
18
|
+
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
|
19
|
+
if (y = 0, t) op = [op[0] & 2, t.value];
|
|
20
|
+
switch (op[0]) {
|
|
21
|
+
case 0: case 1: t = op; break;
|
|
22
|
+
case 4: _.label++; return { value: op[1], done: false };
|
|
23
|
+
case 5: _.label++; y = op[1]; op = [0]; continue;
|
|
24
|
+
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
|
25
|
+
default:
|
|
26
|
+
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
|
27
|
+
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
|
28
|
+
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
|
29
|
+
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
|
30
|
+
if (t[2]) _.ops.pop();
|
|
31
|
+
_.trys.pop(); continue;
|
|
32
|
+
}
|
|
33
|
+
op = body.call(thisArg, _);
|
|
34
|
+
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
|
35
|
+
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
|
36
|
+
}
|
|
37
|
+
};
|
|
38
|
+
var __spreadArray = (this && this.__spreadArray) || function (to, from, pack) {
|
|
39
|
+
if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) {
|
|
40
|
+
if (ar || !(i in from)) {
|
|
41
|
+
if (!ar) ar = Array.prototype.slice.call(from, 0, i);
|
|
42
|
+
ar[i] = from[i];
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
return to.concat(ar || Array.prototype.slice.call(from));
|
|
46
|
+
};
|
|
47
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
48
|
+
exports.OpenAiResponses = exports.DEFAULT_MAX_TOOL_CALLS = exports.DEFAULT_RESPONSES_MODEL = void 0;
|
|
49
|
+
var openai_1 = require("openai");
|
|
50
|
+
var logger_1 = require("@proteinjs/logger");
|
|
51
|
+
var UsageData_1 = require("./UsageData");
|
|
52
|
+
var ChatCompletionMessageParamFactory_1 = require("./ChatCompletionMessageParamFactory");
|
|
53
|
+
var OpenAi_1 = require("./OpenAi");
|
|
54
|
+
exports.DEFAULT_RESPONSES_MODEL = 'gpt-5.2';
|
|
55
|
+
exports.DEFAULT_MAX_TOOL_CALLS = 50;
|
|
56
|
+
/**
|
|
57
|
+
* OpenAI Responses API wrapper (tool-loop + usage tracking + ConversationModules).
|
|
58
|
+
* - Uses Responses API directly
|
|
59
|
+
* - Supports custom function tools (tool calling loop)
|
|
60
|
+
* - Supports structured outputs (JSON schema / Zod)
|
|
61
|
+
* - Tracks usage + tool calls using existing types
|
|
62
|
+
* - Supports background mode (polling)
|
|
63
|
+
* - Supports ConversationModules (system messages + tool registration)
|
|
64
|
+
*/
|
|
65
|
+
var OpenAiResponses = /** @class */ (function () {
|
|
66
|
+
function OpenAiResponses(opts) {
|
|
67
|
+
if (opts === void 0) { opts = {}; }
|
|
68
|
+
var _a, _b;
|
|
69
|
+
this.modulesProcessed = false;
|
|
70
|
+
this.processingModulesPromise = null;
|
|
71
|
+
this.systemMessages = [];
|
|
72
|
+
this.functions = [];
|
|
73
|
+
this.client = new openai_1.OpenAI();
|
|
74
|
+
this.logger = new logger_1.Logger({ name: 'OpenAiResponses', logLevel: opts.logLevel });
|
|
75
|
+
this.modules = (_a = opts.modules) !== null && _a !== void 0 ? _a : [];
|
|
76
|
+
this.allowedFunctionNames = opts.allowedFunctionNames;
|
|
77
|
+
this.defaultModel = ((_b = opts.defaultModel) !== null && _b !== void 0 ? _b : exports.DEFAULT_RESPONSES_MODEL).trim();
|
|
78
|
+
this.defaultMaxToolCalls = typeof opts.maxToolCalls === 'number' ? opts.maxToolCalls : exports.DEFAULT_MAX_TOOL_CALLS;
|
|
79
|
+
}
|
|
80
|
+
/** Plain text generation (supports tool calling). */
|
|
81
|
+
OpenAiResponses.prototype.generateText = function (args) {
|
|
82
|
+
return __awaiter(this, void 0, void 0, function () {
|
|
83
|
+
var model, backgroundMode, maxToolCalls, result;
|
|
84
|
+
return __generator(this, function (_a) {
|
|
85
|
+
switch (_a.label) {
|
|
86
|
+
case 0: return [4 /*yield*/, this.ensureModulesProcessed()];
|
|
87
|
+
case 1:
|
|
88
|
+
_a.sent();
|
|
89
|
+
model = this.resolveModel(args.model);
|
|
90
|
+
backgroundMode = this.resolveBackgroundMode({
|
|
91
|
+
requested: args.backgroundMode,
|
|
92
|
+
model: model,
|
|
93
|
+
reasoningEffort: args.reasoningEffort,
|
|
94
|
+
});
|
|
95
|
+
maxToolCalls = typeof args.maxToolCalls === 'number' ? args.maxToolCalls : this.defaultMaxToolCalls;
|
|
96
|
+
return [4 /*yield*/, this.run({
|
|
97
|
+
model: model,
|
|
98
|
+
messages: args.messages,
|
|
99
|
+
temperature: args.temperature,
|
|
100
|
+
topP: args.topP,
|
|
101
|
+
maxTokens: args.maxTokens,
|
|
102
|
+
abortSignal: args.abortSignal,
|
|
103
|
+
onToolInvocation: args.onToolInvocation,
|
|
104
|
+
reasoningEffort: args.reasoningEffort,
|
|
105
|
+
maxToolCalls: maxToolCalls,
|
|
106
|
+
backgroundMode: backgroundMode,
|
|
107
|
+
textFormat: undefined,
|
|
108
|
+
})];
|
|
109
|
+
case 2:
|
|
110
|
+
result = _a.sent();
|
|
111
|
+
if (!args.onUsageData) return [3 /*break*/, 4];
|
|
112
|
+
return [4 /*yield*/, args.onUsageData(result.usagedata)];
|
|
113
|
+
case 3:
|
|
114
|
+
_a.sent();
|
|
115
|
+
_a.label = 4;
|
|
116
|
+
case 4: return [2 /*return*/, result];
|
|
117
|
+
}
|
|
118
|
+
});
|
|
119
|
+
});
|
|
120
|
+
};
|
|
121
|
+
/** Back-compat alias for callers that use `generateResponse`. */
|
|
122
|
+
OpenAiResponses.prototype.generateResponse = function (args) {
|
|
123
|
+
return __awaiter(this, void 0, void 0, function () {
|
|
124
|
+
return __generator(this, function (_a) {
|
|
125
|
+
return [2 /*return*/, this.generateText(args)];
|
|
126
|
+
});
|
|
127
|
+
});
|
|
128
|
+
};
|
|
129
|
+
/** Structured object generation (supports tool calling). */
|
|
130
|
+
OpenAiResponses.prototype.generateObject = function (args) {
|
|
131
|
+
return __awaiter(this, void 0, void 0, function () {
|
|
132
|
+
var model, backgroundMode, maxToolCalls, textFormat, result, object, outcome;
|
|
133
|
+
return __generator(this, function (_a) {
|
|
134
|
+
switch (_a.label) {
|
|
135
|
+
case 0: return [4 /*yield*/, this.ensureModulesProcessed()];
|
|
136
|
+
case 1:
|
|
137
|
+
_a.sent();
|
|
138
|
+
model = this.resolveModel(args.model);
|
|
139
|
+
backgroundMode = this.resolveBackgroundMode({
|
|
140
|
+
requested: args.backgroundMode,
|
|
141
|
+
model: model,
|
|
142
|
+
reasoningEffort: args.reasoningEffort,
|
|
143
|
+
});
|
|
144
|
+
maxToolCalls = typeof args.maxToolCalls === 'number' ? args.maxToolCalls : this.defaultMaxToolCalls;
|
|
145
|
+
textFormat = this.buildTextFormat(args.schema);
|
|
146
|
+
return [4 /*yield*/, this.run({
|
|
147
|
+
model: model,
|
|
148
|
+
messages: args.messages,
|
|
149
|
+
temperature: args.temperature,
|
|
150
|
+
topP: args.topP,
|
|
151
|
+
maxTokens: args.maxTokens,
|
|
152
|
+
abortSignal: args.abortSignal,
|
|
153
|
+
onToolInvocation: args.onToolInvocation,
|
|
154
|
+
reasoningEffort: args.reasoningEffort,
|
|
155
|
+
maxToolCalls: maxToolCalls,
|
|
156
|
+
backgroundMode: backgroundMode,
|
|
157
|
+
textFormat: textFormat,
|
|
158
|
+
})];
|
|
159
|
+
case 2:
|
|
160
|
+
result = _a.sent();
|
|
161
|
+
object = this.parseAndValidateStructuredOutput(result.message, args.schema);
|
|
162
|
+
outcome = {
|
|
163
|
+
object: object,
|
|
164
|
+
usageData: result.usagedata,
|
|
165
|
+
};
|
|
166
|
+
if (!args.onUsageData) return [3 /*break*/, 4];
|
|
167
|
+
return [4 /*yield*/, args.onUsageData(outcome.usageData)];
|
|
168
|
+
case 3:
|
|
169
|
+
_a.sent();
|
|
170
|
+
_a.label = 4;
|
|
171
|
+
case 4: return [2 /*return*/, outcome];
|
|
172
|
+
}
|
|
173
|
+
});
|
|
174
|
+
});
|
|
175
|
+
};
|
|
176
|
+
// -----------------------------------------
|
|
177
|
+
// Core runner (tool loop)
|
|
178
|
+
// -----------------------------------------
|
|
179
|
+
OpenAiResponses.prototype.run = function (args) {
|
|
180
|
+
return __awaiter(this, void 0, void 0, function () {
|
|
181
|
+
var usage, toolInvocations, tools, _a, instructions, input, toolCallsExecuted, previousResponseId, nextInput, response, functionCalls, message, toolOutputs;
|
|
182
|
+
return __generator(this, function (_b) {
|
|
183
|
+
switch (_b.label) {
|
|
184
|
+
case 0:
|
|
185
|
+
usage = new UsageData_1.UsageDataAccumulator({ model: OpenAi_1.DEFAULT_MODEL });
|
|
186
|
+
toolInvocations = [];
|
|
187
|
+
tools = this.buildResponseTools(this.functions);
|
|
188
|
+
_a = this.buildInstructionsAndInput(args.messages), instructions = _a.instructions, input = _a.input;
|
|
189
|
+
toolCallsExecuted = 0;
|
|
190
|
+
nextInput = input;
|
|
191
|
+
_b.label = 1;
|
|
192
|
+
case 1: return [4 /*yield*/, this.createResponseAndMaybeWait({
|
|
193
|
+
model: args.model,
|
|
194
|
+
instructions: previousResponseId ? undefined : instructions,
|
|
195
|
+
input: nextInput,
|
|
196
|
+
previousResponseId: previousResponseId,
|
|
197
|
+
tools: tools,
|
|
198
|
+
temperature: args.temperature,
|
|
199
|
+
topP: args.topP,
|
|
200
|
+
maxTokens: args.maxTokens,
|
|
201
|
+
reasoningEffort: args.reasoningEffort,
|
|
202
|
+
textFormat: args.textFormat,
|
|
203
|
+
backgroundMode: args.backgroundMode,
|
|
204
|
+
abortSignal: args.abortSignal,
|
|
205
|
+
})];
|
|
206
|
+
case 2:
|
|
207
|
+
response = _b.sent();
|
|
208
|
+
this.addUsageFromResponse(response, usage);
|
|
209
|
+
functionCalls = this.extractFunctionCalls(response);
|
|
210
|
+
if (functionCalls.length < 1) {
|
|
211
|
+
message = this.extractAssistantText(response);
|
|
212
|
+
if (!message) {
|
|
213
|
+
throw new Error("Response was empty");
|
|
214
|
+
}
|
|
215
|
+
return [2 /*return*/, { message: message, usagedata: usage.usageData, toolInvocations: toolInvocations }];
|
|
216
|
+
}
|
|
217
|
+
if (toolCallsExecuted + functionCalls.length > args.maxToolCalls) {
|
|
218
|
+
throw new Error("Max tool calls (".concat(args.maxToolCalls, ") reached. Stopping execution."));
|
|
219
|
+
}
|
|
220
|
+
if (!response.id) {
|
|
221
|
+
throw new Error("Responses API did not return an id for a tool-calling response.");
|
|
222
|
+
}
|
|
223
|
+
return [4 /*yield*/, this.executeFunctionCalls({
|
|
224
|
+
calls: functionCalls,
|
|
225
|
+
functions: this.functions,
|
|
226
|
+
usage: usage,
|
|
227
|
+
toolInvocations: toolInvocations,
|
|
228
|
+
onToolInvocation: args.onToolInvocation,
|
|
229
|
+
})];
|
|
230
|
+
case 3:
|
|
231
|
+
toolOutputs = _b.sent();
|
|
232
|
+
toolCallsExecuted += functionCalls.length;
|
|
233
|
+
previousResponseId = response.id;
|
|
234
|
+
nextInput = toolOutputs;
|
|
235
|
+
this.logger.debug({
|
|
236
|
+
message: "Tool loop continuing",
|
|
237
|
+
obj: { toolCallsExecuted: toolCallsExecuted, lastToolCallCount: functionCalls.length, responseId: previousResponseId },
|
|
238
|
+
});
|
|
239
|
+
_b.label = 4;
|
|
240
|
+
case 4: return [3 /*break*/, 1];
|
|
241
|
+
case 5: return [2 /*return*/];
|
|
242
|
+
}
|
|
243
|
+
});
|
|
244
|
+
});
|
|
245
|
+
};
|
|
246
|
+
OpenAiResponses.prototype.createResponseAndMaybeWait = function (args) {
|
|
247
|
+
return __awaiter(this, void 0, void 0, function () {
|
|
248
|
+
var body, created;
|
|
249
|
+
return __generator(this, function (_a) {
|
|
250
|
+
switch (_a.label) {
|
|
251
|
+
case 0:
|
|
252
|
+
body = {
|
|
253
|
+
model: args.model,
|
|
254
|
+
input: args.input,
|
|
255
|
+
};
|
|
256
|
+
if (args.instructions) {
|
|
257
|
+
body.instructions = args.instructions;
|
|
258
|
+
}
|
|
259
|
+
if (args.previousResponseId) {
|
|
260
|
+
body.previous_response_id = args.previousResponseId;
|
|
261
|
+
}
|
|
262
|
+
if (args.tools.length > 0) {
|
|
263
|
+
body.tools = args.tools;
|
|
264
|
+
}
|
|
265
|
+
if (typeof args.temperature === 'number') {
|
|
266
|
+
body.temperature = args.temperature;
|
|
267
|
+
}
|
|
268
|
+
if (typeof args.topP === 'number') {
|
|
269
|
+
body.top_p = args.topP;
|
|
270
|
+
}
|
|
271
|
+
if (typeof args.maxTokens === 'number') {
|
|
272
|
+
body.max_output_tokens = args.maxTokens;
|
|
273
|
+
}
|
|
274
|
+
if (args.reasoningEffort) {
|
|
275
|
+
body.reasoning = { effort: args.reasoningEffort };
|
|
276
|
+
}
|
|
277
|
+
if (args.textFormat) {
|
|
278
|
+
body.text = { format: args.textFormat };
|
|
279
|
+
}
|
|
280
|
+
if (args.backgroundMode) {
|
|
281
|
+
body.background = true;
|
|
282
|
+
body.store = true;
|
|
283
|
+
}
|
|
284
|
+
return [4 /*yield*/, this.client.responses.create(body, args.abortSignal ? { signal: args.abortSignal } : undefined)];
|
|
285
|
+
case 1:
|
|
286
|
+
created = _a.sent();
|
|
287
|
+
if (!args.backgroundMode) {
|
|
288
|
+
return [2 /*return*/, created];
|
|
289
|
+
}
|
|
290
|
+
if (!(created === null || created === void 0 ? void 0 : created.id)) {
|
|
291
|
+
return [2 /*return*/, created];
|
|
292
|
+
}
|
|
293
|
+
return [4 /*yield*/, this.waitForCompletion(created.id, args.abortSignal)];
|
|
294
|
+
case 2: return [2 /*return*/, _a.sent()];
|
|
295
|
+
}
|
|
296
|
+
});
|
|
297
|
+
});
|
|
298
|
+
};
|
|
299
|
+
OpenAiResponses.prototype.waitForCompletion = function (responseId, abortSignal) {
|
|
300
|
+
return __awaiter(this, void 0, void 0, function () {
|
|
301
|
+
var delayMs, resp, status_1;
|
|
302
|
+
return __generator(this, function (_a) {
|
|
303
|
+
switch (_a.label) {
|
|
304
|
+
case 0:
|
|
305
|
+
delayMs = 500;
|
|
306
|
+
_a.label = 1;
|
|
307
|
+
case 1:
|
|
308
|
+
if (abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.aborted) {
|
|
309
|
+
throw new Error("Request aborted");
|
|
310
|
+
}
|
|
311
|
+
return [4 /*yield*/, this.client.responses.retrieve(responseId, undefined, abortSignal ? { signal: abortSignal } : undefined)];
|
|
312
|
+
case 2:
|
|
313
|
+
resp = _a.sent();
|
|
314
|
+
status_1 = typeof (resp === null || resp === void 0 ? void 0 : resp.status) === 'string' ? String(resp.status).toLowerCase() : '';
|
|
315
|
+
if (status_1 === 'completed' || status_1 === 'failed' || status_1 === 'cancelled' || status_1 === 'incomplete') {
|
|
316
|
+
return [2 /*return*/, resp];
|
|
317
|
+
}
|
|
318
|
+
this.logger.debug({ message: "Polling response", obj: { responseId: responseId, status: status_1, delayMs: delayMs } });
|
|
319
|
+
return [4 /*yield*/, sleep(delayMs)];
|
|
320
|
+
case 3:
|
|
321
|
+
_a.sent();
|
|
322
|
+
delayMs = Math.min(5000, Math.floor(delayMs * 1.5));
|
|
323
|
+
_a.label = 4;
|
|
324
|
+
case 4: return [3 /*break*/, 1];
|
|
325
|
+
case 5: return [2 /*return*/];
|
|
326
|
+
}
|
|
327
|
+
});
|
|
328
|
+
});
|
|
329
|
+
};
|
|
330
|
+
// -----------------------------------------
|
|
331
|
+
// Tool calls
|
|
332
|
+
// -----------------------------------------
|
|
333
|
+
OpenAiResponses.prototype.buildResponseTools = function (functions) {
|
|
334
|
+
var tools = [];
|
|
335
|
+
if (!functions || functions.length < 1) {
|
|
336
|
+
return tools;
|
|
337
|
+
}
|
|
338
|
+
for (var _i = 0, functions_1 = functions; _i < functions_1.length; _i++) {
|
|
339
|
+
var f = functions_1[_i];
|
|
340
|
+
var def = f.definition;
|
|
341
|
+
if (!(def === null || def === void 0 ? void 0 : def.name)) {
|
|
342
|
+
continue;
|
|
343
|
+
}
|
|
344
|
+
tools.push({
|
|
345
|
+
type: 'function',
|
|
346
|
+
name: def.name,
|
|
347
|
+
description: def.description,
|
|
348
|
+
parameters: def.parameters,
|
|
349
|
+
// strict: true,
|
|
350
|
+
});
|
|
351
|
+
}
|
|
352
|
+
return tools;
|
|
353
|
+
};
|
|
354
|
+
OpenAiResponses.prototype.extractFunctionCalls = function (response) {
|
|
355
|
+
var out = Array.isArray(response.output) ? response.output : [];
|
|
356
|
+
var calls = [];
|
|
357
|
+
for (var _i = 0, out_1 = out; _i < out_1.length; _i++) {
|
|
358
|
+
var item = out_1[_i];
|
|
359
|
+
if (!item || typeof item !== 'object') {
|
|
360
|
+
continue;
|
|
361
|
+
}
|
|
362
|
+
var rec = item;
|
|
363
|
+
if (rec.type !== 'function_call') {
|
|
364
|
+
continue;
|
|
365
|
+
}
|
|
366
|
+
var call_id = typeof rec.call_id === 'string' ? rec.call_id : '';
|
|
367
|
+
var name_1 = typeof rec.name === 'string' ? rec.name : '';
|
|
368
|
+
var args = typeof rec.arguments === 'string' ? rec.arguments : '';
|
|
369
|
+
if (!call_id || !name_1) {
|
|
370
|
+
continue;
|
|
371
|
+
}
|
|
372
|
+
calls.push({ type: 'function_call', call_id: call_id, name: name_1, arguments: args });
|
|
373
|
+
}
|
|
374
|
+
return calls;
|
|
375
|
+
};
|
|
376
|
+
OpenAiResponses.prototype.executeFunctionCalls = function (args) {
|
|
377
|
+
return __awaiter(this, void 0, void 0, function () {
|
|
378
|
+
var outputs, _i, _a, call, _b, _c;
|
|
379
|
+
return __generator(this, function (_d) {
|
|
380
|
+
switch (_d.label) {
|
|
381
|
+
case 0:
|
|
382
|
+
outputs = [];
|
|
383
|
+
_i = 0, _a = args.calls;
|
|
384
|
+
_d.label = 1;
|
|
385
|
+
case 1:
|
|
386
|
+
if (!(_i < _a.length)) return [3 /*break*/, 4];
|
|
387
|
+
call = _a[_i];
|
|
388
|
+
_c = (_b = outputs).push;
|
|
389
|
+
return [4 /*yield*/, this.executeFunctionCall({
|
|
390
|
+
call: call,
|
|
391
|
+
functions: args.functions,
|
|
392
|
+
usage: args.usage,
|
|
393
|
+
toolInvocations: args.toolInvocations,
|
|
394
|
+
onToolInvocation: args.onToolInvocation,
|
|
395
|
+
})];
|
|
396
|
+
case 2:
|
|
397
|
+
_c.apply(_b, [_d.sent()]);
|
|
398
|
+
_d.label = 3;
|
|
399
|
+
case 3:
|
|
400
|
+
_i++;
|
|
401
|
+
return [3 /*break*/, 1];
|
|
402
|
+
case 4: return [2 /*return*/, outputs];
|
|
403
|
+
}
|
|
404
|
+
});
|
|
405
|
+
});
|
|
406
|
+
};
|
|
407
|
+
OpenAiResponses.prototype.executeFunctionCall = function (args) {
|
|
408
|
+
var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l;
|
|
409
|
+
return __awaiter(this, void 0, void 0, function () {
|
|
410
|
+
var callId, rawName, shortName, functionToCall, startedAt, parsedArgs, finishedAt, rec, argsObj, returnObject, finishedAt, rec, output, error_1, finishedAt, errMessage, errStack, rec;
|
|
411
|
+
return __generator(this, function (_m) {
|
|
412
|
+
switch (_m.label) {
|
|
413
|
+
case 0:
|
|
414
|
+
callId = args.call.call_id;
|
|
415
|
+
rawName = args.call.name;
|
|
416
|
+
shortName = (_a = rawName.split('.').pop()) !== null && _a !== void 0 ? _a : rawName;
|
|
417
|
+
functionToCall = (_b = args.functions.find(function (fx) { return fx.definition.name === rawName; })) !== null && _b !== void 0 ? _b : args.functions.find(function (fx) { var _a; return ((_a = fx.definition.name.split('.').pop()) !== null && _a !== void 0 ? _a : fx.definition.name) === shortName; });
|
|
418
|
+
startedAt = new Date();
|
|
419
|
+
try {
|
|
420
|
+
parsedArgs = JSON.parse((_c = args.call.arguments) !== null && _c !== void 0 ? _c : '{}');
|
|
421
|
+
}
|
|
422
|
+
catch (_o) {
|
|
423
|
+
parsedArgs = args.call.arguments;
|
|
424
|
+
}
|
|
425
|
+
(_d = args.onToolInvocation) === null || _d === void 0 ? void 0 : _d.call(args, {
|
|
426
|
+
type: 'started',
|
|
427
|
+
id: callId,
|
|
428
|
+
name: (_f = (_e = functionToCall === null || functionToCall === void 0 ? void 0 : functionToCall.definition) === null || _e === void 0 ? void 0 : _e.name) !== null && _f !== void 0 ? _f : shortName,
|
|
429
|
+
startedAt: startedAt,
|
|
430
|
+
input: parsedArgs,
|
|
431
|
+
});
|
|
432
|
+
if (!functionToCall) {
|
|
433
|
+
finishedAt = new Date();
|
|
434
|
+
rec = {
|
|
435
|
+
id: callId,
|
|
436
|
+
name: shortName,
|
|
437
|
+
startedAt: startedAt,
|
|
438
|
+
finishedAt: finishedAt,
|
|
439
|
+
input: parsedArgs,
|
|
440
|
+
ok: false,
|
|
441
|
+
error: { message: "Assistant attempted to call nonexistent function" },
|
|
442
|
+
};
|
|
443
|
+
args.toolInvocations.push(rec);
|
|
444
|
+
(_g = args.onToolInvocation) === null || _g === void 0 ? void 0 : _g.call(args, { type: 'finished', result: rec });
|
|
445
|
+
return [2 /*return*/, {
|
|
446
|
+
type: 'function_call_output',
|
|
447
|
+
call_id: callId,
|
|
448
|
+
output: JSON.stringify({ error: (_h = rec.error) === null || _h === void 0 ? void 0 : _h.message, functionName: shortName }),
|
|
449
|
+
}];
|
|
450
|
+
}
|
|
451
|
+
_m.label = 1;
|
|
452
|
+
case 1:
|
|
453
|
+
_m.trys.push([1, 4, , 5]);
|
|
454
|
+
argsObj = void 0;
|
|
455
|
+
try {
|
|
456
|
+
argsObj = JSON.parse((_j = args.call.arguments) !== null && _j !== void 0 ? _j : '{}');
|
|
457
|
+
}
|
|
458
|
+
catch (_p) {
|
|
459
|
+
argsObj = {};
|
|
460
|
+
}
|
|
461
|
+
args.usage.recordToolCall(functionToCall.definition.name);
|
|
462
|
+
return [4 /*yield*/, functionToCall.call(argsObj)];
|
|
463
|
+
case 2:
|
|
464
|
+
returnObject = _m.sent();
|
|
465
|
+
finishedAt = new Date();
|
|
466
|
+
rec = {
|
|
467
|
+
id: callId,
|
|
468
|
+
name: functionToCall.definition.name,
|
|
469
|
+
startedAt: startedAt,
|
|
470
|
+
finishedAt: finishedAt,
|
|
471
|
+
input: argsObj,
|
|
472
|
+
ok: true,
|
|
473
|
+
data: returnObject,
|
|
474
|
+
};
|
|
475
|
+
args.toolInvocations.push(rec);
|
|
476
|
+
(_k = args.onToolInvocation) === null || _k === void 0 ? void 0 : _k.call(args, { type: 'finished', result: rec });
|
|
477
|
+
return [4 /*yield*/, this.formatToolReturn(returnObject)];
|
|
478
|
+
case 3:
|
|
479
|
+
output = _m.sent();
|
|
480
|
+
return [2 /*return*/, {
|
|
481
|
+
type: 'function_call_output',
|
|
482
|
+
call_id: callId,
|
|
483
|
+
output: output,
|
|
484
|
+
}];
|
|
485
|
+
case 4:
|
|
486
|
+
error_1 = _m.sent();
|
|
487
|
+
finishedAt = new Date();
|
|
488
|
+
errMessage = error_1 instanceof Error ? error_1.message : String(error_1);
|
|
489
|
+
errStack = error_1 instanceof Error ? error_1.stack : undefined;
|
|
490
|
+
rec = {
|
|
491
|
+
id: callId,
|
|
492
|
+
name: functionToCall.definition.name,
|
|
493
|
+
startedAt: startedAt,
|
|
494
|
+
finishedAt: finishedAt,
|
|
495
|
+
input: parsedArgs,
|
|
496
|
+
ok: false,
|
|
497
|
+
error: { message: errMessage, stack: errStack },
|
|
498
|
+
};
|
|
499
|
+
args.toolInvocations.push(rec);
|
|
500
|
+
(_l = args.onToolInvocation) === null || _l === void 0 ? void 0 : _l.call(args, { type: 'finished', result: rec });
|
|
501
|
+
throw error_1;
|
|
502
|
+
case 5: return [2 /*return*/];
|
|
503
|
+
}
|
|
504
|
+
});
|
|
505
|
+
});
|
|
506
|
+
};
|
|
507
|
+
OpenAiResponses.prototype.formatToolReturn = function (returnObject) {
|
|
508
|
+
return __awaiter(this, void 0, void 0, function () {
|
|
509
|
+
var messageParams, normalized;
|
|
510
|
+
var _this = this;
|
|
511
|
+
return __generator(this, function (_a) {
|
|
512
|
+
switch (_a.label) {
|
|
513
|
+
case 0:
|
|
514
|
+
if (typeof returnObject === 'undefined') {
|
|
515
|
+
return [2 /*return*/, JSON.stringify({ result: 'Function with no return value executed successfully' })];
|
|
516
|
+
}
|
|
517
|
+
if (!(returnObject instanceof ChatCompletionMessageParamFactory_1.ChatCompletionMessageParamFactory)) return [3 /*break*/, 2];
|
|
518
|
+
return [4 /*yield*/, returnObject.create()];
|
|
519
|
+
case 1:
|
|
520
|
+
messageParams = _a.sent();
|
|
521
|
+
normalized = (messageParams !== null && messageParams !== void 0 ? messageParams : [])
|
|
522
|
+
.map(function (m) { return ({
|
|
523
|
+
role: m.role,
|
|
524
|
+
content: _this.extractTextContent(m.content),
|
|
525
|
+
}); })
|
|
526
|
+
.filter(function (m) { return typeof m.content === 'string' && m.content.trim().length > 0; });
|
|
527
|
+
return [2 /*return*/, JSON.stringify({ messages: normalized })];
|
|
528
|
+
case 2: return [2 /*return*/, JSON.stringify(returnObject)];
|
|
529
|
+
}
|
|
530
|
+
});
|
|
531
|
+
});
|
|
532
|
+
};
|
|
533
|
+
// -----------------------------------------
|
|
534
|
+
// Usage + text extraction
|
|
535
|
+
// -----------------------------------------
|
|
536
|
+
OpenAiResponses.prototype.addUsageFromResponse = function (response, usage) {
|
|
537
|
+
var u = response.usage;
|
|
538
|
+
if (!u || typeof u !== 'object') {
|
|
539
|
+
return;
|
|
540
|
+
}
|
|
541
|
+
var rec = u;
|
|
542
|
+
var input = typeof rec.input_tokens === 'number' ? rec.input_tokens : 0;
|
|
543
|
+
var output = typeof rec.output_tokens === 'number' ? rec.output_tokens : 0;
|
|
544
|
+
var total = typeof rec.total_tokens === 'number' ? rec.total_tokens : input + output;
|
|
545
|
+
var cached = 0;
|
|
546
|
+
var reasoning = 0;
|
|
547
|
+
var inputDetails = rec.input_tokens_details;
|
|
548
|
+
if (inputDetails && typeof inputDetails === 'object') {
|
|
549
|
+
var id = inputDetails;
|
|
550
|
+
cached = typeof id.cached_tokens === 'number' ? id.cached_tokens : 0;
|
|
551
|
+
}
|
|
552
|
+
var outputDetails = rec.output_tokens_details;
|
|
553
|
+
if (outputDetails && typeof outputDetails === 'object') {
|
|
554
|
+
var od = outputDetails;
|
|
555
|
+
reasoning = typeof od.reasoning_tokens === 'number' ? od.reasoning_tokens : 0;
|
|
556
|
+
}
|
|
557
|
+
usage.addTokenUsage({
|
|
558
|
+
promptTokens: input,
|
|
559
|
+
cachedPromptTokens: cached,
|
|
560
|
+
completionTokens: output,
|
|
561
|
+
reasoningTokens: reasoning,
|
|
562
|
+
totalTokens: total,
|
|
563
|
+
});
|
|
564
|
+
};
|
|
565
|
+
OpenAiResponses.prototype.extractAssistantText = function (response) {
|
|
566
|
+
var direct = typeof response.output_text === 'string' ? response.output_text.trim() : '';
|
|
567
|
+
if (direct) {
|
|
568
|
+
return direct;
|
|
569
|
+
}
|
|
570
|
+
var out = Array.isArray(response.output) ? response.output : [];
|
|
571
|
+
for (var _i = 0, out_2 = out; _i < out_2.length; _i++) {
|
|
572
|
+
var item = out_2[_i];
|
|
573
|
+
if (!item || typeof item !== 'object') {
|
|
574
|
+
continue;
|
|
575
|
+
}
|
|
576
|
+
var rec = item;
|
|
577
|
+
if (rec.type !== 'message') {
|
|
578
|
+
continue;
|
|
579
|
+
}
|
|
580
|
+
if (rec.role !== 'assistant') {
|
|
581
|
+
continue;
|
|
582
|
+
}
|
|
583
|
+
var contentRaw = rec.content;
|
|
584
|
+
if (!Array.isArray(contentRaw)) {
|
|
585
|
+
continue;
|
|
586
|
+
}
|
|
587
|
+
var pieces = [];
|
|
588
|
+
for (var _a = 0, contentRaw_1 = contentRaw; _a < contentRaw_1.length; _a++) {
|
|
589
|
+
var c = contentRaw_1[_a];
|
|
590
|
+
if (!c || typeof c !== 'object') {
|
|
591
|
+
continue;
|
|
592
|
+
}
|
|
593
|
+
var part = c;
|
|
594
|
+
if (part.type !== 'output_text') {
|
|
595
|
+
continue;
|
|
596
|
+
}
|
|
597
|
+
var t = part.text;
|
|
598
|
+
if (typeof t === 'string' && t.trim()) {
|
|
599
|
+
pieces.push(t);
|
|
600
|
+
}
|
|
601
|
+
}
|
|
602
|
+
var joined = pieces.join('\n').trim();
|
|
603
|
+
if (joined) {
|
|
604
|
+
return joined;
|
|
605
|
+
}
|
|
606
|
+
}
|
|
607
|
+
return '';
|
|
608
|
+
};
|
|
609
|
+
// -----------------------------------------
|
|
610
|
+
// Structured outputs (JSON schema / Zod)
|
|
611
|
+
// -----------------------------------------
|
|
612
|
+
OpenAiResponses.prototype.buildTextFormat = function (schema) {
|
|
613
|
+
if (this.isZodSchema(schema)) {
|
|
614
|
+
// Prefer the official helper when schema is Zod.
|
|
615
|
+
// eslint-disable-next-line @typescript-eslint/no-var-requires
|
|
616
|
+
var mod = require('openai/helpers/zod');
|
|
617
|
+
return mod.zodTextFormat(schema, 'output');
|
|
618
|
+
}
|
|
619
|
+
return {
|
|
620
|
+
type: 'json_schema',
|
|
621
|
+
name: 'output',
|
|
622
|
+
strict: true,
|
|
623
|
+
schema: this.strictifyJsonSchema(schema),
|
|
624
|
+
};
|
|
625
|
+
};
|
|
626
|
+
OpenAiResponses.prototype.parseAndValidateStructuredOutput = function (text, schema) {
|
|
627
|
+
var parsed = this.parseJson(text);
|
|
628
|
+
if (this.isZodSchema(schema)) {
|
|
629
|
+
var res = schema.safeParse(parsed);
|
|
630
|
+
if (!(res === null || res === void 0 ? void 0 : res.success)) {
|
|
631
|
+
throw new Error("Structured output failed schema validation");
|
|
632
|
+
}
|
|
633
|
+
return res.data;
|
|
634
|
+
}
|
|
635
|
+
return parsed;
|
|
636
|
+
};
|
|
637
|
+
OpenAiResponses.prototype.isZodSchema = function (schema) {
|
|
638
|
+
if (!schema || (typeof schema !== 'object' && typeof schema !== 'function')) {
|
|
639
|
+
return false;
|
|
640
|
+
}
|
|
641
|
+
return typeof schema.safeParse === 'function';
|
|
642
|
+
};
|
|
643
|
+
OpenAiResponses.prototype.parseJson = function (text) {
|
|
644
|
+
var cleaned = String(text !== null && text !== void 0 ? text : '')
|
|
645
|
+
.trim()
|
|
646
|
+
.replace(/^```(?:json)?/i, '')
|
|
647
|
+
.replace(/```$/i, '')
|
|
648
|
+
.trim();
|
|
649
|
+
try {
|
|
650
|
+
return JSON.parse(cleaned);
|
|
651
|
+
}
|
|
652
|
+
catch (_a) {
|
|
653
|
+
var s = cleaned;
|
|
654
|
+
var firstObj = s.indexOf('{');
|
|
655
|
+
var firstArr = s.indexOf('[');
|
|
656
|
+
var start = firstObj === -1 ? firstArr : firstArr === -1 ? firstObj : Math.min(firstObj, firstArr);
|
|
657
|
+
var lastObj = s.lastIndexOf('}');
|
|
658
|
+
var lastArr = s.lastIndexOf(']');
|
|
659
|
+
var end = Math.max(lastObj, lastArr);
|
|
660
|
+
if (start >= 0 && end > start) {
|
|
661
|
+
return JSON.parse(s.slice(start, end + 1));
|
|
662
|
+
}
|
|
663
|
+
throw new Error("Failed to parse model output as JSON");
|
|
664
|
+
}
|
|
665
|
+
};
|
|
666
|
+
/**
|
|
667
|
+
* Strictifies a plain JSON Schema for OpenAI Structured Outputs (strict mode):
|
|
668
|
+
* - Ensures every object has `additionalProperties: false`
|
|
669
|
+
* - Ensures every object has a `required` array that includes **all** keys in `properties`
|
|
670
|
+
* - Adds missing `type: "object"` / `type: "array"` where implied by keywords
|
|
671
|
+
*/
|
|
672
|
+
OpenAiResponses.prototype.strictifyJsonSchema = function (schema) {
|
|
673
|
+
var root = JSON.parse(JSON.stringify(schema !== null && schema !== void 0 ? schema : {}));
|
|
674
|
+
var visit = function (node) {
|
|
675
|
+
if (!node || typeof node !== 'object') {
|
|
676
|
+
return;
|
|
677
|
+
}
|
|
678
|
+
if (!node.type) {
|
|
679
|
+
if (node.properties || node.additionalProperties || node.patternProperties) {
|
|
680
|
+
node.type = 'object';
|
|
681
|
+
}
|
|
682
|
+
else if (node.items || node.prefixItems) {
|
|
683
|
+
node.type = 'array';
|
|
684
|
+
}
|
|
685
|
+
}
|
|
686
|
+
var types = Array.isArray(node.type) ? node.type : node.type ? [node.type] : [];
|
|
687
|
+
if (types.includes('object')) {
|
|
688
|
+
if (node.additionalProperties !== false) {
|
|
689
|
+
node.additionalProperties = false;
|
|
690
|
+
}
|
|
691
|
+
if (node.properties && typeof node.properties === 'object') {
|
|
692
|
+
var propKeys = Object.keys(node.properties);
|
|
693
|
+
var currentReq = Array.isArray(node.required) ? node.required.slice() : [];
|
|
694
|
+
node.required = Array.from(new Set(__spreadArray(__spreadArray([], currentReq, true), propKeys, true)));
|
|
695
|
+
for (var _i = 0, propKeys_1 = propKeys; _i < propKeys_1.length; _i++) {
|
|
696
|
+
var k = propKeys_1[_i];
|
|
697
|
+
visit(node.properties[k]);
|
|
698
|
+
}
|
|
699
|
+
}
|
|
700
|
+
if (node.patternProperties && typeof node.patternProperties === 'object') {
|
|
701
|
+
for (var _a = 0, _b = Object.keys(node.patternProperties); _a < _b.length; _a++) {
|
|
702
|
+
var k = _b[_a];
|
|
703
|
+
visit(node.patternProperties[k]);
|
|
704
|
+
}
|
|
705
|
+
}
|
|
706
|
+
for (var _c = 0, _d = ['$defs', 'definitions']; _c < _d.length; _c++) {
|
|
707
|
+
var defsKey = _d[_c];
|
|
708
|
+
if (node[defsKey] && typeof node[defsKey] === 'object') {
|
|
709
|
+
for (var _e = 0, _f = Object.keys(node[defsKey]); _e < _f.length; _e++) {
|
|
710
|
+
var key = _f[_e];
|
|
711
|
+
visit(node[defsKey][key]);
|
|
712
|
+
}
|
|
713
|
+
}
|
|
714
|
+
}
|
|
715
|
+
}
|
|
716
|
+
if (types.includes('array')) {
|
|
717
|
+
if (node.items) {
|
|
718
|
+
if (Array.isArray(node.items)) {
|
|
719
|
+
node.items.forEach(visit);
|
|
720
|
+
}
|
|
721
|
+
else {
|
|
722
|
+
visit(node.items);
|
|
723
|
+
}
|
|
724
|
+
}
|
|
725
|
+
if (Array.isArray(node.prefixItems)) {
|
|
726
|
+
node.prefixItems.forEach(visit);
|
|
727
|
+
}
|
|
728
|
+
}
|
|
729
|
+
for (var _g = 0, _h = ['oneOf', 'anyOf', 'allOf']; _g < _h.length; _g++) {
|
|
730
|
+
var k = _h[_g];
|
|
731
|
+
if (Array.isArray(node[k])) {
|
|
732
|
+
node[k].forEach(visit);
|
|
733
|
+
}
|
|
734
|
+
}
|
|
735
|
+
if (node.not) {
|
|
736
|
+
visit(node.not);
|
|
737
|
+
}
|
|
738
|
+
};
|
|
739
|
+
visit(root);
|
|
740
|
+
return root;
|
|
741
|
+
};
|
|
742
|
+
// -----------------------------------------
|
|
743
|
+
// Messages + modules
|
|
744
|
+
// -----------------------------------------
|
|
745
|
+
OpenAiResponses.prototype.buildInstructionsAndInput = function (messages) {
|
|
746
|
+
var instructionsParts = [];
|
|
747
|
+
instructionsParts.push.apply(instructionsParts, this.systemMessages);
|
|
748
|
+
var input = [];
|
|
749
|
+
for (var _i = 0, messages_1 = messages; _i < messages_1.length; _i++) {
|
|
750
|
+
var m = messages_1[_i];
|
|
751
|
+
var msg = typeof m === 'string' ? { role: 'user', content: m } : m;
|
|
752
|
+
if (msg.role === 'system') {
|
|
753
|
+
var c = this.extractTextContent(msg.content).trim();
|
|
754
|
+
if (c) {
|
|
755
|
+
instructionsParts.push(c);
|
|
756
|
+
}
|
|
757
|
+
continue;
|
|
758
|
+
}
|
|
759
|
+
if (msg.role === 'tool') {
|
|
760
|
+
continue;
|
|
761
|
+
}
|
|
762
|
+
var role = msg.role === 'assistant' ? 'assistant' : 'user';
|
|
763
|
+
var content = this.extractTextContent(msg.content).trim();
|
|
764
|
+
if (!content) {
|
|
765
|
+
continue;
|
|
766
|
+
}
|
|
767
|
+
input.push({ role: role, content: content });
|
|
768
|
+
}
|
|
769
|
+
var instructions = instructionsParts.map(function (s) { return String(s !== null && s !== void 0 ? s : '').trim(); }).filter(Boolean).length > 0
|
|
770
|
+
? instructionsParts
|
|
771
|
+
.map(function (s) { return String(s !== null && s !== void 0 ? s : '').trim(); })
|
|
772
|
+
.filter(Boolean)
|
|
773
|
+
.join('\n\n')
|
|
774
|
+
: undefined;
|
|
775
|
+
return { instructions: instructions, input: input };
|
|
776
|
+
};
|
|
777
|
+
OpenAiResponses.prototype.extractTextContent = function (content) {
|
|
778
|
+
if (typeof content === 'string') {
|
|
779
|
+
return content;
|
|
780
|
+
}
|
|
781
|
+
if (!content) {
|
|
782
|
+
return '';
|
|
783
|
+
}
|
|
784
|
+
if (Array.isArray(content)) {
|
|
785
|
+
return content
|
|
786
|
+
.map(function (p) {
|
|
787
|
+
if (typeof p === 'string') {
|
|
788
|
+
return p;
|
|
789
|
+
}
|
|
790
|
+
if ((p === null || p === void 0 ? void 0 : p.type) === 'text' && typeof (p === null || p === void 0 ? void 0 : p.text) === 'string') {
|
|
791
|
+
return p.text;
|
|
792
|
+
}
|
|
793
|
+
return '';
|
|
794
|
+
})
|
|
795
|
+
.join('\n');
|
|
796
|
+
}
|
|
797
|
+
return '';
|
|
798
|
+
};
|
|
799
|
+
OpenAiResponses.prototype.ensureModulesProcessed = function () {
|
|
800
|
+
return __awaiter(this, void 0, void 0, function () {
|
|
801
|
+
var error_2;
|
|
802
|
+
return __generator(this, function (_a) {
|
|
803
|
+
switch (_a.label) {
|
|
804
|
+
case 0:
|
|
805
|
+
if (this.modulesProcessed) {
|
|
806
|
+
return [2 /*return*/];
|
|
807
|
+
}
|
|
808
|
+
if (this.processingModulesPromise) {
|
|
809
|
+
return [2 /*return*/, this.processingModulesPromise];
|
|
810
|
+
}
|
|
811
|
+
this.processingModulesPromise = this.processModules();
|
|
812
|
+
_a.label = 1;
|
|
813
|
+
case 1:
|
|
814
|
+
_a.trys.push([1, 3, , 4]);
|
|
815
|
+
return [4 /*yield*/, this.processingModulesPromise];
|
|
816
|
+
case 2:
|
|
817
|
+
_a.sent();
|
|
818
|
+
this.modulesProcessed = true;
|
|
819
|
+
return [3 /*break*/, 4];
|
|
820
|
+
case 3:
|
|
821
|
+
error_2 = _a.sent();
|
|
822
|
+
this.processingModulesPromise = null;
|
|
823
|
+
throw error_2;
|
|
824
|
+
case 4: return [2 /*return*/];
|
|
825
|
+
}
|
|
826
|
+
});
|
|
827
|
+
});
|
|
828
|
+
};
|
|
829
|
+
OpenAiResponses.prototype.processModules = function () {
|
|
830
|
+
return __awaiter(this, void 0, void 0, function () {
|
|
831
|
+
var _i, _a, module_1, moduleName, rawSystem, sysArr, trimmed, formatted, moduleFunctions, filtered, fnInstructions;
|
|
832
|
+
var _b;
|
|
833
|
+
return __generator(this, function (_c) {
|
|
834
|
+
switch (_c.label) {
|
|
835
|
+
case 0:
|
|
836
|
+
if (!this.modules || this.modules.length < 1) {
|
|
837
|
+
return [2 /*return*/];
|
|
838
|
+
}
|
|
839
|
+
_i = 0, _a = this.modules;
|
|
840
|
+
_c.label = 1;
|
|
841
|
+
case 1:
|
|
842
|
+
if (!(_i < _a.length)) return [3 /*break*/, 4];
|
|
843
|
+
module_1 = _a[_i];
|
|
844
|
+
moduleName = module_1.getName();
|
|
845
|
+
return [4 /*yield*/, Promise.resolve(module_1.getSystemMessages())];
|
|
846
|
+
case 2:
|
|
847
|
+
rawSystem = _c.sent();
|
|
848
|
+
sysArr = Array.isArray(rawSystem) ? rawSystem : rawSystem ? [rawSystem] : [];
|
|
849
|
+
trimmed = sysArr.map(function (s) { return String(s !== null && s !== void 0 ? s : '').trim(); }).filter(Boolean);
|
|
850
|
+
if (trimmed.length > 0) {
|
|
851
|
+
formatted = trimmed.join('. ');
|
|
852
|
+
this.systemMessages.push("The following are instructions from the ".concat(moduleName, " module:\n").concat(formatted));
|
|
853
|
+
}
|
|
854
|
+
moduleFunctions = module_1.getFunctions();
|
|
855
|
+
filtered = this.filterFunctions(moduleFunctions);
|
|
856
|
+
(_b = this.functions).push.apply(_b, filtered);
|
|
857
|
+
fnInstructions = this.buildFunctionInstructionsMessage(moduleName, filtered);
|
|
858
|
+
if (fnInstructions) {
|
|
859
|
+
this.systemMessages.push(fnInstructions);
|
|
860
|
+
}
|
|
861
|
+
_c.label = 3;
|
|
862
|
+
case 3:
|
|
863
|
+
_i++;
|
|
864
|
+
return [3 /*break*/, 1];
|
|
865
|
+
case 4: return [2 /*return*/];
|
|
866
|
+
}
|
|
867
|
+
});
|
|
868
|
+
});
|
|
869
|
+
};
|
|
870
|
+
OpenAiResponses.prototype.filterFunctions = function (functions) {
|
|
871
|
+
if (!this.allowedFunctionNames || this.allowedFunctionNames.length < 1) {
|
|
872
|
+
return functions;
|
|
873
|
+
}
|
|
874
|
+
var allow = new Set(this.allowedFunctionNames.map(function (n) { return String(n).trim(); }).filter(Boolean));
|
|
875
|
+
return functions.filter(function (f) {
|
|
876
|
+
var _a, _b, _c;
|
|
877
|
+
var name = String((_b = (_a = f.definition) === null || _a === void 0 ? void 0 : _a.name) !== null && _b !== void 0 ? _b : '').trim();
|
|
878
|
+
if (!name) {
|
|
879
|
+
return false;
|
|
880
|
+
}
|
|
881
|
+
var short = (_c = name.split('.').pop()) !== null && _c !== void 0 ? _c : name;
|
|
882
|
+
return allow.has(name) || allow.has(short);
|
|
883
|
+
});
|
|
884
|
+
};
|
|
885
|
+
OpenAiResponses.prototype.buildFunctionInstructionsMessage = function (moduleName, functions) {
|
|
886
|
+
var _a, _b;
|
|
887
|
+
var msg = "The following are instructions from functions in the ".concat(moduleName, " module:");
|
|
888
|
+
var added = false;
|
|
889
|
+
for (var _i = 0, functions_2 = functions; _i < functions_2.length; _i++) {
|
|
890
|
+
var f = functions_2[_i];
|
|
891
|
+
var name_2 = String((_b = (_a = f.definition) === null || _a === void 0 ? void 0 : _a.name) !== null && _b !== void 0 ? _b : '').trim();
|
|
892
|
+
var instructions = f.instructions;
|
|
893
|
+
if (!name_2 || !instructions || instructions.length < 1) {
|
|
894
|
+
continue;
|
|
895
|
+
}
|
|
896
|
+
var paragraph = instructions
|
|
897
|
+
.map(function (s) { return String(s !== null && s !== void 0 ? s : '').trim(); })
|
|
898
|
+
.filter(Boolean)
|
|
899
|
+
.join('. ');
|
|
900
|
+
if (!paragraph) {
|
|
901
|
+
continue;
|
|
902
|
+
}
|
|
903
|
+
added = true;
|
|
904
|
+
msg += " ".concat(name_2, ": ").concat(paragraph, ".");
|
|
905
|
+
}
|
|
906
|
+
return added ? msg : null;
|
|
907
|
+
};
|
|
908
|
+
// -----------------------------------------
|
|
909
|
+
// Model/background defaults
|
|
910
|
+
// -----------------------------------------
|
|
911
|
+
OpenAiResponses.prototype.resolveModel = function (model) {
|
|
912
|
+
var m = (model !== null && model !== void 0 ? model : this.defaultModel).trim();
|
|
913
|
+
return m.length > 0 ? m : exports.DEFAULT_RESPONSES_MODEL;
|
|
914
|
+
};
|
|
915
|
+
OpenAiResponses.prototype.resolveBackgroundMode = function (args) {
|
|
916
|
+
if (typeof args.requested === 'boolean') {
|
|
917
|
+
return args.requested;
|
|
918
|
+
}
|
|
919
|
+
if (this.isProModel(args.model)) {
|
|
920
|
+
return true;
|
|
921
|
+
}
|
|
922
|
+
if (this.isHighReasoningEffort(args.reasoningEffort)) {
|
|
923
|
+
return true;
|
|
924
|
+
}
|
|
925
|
+
return false;
|
|
926
|
+
};
|
|
927
|
+
OpenAiResponses.prototype.isProModel = function (model) {
|
|
928
|
+
var m = String(model !== null && model !== void 0 ? model : '').toLowerCase();
|
|
929
|
+
return /(^|[-_.])pro($|[-_.])/.test(m);
|
|
930
|
+
};
|
|
931
|
+
OpenAiResponses.prototype.isHighReasoningEffort = function (effort) {
|
|
932
|
+
var v = String(effort !== null && effort !== void 0 ? effort : '').toLowerCase();
|
|
933
|
+
return v === 'high' || v === 'xhigh';
|
|
934
|
+
};
|
|
935
|
+
return OpenAiResponses;
|
|
936
|
+
}());
|
|
937
|
+
exports.OpenAiResponses = OpenAiResponses;
|
|
938
|
+
function sleep(ms) {
|
|
939
|
+
return new Promise(function (resolve) { return setTimeout(resolve, ms); });
|
|
940
|
+
}
|
|
941
|
+
//# sourceMappingURL=OpenAiResponses.js.map
|