@proteinjs/conversation 2.5.0 → 2.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. package/CHANGELOG.md +29 -0
  2. package/dist/index.d.ts +2 -1
  3. package/dist/index.d.ts.map +1 -1
  4. package/dist/index.js +2 -0
  5. package/dist/index.js.map +1 -1
  6. package/dist/src/Conversation.d.ts.map +1 -1
  7. package/dist/src/Conversation.js +12 -16
  8. package/dist/src/Conversation.js.map +1 -1
  9. package/dist/src/OpenAi.js +3 -3
  10. package/dist/src/OpenAi.js.map +1 -1
  11. package/dist/src/OpenAiResponses.d.ts +158 -0
  12. package/dist/src/OpenAiResponses.d.ts.map +1 -0
  13. package/dist/src/OpenAiResponses.js +1621 -0
  14. package/dist/src/OpenAiResponses.js.map +1 -0
  15. package/dist/src/OpenAiStreamProcessor.js +4 -4
  16. package/dist/src/OpenAiStreamProcessor.js.map +1 -1
  17. package/dist/src/UsageData.d.ts +39 -4
  18. package/dist/src/UsageData.d.ts.map +1 -1
  19. package/dist/src/UsageData.js +302 -11
  20. package/dist/src/UsageData.js.map +1 -1
  21. package/dist/src/fs/conversation_fs/ConversationFsModule.d.ts.map +1 -1
  22. package/dist/src/fs/conversation_fs/ConversationFsModule.js +1 -0
  23. package/dist/src/fs/conversation_fs/ConversationFsModule.js.map +1 -1
  24. package/dist/src/fs/conversation_fs/FsFunctions.d.ts +26 -0
  25. package/dist/src/fs/conversation_fs/FsFunctions.d.ts.map +1 -1
  26. package/dist/src/fs/conversation_fs/FsFunctions.js +68 -27
  27. package/dist/src/fs/conversation_fs/FsFunctions.js.map +1 -1
  28. package/index.ts +2 -1
  29. package/package.json +4 -4
  30. package/src/Conversation.ts +14 -17
  31. package/src/OpenAi.ts +3 -3
  32. package/src/OpenAiResponses.ts +1869 -0
  33. package/src/OpenAiStreamProcessor.ts +3 -3
  34. package/src/UsageData.ts +376 -13
  35. package/src/fs/conversation_fs/ConversationFsModule.ts +2 -0
  36. package/src/fs/conversation_fs/FsFunctions.ts +32 -2
@@ -0,0 +1,1621 @@
1
+ "use strict";
2
+ var __extends = (this && this.__extends) || (function () {
3
+ var extendStatics = function (d, b) {
4
+ extendStatics = Object.setPrototypeOf ||
5
+ ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
6
+ function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
7
+ return extendStatics(d, b);
8
+ };
9
+ return function (d, b) {
10
+ if (typeof b !== "function" && b !== null)
11
+ throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
12
+ extendStatics(d, b);
13
+ function __() { this.constructor = d; }
14
+ d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
15
+ };
16
+ })();
17
+ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
18
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
19
+ return new (P || (P = Promise))(function (resolve, reject) {
20
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
21
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
22
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
23
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
24
+ });
25
+ };
26
+ var __generator = (this && this.__generator) || function (thisArg, body) {
27
+ var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
28
+ return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
29
+ function verb(n) { return function (v) { return step([n, v]); }; }
30
+ function step(op) {
31
+ if (f) throw new TypeError("Generator is already executing.");
32
+ while (g && (g = 0, op[0] && (_ = 0)), _) try {
33
+ if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
34
+ if (y = 0, t) op = [op[0] & 2, t.value];
35
+ switch (op[0]) {
36
+ case 0: case 1: t = op; break;
37
+ case 4: _.label++; return { value: op[1], done: false };
38
+ case 5: _.label++; y = op[1]; op = [0]; continue;
39
+ case 7: op = _.ops.pop(); _.trys.pop(); continue;
40
+ default:
41
+ if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
42
+ if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
43
+ if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
44
+ if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
45
+ if (t[2]) _.ops.pop();
46
+ _.trys.pop(); continue;
47
+ }
48
+ op = body.call(thisArg, _);
49
+ } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
50
+ if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
51
+ }
52
+ };
53
+ var __spreadArray = (this && this.__spreadArray) || function (to, from, pack) {
54
+ if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) {
55
+ if (ar || !(i in from)) {
56
+ if (!ar) ar = Array.prototype.slice.call(from, 0, i);
57
+ ar[i] = from[i];
58
+ }
59
+ }
60
+ return to.concat(ar || Array.prototype.slice.call(from));
61
+ };
62
+ Object.defineProperty(exports, "__esModule", { value: true });
63
+ exports.OpenAiResponsesError = exports.OpenAiResponses = exports.DEFAULT_MAX_BACKGROUND_WAIT_MS = exports.DEFAULT_MAX_TOOL_CALLS = exports.DEFAULT_RESPONSES_MODEL = void 0;
64
+ var openai_1 = require("openai");
65
+ var logger_1 = require("@proteinjs/logger");
66
+ var UsageData_1 = require("./UsageData");
67
+ var ChatCompletionMessageParamFactory_1 = require("./ChatCompletionMessageParamFactory");
68
+ exports.DEFAULT_RESPONSES_MODEL = 'gpt-5.2';
69
+ exports.DEFAULT_MAX_TOOL_CALLS = 50;
70
+ /** Default hard cap for background-mode polling duration (ms): 1 hour. */
71
+ exports.DEFAULT_MAX_BACKGROUND_WAIT_MS = 60 * 60 * 1000;
72
+ /** Best-effort timeout for cancel calls (avoid hanging abort/timeout paths). */
73
+ var DEFAULT_CANCEL_TIMEOUT_MS = 10000;
74
+ /**
75
+ * OpenAI Responses API wrapper (tool-loop + usage tracking + ConversationModules).
76
+ * - Uses Responses API directly
77
+ * - Supports custom function tools (tool calling loop)
78
+ * - Supports structured outputs (JSON schema / Zod)
79
+ * - Tracks usage + tool calls using existing types
80
+ * - Supports background mode (polling)
81
+ * - Supports ConversationModules (system messages + tool registration)
82
+ */
83
+ var OpenAiResponses = /** @class */ (function () {
84
+ function OpenAiResponses(opts) {
85
+ if (opts === void 0) { opts = {}; }
86
+ var _a, _b;
87
+ this.modulesProcessed = false;
88
+ this.processingModulesPromise = null;
89
+ this.systemMessages = [];
90
+ this.functions = [];
91
+ this.client = new openai_1.OpenAI();
92
+ this.logger = new logger_1.Logger({ name: 'OpenAiResponses', logLevel: opts.logLevel });
93
+ this.modules = (_a = opts.modules) !== null && _a !== void 0 ? _a : [];
94
+ this.allowedFunctionNames = opts.allowedFunctionNames;
95
+ this.defaultModel = (_b = opts.defaultModel) !== null && _b !== void 0 ? _b : exports.DEFAULT_RESPONSES_MODEL;
96
+ this.defaultMaxToolCalls = typeof opts.maxToolCalls === 'number' ? opts.maxToolCalls : exports.DEFAULT_MAX_TOOL_CALLS;
97
+ this.defaultMaxBackgroundWaitMs =
98
+ typeof opts.maxBackgroundWaitMs === 'number' &&
99
+ Number.isFinite(opts.maxBackgroundWaitMs) &&
100
+ opts.maxBackgroundWaitMs > 0
101
+ ? Math.floor(opts.maxBackgroundWaitMs)
102
+ : exports.DEFAULT_MAX_BACKGROUND_WAIT_MS;
103
+ }
104
+ /** Plain text generation (supports tool calling). */
105
+ OpenAiResponses.prototype.generateText = function (args) {
106
+ return __awaiter(this, void 0, void 0, function () {
107
+ var model, backgroundMode, maxToolCalls, maxBackgroundWaitMs, result;
108
+ return __generator(this, function (_a) {
109
+ switch (_a.label) {
110
+ case 0: return [4 /*yield*/, this.ensureModulesProcessed()];
111
+ case 1:
112
+ _a.sent();
113
+ model = this.resolveModel(args.model);
114
+ backgroundMode = this.resolveBackgroundMode({
115
+ requested: args.backgroundMode,
116
+ model: model,
117
+ reasoningEffort: args.reasoningEffort,
118
+ });
119
+ maxToolCalls = typeof args.maxToolCalls === 'number' ? args.maxToolCalls : this.defaultMaxToolCalls;
120
+ maxBackgroundWaitMs = this.resolveMaxBackgroundWaitMs(args.maxBackgroundWaitMs);
121
+ return [4 /*yield*/, this.run({
122
+ model: model,
123
+ messages: args.messages,
124
+ temperature: args.temperature,
125
+ topP: args.topP,
126
+ maxTokens: args.maxTokens,
127
+ abortSignal: args.abortSignal,
128
+ onToolInvocation: args.onToolInvocation,
129
+ reasoningEffort: args.reasoningEffort,
130
+ maxToolCalls: maxToolCalls,
131
+ backgroundMode: backgroundMode,
132
+ maxBackgroundWaitMs: maxBackgroundWaitMs,
133
+ textFormat: undefined,
134
+ serviceTier: args.serviceTier,
135
+ })];
136
+ case 2:
137
+ result = _a.sent();
138
+ if (!args.onUsageData) return [3 /*break*/, 4];
139
+ return [4 /*yield*/, args.onUsageData(result.usagedata)];
140
+ case 3:
141
+ _a.sent();
142
+ _a.label = 4;
143
+ case 4: return [2 /*return*/, result];
144
+ }
145
+ });
146
+ });
147
+ };
148
+ /** Back-compat alias for callers that use `generateResponse`. */
149
+ OpenAiResponses.prototype.generateResponse = function (args) {
150
+ return __awaiter(this, void 0, void 0, function () {
151
+ return __generator(this, function (_a) {
152
+ return [2 /*return*/, this.generateText(args)];
153
+ });
154
+ });
155
+ };
156
+ /** Structured object generation (supports tool calling). */
157
+ OpenAiResponses.prototype.generateObject = function (args) {
158
+ return __awaiter(this, void 0, void 0, function () {
159
+ var model, backgroundMode, maxToolCalls, maxBackgroundWaitMs, textFormat, result, object, outcome;
160
+ return __generator(this, function (_a) {
161
+ switch (_a.label) {
162
+ case 0: return [4 /*yield*/, this.ensureModulesProcessed()];
163
+ case 1:
164
+ _a.sent();
165
+ model = this.resolveModel(args.model);
166
+ backgroundMode = this.resolveBackgroundMode({
167
+ requested: args.backgroundMode,
168
+ model: model,
169
+ reasoningEffort: args.reasoningEffort,
170
+ });
171
+ maxToolCalls = typeof args.maxToolCalls === 'number' ? args.maxToolCalls : this.defaultMaxToolCalls;
172
+ maxBackgroundWaitMs = this.resolveMaxBackgroundWaitMs(args.maxBackgroundWaitMs);
173
+ textFormat = this.buildTextFormat(args.schema);
174
+ return [4 /*yield*/, this.run({
175
+ model: model,
176
+ messages: args.messages,
177
+ temperature: args.temperature,
178
+ topP: args.topP,
179
+ maxTokens: args.maxTokens,
180
+ abortSignal: args.abortSignal,
181
+ onToolInvocation: args.onToolInvocation,
182
+ reasoningEffort: args.reasoningEffort,
183
+ maxToolCalls: maxToolCalls,
184
+ backgroundMode: backgroundMode,
185
+ maxBackgroundWaitMs: maxBackgroundWaitMs,
186
+ textFormat: textFormat,
187
+ serviceTier: args.serviceTier,
188
+ })];
189
+ case 2:
190
+ result = _a.sent();
191
+ object = this.parseAndValidateStructuredOutput(result.message, args.schema, {
192
+ model: model,
193
+ maxOutputTokens: args.maxTokens,
194
+ requestedServiceTier: args.serviceTier,
195
+ serviceTier: result.serviceTier,
196
+ });
197
+ outcome = {
198
+ object: object,
199
+ usageData: result.usagedata,
200
+ };
201
+ if (!args.onUsageData) return [3 /*break*/, 4];
202
+ return [4 /*yield*/, args.onUsageData(outcome.usageData)];
203
+ case 3:
204
+ _a.sent();
205
+ _a.label = 4;
206
+ case 4: return [2 /*return*/, outcome];
207
+ }
208
+ });
209
+ });
210
+ };
211
+ // -----------------------------------------
212
+ // Core runner (tool loop)
213
+ // -----------------------------------------
214
+ OpenAiResponses.prototype.run = function (args) {
215
+ return __awaiter(this, void 0, void 0, function () {
216
+ var usage, toolInvocations, tools, _a, instructions, input, toolCallsExecuted, previousResponseId, nextInput, response, functionCalls, message, toolOutputs;
217
+ return __generator(this, function (_b) {
218
+ switch (_b.label) {
219
+ case 0:
220
+ usage = new UsageData_1.UsageDataAccumulator({ model: args.model });
221
+ toolInvocations = [];
222
+ tools = this.buildResponseTools(this.functions);
223
+ _a = this.buildInstructionsAndInput(args.messages), instructions = _a.instructions, input = _a.input;
224
+ toolCallsExecuted = 0;
225
+ nextInput = input;
226
+ _b.label = 1;
227
+ case 1: return [4 /*yield*/, this.createResponseAndMaybeWait({
228
+ model: args.model,
229
+ // Always pass instructions; they are not carried over with previous_response_id.
230
+ instructions: instructions,
231
+ input: nextInput,
232
+ previousResponseId: previousResponseId,
233
+ tools: tools,
234
+ temperature: args.temperature,
235
+ topP: args.topP,
236
+ maxTokens: args.maxTokens,
237
+ reasoningEffort: args.reasoningEffort,
238
+ textFormat: args.textFormat,
239
+ backgroundMode: args.backgroundMode,
240
+ maxBackgroundWaitMs: args.maxBackgroundWaitMs,
241
+ abortSignal: args.abortSignal,
242
+ serviceTier: args.serviceTier,
243
+ })];
244
+ case 2:
245
+ response = _b.sent();
246
+ this.addUsageFromResponse(response, usage, { requestedServiceTier: args.serviceTier });
247
+ // For structured outputs we should not attempt to parse incomplete/failed/cancelled responses.
248
+ // For plain-text generation, we allow "incomplete" to pass through (partial output),
249
+ // but still fail on other non-completed statuses.
250
+ this.throwIfResponseUnusable(response, {
251
+ allowIncomplete: !args.textFormat,
252
+ model: args.model,
253
+ maxOutputTokens: args.maxTokens,
254
+ requestedServiceTier: args.serviceTier,
255
+ });
256
+ functionCalls = this.extractFunctionCalls(response);
257
+ if (functionCalls.length < 1) {
258
+ message = this.extractAssistantText(response);
259
+ if (!message) {
260
+ throw new Error("Response was empty");
261
+ }
262
+ return [2 /*return*/, {
263
+ message: message,
264
+ usagedata: usage.usageData,
265
+ toolInvocations: toolInvocations,
266
+ serviceTier: response.service_tier ? response.service_tier : undefined,
267
+ }];
268
+ }
269
+ if (toolCallsExecuted + functionCalls.length > args.maxToolCalls) {
270
+ throw new Error("Max tool calls (".concat(args.maxToolCalls, ") reached. Stopping execution."));
271
+ }
272
+ if (!response.id) {
273
+ throw new Error("Responses API did not return an id for a tool-calling response.");
274
+ }
275
+ return [4 /*yield*/, this.executeFunctionCalls({
276
+ calls: functionCalls,
277
+ functions: this.functions,
278
+ usage: usage,
279
+ toolInvocations: toolInvocations,
280
+ onToolInvocation: args.onToolInvocation,
281
+ })];
282
+ case 3:
283
+ toolOutputs = _b.sent();
284
+ toolCallsExecuted += functionCalls.length;
285
+ previousResponseId = response.id;
286
+ nextInput = toolOutputs;
287
+ this.logger.debug({
288
+ message: "Tool loop continuing",
289
+ obj: { toolCallsExecuted: toolCallsExecuted, lastToolCallCount: functionCalls.length, responseId: previousResponseId },
290
+ });
291
+ _b.label = 4;
292
+ case 4: return [3 /*break*/, 1];
293
+ case 5: return [2 /*return*/];
294
+ }
295
+ });
296
+ });
297
+ };
298
+ OpenAiResponses.prototype.throwIfResponseUnusable = function (response, opts) {
299
+ var _a;
300
+ var statusRaw = typeof (response === null || response === void 0 ? void 0 : response.status) === 'string' ? String(response.status) : '';
301
+ var status = statusRaw.toLowerCase();
302
+ if (!status || status === 'completed') {
303
+ return;
304
+ }
305
+ if (status === 'incomplete' && opts.allowIncomplete) {
306
+ return;
307
+ }
308
+ var id = typeof (response === null || response === void 0 ? void 0 : response.id) === 'string' ? response.id : '';
309
+ var reason = (_a = response === null || response === void 0 ? void 0 : response.incomplete_details) === null || _a === void 0 ? void 0 : _a.reason;
310
+ var apiErr = response === null || response === void 0 ? void 0 : response.error;
311
+ var serviceTier = typeof (response === null || response === void 0 ? void 0 : response.service_tier) === 'string' && response.service_tier.trim() ? response.service_tier.trim() : '';
312
+ var directOutputText = typeof (response === null || response === void 0 ? void 0 : response.output_text) === 'string' ? response.output_text : '';
313
+ var assistantText = this.extractAssistantText(response);
314
+ var outTextLen = directOutputText ? directOutputText.length : 0;
315
+ var assistantLen = assistantText ? assistantText.length : 0;
316
+ var usage = response === null || response === void 0 ? void 0 : response.usage;
317
+ var inputTokens = typeof (usage === null || usage === void 0 ? void 0 : usage.input_tokens) === 'number' ? usage.input_tokens : undefined;
318
+ var outputTokens = typeof (usage === null || usage === void 0 ? void 0 : usage.output_tokens) === 'number' ? usage.output_tokens : undefined;
319
+ var totalTokens = typeof (usage === null || usage === void 0 ? void 0 : usage.total_tokens) === 'number'
320
+ ? usage.total_tokens
321
+ : typeof inputTokens === 'number' && typeof outputTokens === 'number'
322
+ ? inputTokens + outputTokens
323
+ : undefined;
324
+ var msg = "Responses API returned status=\"".concat(status, "\"");
325
+ if (id) {
326
+ msg += " (id=".concat(id, ")");
327
+ }
328
+ msg += ".";
329
+ var details = {
330
+ response_id: id || undefined,
331
+ status: status,
332
+ model: typeof opts.model === 'string' && opts.model.trim() ? opts.model : undefined,
333
+ max_output_tokens: typeof opts.maxOutputTokens === 'number' ? opts.maxOutputTokens : undefined,
334
+ requested_service_tier: typeof opts.requestedServiceTier === 'string' && opts.requestedServiceTier.trim()
335
+ ? opts.requestedServiceTier.trim()
336
+ : undefined,
337
+ service_tier: serviceTier || undefined,
338
+ incomplete_reason: typeof reason === 'string' && reason.trim() ? reason : undefined,
339
+ api_error: apiErr !== null && apiErr !== void 0 ? apiErr : undefined,
340
+ usage_input_tokens: inputTokens,
341
+ usage_output_tokens: outputTokens,
342
+ usage_total_tokens: totalTokens,
343
+ output_text_len: outTextLen || undefined,
344
+ output_text_tail: outTextLen > 0 ? truncateTail(directOutputText, 400) : undefined,
345
+ assistant_text_len: assistantLen || undefined,
346
+ assistant_text_tail: assistantLen > 0 ? truncateTail(assistantText, 400) : undefined,
347
+ };
348
+ var extra = [];
349
+ if (details.model) {
350
+ extra.push("model=".concat(details.model));
351
+ }
352
+ if (typeof details.max_output_tokens === 'number') {
353
+ extra.push("max_output_tokens=".concat(details.max_output_tokens));
354
+ }
355
+ if (typeof details.requested_service_tier === 'string') {
356
+ extra.push("requested_service_tier=".concat(details.requested_service_tier));
357
+ }
358
+ if (typeof details.service_tier === 'string') {
359
+ extra.push("service_tier=".concat(details.service_tier));
360
+ }
361
+ if (details.incomplete_reason) {
362
+ extra.push("reason=".concat(details.incomplete_reason));
363
+ }
364
+ if (typeof details.output_text_len === 'number') {
365
+ extra.push("output_text_len=".concat(details.output_text_len));
366
+ }
367
+ if (typeof details.assistant_text_len === 'number') {
368
+ extra.push("assistant_text_len=".concat(details.assistant_text_len));
369
+ }
370
+ if (extra.length > 0) {
371
+ msg += " ".concat(extra.join(' '), ".");
372
+ }
373
+ throw new OpenAiResponsesError({
374
+ code: 'RESPONSE_STATUS',
375
+ message: msg,
376
+ details: details,
377
+ });
378
+ };
379
+ OpenAiResponses.prototype.toOpenAiApiError = function (error, meta) {
380
+ var status = extractHttpStatus(error);
381
+ var requestId = extractRequestId(error);
382
+ var retryable = isRetryableHttpStatus(status);
383
+ var errMsg = error instanceof Error ? error.message : String(error !== null && error !== void 0 ? error : '');
384
+ var errName = error instanceof Error ? error.name : undefined;
385
+ var aborted = meta.aborted === true || isAbortError(error);
386
+ var msg = "OpenAI ".concat(meta.operation, " failed.");
387
+ var extra = [];
388
+ if (aborted) {
389
+ extra.push("aborted=true");
390
+ }
391
+ if (typeof status === 'number') {
392
+ extra.push("status=".concat(status));
393
+ }
394
+ if (requestId) {
395
+ extra.push("requestId=".concat(requestId));
396
+ }
397
+ if (meta.responseId) {
398
+ extra.push("responseId=".concat(meta.responseId));
399
+ }
400
+ if (meta.backgroundMode) {
401
+ extra.push("background=true");
402
+ }
403
+ if (typeof meta.pollAttempt === 'number') {
404
+ extra.push("pollAttempt=".concat(meta.pollAttempt));
405
+ }
406
+ if (typeof meta.waitedMs === 'number') {
407
+ extra.push("waitedMs=".concat(meta.waitedMs));
408
+ }
409
+ if (typeof meta.maxWaitMs === 'number') {
410
+ extra.push("maxWaitMs=".concat(meta.maxWaitMs));
411
+ }
412
+ if (typeof meta.lastStatus === 'string' && meta.lastStatus.trim()) {
413
+ extra.push("lastStatus=".concat(meta.lastStatus.trim()));
414
+ }
415
+ if (typeof meta.model === 'string' && meta.model.trim()) {
416
+ extra.push("model=".concat(meta.model.trim()));
417
+ }
418
+ if (meta.reasoningEffort) {
419
+ extra.push("reasoningEffort=".concat(meta.reasoningEffort));
420
+ }
421
+ if (typeof meta.requestedServiceTier === 'string' && meta.requestedServiceTier.trim()) {
422
+ extra.push("requested_service_tier=".concat(meta.requestedServiceTier.trim()));
423
+ }
424
+ if (typeof meta.serviceTier === 'string' && meta.serviceTier.trim()) {
425
+ extra.push("service_tier=".concat(meta.serviceTier.trim()));
426
+ }
427
+ if (extra.length > 0) {
428
+ msg += " ".concat(extra.join(' '), ".");
429
+ }
430
+ if (errMsg) {
431
+ msg += " error=".concat(JSON.stringify(errMsg), ".");
432
+ }
433
+ var details = {
434
+ operation: meta.operation,
435
+ status: typeof status === 'number' ? status : undefined,
436
+ request_id: requestId,
437
+ response_id: meta.responseId,
438
+ previous_response_id: meta.previousResponseId,
439
+ background: meta.backgroundMode ? true : undefined,
440
+ poll_attempt: meta.pollAttempt,
441
+ waited_ms: meta.waitedMs,
442
+ max_wait_ms: meta.maxWaitMs,
443
+ last_status: typeof meta.lastStatus === 'string' && meta.lastStatus.trim() ? meta.lastStatus.trim() : undefined,
444
+ model: typeof meta.model === 'string' && meta.model.trim() ? meta.model.trim() : undefined,
445
+ reasoning_effort: meta.reasoningEffort,
446
+ requested_service_tier: typeof meta.requestedServiceTier === 'string' && meta.requestedServiceTier.trim()
447
+ ? meta.requestedServiceTier.trim()
448
+ : undefined,
449
+ service_tier: typeof meta.serviceTier === 'string' && meta.serviceTier.trim() ? meta.serviceTier.trim() : undefined,
450
+ error_name: errName,
451
+ aborted: aborted ? true : undefined,
452
+ };
453
+ return new OpenAiResponsesError({
454
+ code: 'OPENAI_API',
455
+ message: msg,
456
+ details: details,
457
+ cause: error,
458
+ retryable: retryable,
459
+ });
460
+ };
461
+ OpenAiResponses.prototype.resolveMaxBackgroundWaitMs = function (ms) {
462
+ var n = typeof ms === 'number' && Number.isFinite(ms) && ms > 0 ? Math.floor(ms) : this.defaultMaxBackgroundWaitMs;
463
+ // Ensure we never return a non-positive number even if misconfigured elsewhere.
464
+ return n > 0 ? n : exports.DEFAULT_MAX_BACKGROUND_WAIT_MS;
465
+ };
466
+ OpenAiResponses.prototype.cancelResponseBestEffort = function (responseId) {
467
+ return __awaiter(this, void 0, void 0, function () {
468
+ var resp, e_1;
469
+ return __generator(this, function (_a) {
470
+ switch (_a.label) {
471
+ case 0:
472
+ if (!responseId) {
473
+ return [2 /*return*/, { attempted: false }];
474
+ }
475
+ _a.label = 1;
476
+ case 1:
477
+ _a.trys.push([1, 3, , 4]);
478
+ return [4 /*yield*/, this.client.responses.cancel(responseId)];
479
+ case 2:
480
+ resp = _a.sent();
481
+ // Docs show cancelled as the post-cancel status.
482
+ if ((resp === null || resp === void 0 ? void 0 : resp.status) === 'cancelled') {
483
+ return [2 /*return*/, { attempted: true, ok: true }];
484
+ }
485
+ return [2 /*return*/, {
486
+ attempted: true,
487
+ ok: false,
488
+ error: {
489
+ message: 'Cancel did not return status=cancelled',
490
+ status: resp === null || resp === void 0 ? void 0 : resp.status,
491
+ },
492
+ }];
493
+ case 3:
494
+ e_1 = _a.sent();
495
+ return [2 /*return*/, { attempted: true, ok: false, error: safeErrorSummary(e_1) }];
496
+ case 4: return [2 /*return*/];
497
+ }
498
+ });
499
+ });
500
+ };
501
+ OpenAiResponses.prototype.createResponseAndMaybeWait = function (args) {
502
+ var _a;
503
+ return __awaiter(this, void 0, void 0, function () {
504
+ var body, created, error_1;
505
+ return __generator(this, function (_b) {
506
+ switch (_b.label) {
507
+ case 0:
508
+ body = {
509
+ model: args.model,
510
+ input: args.input,
511
+ };
512
+ if (args.instructions) {
513
+ body.instructions = args.instructions;
514
+ }
515
+ if (args.previousResponseId) {
516
+ body.previous_response_id = args.previousResponseId;
517
+ }
518
+ if (args.tools.length > 0) {
519
+ body.tools = args.tools;
520
+ }
521
+ if (typeof args.temperature === 'number') {
522
+ body.temperature = args.temperature;
523
+ }
524
+ if (typeof args.topP === 'number') {
525
+ body.top_p = args.topP;
526
+ }
527
+ if (typeof args.maxTokens === 'number') {
528
+ body.max_output_tokens = args.maxTokens;
529
+ }
530
+ if (args.reasoningEffort) {
531
+ body.reasoning = { effort: args.reasoningEffort };
532
+ }
533
+ if (args.textFormat) {
534
+ body.text = { format: args.textFormat };
535
+ }
536
+ if (typeof args.serviceTier === 'string' && args.serviceTier.trim()) {
537
+ body.service_tier = args.serviceTier.trim();
538
+ }
539
+ if (args.backgroundMode) {
540
+ body.background = true;
541
+ body.store = true;
542
+ }
543
+ _b.label = 1;
544
+ case 1:
545
+ _b.trys.push([1, 3, , 4]);
546
+ return [4 /*yield*/, this.client.responses.create(body, args.abortSignal ? { signal: args.abortSignal } : undefined)];
547
+ case 2:
548
+ created = _b.sent();
549
+ return [3 /*break*/, 4];
550
+ case 3:
551
+ error_1 = _b.sent();
552
+ throw this.toOpenAiApiError(error_1, {
553
+ operation: 'responses.create',
554
+ model: args.model,
555
+ reasoningEffort: args.reasoningEffort,
556
+ backgroundMode: args.backgroundMode,
557
+ previousResponseId: args.previousResponseId,
558
+ aborted: ((_a = args.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) ? true : undefined,
559
+ requestedServiceTier: args.serviceTier,
560
+ });
561
+ case 4:
562
+ if (!args.backgroundMode) {
563
+ return [2 /*return*/, created];
564
+ }
565
+ if (!(created === null || created === void 0 ? void 0 : created.id)) {
566
+ return [2 /*return*/, created];
567
+ }
568
+ return [4 /*yield*/, this.waitForCompletion(created.id, args.abortSignal, {
569
+ model: args.model,
570
+ reasoningEffort: args.reasoningEffort,
571
+ maxWaitMs: this.resolveMaxBackgroundWaitMs(args.maxBackgroundWaitMs),
572
+ requestedServiceTier: args.serviceTier,
573
+ })];
574
+ case 5: return [2 /*return*/, _b.sent()];
575
+ }
576
+ });
577
+ });
578
+ };
579
+ OpenAiResponses.prototype.waitForCompletion = function (responseId, abortSignal, ctx) {
580
+ return __awaiter(this, void 0, void 0, function () {
581
+ var maxWaitMs, startedAtMs, delayMs, pollAttempt, lastStatus, cancelAttempted, warnEveryMs, nextWarnAtMs, throwPollingStop, waitedMs, resp, error_2, status_1;
582
+ var _this = this;
583
+ return __generator(this, function (_a) {
584
+ switch (_a.label) {
585
+ case 0:
586
+ this.logger.debug({ message: 'Waiting for completion', obj: { responseId: responseId } });
587
+ maxWaitMs = this.resolveMaxBackgroundWaitMs(ctx === null || ctx === void 0 ? void 0 : ctx.maxWaitMs);
588
+ startedAtMs = Date.now();
589
+ delayMs = 1000;
590
+ pollAttempt = 0;
591
+ lastStatus = '';
592
+ cancelAttempted = false;
593
+ warnEveryMs = 10 * 60 * 1000;
594
+ nextWarnAtMs = warnEveryMs;
595
+ throwPollingStop = function (args) { return __awaiter(_this, void 0, void 0, function () {
596
+ var waitedMs, cancel, baseDetails, msg;
597
+ return __generator(this, function (_a) {
598
+ switch (_a.label) {
599
+ case 0:
600
+ waitedMs = Date.now() - startedAtMs;
601
+ cancel = undefined;
602
+ if (!!cancelAttempted) return [3 /*break*/, 2];
603
+ cancelAttempted = true;
604
+ return [4 /*yield*/, this.cancelResponseBestEffort(responseId)];
605
+ case 1:
606
+ cancel = _a.sent();
607
+ _a.label = 2;
608
+ case 2:
609
+ baseDetails = {
610
+ operation: 'responses.retrieve',
611
+ response_id: responseId,
612
+ background: true,
613
+ poll_attempt: pollAttempt,
614
+ waited_ms: waitedMs,
615
+ max_wait_ms: maxWaitMs,
616
+ last_status: lastStatus || undefined,
617
+ model: typeof (ctx === null || ctx === void 0 ? void 0 : ctx.model) === 'string' && ctx.model.trim() ? ctx.model.trim() : undefined,
618
+ reasoning_effort: ctx === null || ctx === void 0 ? void 0 : ctx.reasoningEffort,
619
+ requested_service_tier: typeof (ctx === null || ctx === void 0 ? void 0 : ctx.requestedServiceTier) === 'string' && ctx.requestedServiceTier.trim()
620
+ ? ctx.requestedServiceTier.trim()
621
+ : undefined,
622
+ aborted: args.kind === 'aborted' ? true : undefined,
623
+ timeout: args.kind === 'timeout' ? true : undefined,
624
+ cancel_attempted: (cancel === null || cancel === void 0 ? void 0 : cancel.attempted) ? true : undefined,
625
+ cancel_ok: cancel && cancel.attempted && 'ok' in cancel ? cancel.ok : undefined,
626
+ cancel_timed_out: cancel && cancel.attempted && cancel.timedOut ? true : undefined,
627
+ cancel_error: cancel && cancel.attempted && cancel.error ? cancel.error : undefined,
628
+ };
629
+ if (args.cause) {
630
+ baseDetails.polling_cause = safeErrorSummary(args.cause);
631
+ }
632
+ msg = args.kind === 'timeout'
633
+ ? "Background response exceeded max wait (maxWaitMs=".concat(maxWaitMs, ") while polling (id=").concat(responseId, ").")
634
+ : "Background polling aborted (id=".concat(responseId, ").");
635
+ throw new OpenAiResponsesError({
636
+ code: 'OPENAI_API',
637
+ message: msg,
638
+ details: baseDetails,
639
+ cause: args.cause,
640
+ });
641
+ }
642
+ });
643
+ }); };
644
+ _a.label = 1;
645
+ case 1:
646
+ waitedMs = Date.now() - startedAtMs;
647
+ if (!(abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.aborted)) return [3 /*break*/, 3];
648
+ return [4 /*yield*/, throwPollingStop({ kind: 'aborted' })];
649
+ case 2:
650
+ _a.sent();
651
+ _a.label = 3;
652
+ case 3:
653
+ if (!(waitedMs >= maxWaitMs)) return [3 /*break*/, 5];
654
+ return [4 /*yield*/, throwPollingStop({ kind: 'timeout' })];
655
+ case 4:
656
+ _a.sent();
657
+ _a.label = 5;
658
+ case 5:
659
+ // Warn every 10 minutes elapsed (best-effort; may log slightly after the boundary).
660
+ if (waitedMs >= nextWarnAtMs) {
661
+ nextWarnAtMs += warnEveryMs;
662
+ this.logger.warn({
663
+ message: "Background polling still in progress",
664
+ obj: {
665
+ responseId: responseId,
666
+ status: lastStatus || undefined,
667
+ waitedMs: waitedMs,
668
+ pollAttempt: pollAttempt,
669
+ model: typeof (ctx === null || ctx === void 0 ? void 0 : ctx.model) === 'string' && ctx.model.trim() ? ctx.model.trim() : undefined,
670
+ reasoningEffort: ctx === null || ctx === void 0 ? void 0 : ctx.reasoningEffort,
671
+ serviceTier: typeof (ctx === null || ctx === void 0 ? void 0 : ctx.requestedServiceTier) === 'string' && ctx.requestedServiceTier.trim()
672
+ ? ctx.requestedServiceTier.trim()
673
+ : undefined,
674
+ },
675
+ });
676
+ }
677
+ pollAttempt += 1;
678
+ resp = void 0;
679
+ _a.label = 6;
680
+ case 6:
681
+ _a.trys.push([6, 8, , 11]);
682
+ return [4 /*yield*/, this.client.responses.retrieve(responseId, undefined, abortSignal ? { signal: abortSignal } : undefined)];
683
+ case 7:
684
+ resp = _a.sent();
685
+ return [3 /*break*/, 11];
686
+ case 8:
687
+ error_2 = _a.sent();
688
+ if (!((abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.aborted) || isAbortError(error_2))) return [3 /*break*/, 10];
689
+ return [4 /*yield*/, throwPollingStop({ kind: 'aborted', cause: error_2 })];
690
+ case 9:
691
+ _a.sent();
692
+ _a.label = 10;
693
+ case 10: throw this.toOpenAiApiError(error_2, {
694
+ operation: 'responses.retrieve',
695
+ model: ctx === null || ctx === void 0 ? void 0 : ctx.model,
696
+ reasoningEffort: ctx === null || ctx === void 0 ? void 0 : ctx.reasoningEffort,
697
+ backgroundMode: true,
698
+ responseId: responseId,
699
+ pollAttempt: pollAttempt,
700
+ waitedMs: waitedMs,
701
+ maxWaitMs: maxWaitMs,
702
+ lastStatus: lastStatus,
703
+ requestedServiceTier: ctx === null || ctx === void 0 ? void 0 : ctx.requestedServiceTier,
704
+ });
705
+ case 11:
706
+ status_1 = typeof (resp === null || resp === void 0 ? void 0 : resp.status) === 'string' ? resp.status : '';
707
+ lastStatus = status_1;
708
+ // Terminal states
709
+ if (status_1 === 'completed' || status_1 === 'failed' || status_1 === 'incomplete' || status_1 === 'cancelled') {
710
+ return [2 /*return*/, resp];
711
+ }
712
+ this.logger.debug({ message: "Polling response", obj: { responseId: responseId, status: status_1, delayMs: delayMs, pollAttempt: pollAttempt, waitedMs: waitedMs } });
713
+ // Sleep but wake early if aborted, so abort latency is low.
714
+ return [4 /*yield*/, sleepWithAbort(delayMs, abortSignal)];
715
+ case 12:
716
+ // Sleep but wake early if aborted, so abort latency is low.
717
+ _a.sent();
718
+ _a.label = 13;
719
+ case 13: return [3 /*break*/, 1];
720
+ case 14: return [2 /*return*/];
721
+ }
722
+ });
723
+ });
724
+ };
725
+ // -----------------------------------------
726
+ // Tool calls
727
+ // -----------------------------------------
728
+ OpenAiResponses.prototype.buildResponseTools = function (functions) {
729
+ var tools = [];
730
+ if (!functions || functions.length < 1) {
731
+ return tools;
732
+ }
733
+ for (var _i = 0, functions_1 = functions; _i < functions_1.length; _i++) {
734
+ var f = functions_1[_i];
735
+ var def = f.definition;
736
+ if (!(def === null || def === void 0 ? void 0 : def.name)) {
737
+ continue;
738
+ }
739
+ tools.push({
740
+ type: 'function',
741
+ name: def.name,
742
+ description: def.description,
743
+ parameters: def.parameters,
744
+ // strict: true,
745
+ });
746
+ }
747
+ return tools;
748
+ };
749
+ OpenAiResponses.prototype.extractFunctionCalls = function (response) {
750
+ var out = Array.isArray(response.output) ? response.output : [];
751
+ var calls = [];
752
+ for (var _i = 0, out_1 = out; _i < out_1.length; _i++) {
753
+ var item = out_1[_i];
754
+ if (!item || typeof item !== 'object') {
755
+ continue;
756
+ }
757
+ var rec = item;
758
+ if (rec.type !== 'function_call') {
759
+ continue;
760
+ }
761
+ var call_id = typeof rec.call_id === 'string' ? rec.call_id : '';
762
+ var name_1 = typeof rec.name === 'string' ? rec.name : '';
763
+ var args = typeof rec.arguments === 'string' ? rec.arguments : '';
764
+ if (!call_id || !name_1) {
765
+ continue;
766
+ }
767
+ calls.push({ type: 'function_call', call_id: call_id, name: name_1, arguments: args });
768
+ }
769
+ return calls;
770
+ };
771
+ OpenAiResponses.prototype.executeFunctionCalls = function (args) {
772
+ return __awaiter(this, void 0, void 0, function () {
773
+ var outputs, _i, _a, call, _b, _c;
774
+ return __generator(this, function (_d) {
775
+ switch (_d.label) {
776
+ case 0:
777
+ outputs = [];
778
+ _i = 0, _a = args.calls;
779
+ _d.label = 1;
780
+ case 1:
781
+ if (!(_i < _a.length)) return [3 /*break*/, 4];
782
+ call = _a[_i];
783
+ _c = (_b = outputs).push;
784
+ return [4 /*yield*/, this.executeFunctionCall({
785
+ call: call,
786
+ functions: args.functions,
787
+ usage: args.usage,
788
+ toolInvocations: args.toolInvocations,
789
+ onToolInvocation: args.onToolInvocation,
790
+ })];
791
+ case 2:
792
+ _c.apply(_b, [_d.sent()]);
793
+ _d.label = 3;
794
+ case 3:
795
+ _i++;
796
+ return [3 /*break*/, 1];
797
+ case 4: return [2 /*return*/, outputs];
798
+ }
799
+ });
800
+ });
801
+ };
802
+ OpenAiResponses.prototype.executeFunctionCall = function (args) {
803
+ var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l;
804
+ return __awaiter(this, void 0, void 0, function () {
805
+ var callId, rawName, shortName, functionToCall, startedAt, parsedArgs, finishedAt, rec, argsObj, returnObject, finishedAt, rec, output, error_3, finishedAt, errMessage, errStack, rec;
806
+ return __generator(this, function (_m) {
807
+ switch (_m.label) {
808
+ case 0:
809
+ callId = args.call.call_id;
810
+ rawName = args.call.name;
811
+ shortName = (_a = rawName.split('.').pop()) !== null && _a !== void 0 ? _a : rawName;
812
+ functionToCall = (_b = args.functions.find(function (fx) { return fx.definition.name === rawName; })) !== null && _b !== void 0 ? _b : args.functions.find(function (fx) { var _a; return ((_a = fx.definition.name.split('.').pop()) !== null && _a !== void 0 ? _a : fx.definition.name) === shortName; });
813
+ startedAt = new Date();
814
+ try {
815
+ parsedArgs = JSON.parse((_c = args.call.arguments) !== null && _c !== void 0 ? _c : '{}');
816
+ }
817
+ catch (_o) {
818
+ parsedArgs = args.call.arguments;
819
+ }
820
+ (_d = args.onToolInvocation) === null || _d === void 0 ? void 0 : _d.call(args, {
821
+ type: 'started',
822
+ id: callId,
823
+ name: (_f = (_e = functionToCall === null || functionToCall === void 0 ? void 0 : functionToCall.definition) === null || _e === void 0 ? void 0 : _e.name) !== null && _f !== void 0 ? _f : shortName,
824
+ startedAt: startedAt,
825
+ input: parsedArgs,
826
+ });
827
+ if (!functionToCall) {
828
+ finishedAt = new Date();
829
+ rec = {
830
+ id: callId,
831
+ name: shortName,
832
+ startedAt: startedAt,
833
+ finishedAt: finishedAt,
834
+ input: parsedArgs,
835
+ ok: false,
836
+ error: { message: "Assistant attempted to call nonexistent function" },
837
+ };
838
+ args.toolInvocations.push(rec);
839
+ (_g = args.onToolInvocation) === null || _g === void 0 ? void 0 : _g.call(args, { type: 'finished', result: rec });
840
+ return [2 /*return*/, {
841
+ type: 'function_call_output',
842
+ call_id: callId,
843
+ output: JSON.stringify({ error: (_h = rec.error) === null || _h === void 0 ? void 0 : _h.message, functionName: shortName }),
844
+ }];
845
+ }
846
+ _m.label = 1;
847
+ case 1:
848
+ _m.trys.push([1, 4, , 5]);
849
+ argsObj = void 0;
850
+ try {
851
+ argsObj = JSON.parse((_j = args.call.arguments) !== null && _j !== void 0 ? _j : '{}');
852
+ }
853
+ catch (_p) {
854
+ argsObj = {};
855
+ }
856
+ args.usage.recordToolCall(functionToCall.definition.name);
857
+ return [4 /*yield*/, functionToCall.call(argsObj)];
858
+ case 2:
859
+ returnObject = _m.sent();
860
+ finishedAt = new Date();
861
+ rec = {
862
+ id: callId,
863
+ name: functionToCall.definition.name,
864
+ startedAt: startedAt,
865
+ finishedAt: finishedAt,
866
+ input: argsObj,
867
+ ok: true,
868
+ data: returnObject,
869
+ };
870
+ args.toolInvocations.push(rec);
871
+ (_k = args.onToolInvocation) === null || _k === void 0 ? void 0 : _k.call(args, { type: 'finished', result: rec });
872
+ return [4 /*yield*/, this.formatToolReturn(returnObject)];
873
+ case 3:
874
+ output = _m.sent();
875
+ return [2 /*return*/, {
876
+ type: 'function_call_output',
877
+ call_id: callId,
878
+ output: output,
879
+ }];
880
+ case 4:
881
+ error_3 = _m.sent();
882
+ finishedAt = new Date();
883
+ errMessage = error_3 instanceof Error ? error_3.message : String(error_3);
884
+ errStack = error_3 instanceof Error ? error_3.stack : undefined;
885
+ rec = {
886
+ id: callId,
887
+ name: functionToCall.definition.name,
888
+ startedAt: startedAt,
889
+ finishedAt: finishedAt,
890
+ input: parsedArgs,
891
+ ok: false,
892
+ error: { message: errMessage, stack: errStack },
893
+ };
894
+ args.toolInvocations.push(rec);
895
+ (_l = args.onToolInvocation) === null || _l === void 0 ? void 0 : _l.call(args, { type: 'finished', result: rec });
896
+ throw error_3;
897
+ case 5: return [2 /*return*/];
898
+ }
899
+ });
900
+ });
901
+ };
902
+ OpenAiResponses.prototype.formatToolReturn = function (returnObject) {
903
+ return __awaiter(this, void 0, void 0, function () {
904
+ var messageParams, normalized;
905
+ var _this = this;
906
+ return __generator(this, function (_a) {
907
+ switch (_a.label) {
908
+ case 0:
909
+ if (typeof returnObject === 'undefined') {
910
+ return [2 /*return*/, JSON.stringify({ result: 'Function with no return value executed successfully' })];
911
+ }
912
+ if (!(returnObject instanceof ChatCompletionMessageParamFactory_1.ChatCompletionMessageParamFactory)) return [3 /*break*/, 2];
913
+ return [4 /*yield*/, returnObject.create()];
914
+ case 1:
915
+ messageParams = _a.sent();
916
+ normalized = (messageParams !== null && messageParams !== void 0 ? messageParams : [])
917
+ .map(function (m) { return ({
918
+ role: m.role,
919
+ content: _this.extractTextContent(m.content),
920
+ }); })
921
+ .filter(function (m) { return typeof m.content === 'string' && m.content.trim().length > 0; });
922
+ return [2 /*return*/, JSON.stringify({ messages: normalized })];
923
+ case 2: return [2 /*return*/, JSON.stringify(returnObject)];
924
+ }
925
+ });
926
+ });
927
+ };
928
+ // -----------------------------------------
929
+ // Usage + text extraction
930
+ // -----------------------------------------
931
+ OpenAiResponses.prototype.addUsageFromResponse = function (response, usage, ctx) {
932
+ var _a;
933
+ if (!response.usage) {
934
+ return;
935
+ }
936
+ usage.addTokenUsage({
937
+ inputTokens: response.usage.input_tokens,
938
+ cachedInputTokens: response.usage.input_tokens_details.cached_tokens,
939
+ outputTokens: response.usage.output_tokens,
940
+ reasoningTokens: response.usage.output_tokens_details.reasoning_tokens,
941
+ totalTokens: response.usage.total_tokens,
942
+ }, { serviceTier: (_a = response.service_tier) !== null && _a !== void 0 ? _a : ctx === null || ctx === void 0 ? void 0 : ctx.requestedServiceTier });
943
+ };
944
+ OpenAiResponses.prototype.extractAssistantText = function (response) {
945
+ var out = Array.isArray(response.output) ? response.output : [];
946
+ var lastJoined = '';
947
+ for (var _i = 0, out_2 = out; _i < out_2.length; _i++) {
948
+ var item = out_2[_i];
949
+ if (!item || typeof item !== 'object') {
950
+ continue;
951
+ }
952
+ var rec = item;
953
+ if (rec.type !== 'message') {
954
+ continue;
955
+ }
956
+ if (rec.role !== 'assistant') {
957
+ continue;
958
+ }
959
+ var contentRaw = rec.content;
960
+ if (!Array.isArray(contentRaw)) {
961
+ continue;
962
+ }
963
+ var pieces = [];
964
+ for (var _a = 0, contentRaw_1 = contentRaw; _a < contentRaw_1.length; _a++) {
965
+ var c = contentRaw_1[_a];
966
+ if (!c || typeof c !== 'object') {
967
+ continue;
968
+ }
969
+ var part = c;
970
+ if (part.type !== 'output_text') {
971
+ continue;
972
+ }
973
+ var t = part.text;
974
+ if (typeof t === 'string' && t.trim()) {
975
+ pieces.push(t);
976
+ }
977
+ }
978
+ var joined = pieces.join('\n').trim();
979
+ if (joined) {
980
+ lastJoined = joined;
981
+ }
982
+ }
983
+ if (lastJoined) {
984
+ return lastJoined;
985
+ }
986
+ var direct = typeof response.output_text === 'string' ? response.output_text.trim() : '';
987
+ if (direct) {
988
+ return direct;
989
+ }
990
+ return '';
991
+ };
992
+ // -----------------------------------------
993
+ // Structured outputs (JSON schema / Zod)
994
+ // -----------------------------------------
995
+ OpenAiResponses.prototype.buildTextFormat = function (schema) {
996
+ if (this.isZodSchema(schema)) {
997
+ // Prefer the official helper when schema is Zod.
998
+ // eslint-disable-next-line @typescript-eslint/no-var-requires
999
+ var mod = require('openai/helpers/zod');
1000
+ return mod.zodTextFormat(schema, 'output');
1001
+ }
1002
+ return {
1003
+ type: 'json_schema',
1004
+ name: 'output',
1005
+ strict: true,
1006
+ schema: this.strictifyJsonSchema(schema),
1007
+ };
1008
+ };
1009
+ OpenAiResponses.prototype.parseAndValidateStructuredOutput = function (text, schema, ctx) {
1010
+ var parsed = this.parseJson(text, ctx);
1011
+ if (this.isZodSchema(schema)) {
1012
+ var res = schema.safeParse(parsed);
1013
+ if (!(res === null || res === void 0 ? void 0 : res.success)) {
1014
+ throw new Error("Structured output failed schema validation");
1015
+ }
1016
+ return res.data;
1017
+ }
1018
+ return parsed;
1019
+ };
1020
+ OpenAiResponses.prototype.isZodSchema = function (schema) {
1021
+ if (!schema || (typeof schema !== 'object' && typeof schema !== 'function')) {
1022
+ return false;
1023
+ }
1024
+ return typeof schema.safeParse === 'function';
1025
+ };
1026
+ OpenAiResponses.prototype.parseJson = function (text, ctx) {
1027
+ var _a;
1028
+ var cleaned = String(text !== null && text !== void 0 ? text : '')
1029
+ .trim()
1030
+ .replace(/^```(?:json)?/i, '')
1031
+ .replace(/```$/i, '')
1032
+ .trim();
1033
+ try {
1034
+ return JSON.parse(cleaned);
1035
+ }
1036
+ catch (err1) {
1037
+ var firstErrMsg = err1 instanceof Error ? err1.message : String(err1);
1038
+ var s = cleaned;
1039
+ var firstObj = s.indexOf('{');
1040
+ var firstArr = s.indexOf('[');
1041
+ var start = firstObj === -1 ? firstArr : firstArr === -1 ? firstObj : Math.min(firstObj, firstArr);
1042
+ var lastObj = s.lastIndexOf('}');
1043
+ var lastArr = s.lastIndexOf(']');
1044
+ var end = Math.max(lastObj, lastArr);
1045
+ if (start >= 0 && end > start) {
1046
+ var candidate = s.slice(start, end + 1);
1047
+ try {
1048
+ return JSON.parse(candidate);
1049
+ }
1050
+ catch (err2) {
1051
+ var secondErrMsg = err2 instanceof Error ? err2.message : String(err2);
1052
+ var pos2rel = extractJsonParsePosition(secondErrMsg);
1053
+ var pos2 = typeof pos2rel === 'number' ? start + pos2rel : undefined;
1054
+ var pos1 = extractJsonParsePosition(firstErrMsg);
1055
+ var pos_1 = typeof pos2 === 'number' ? pos2 : pos1;
1056
+ var lc_1 = (_a = extractJsonParseLineCol(secondErrMsg)) !== null && _a !== void 0 ? _a : extractJsonParseLineCol(firstErrMsg);
1057
+ var details_1 = {
1058
+ model: typeof (ctx === null || ctx === void 0 ? void 0 : ctx.model) === 'string' && ctx.model.trim() ? ctx.model : undefined,
1059
+ max_output_tokens: typeof (ctx === null || ctx === void 0 ? void 0 : ctx.maxOutputTokens) === 'number' ? ctx.maxOutputTokens : undefined,
1060
+ requested_service_tier: typeof (ctx === null || ctx === void 0 ? void 0 : ctx.requestedServiceTier) === 'string' && String(ctx.requestedServiceTier).trim()
1061
+ ? String(ctx.requestedServiceTier).trim()
1062
+ : undefined,
1063
+ service_tier: typeof (ctx === null || ctx === void 0 ? void 0 : ctx.serviceTier) === 'string' && ctx.serviceTier.trim() ? ctx.serviceTier.trim() : undefined,
1064
+ cleaned_len: s.length,
1065
+ cleaned_head: truncateHead(s, 250),
1066
+ cleaned_tail: truncateTail(s, 500),
1067
+ json_start: start,
1068
+ json_end: end,
1069
+ json_candidate_len: candidate.length,
1070
+ first_error: firstErrMsg,
1071
+ second_error: secondErrMsg,
1072
+ error_pos: typeof pos_1 === 'number' ? pos_1 : undefined,
1073
+ error_line: lc_1 === null || lc_1 === void 0 ? void 0 : lc_1.line,
1074
+ error_column: lc_1 === null || lc_1 === void 0 ? void 0 : lc_1.column,
1075
+ error_context: typeof pos_1 === 'number' ? snippetAround(s, pos_1, 160) : undefined,
1076
+ };
1077
+ var msg_1 = "Failed to parse model output as JSON. " +
1078
+ "cleaned_len=".concat(s.length, " json_start=").concat(start, " json_end=").concat(end, ". ") +
1079
+ "first_error=".concat(JSON.stringify(firstErrMsg), " second_error=").concat(JSON.stringify(secondErrMsg), ".");
1080
+ throw new OpenAiResponsesError({
1081
+ code: 'JSON_PARSE',
1082
+ message: msg_1,
1083
+ details: details_1,
1084
+ cause: err2,
1085
+ });
1086
+ }
1087
+ }
1088
+ var pos = extractJsonParsePosition(firstErrMsg);
1089
+ var lc = extractJsonParseLineCol(firstErrMsg);
1090
+ var details = {
1091
+ model: typeof (ctx === null || ctx === void 0 ? void 0 : ctx.model) === 'string' && ctx.model.trim() ? ctx.model : undefined,
1092
+ max_output_tokens: typeof (ctx === null || ctx === void 0 ? void 0 : ctx.maxOutputTokens) === 'number' ? ctx.maxOutputTokens : undefined,
1093
+ requested_service_tier: typeof (ctx === null || ctx === void 0 ? void 0 : ctx.requestedServiceTier) === 'string' && String(ctx.requestedServiceTier).trim()
1094
+ ? String(ctx.requestedServiceTier).trim()
1095
+ : undefined,
1096
+ service_tier: typeof (ctx === null || ctx === void 0 ? void 0 : ctx.serviceTier) === 'string' && ctx.serviceTier.trim() ? ctx.serviceTier.trim() : undefined,
1097
+ cleaned_len: s.length,
1098
+ cleaned_head: truncateHead(s, 250),
1099
+ cleaned_tail: truncateTail(s, 500),
1100
+ json_start: start >= 0 ? start : undefined,
1101
+ json_end: end >= 0 ? end : undefined,
1102
+ first_error: firstErrMsg,
1103
+ error_pos: typeof pos === 'number' ? pos : undefined,
1104
+ error_line: lc === null || lc === void 0 ? void 0 : lc.line,
1105
+ error_column: lc === null || lc === void 0 ? void 0 : lc.column,
1106
+ error_context: typeof pos === 'number' ? snippetAround(s, pos, 160) : undefined,
1107
+ };
1108
+ var msg = "Failed to parse model output as JSON. " +
1109
+ "cleaned_len=".concat(s.length, ". ") +
1110
+ "error=".concat(JSON.stringify(firstErrMsg), ".");
1111
+ throw new OpenAiResponsesError({
1112
+ code: 'JSON_PARSE',
1113
+ message: msg,
1114
+ details: details,
1115
+ cause: err1,
1116
+ });
1117
+ }
1118
+ };
1119
+ /**
1120
+ * Strictifies a plain JSON Schema for OpenAI Structured Outputs (strict mode):
1121
+ * - Ensures every object has `additionalProperties: false`
1122
+ * - Ensures every object has a `required` array that includes **all** keys in `properties`
1123
+ * - Adds missing `type: "object"` / `type: "array"` where implied by keywords
1124
+ */
1125
+ OpenAiResponses.prototype.strictifyJsonSchema = function (schema) {
1126
+ var root = JSON.parse(JSON.stringify(schema !== null && schema !== void 0 ? schema : {}));
1127
+ var visit = function (node) {
1128
+ if (!node || typeof node !== 'object') {
1129
+ return;
1130
+ }
1131
+ if (!node.type) {
1132
+ if (node.properties || node.additionalProperties || node.patternProperties) {
1133
+ node.type = 'object';
1134
+ }
1135
+ else if (node.items || node.prefixItems) {
1136
+ node.type = 'array';
1137
+ }
1138
+ }
1139
+ var types = Array.isArray(node.type) ? node.type : node.type ? [node.type] : [];
1140
+ if (types.includes('object')) {
1141
+ if (node.additionalProperties !== false) {
1142
+ node.additionalProperties = false;
1143
+ }
1144
+ if (node.properties && typeof node.properties === 'object') {
1145
+ var propKeys = Object.keys(node.properties);
1146
+ var currentReq = Array.isArray(node.required) ? node.required.slice() : [];
1147
+ node.required = Array.from(new Set(__spreadArray(__spreadArray([], currentReq, true), propKeys, true)));
1148
+ for (var _i = 0, propKeys_1 = propKeys; _i < propKeys_1.length; _i++) {
1149
+ var k = propKeys_1[_i];
1150
+ visit(node.properties[k]);
1151
+ }
1152
+ }
1153
+ if (node.patternProperties && typeof node.patternProperties === 'object') {
1154
+ for (var _a = 0, _b = Object.keys(node.patternProperties); _a < _b.length; _a++) {
1155
+ var k = _b[_a];
1156
+ visit(node.patternProperties[k]);
1157
+ }
1158
+ }
1159
+ for (var _c = 0, _d = ['$defs', 'definitions']; _c < _d.length; _c++) {
1160
+ var defsKey = _d[_c];
1161
+ if (node[defsKey] && typeof node[defsKey] === 'object') {
1162
+ for (var _e = 0, _f = Object.keys(node[defsKey]); _e < _f.length; _e++) {
1163
+ var key = _f[_e];
1164
+ visit(node[defsKey][key]);
1165
+ }
1166
+ }
1167
+ }
1168
+ }
1169
+ if (types.includes('array')) {
1170
+ if (node.items) {
1171
+ if (Array.isArray(node.items)) {
1172
+ node.items.forEach(visit);
1173
+ }
1174
+ else {
1175
+ visit(node.items);
1176
+ }
1177
+ }
1178
+ if (Array.isArray(node.prefixItems)) {
1179
+ node.prefixItems.forEach(visit);
1180
+ }
1181
+ }
1182
+ for (var _g = 0, _h = ['oneOf', 'anyOf', 'allOf']; _g < _h.length; _g++) {
1183
+ var k = _h[_g];
1184
+ if (Array.isArray(node[k])) {
1185
+ node[k].forEach(visit);
1186
+ }
1187
+ }
1188
+ if (node.not) {
1189
+ visit(node.not);
1190
+ }
1191
+ };
1192
+ visit(root);
1193
+ return root;
1194
+ };
1195
+ // -----------------------------------------
1196
+ // Messages + modules
1197
+ // -----------------------------------------
1198
+ OpenAiResponses.prototype.buildInstructionsAndInput = function (messages) {
1199
+ var instructionsParts = [];
1200
+ instructionsParts.push.apply(instructionsParts, this.systemMessages);
1201
+ var input = [];
1202
+ for (var _i = 0, messages_1 = messages; _i < messages_1.length; _i++) {
1203
+ var m = messages_1[_i];
1204
+ var msg = typeof m === 'string' ? { role: 'user', content: m } : m;
1205
+ if (msg.role === 'system') {
1206
+ var c = this.extractTextContent(msg.content).trim();
1207
+ if (c) {
1208
+ instructionsParts.push(c);
1209
+ }
1210
+ continue;
1211
+ }
1212
+ if (msg.role === 'tool') {
1213
+ continue;
1214
+ }
1215
+ var role = msg.role === 'assistant' ? 'assistant' : 'user';
1216
+ var content = this.extractTextContent(msg.content).trim();
1217
+ if (!content) {
1218
+ continue;
1219
+ }
1220
+ input.push({ role: role, content: content });
1221
+ }
1222
+ var instructions = instructionsParts.map(function (s) { return String(s !== null && s !== void 0 ? s : '').trim(); }).filter(Boolean).length > 0
1223
+ ? instructionsParts
1224
+ .map(function (s) { return String(s !== null && s !== void 0 ? s : '').trim(); })
1225
+ .filter(Boolean)
1226
+ .join('\n\n')
1227
+ : undefined;
1228
+ return { instructions: instructions, input: input };
1229
+ };
1230
+ OpenAiResponses.prototype.extractTextContent = function (content) {
1231
+ if (typeof content === 'string') {
1232
+ return content;
1233
+ }
1234
+ if (!content) {
1235
+ return '';
1236
+ }
1237
+ if (Array.isArray(content)) {
1238
+ return content
1239
+ .map(function (p) {
1240
+ if (typeof p === 'string') {
1241
+ return p;
1242
+ }
1243
+ if ((p === null || p === void 0 ? void 0 : p.type) === 'text' && typeof (p === null || p === void 0 ? void 0 : p.text) === 'string') {
1244
+ return p.text;
1245
+ }
1246
+ return '';
1247
+ })
1248
+ .join('\n');
1249
+ }
1250
+ return '';
1251
+ };
1252
+ OpenAiResponses.prototype.ensureModulesProcessed = function () {
1253
+ return __awaiter(this, void 0, void 0, function () {
1254
+ var error_4;
1255
+ return __generator(this, function (_a) {
1256
+ switch (_a.label) {
1257
+ case 0:
1258
+ if (this.modulesProcessed) {
1259
+ return [2 /*return*/];
1260
+ }
1261
+ if (this.processingModulesPromise) {
1262
+ return [2 /*return*/, this.processingModulesPromise];
1263
+ }
1264
+ this.processingModulesPromise = this.processModules();
1265
+ _a.label = 1;
1266
+ case 1:
1267
+ _a.trys.push([1, 3, , 4]);
1268
+ return [4 /*yield*/, this.processingModulesPromise];
1269
+ case 2:
1270
+ _a.sent();
1271
+ this.modulesProcessed = true;
1272
+ return [3 /*break*/, 4];
1273
+ case 3:
1274
+ error_4 = _a.sent();
1275
+ this.processingModulesPromise = null;
1276
+ throw error_4;
1277
+ case 4: return [2 /*return*/];
1278
+ }
1279
+ });
1280
+ });
1281
+ };
1282
+ OpenAiResponses.prototype.processModules = function () {
1283
+ return __awaiter(this, void 0, void 0, function () {
1284
+ var _i, _a, module_1, moduleName, rawSystem, sysArr, trimmed, formatted, moduleFunctions, filtered, fnInstructions;
1285
+ var _b;
1286
+ return __generator(this, function (_c) {
1287
+ switch (_c.label) {
1288
+ case 0:
1289
+ if (!this.modules || this.modules.length < 1) {
1290
+ return [2 /*return*/];
1291
+ }
1292
+ _i = 0, _a = this.modules;
1293
+ _c.label = 1;
1294
+ case 1:
1295
+ if (!(_i < _a.length)) return [3 /*break*/, 4];
1296
+ module_1 = _a[_i];
1297
+ moduleName = module_1.getName();
1298
+ return [4 /*yield*/, Promise.resolve(module_1.getSystemMessages())];
1299
+ case 2:
1300
+ rawSystem = _c.sent();
1301
+ sysArr = Array.isArray(rawSystem) ? rawSystem : rawSystem ? [rawSystem] : [];
1302
+ trimmed = sysArr.map(function (s) { return String(s !== null && s !== void 0 ? s : '').trim(); }).filter(Boolean);
1303
+ if (trimmed.length > 0) {
1304
+ formatted = trimmed.join('. ');
1305
+ this.systemMessages.push("The following are instructions from the ".concat(moduleName, " module:\n").concat(formatted));
1306
+ }
1307
+ moduleFunctions = module_1.getFunctions();
1308
+ filtered = this.filterFunctions(moduleFunctions);
1309
+ (_b = this.functions).push.apply(_b, filtered);
1310
+ fnInstructions = this.buildFunctionInstructionsMessage(moduleName, filtered);
1311
+ if (fnInstructions) {
1312
+ this.systemMessages.push(fnInstructions);
1313
+ }
1314
+ _c.label = 3;
1315
+ case 3:
1316
+ _i++;
1317
+ return [3 /*break*/, 1];
1318
+ case 4: return [2 /*return*/];
1319
+ }
1320
+ });
1321
+ });
1322
+ };
1323
+ OpenAiResponses.prototype.filterFunctions = function (functions) {
1324
+ if (!this.allowedFunctionNames || this.allowedFunctionNames.length < 1) {
1325
+ return functions;
1326
+ }
1327
+ var allow = new Set(this.allowedFunctionNames.map(function (n) { return String(n).trim(); }).filter(Boolean));
1328
+ return functions.filter(function (f) {
1329
+ var _a, _b, _c;
1330
+ var name = String((_b = (_a = f.definition) === null || _a === void 0 ? void 0 : _a.name) !== null && _b !== void 0 ? _b : '').trim();
1331
+ if (!name) {
1332
+ return false;
1333
+ }
1334
+ var short = (_c = name.split('.').pop()) !== null && _c !== void 0 ? _c : name;
1335
+ return allow.has(name) || allow.has(short);
1336
+ });
1337
+ };
1338
+ OpenAiResponses.prototype.buildFunctionInstructionsMessage = function (moduleName, functions) {
1339
+ var _a, _b;
1340
+ var msg = "The following are instructions from functions in the ".concat(moduleName, " module:");
1341
+ var added = false;
1342
+ for (var _i = 0, functions_2 = functions; _i < functions_2.length; _i++) {
1343
+ var f = functions_2[_i];
1344
+ var name_2 = String((_b = (_a = f.definition) === null || _a === void 0 ? void 0 : _a.name) !== null && _b !== void 0 ? _b : '').trim();
1345
+ var instructions = f.instructions;
1346
+ if (!name_2 || !instructions || instructions.length < 1) {
1347
+ continue;
1348
+ }
1349
+ var paragraph = instructions
1350
+ .map(function (s) { return String(s !== null && s !== void 0 ? s : '').trim(); })
1351
+ .filter(Boolean)
1352
+ .join('. ');
1353
+ if (!paragraph) {
1354
+ continue;
1355
+ }
1356
+ added = true;
1357
+ msg += " ".concat(name_2, ": ").concat(paragraph, ".");
1358
+ }
1359
+ return added ? msg : null;
1360
+ };
1361
+ // -----------------------------------------
1362
+ // Model/background defaults
1363
+ // -----------------------------------------
1364
+ OpenAiResponses.prototype.resolveModel = function (model) {
1365
+ return model !== null && model !== void 0 ? model : this.defaultModel;
1366
+ };
1367
+ OpenAiResponses.prototype.resolveBackgroundMode = function (args) {
1368
+ if (typeof args.requested === 'boolean') {
1369
+ return args.requested;
1370
+ }
1371
+ if (this.isProModel(args.model)) {
1372
+ return true;
1373
+ }
1374
+ if (this.isHighReasoningEffort(args.reasoningEffort)) {
1375
+ return true;
1376
+ }
1377
+ return false;
1378
+ };
1379
+ OpenAiResponses.prototype.isProModel = function (model) {
1380
+ var m = String(model !== null && model !== void 0 ? model : '').toLowerCase();
1381
+ return /(^|[-_.])pro($|[-_.])/.test(m);
1382
+ };
1383
+ OpenAiResponses.prototype.isHighReasoningEffort = function (effort) {
1384
+ var v = String(effort !== null && effort !== void 0 ? effort : '').toLowerCase();
1385
+ return v === 'high' || v === 'xhigh';
1386
+ };
1387
+ return OpenAiResponses;
1388
+ }());
1389
+ exports.OpenAiResponses = OpenAiResponses;
1390
+ var OpenAiResponsesError = /** @class */ (function (_super) {
1391
+ __extends(OpenAiResponsesError, _super);
1392
+ function OpenAiResponsesError(args) {
1393
+ var _newTarget = this.constructor;
1394
+ var _a;
1395
+ var _this = _super.call(this, args.message) || this;
1396
+ _this.name = 'OpenAiResponsesError';
1397
+ _this.code = args.code;
1398
+ _this.details = (_a = args.details) !== null && _a !== void 0 ? _a : {};
1399
+ _this.cause = args.cause;
1400
+ _this.retryable = typeof args.retryable === 'boolean' ? args.retryable : true;
1401
+ Object.setPrototypeOf(_this, _newTarget.prototype);
1402
+ return _this;
1403
+ }
1404
+ return OpenAiResponsesError;
1405
+ }(Error));
1406
+ exports.OpenAiResponsesError = OpenAiResponsesError;
1407
+ function truncateHead(text, max) {
1408
+ var s = String(text !== null && text !== void 0 ? text : '');
1409
+ if (max <= 0) {
1410
+ return '';
1411
+ }
1412
+ if (s.length <= max) {
1413
+ return s;
1414
+ }
1415
+ return s.slice(0, max) + '...';
1416
+ }
1417
+ function truncateTail(text, max) {
1418
+ var s = String(text !== null && text !== void 0 ? text : '');
1419
+ if (max <= 0) {
1420
+ return '';
1421
+ }
1422
+ if (s.length <= max) {
1423
+ return s;
1424
+ }
1425
+ return '...' + s.slice(s.length - max);
1426
+ }
1427
+ function extractJsonParsePosition(errMsg) {
1428
+ var m = String(errMsg !== null && errMsg !== void 0 ? errMsg : '').match(/at position\s+(\d+)/i);
1429
+ if (!m) {
1430
+ return undefined;
1431
+ }
1432
+ var n = Number(m[1]);
1433
+ return Number.isFinite(n) ? n : undefined;
1434
+ }
1435
+ function extractJsonParseLineCol(errMsg) {
1436
+ var m = String(errMsg !== null && errMsg !== void 0 ? errMsg : '').match(/line\s+(\d+)\s+column\s+(\d+)/i);
1437
+ if (!m) {
1438
+ return undefined;
1439
+ }
1440
+ var line = Number(m[1]);
1441
+ var column = Number(m[2]);
1442
+ return {
1443
+ line: Number.isFinite(line) ? line : undefined,
1444
+ column: Number.isFinite(column) ? column : undefined,
1445
+ };
1446
+ }
1447
+ function snippetAround(text, pos, radius) {
1448
+ var s = String(text !== null && text !== void 0 ? text : '');
1449
+ var p = Math.max(0, Math.min(s.length, Number.isFinite(pos) ? pos : 0));
1450
+ var r = Math.max(0, radius);
1451
+ var start = Math.max(0, p - r);
1452
+ var end = Math.min(s.length, p + r);
1453
+ var before = s.slice(start, p);
1454
+ var after = s.slice(p, end);
1455
+ var left = start > 0 ? '...' : '';
1456
+ var right = end < s.length ? '...' : '';
1457
+ return "".concat(left).concat(before, "<<HERE>>").concat(after).concat(right);
1458
+ }
1459
+ function sleep(ms) {
1460
+ return new Promise(function (resolve) { return setTimeout(resolve, ms); });
1461
+ }
1462
+ /**
1463
+ * Sleep, but wake early if the signal is aborted.
1464
+ * (We do not throw here; the caller should check `signal.aborted` and act.)
1465
+ */
1466
+ function sleepWithAbort(ms, signal) {
1467
+ if (!signal) {
1468
+ return sleep(ms);
1469
+ }
1470
+ if (signal.aborted) {
1471
+ return Promise.resolve();
1472
+ }
1473
+ return new Promise(function (resolve) {
1474
+ var _a;
1475
+ var t = setTimeout(function () {
1476
+ cleanup();
1477
+ resolve();
1478
+ }, ms);
1479
+ var onAbort = function () {
1480
+ cleanup();
1481
+ resolve();
1482
+ };
1483
+ var cleanup = function () {
1484
+ var _a;
1485
+ try {
1486
+ clearTimeout(t);
1487
+ }
1488
+ catch (_b) {
1489
+ // ignore
1490
+ }
1491
+ try {
1492
+ (_a = signal.removeEventListener) === null || _a === void 0 ? void 0 : _a.call(signal, 'abort', onAbort);
1493
+ }
1494
+ catch (_c) {
1495
+ // ignore
1496
+ }
1497
+ };
1498
+ try {
1499
+ (_a = signal.addEventListener) === null || _a === void 0 ? void 0 : _a.call(signal, 'abort', onAbort, { once: true });
1500
+ }
1501
+ catch (_b) {
1502
+ // If addEventListener isn't available, fall back to plain sleep.
1503
+ }
1504
+ });
1505
+ }
1506
+ function extractHttpStatus(error) {
1507
+ if (!error || typeof error !== 'object') {
1508
+ return undefined;
1509
+ }
1510
+ var rec = error;
1511
+ var status = rec.status;
1512
+ if (typeof status === 'number' && Number.isFinite(status)) {
1513
+ return status;
1514
+ }
1515
+ var statusCode = rec.statusCode;
1516
+ if (typeof statusCode === 'number' && Number.isFinite(statusCode)) {
1517
+ return statusCode;
1518
+ }
1519
+ return undefined;
1520
+ }
1521
+ function extractRequestId(error) {
1522
+ var _a;
1523
+ if (!error || typeof error !== 'object') {
1524
+ return undefined;
1525
+ }
1526
+ var rec = error;
1527
+ var direct = (_a = rec.request_id) !== null && _a !== void 0 ? _a : rec.requestId;
1528
+ if (typeof direct === 'string' && direct.trim()) {
1529
+ return direct.trim();
1530
+ }
1531
+ var headers = rec.headers;
1532
+ if (!headers) {
1533
+ return undefined;
1534
+ }
1535
+ if (typeof headers.get === 'function') {
1536
+ var v = headers.get('x-request-id');
1537
+ return typeof v === 'string' && v.trim() ? v.trim() : undefined;
1538
+ }
1539
+ if (typeof headers === 'object' && !Array.isArray(headers)) {
1540
+ for (var _i = 0, _b = Object.keys(headers); _i < _b.length; _i++) {
1541
+ var k = _b[_i];
1542
+ if (String(k).toLowerCase() !== 'x-request-id') {
1543
+ continue;
1544
+ }
1545
+ var v = headers[k];
1546
+ return typeof v === 'string' && v.trim() ? v.trim() : undefined;
1547
+ }
1548
+ }
1549
+ return undefined;
1550
+ }
1551
+ function isRetryableHttpStatus(status) {
1552
+ if (typeof status !== 'number') {
1553
+ return true;
1554
+ }
1555
+ if (status === 408 || status === 409 || status === 429) {
1556
+ return true;
1557
+ }
1558
+ if (status >= 500) {
1559
+ return true;
1560
+ }
1561
+ return false;
1562
+ }
1563
+ function isAbortError(error) {
1564
+ var _a, _b;
1565
+ if (!error) {
1566
+ return false;
1567
+ }
1568
+ // Most fetch implementations:
1569
+ // - error.name === 'AbortError'
1570
+ // - or error.code === 'ABORT_ERR'
1571
+ if (error instanceof Error) {
1572
+ var name_3 = String((_a = error.name) !== null && _a !== void 0 ? _a : '').toLowerCase();
1573
+ if (name_3 === 'aborterror') {
1574
+ return true;
1575
+ }
1576
+ var msg = String((_b = error.message) !== null && _b !== void 0 ? _b : '').toLowerCase();
1577
+ // Keep this conservative; don't treat every "abort" substring as abort.
1578
+ if (msg === 'aborted' || msg === 'request aborted') {
1579
+ return true;
1580
+ }
1581
+ }
1582
+ if (typeof error === 'object') {
1583
+ var rec = error;
1584
+ var code = rec.code;
1585
+ if (typeof code === 'string' && code.toUpperCase() === 'ABORT_ERR') {
1586
+ return true;
1587
+ }
1588
+ }
1589
+ return false;
1590
+ }
1591
+ function safeErrorSummary(error) {
1592
+ if (!error) {
1593
+ return { message: 'Unknown error' };
1594
+ }
1595
+ var status = extractHttpStatus(error);
1596
+ var requestId = extractRequestId(error);
1597
+ if (error instanceof OpenAiResponsesError) {
1598
+ return {
1599
+ name: error.name,
1600
+ message: error.message,
1601
+ code: error.code,
1602
+ details: error.details,
1603
+ status: typeof status === 'number' ? status : undefined,
1604
+ request_id: requestId,
1605
+ };
1606
+ }
1607
+ if (error instanceof Error) {
1608
+ return {
1609
+ name: error.name,
1610
+ message: error.message,
1611
+ status: typeof status === 'number' ? status : undefined,
1612
+ request_id: requestId,
1613
+ };
1614
+ }
1615
+ return {
1616
+ message: String(error),
1617
+ status: typeof status === 'number' ? status : undefined,
1618
+ request_id: requestId,
1619
+ };
1620
+ }
1621
+ //# sourceMappingURL=OpenAiResponses.js.map