@just-every/ensemble 0.2.87 → 0.2.88
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cjs/config/tool_execution.cjs +47 -0
- package/dist/cjs/config/tool_execution.d.ts +12 -0
- package/dist/cjs/config/tool_execution.d.ts.map +1 -0
- package/dist/cjs/config/tool_execution.js.map +1 -0
- package/dist/cjs/core/ensemble_embed.cjs +35 -0
- package/dist/cjs/core/ensemble_embed.d.ts +3 -0
- package/dist/cjs/core/ensemble_embed.d.ts.map +1 -0
- package/dist/cjs/core/ensemble_embed.js.map +1 -0
- package/dist/cjs/core/ensemble_image.cjs +13 -0
- package/dist/cjs/core/ensemble_image.d.ts +3 -0
- package/dist/cjs/core/ensemble_image.d.ts.map +1 -0
- package/dist/cjs/core/ensemble_image.js.map +1 -0
- package/dist/cjs/core/ensemble_listen.cjs +162 -0
- package/dist/cjs/core/ensemble_listen.d.ts +5 -0
- package/dist/cjs/core/ensemble_listen.d.ts.map +1 -0
- package/dist/cjs/core/ensemble_listen.js.map +1 -0
- package/dist/cjs/core/ensemble_live.cjs +387 -0
- package/dist/cjs/core/ensemble_live.d.ts +14 -0
- package/dist/cjs/core/ensemble_live.d.ts.map +1 -0
- package/dist/cjs/core/ensemble_live.js.map +1 -0
- package/dist/cjs/core/ensemble_request.cjs +409 -0
- package/dist/cjs/core/ensemble_request.d.ts +4 -0
- package/dist/cjs/core/ensemble_request.d.ts.map +1 -0
- package/dist/cjs/core/ensemble_request.js.map +1 -0
- package/dist/cjs/core/ensemble_voice.cjs +284 -0
- package/dist/cjs/core/ensemble_voice.d.ts +4 -0
- package/dist/cjs/core/ensemble_voice.d.ts.map +1 -0
- package/dist/cjs/core/ensemble_voice.js.map +1 -0
- package/dist/cjs/data/model_data.cjs +1460 -0
- package/dist/cjs/data/model_data.d.ts +71 -0
- package/dist/cjs/data/model_data.d.ts.map +1 -0
- package/dist/cjs/data/model_data.js.map +1 -0
- package/dist/cjs/index.cjs +139 -0
- package/dist/cjs/index.d.ts +37 -0
- package/dist/cjs/index.d.ts.map +1 -0
- package/dist/cjs/index.js.map +1 -0
- package/dist/cjs/model_providers/base_provider.cjs +46 -0
- package/dist/cjs/model_providers/base_provider.d.ts +13 -0
- package/dist/cjs/model_providers/base_provider.d.ts.map +1 -0
- package/dist/cjs/model_providers/base_provider.js.map +1 -0
- package/dist/cjs/model_providers/claude.cjs +782 -0
- package/dist/cjs/model_providers/claude.d.ts +12 -0
- package/dist/cjs/model_providers/claude.d.ts.map +1 -0
- package/dist/cjs/model_providers/claude.js.map +1 -0
- package/dist/cjs/model_providers/deepseek.cjs +129 -0
- package/dist/cjs/model_providers/deepseek.d.ts +8 -0
- package/dist/cjs/model_providers/deepseek.d.ts.map +1 -0
- package/dist/cjs/model_providers/deepseek.js.map +1 -0
- package/dist/cjs/model_providers/elevenlabs.cjs +140 -0
- package/dist/cjs/model_providers/elevenlabs.d.ts +37 -0
- package/dist/cjs/model_providers/elevenlabs.d.ts.map +1 -0
- package/dist/cjs/model_providers/elevenlabs.js.map +1 -0
- package/dist/cjs/model_providers/gemini.cjs +1654 -0
- package/dist/cjs/model_providers/gemini.d.ts +22 -0
- package/dist/cjs/model_providers/gemini.d.ts.map +1 -0
- package/dist/cjs/model_providers/gemini.js.map +1 -0
- package/dist/cjs/model_providers/grok.cjs +25 -0
- package/dist/cjs/model_providers/grok.d.ts +8 -0
- package/dist/cjs/model_providers/grok.d.ts.map +1 -0
- package/dist/cjs/model_providers/grok.js.map +1 -0
- package/dist/cjs/model_providers/model_provider.cjs +296 -0
- package/dist/cjs/model_providers/model_provider.d.ts +10 -0
- package/dist/cjs/model_providers/model_provider.d.ts.map +1 -0
- package/dist/cjs/model_providers/model_provider.js.map +1 -0
- package/dist/cjs/model_providers/openai.cjs +1117 -0
- package/dist/cjs/model_providers/openai.d.ts +19 -0
- package/dist/cjs/model_providers/openai.d.ts.map +1 -0
- package/dist/cjs/model_providers/openai.js.map +1 -0
- package/dist/cjs/model_providers/openai_chat.cjs +787 -0
- package/dist/cjs/model_providers/openai_chat.d.ts +20 -0
- package/dist/cjs/model_providers/openai_chat.d.ts.map +1 -0
- package/dist/cjs/model_providers/openai_chat.js.map +1 -0
- package/dist/cjs/model_providers/openrouter.cjs +22 -0
- package/dist/cjs/model_providers/openrouter.d.ts +6 -0
- package/dist/cjs/model_providers/openrouter.d.ts.map +1 -0
- package/dist/cjs/model_providers/openrouter.js.map +1 -0
- package/dist/cjs/model_providers/test_provider.cjs +236 -0
- package/dist/cjs/model_providers/test_provider.d.ts +29 -0
- package/dist/cjs/model_providers/test_provider.d.ts.map +1 -0
- package/dist/cjs/model_providers/test_provider.js.map +1 -0
- package/dist/cjs/tsconfig.cjs.tsbuildinfo +1 -0
- package/dist/cjs/types/api_types.cjs +3 -0
- package/dist/cjs/types/api_types.d.ts +249 -0
- package/dist/cjs/types/api_types.d.ts.map +1 -0
- package/dist/cjs/types/api_types.js.map +1 -0
- package/dist/cjs/types/errors.cjs +76 -0
- package/dist/cjs/types/errors.d.ts +34 -0
- package/dist/cjs/types/errors.d.ts.map +1 -0
- package/dist/cjs/types/errors.js.map +1 -0
- package/dist/cjs/types/types.cjs +3 -0
- package/dist/cjs/types/types.d.ts +638 -0
- package/dist/cjs/types/types.d.ts.map +1 -0
- package/dist/cjs/types/types.js.map +1 -0
- package/dist/cjs/utils/agent.cjs +384 -0
- package/dist/cjs/utils/agent.d.ts +48 -0
- package/dist/cjs/utils/agent.d.ts.map +1 -0
- package/dist/cjs/utils/agent.js.map +1 -0
- package/dist/cjs/utils/audio_stream_player.cjs +342 -0
- package/dist/cjs/utils/audio_stream_player.d.ts +37 -0
- package/dist/cjs/utils/audio_stream_player.d.ts.map +1 -0
- package/dist/cjs/utils/audio_stream_player.js.map +1 -0
- package/dist/cjs/utils/citation_tracker.cjs +25 -0
- package/dist/cjs/utils/citation_tracker.d.ts +12 -0
- package/dist/cjs/utils/citation_tracker.d.ts.map +1 -0
- package/dist/cjs/utils/citation_tracker.js.map +1 -0
- package/dist/cjs/utils/config_manager.cjs +105 -0
- package/dist/cjs/utils/config_manager.d.ts +31 -0
- package/dist/cjs/utils/config_manager.d.ts.map +1 -0
- package/dist/cjs/utils/config_manager.js.map +1 -0
- package/dist/cjs/utils/cost_tracker.cjs +226 -0
- package/dist/cjs/utils/cost_tracker.d.ts +33 -0
- package/dist/cjs/utils/cost_tracker.d.ts.map +1 -0
- package/dist/cjs/utils/cost_tracker.js.map +1 -0
- package/dist/cjs/utils/create_tool_function.cjs +182 -0
- package/dist/cjs/utils/create_tool_function.d.ts +3 -0
- package/dist/cjs/utils/create_tool_function.d.ts.map +1 -0
- package/dist/cjs/utils/create_tool_function.js.map +1 -0
- package/dist/cjs/utils/delta_buffer.cjs +65 -0
- package/dist/cjs/utils/delta_buffer.d.ts +14 -0
- package/dist/cjs/utils/delta_buffer.d.ts.map +1 -0
- package/dist/cjs/utils/delta_buffer.js.map +1 -0
- package/dist/cjs/utils/ensemble_result.cjs +167 -0
- package/dist/cjs/utils/ensemble_result.d.ts +33 -0
- package/dist/cjs/utils/ensemble_result.d.ts.map +1 -0
- package/dist/cjs/utils/ensemble_result.js.map +1 -0
- package/dist/cjs/utils/event_controller.cjs +59 -0
- package/dist/cjs/utils/event_controller.d.ts +13 -0
- package/dist/cjs/utils/event_controller.d.ts.map +1 -0
- package/dist/cjs/utils/event_controller.js.map +1 -0
- package/dist/cjs/utils/external_models.cjs +42 -0
- package/dist/cjs/utils/external_models.d.ts +9 -0
- package/dist/cjs/utils/external_models.d.ts.map +1 -0
- package/dist/cjs/utils/external_models.js.map +1 -0
- package/dist/cjs/utils/image_to_text.cjs +58 -0
- package/dist/cjs/utils/image_to_text.d.ts +3 -0
- package/dist/cjs/utils/image_to_text.d.ts.map +1 -0
- package/dist/cjs/utils/image_to_text.js.map +1 -0
- package/dist/cjs/utils/image_utils.cjs +168 -0
- package/dist/cjs/utils/image_utils.d.ts +18 -0
- package/dist/cjs/utils/image_utils.d.ts.map +1 -0
- package/dist/cjs/utils/image_utils.js.map +1 -0
- package/dist/cjs/utils/image_validation.cjs +31 -0
- package/dist/cjs/utils/image_validation.d.ts +3 -0
- package/dist/cjs/utils/image_validation.d.ts.map +1 -0
- package/dist/cjs/utils/image_validation.js.map +1 -0
- package/dist/cjs/utils/llm_logger.cjs +31 -0
- package/dist/cjs/utils/llm_logger.d.ts +8 -0
- package/dist/cjs/utils/llm_logger.d.ts.map +1 -0
- package/dist/cjs/utils/llm_logger.js.map +1 -0
- package/dist/cjs/utils/message_history.cjs +560 -0
- package/dist/cjs/utils/message_history.d.ts +65 -0
- package/dist/cjs/utils/message_history.d.ts.map +1 -0
- package/dist/cjs/utils/message_history.js.map +1 -0
- package/dist/cjs/utils/model_class_config.cjs +105 -0
- package/dist/cjs/utils/model_class_config.d.ts +12 -0
- package/dist/cjs/utils/model_class_config.d.ts.map +1 -0
- package/dist/cjs/utils/model_class_config.js.map +1 -0
- package/dist/cjs/utils/pause_controller.cjs +90 -0
- package/dist/cjs/utils/pause_controller.d.ts +14 -0
- package/dist/cjs/utils/pause_controller.d.ts.map +1 -0
- package/dist/cjs/utils/pause_controller.js.map +1 -0
- package/dist/cjs/utils/quota_tracker.cjs +311 -0
- package/dist/cjs/utils/quota_tracker.d.ts +22 -0
- package/dist/cjs/utils/quota_tracker.d.ts.map +1 -0
- package/dist/cjs/utils/quota_tracker.js.map +1 -0
- package/dist/cjs/utils/retry_handler.cjs +131 -0
- package/dist/cjs/utils/retry_handler.d.ts +15 -0
- package/dist/cjs/utils/retry_handler.d.ts.map +1 -0
- package/dist/cjs/utils/retry_handler.js.map +1 -0
- package/dist/cjs/utils/running_tool_tracker.cjs +133 -0
- package/dist/cjs/utils/running_tool_tracker.d.ts +42 -0
- package/dist/cjs/utils/running_tool_tracker.d.ts.map +1 -0
- package/dist/cjs/utils/running_tool_tracker.js.map +1 -0
- package/dist/cjs/utils/sequential_queue.cjs +73 -0
- package/dist/cjs/utils/sequential_queue.d.ts +13 -0
- package/dist/cjs/utils/sequential_queue.d.ts.map +1 -0
- package/dist/cjs/utils/sequential_queue.js.map +1 -0
- package/dist/cjs/utils/stream_handler.cjs +73 -0
- package/dist/cjs/utils/stream_handler.d.ts +16 -0
- package/dist/cjs/utils/stream_handler.d.ts.map +1 -0
- package/dist/cjs/utils/stream_handler.js.map +1 -0
- package/dist/cjs/utils/summary_utils.cjs +211 -0
- package/dist/cjs/utils/summary_utils.d.ts +7 -0
- package/dist/cjs/utils/summary_utils.d.ts.map +1 -0
- package/dist/cjs/utils/summary_utils.js.map +1 -0
- package/dist/cjs/utils/test_utils.cjs +212 -0
- package/dist/cjs/utils/test_utils.d.ts +58 -0
- package/dist/cjs/utils/test_utils.d.ts.map +1 -0
- package/dist/cjs/utils/test_utils.js.map +1 -0
- package/dist/cjs/utils/tool_execution_manager.cjs +139 -0
- package/dist/cjs/utils/tool_execution_manager.d.ts +7 -0
- package/dist/cjs/utils/tool_execution_manager.d.ts.map +1 -0
- package/dist/cjs/utils/tool_execution_manager.js.map +1 -0
- package/dist/cjs/utils/tool_parameter_utils.cjs +168 -0
- package/dist/cjs/utils/tool_parameter_utils.d.ts +5 -0
- package/dist/cjs/utils/tool_parameter_utils.d.ts.map +1 -0
- package/dist/cjs/utils/tool_parameter_utils.js.map +1 -0
- package/dist/cjs/utils/tool_result_processor.cjs +363 -0
- package/dist/cjs/utils/tool_result_processor.d.ts +11 -0
- package/dist/cjs/utils/tool_result_processor.d.ts.map +1 -0
- package/dist/cjs/utils/tool_result_processor.js.map +1 -0
- package/dist/cjs/utils/verification.cjs +59 -0
- package/dist/cjs/utils/verification.d.ts +7 -0
- package/dist/cjs/utils/verification.d.ts.map +1 -0
- package/dist/cjs/utils/verification.js.map +1 -0
- package/dist/tsconfig.tsbuildinfo +1 -1
- package/dist/utils/audio_stream_player.d.ts +2 -0
- package/dist/utils/audio_stream_player.d.ts.map +1 -1
- package/dist/utils/audio_stream_player.js +83 -1
- package/dist/utils/audio_stream_player.js.map +1 -1
- package/dist/utils/cost_tracker.d.ts +7 -0
- package/dist/utils/cost_tracker.d.ts.map +1 -1
- package/dist/utils/cost_tracker.js +29 -0
- package/dist/utils/cost_tracker.js.map +1 -1
- package/package.json +18 -3
|
@@ -0,0 +1,1117 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
36
|
+
exports.openaiProvider = exports.OpenAIProvider = void 0;
|
|
37
|
+
const base_provider_js_1 = require("./base_provider.cjs");
|
|
38
|
+
const openai_1 = __importStar(require("openai"));
|
|
39
|
+
const index_js_1 = require("../index.cjs");
|
|
40
|
+
const llm_logger_js_1 = require("../utils/llm_logger.cjs");
|
|
41
|
+
const pause_controller_js_1 = require("../utils/pause_controller.cjs");
|
|
42
|
+
const image_utils_js_1 = require("../utils/image_utils.cjs");
|
|
43
|
+
const delta_buffer_js_1 = require("../utils/delta_buffer.cjs");
|
|
44
|
+
const citation_tracker_js_1 = require("../utils/citation_tracker.cjs");
|
|
45
|
+
const BROWSER_WIDTH = 1024;
|
|
46
|
+
const BROWSER_HEIGHT = 1536;
|
|
47
|
+
function processSchemaForOpenAI(schema, originalProperties) {
|
|
48
|
+
const processedSchema = JSON.parse(JSON.stringify(schema));
|
|
49
|
+
const processSchemaRecursively = (schema) => {
|
|
50
|
+
if (!schema || typeof schema !== 'object')
|
|
51
|
+
return;
|
|
52
|
+
if (schema.optional === true) {
|
|
53
|
+
delete schema.optional;
|
|
54
|
+
}
|
|
55
|
+
if (Array.isArray(schema.oneOf)) {
|
|
56
|
+
schema.anyOf = schema.oneOf;
|
|
57
|
+
delete schema.oneOf;
|
|
58
|
+
}
|
|
59
|
+
const unsupportedKeywords = [
|
|
60
|
+
'minimum',
|
|
61
|
+
'maximum',
|
|
62
|
+
'minItems',
|
|
63
|
+
'maxItems',
|
|
64
|
+
'minLength',
|
|
65
|
+
'maxLength',
|
|
66
|
+
'pattern',
|
|
67
|
+
'format',
|
|
68
|
+
'multipleOf',
|
|
69
|
+
'patternProperties',
|
|
70
|
+
'unevaluatedProperties',
|
|
71
|
+
'propertyNames',
|
|
72
|
+
'minProperties',
|
|
73
|
+
'maxProperties',
|
|
74
|
+
'unevaluatedItems',
|
|
75
|
+
'contains',
|
|
76
|
+
'minContains',
|
|
77
|
+
'maxContains',
|
|
78
|
+
'uniqueItems',
|
|
79
|
+
'default',
|
|
80
|
+
];
|
|
81
|
+
unsupportedKeywords.forEach(keyword => {
|
|
82
|
+
if (schema[keyword] !== undefined) {
|
|
83
|
+
delete schema[keyword];
|
|
84
|
+
}
|
|
85
|
+
});
|
|
86
|
+
const isObject = schema.type === 'object' || (schema.type === undefined && schema.properties !== undefined);
|
|
87
|
+
for (const key of ['anyOf', 'allOf']) {
|
|
88
|
+
if (Array.isArray(schema[key])) {
|
|
89
|
+
schema[key].forEach((variantSchema) => processSchemaRecursively(variantSchema));
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
if (isObject && schema.properties) {
|
|
93
|
+
for (const propName in schema.properties) {
|
|
94
|
+
processSchemaRecursively(schema.properties[propName]);
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
if (schema.type === 'array' && schema.items !== undefined) {
|
|
98
|
+
if (Array.isArray(schema.items)) {
|
|
99
|
+
schema.items.forEach((itemSchema) => processSchemaRecursively(itemSchema));
|
|
100
|
+
}
|
|
101
|
+
else if (typeof schema.items === 'object') {
|
|
102
|
+
processSchemaRecursively(schema.items);
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
if (isObject) {
|
|
106
|
+
schema.additionalProperties = false;
|
|
107
|
+
if (schema.properties) {
|
|
108
|
+
const currentRequired = Object.keys(schema.properties);
|
|
109
|
+
if (currentRequired.length > 0) {
|
|
110
|
+
schema.required = currentRequired;
|
|
111
|
+
}
|
|
112
|
+
else {
|
|
113
|
+
delete schema.required;
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
else {
|
|
117
|
+
delete schema.required;
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
};
|
|
121
|
+
processSchemaRecursively(processedSchema);
|
|
122
|
+
if (originalProperties) {
|
|
123
|
+
const topLevelRequired = [];
|
|
124
|
+
for (const propName in originalProperties) {
|
|
125
|
+
if (!originalProperties[propName].optional) {
|
|
126
|
+
topLevelRequired.push(propName);
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
if (topLevelRequired.length > 0) {
|
|
130
|
+
processedSchema.required = topLevelRequired;
|
|
131
|
+
}
|
|
132
|
+
else {
|
|
133
|
+
delete processedSchema.required;
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
if (processedSchema.properties && processedSchema.additionalProperties === undefined) {
|
|
137
|
+
processedSchema.additionalProperties = false;
|
|
138
|
+
}
|
|
139
|
+
return processedSchema;
|
|
140
|
+
}
|
|
141
|
+
async function resolveAsyncEnums(params) {
|
|
142
|
+
if (!params || typeof params !== 'object') {
|
|
143
|
+
return params;
|
|
144
|
+
}
|
|
145
|
+
const resolved = { ...params };
|
|
146
|
+
if (resolved.properties) {
|
|
147
|
+
const resolvedProps = {};
|
|
148
|
+
for (const [key, value] of Object.entries(resolved.properties)) {
|
|
149
|
+
if (value && typeof value === 'object') {
|
|
150
|
+
const propCopy = { ...value };
|
|
151
|
+
if (typeof propCopy.enum === 'function') {
|
|
152
|
+
try {
|
|
153
|
+
const enumValue = await propCopy.enum();
|
|
154
|
+
if (Array.isArray(enumValue) && enumValue.length > 0) {
|
|
155
|
+
propCopy.enum = enumValue;
|
|
156
|
+
}
|
|
157
|
+
else {
|
|
158
|
+
delete propCopy.enum;
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
catch {
|
|
162
|
+
delete propCopy.enum;
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
resolvedProps[key] = await resolveAsyncEnums(propCopy);
|
|
166
|
+
}
|
|
167
|
+
else {
|
|
168
|
+
resolvedProps[key] = value;
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
resolved.properties = resolvedProps;
|
|
172
|
+
}
|
|
173
|
+
return resolved;
|
|
174
|
+
}
|
|
175
|
+
async function convertToOpenAITools(requestParams, tools) {
|
|
176
|
+
requestParams.tools = await Promise.all(tools.map(async (tool) => {
|
|
177
|
+
if (tool.definition.function.name === 'openai_web_search') {
|
|
178
|
+
delete requestParams.reasoning;
|
|
179
|
+
return {
|
|
180
|
+
type: 'web_search_preview',
|
|
181
|
+
search_context_size: 'high',
|
|
182
|
+
};
|
|
183
|
+
}
|
|
184
|
+
const resolvedParams = await resolveAsyncEnums(tool.definition.function.parameters);
|
|
185
|
+
const originalToolProperties = resolvedParams.properties;
|
|
186
|
+
const paramSchema = processSchemaForOpenAI(resolvedParams, originalToolProperties);
|
|
187
|
+
return {
|
|
188
|
+
type: 'function',
|
|
189
|
+
name: tool.definition.function.name,
|
|
190
|
+
description: tool.definition.function.description,
|
|
191
|
+
parameters: paramSchema,
|
|
192
|
+
strict: true,
|
|
193
|
+
};
|
|
194
|
+
}));
|
|
195
|
+
if (requestParams.model === 'computer-use-preview') {
|
|
196
|
+
requestParams.tools.push({
|
|
197
|
+
type: 'computer_use_preview',
|
|
198
|
+
display_width: BROWSER_WIDTH,
|
|
199
|
+
display_height: BROWSER_HEIGHT,
|
|
200
|
+
environment: 'browser',
|
|
201
|
+
});
|
|
202
|
+
}
|
|
203
|
+
requestParams.truncation = 'auto';
|
|
204
|
+
return requestParams;
|
|
205
|
+
}
|
|
206
|
+
async function addImagesToInput(input, images, source) {
|
|
207
|
+
for (const [image_id, imageData] of Object.entries(images)) {
|
|
208
|
+
try {
|
|
209
|
+
const processedImages = await (0, image_utils_js_1.resizeAndSplitForOpenAI)(imageData);
|
|
210
|
+
const messageContent = [];
|
|
211
|
+
if (processedImages.length === 1) {
|
|
212
|
+
messageContent.push({
|
|
213
|
+
type: 'input_text',
|
|
214
|
+
text: `This is [image #${image_id}] from the ${source}`,
|
|
215
|
+
});
|
|
216
|
+
}
|
|
217
|
+
else {
|
|
218
|
+
messageContent.push({
|
|
219
|
+
type: 'input_text',
|
|
220
|
+
text: `This is [image #${image_id}] from the ${source} (split into ${processedImages.length} parts, each up to 768px high)`,
|
|
221
|
+
});
|
|
222
|
+
}
|
|
223
|
+
for (const imageSegment of processedImages) {
|
|
224
|
+
messageContent.push({
|
|
225
|
+
type: 'input_image',
|
|
226
|
+
image_url: imageSegment,
|
|
227
|
+
detail: 'high',
|
|
228
|
+
});
|
|
229
|
+
}
|
|
230
|
+
input.push({
|
|
231
|
+
type: 'message',
|
|
232
|
+
role: 'user',
|
|
233
|
+
content: messageContent,
|
|
234
|
+
});
|
|
235
|
+
}
|
|
236
|
+
catch (error) {
|
|
237
|
+
console.error(`Error processing image ${image_id}:`, error);
|
|
238
|
+
input.push({
|
|
239
|
+
type: 'message',
|
|
240
|
+
role: 'user',
|
|
241
|
+
content: [
|
|
242
|
+
{
|
|
243
|
+
type: 'input_text',
|
|
244
|
+
text: `This is [image #${image_id}] from the ${source} (raw image)`,
|
|
245
|
+
},
|
|
246
|
+
{
|
|
247
|
+
type: 'input_image',
|
|
248
|
+
image_url: imageData,
|
|
249
|
+
detail: 'high',
|
|
250
|
+
},
|
|
251
|
+
],
|
|
252
|
+
});
|
|
253
|
+
}
|
|
254
|
+
}
|
|
255
|
+
return input;
|
|
256
|
+
}
|
|
257
|
+
class OpenAIProvider extends base_provider_js_1.BaseModelProvider {
|
|
258
|
+
_client;
|
|
259
|
+
apiKey;
|
|
260
|
+
constructor(apiKey) {
|
|
261
|
+
super('openai');
|
|
262
|
+
this.apiKey = apiKey;
|
|
263
|
+
}
|
|
264
|
+
get client() {
|
|
265
|
+
if (!this._client) {
|
|
266
|
+
const apiKey = this.apiKey || process.env.OPENAI_API_KEY;
|
|
267
|
+
if (!apiKey) {
|
|
268
|
+
throw new Error('Failed to initialize OpenAI client. Make sure OPENAI_API_KEY is set.');
|
|
269
|
+
}
|
|
270
|
+
this._client = new openai_1.default({
|
|
271
|
+
apiKey: apiKey,
|
|
272
|
+
});
|
|
273
|
+
}
|
|
274
|
+
return this._client;
|
|
275
|
+
}
|
|
276
|
+
async createEmbedding(input, model, opts) {
|
|
277
|
+
try {
|
|
278
|
+
const options = {
|
|
279
|
+
model,
|
|
280
|
+
input: input,
|
|
281
|
+
encoding_format: 'float',
|
|
282
|
+
};
|
|
283
|
+
options.dimensions = opts?.dimensions || 3072;
|
|
284
|
+
console.log(`[OpenAI] Generating embedding with model ${model}`);
|
|
285
|
+
const response = await this.client.embeddings.create(options);
|
|
286
|
+
const inputTokens = response.usage?.prompt_tokens ||
|
|
287
|
+
(typeof input === 'string'
|
|
288
|
+
? Math.ceil(input.length / 4)
|
|
289
|
+
: input.reduce((sum, text) => sum + Math.ceil(text.length / 4), 0));
|
|
290
|
+
index_js_1.costTracker.addUsage({
|
|
291
|
+
model,
|
|
292
|
+
input_tokens: inputTokens,
|
|
293
|
+
output_tokens: 0,
|
|
294
|
+
metadata: {
|
|
295
|
+
dimensions: response.data[0]?.embedding.length || opts?.dimensions || 1536,
|
|
296
|
+
},
|
|
297
|
+
});
|
|
298
|
+
if (Array.isArray(input) && input.length > 1) {
|
|
299
|
+
return response.data.map(item => item.embedding);
|
|
300
|
+
}
|
|
301
|
+
else {
|
|
302
|
+
return response.data[0].embedding;
|
|
303
|
+
}
|
|
304
|
+
}
|
|
305
|
+
catch (error) {
|
|
306
|
+
console.error('[OpenAI] Error generating embedding:', error);
|
|
307
|
+
throw error;
|
|
308
|
+
}
|
|
309
|
+
}
|
|
310
|
+
async createImage(prompt, model, opts) {
|
|
311
|
+
try {
|
|
312
|
+
model = model || 'gpt-image-1';
|
|
313
|
+
const number_of_images = opts?.n || 1;
|
|
314
|
+
let quality = 'auto';
|
|
315
|
+
if (opts?.quality === 'standard')
|
|
316
|
+
quality = 'medium';
|
|
317
|
+
else if (opts?.quality === 'hd')
|
|
318
|
+
quality = 'high';
|
|
319
|
+
else if (opts?.quality === 'low' || opts?.quality === 'medium' || opts?.quality === 'high') {
|
|
320
|
+
quality = opts.quality;
|
|
321
|
+
}
|
|
322
|
+
let size = 'auto';
|
|
323
|
+
if (opts?.size === 'square' || opts?.size === '1024x1024') {
|
|
324
|
+
size = '1024x1024';
|
|
325
|
+
}
|
|
326
|
+
else if (opts?.size === 'landscape' || opts?.size === '1536x1024') {
|
|
327
|
+
size = '1536x1024';
|
|
328
|
+
}
|
|
329
|
+
else if (opts?.size === 'portrait' || opts?.size === '1024x1536') {
|
|
330
|
+
size = '1024x1536';
|
|
331
|
+
}
|
|
332
|
+
const background = 'auto';
|
|
333
|
+
const source_images = opts?.source_images;
|
|
334
|
+
console.log(`[OpenAI] Generating ${number_of_images} image(s) with model ${model}, prompt: "${prompt.substring(0, 100)}${prompt.length > 100 ? '...' : ''}"`);
|
|
335
|
+
let response;
|
|
336
|
+
if (source_images) {
|
|
337
|
+
console.log('[OpenAI] Using images.edit with source_images');
|
|
338
|
+
const imageArray = Array.isArray(source_images) ? source_images : [source_images];
|
|
339
|
+
const imageFiles = [];
|
|
340
|
+
for (const sourceImg of imageArray) {
|
|
341
|
+
let imageFile;
|
|
342
|
+
if (sourceImg.startsWith('http://') || sourceImg.startsWith('https://')) {
|
|
343
|
+
const imageResponse = await fetch(sourceImg);
|
|
344
|
+
const imageBuffer = await imageResponse.arrayBuffer();
|
|
345
|
+
imageFile = await (0, openai_1.toFile)(new Uint8Array(imageBuffer), `image_${imageFiles.length}.png`, {
|
|
346
|
+
type: 'image/png',
|
|
347
|
+
});
|
|
348
|
+
}
|
|
349
|
+
else {
|
|
350
|
+
let base64Data = sourceImg;
|
|
351
|
+
if (sourceImg.startsWith('data:')) {
|
|
352
|
+
base64Data = sourceImg.split(',')[1];
|
|
353
|
+
}
|
|
354
|
+
const binaryData = Buffer.from(base64Data, 'base64');
|
|
355
|
+
imageFile = await (0, openai_1.toFile)(new Uint8Array(binaryData), `image_${imageFiles.length}.png`, {
|
|
356
|
+
type: 'image/png',
|
|
357
|
+
});
|
|
358
|
+
}
|
|
359
|
+
imageFiles.push(imageFile);
|
|
360
|
+
}
|
|
361
|
+
let maskFile;
|
|
362
|
+
if (opts?.mask) {
|
|
363
|
+
let maskBase64 = opts.mask;
|
|
364
|
+
if (opts.mask.startsWith('data:')) {
|
|
365
|
+
maskBase64 = opts.mask.split(',')[1];
|
|
366
|
+
}
|
|
367
|
+
const maskBinary = Buffer.from(maskBase64, 'base64');
|
|
368
|
+
maskFile = await (0, openai_1.toFile)(new Uint8Array(maskBinary), 'mask.png', {
|
|
369
|
+
type: 'image/png',
|
|
370
|
+
});
|
|
371
|
+
}
|
|
372
|
+
const editParams = {
|
|
373
|
+
model,
|
|
374
|
+
prompt,
|
|
375
|
+
image: imageFiles,
|
|
376
|
+
n: number_of_images,
|
|
377
|
+
quality,
|
|
378
|
+
size,
|
|
379
|
+
};
|
|
380
|
+
if (maskFile) {
|
|
381
|
+
editParams.mask = maskFile;
|
|
382
|
+
}
|
|
383
|
+
response = await this.client.images.edit(editParams);
|
|
384
|
+
}
|
|
385
|
+
else {
|
|
386
|
+
response = await this.client.images.generate({
|
|
387
|
+
model,
|
|
388
|
+
prompt,
|
|
389
|
+
n: number_of_images,
|
|
390
|
+
background,
|
|
391
|
+
quality,
|
|
392
|
+
size,
|
|
393
|
+
moderation: 'low',
|
|
394
|
+
output_format: 'png',
|
|
395
|
+
});
|
|
396
|
+
}
|
|
397
|
+
if (response.data && response.data.length > 0) {
|
|
398
|
+
const perImageCost = this.getImageCost(model, quality);
|
|
399
|
+
index_js_1.costTracker.addUsage({
|
|
400
|
+
model,
|
|
401
|
+
image_count: response.data.length,
|
|
402
|
+
metadata: {
|
|
403
|
+
quality,
|
|
404
|
+
size,
|
|
405
|
+
cost_per_image: perImageCost,
|
|
406
|
+
is_edit: !!source_images,
|
|
407
|
+
},
|
|
408
|
+
});
|
|
409
|
+
}
|
|
410
|
+
const imageDataUrls = response.data.map(item => {
|
|
411
|
+
const imageData = item?.b64_json;
|
|
412
|
+
if (!imageData) {
|
|
413
|
+
throw new Error('No image data returned from OpenAI');
|
|
414
|
+
}
|
|
415
|
+
return `data:image/png;base64,${imageData}`;
|
|
416
|
+
});
|
|
417
|
+
if (imageDataUrls.length === 0) {
|
|
418
|
+
throw new Error('No images returned from OpenAI');
|
|
419
|
+
}
|
|
420
|
+
return imageDataUrls;
|
|
421
|
+
}
|
|
422
|
+
catch (error) {
|
|
423
|
+
console.error('[OpenAI] Error generating image:', error);
|
|
424
|
+
throw error;
|
|
425
|
+
}
|
|
426
|
+
}
|
|
427
|
+
getImageCost(model, quality) {
|
|
428
|
+
if (model === 'gpt-image-1') {
|
|
429
|
+
if (quality === 'high') {
|
|
430
|
+
return 0.08;
|
|
431
|
+
}
|
|
432
|
+
else if (quality === 'medium' || quality === 'auto') {
|
|
433
|
+
return 0.04;
|
|
434
|
+
}
|
|
435
|
+
else if (quality === 'low') {
|
|
436
|
+
return 0.02;
|
|
437
|
+
}
|
|
438
|
+
}
|
|
439
|
+
return 0.04;
|
|
440
|
+
}
|
|
441
|
+
async createVoice(text, model, opts) {
|
|
442
|
+
try {
|
|
443
|
+
const voice = opts?.voice || 'alloy';
|
|
444
|
+
const speed = opts?.speed || 1.0;
|
|
445
|
+
let response_format = opts?.response_format || 'mp3';
|
|
446
|
+
if (response_format.includes('pcm')) {
|
|
447
|
+
response_format = 'pcm';
|
|
448
|
+
}
|
|
449
|
+
if (response_format.includes('mp3')) {
|
|
450
|
+
response_format = 'mp3';
|
|
451
|
+
}
|
|
452
|
+
console.log(`[OpenAI] Generating speech with model ${model}, voice: ${voice}, format: ${response_format}`);
|
|
453
|
+
let instructions = opts?.instructions || undefined;
|
|
454
|
+
if (opts?.affect) {
|
|
455
|
+
instructions = `Sound ${opts.affect}${instructions ? ' and ' + instructions : ''}`;
|
|
456
|
+
}
|
|
457
|
+
const response = await this.client.audio.speech.create({
|
|
458
|
+
model,
|
|
459
|
+
input: text,
|
|
460
|
+
instructions,
|
|
461
|
+
voice,
|
|
462
|
+
speed,
|
|
463
|
+
response_format: response_format,
|
|
464
|
+
});
|
|
465
|
+
const characterCount = text.length;
|
|
466
|
+
const costPerThousandChars = model === 'tts-1-hd' ? 0.03 : 0.015;
|
|
467
|
+
const cost = (characterCount / 1000) * costPerThousandChars;
|
|
468
|
+
index_js_1.costTracker.addUsage({
|
|
469
|
+
model,
|
|
470
|
+
cost,
|
|
471
|
+
metadata: {
|
|
472
|
+
character_count: characterCount,
|
|
473
|
+
voice,
|
|
474
|
+
format: response_format,
|
|
475
|
+
},
|
|
476
|
+
});
|
|
477
|
+
if (opts?.stream) {
|
|
478
|
+
const nodeStream = response.body;
|
|
479
|
+
return new ReadableStream({
|
|
480
|
+
async start(controller) {
|
|
481
|
+
for await (const chunk of nodeStream) {
|
|
482
|
+
controller.enqueue(new Uint8Array(chunk));
|
|
483
|
+
}
|
|
484
|
+
controller.close();
|
|
485
|
+
},
|
|
486
|
+
});
|
|
487
|
+
}
|
|
488
|
+
else {
|
|
489
|
+
const buffer = await response.arrayBuffer();
|
|
490
|
+
return buffer;
|
|
491
|
+
}
|
|
492
|
+
}
|
|
493
|
+
catch (error) {
|
|
494
|
+
console.error('[OpenAI] Error generating speech:', error);
|
|
495
|
+
throw error;
|
|
496
|
+
}
|
|
497
|
+
}
|
|
498
|
+
async *createResponseStream(messages, model, agent) {
|
|
499
|
+
const { getToolsFromAgent } = await Promise.resolve().then(() => __importStar(require("../utils/agent.cjs")));
|
|
500
|
+
const tools = agent ? await getToolsFromAgent(agent) : [];
|
|
501
|
+
const settings = agent?.modelSettings;
|
|
502
|
+
let requestId;
|
|
503
|
+
try {
|
|
504
|
+
let input = [];
|
|
505
|
+
for (const messageFull of messages) {
|
|
506
|
+
let message = { ...messageFull };
|
|
507
|
+
const originalModel = message.model;
|
|
508
|
+
delete message.timestamp;
|
|
509
|
+
delete message.model;
|
|
510
|
+
delete message.pinned;
|
|
511
|
+
if (message.type === 'thinking') {
|
|
512
|
+
if (model.startsWith('o') && message.thinking_id && model === originalModel) {
|
|
513
|
+
console.log(`[OpenAI] Processing thinking message with ID: ${message.thinking_id}`, message);
|
|
514
|
+
const match = message.thinking_id.match(/^(rs_[A-Za-z0-9]+)-(\d)$/);
|
|
515
|
+
if (match) {
|
|
516
|
+
const reasoningId = match[1];
|
|
517
|
+
const summaryIndex = parseInt(match[2], 10);
|
|
518
|
+
const summaryText = typeof message.content === 'string' ? message.content : JSON.stringify(message.content);
|
|
519
|
+
const summaryEntry = {
|
|
520
|
+
type: 'summary_text',
|
|
521
|
+
text: summaryText,
|
|
522
|
+
};
|
|
523
|
+
const existingIndex = input.findIndex((item) => item.type === 'reasoning' && item.id === reasoningId);
|
|
524
|
+
if (existingIndex !== -1) {
|
|
525
|
+
const existingItem = input[existingIndex];
|
|
526
|
+
if (!existingItem.summary) {
|
|
527
|
+
existingItem.summary = [];
|
|
528
|
+
}
|
|
529
|
+
existingItem.summary[summaryIndex] = summaryEntry;
|
|
530
|
+
input[existingIndex] = existingItem;
|
|
531
|
+
}
|
|
532
|
+
else {
|
|
533
|
+
const newItem = {
|
|
534
|
+
type: 'reasoning',
|
|
535
|
+
id: reasoningId,
|
|
536
|
+
summary: [],
|
|
537
|
+
};
|
|
538
|
+
newItem.summary[summaryIndex] = summaryEntry;
|
|
539
|
+
input.push(newItem);
|
|
540
|
+
}
|
|
541
|
+
continue;
|
|
542
|
+
}
|
|
543
|
+
}
|
|
544
|
+
input.push({
|
|
545
|
+
type: 'message',
|
|
546
|
+
role: 'user',
|
|
547
|
+
content: 'Thinking: ' + message.content,
|
|
548
|
+
status: message.status || 'completed',
|
|
549
|
+
});
|
|
550
|
+
continue;
|
|
551
|
+
}
|
|
552
|
+
if (message.type === 'function_call') {
|
|
553
|
+
if (message.id && (!message.id.startsWith('fc_') || model !== originalModel)) {
|
|
554
|
+
const { id, ...rest } = message;
|
|
555
|
+
message = rest;
|
|
556
|
+
}
|
|
557
|
+
message.status = message.status || 'completed';
|
|
558
|
+
input.push(message);
|
|
559
|
+
continue;
|
|
560
|
+
}
|
|
561
|
+
if (message.type === 'function_call_output') {
|
|
562
|
+
const { name, id, ...messageToAdd } = message;
|
|
563
|
+
input = await (0, image_utils_js_1.appendMessageWithImage)(model, input, messageToAdd, 'output', addImagesToInput, `function call output of ${message.name}`);
|
|
564
|
+
continue;
|
|
565
|
+
}
|
|
566
|
+
if ((message.type ?? 'message') === 'message' && 'content' in message) {
|
|
567
|
+
if ('id' in message && message.id && (!message.id.startsWith('msg_') || model !== originalModel)) {
|
|
568
|
+
const { id, ...rest } = message;
|
|
569
|
+
message = rest;
|
|
570
|
+
console.log(`[OpenAI] Removed message ID: ${id} model: ${model} originalModel: ${originalModel}`);
|
|
571
|
+
}
|
|
572
|
+
input = await (0, image_utils_js_1.appendMessageWithImage)(model, input, { ...message, type: 'message' }, 'content', addImagesToInput);
|
|
573
|
+
continue;
|
|
574
|
+
}
|
|
575
|
+
}
|
|
576
|
+
if (input.length === 0) {
|
|
577
|
+
input.push({
|
|
578
|
+
type: 'message',
|
|
579
|
+
role: 'user',
|
|
580
|
+
content: 'Please proceed.',
|
|
581
|
+
});
|
|
582
|
+
}
|
|
583
|
+
let requestParams = {
|
|
584
|
+
model,
|
|
585
|
+
stream: true,
|
|
586
|
+
user: 'magi',
|
|
587
|
+
input,
|
|
588
|
+
};
|
|
589
|
+
if (!model.startsWith('o3-')) {
|
|
590
|
+
if (settings?.temperature !== undefined) {
|
|
591
|
+
requestParams.temperature = settings.temperature;
|
|
592
|
+
}
|
|
593
|
+
if (settings?.top_p !== undefined) {
|
|
594
|
+
requestParams.top_p = settings.top_p;
|
|
595
|
+
}
|
|
596
|
+
}
|
|
597
|
+
const REASONING_EFFORT_CONFIGS = ['low', 'medium', 'high'];
|
|
598
|
+
let hasEffortSuffix = false;
|
|
599
|
+
for (const effort of REASONING_EFFORT_CONFIGS) {
|
|
600
|
+
const suffix = `-${effort}`;
|
|
601
|
+
if (model.endsWith(suffix)) {
|
|
602
|
+
hasEffortSuffix = true;
|
|
603
|
+
requestParams.reasoning = {
|
|
604
|
+
effort: effort,
|
|
605
|
+
summary: 'auto',
|
|
606
|
+
};
|
|
607
|
+
model = model.slice(0, -suffix.length);
|
|
608
|
+
requestParams.model = model;
|
|
609
|
+
break;
|
|
610
|
+
}
|
|
611
|
+
}
|
|
612
|
+
if (model.startsWith('o') && !hasEffortSuffix) {
|
|
613
|
+
requestParams.reasoning = {
|
|
614
|
+
effort: 'high',
|
|
615
|
+
summary: 'auto',
|
|
616
|
+
};
|
|
617
|
+
}
|
|
618
|
+
if (settings?.tool_choice) {
|
|
619
|
+
if (typeof settings.tool_choice === 'object' &&
|
|
620
|
+
settings.tool_choice?.type === 'function' &&
|
|
621
|
+
settings.tool_choice?.function?.name) {
|
|
622
|
+
requestParams.tool_choice = {
|
|
623
|
+
type: settings.tool_choice.type,
|
|
624
|
+
name: settings.tool_choice.function.name,
|
|
625
|
+
};
|
|
626
|
+
}
|
|
627
|
+
else if (typeof settings.tool_choice === 'string') {
|
|
628
|
+
requestParams.tool_choice = settings.tool_choice;
|
|
629
|
+
}
|
|
630
|
+
}
|
|
631
|
+
if (settings?.json_schema?.schema) {
|
|
632
|
+
const { schema, ...wrapperWithoutSchema } = settings.json_schema;
|
|
633
|
+
requestParams.text = {
|
|
634
|
+
format: {
|
|
635
|
+
...wrapperWithoutSchema,
|
|
636
|
+
schema: processSchemaForOpenAI(schema),
|
|
637
|
+
},
|
|
638
|
+
};
|
|
639
|
+
}
|
|
640
|
+
if (tools && tools.length > 0) {
|
|
641
|
+
requestParams = await convertToOpenAITools(requestParams, tools);
|
|
642
|
+
}
|
|
643
|
+
requestId = (0, llm_logger_js_1.log_llm_request)(agent.agent_id, 'openai', model, requestParams);
|
|
644
|
+
const { waitWhilePaused } = await Promise.resolve().then(() => __importStar(require("../utils/pause_controller.cjs")));
|
|
645
|
+
await waitWhilePaused(100, agent.abortSignal);
|
|
646
|
+
const stream = await this.client.responses.create(requestParams);
|
|
647
|
+
const messagePositions = new Map();
|
|
648
|
+
const reasoningPositions = new Map();
|
|
649
|
+
const reasoningAggregates = new Map();
|
|
650
|
+
const deltaBuffers = new Map();
|
|
651
|
+
const citationTracker = (0, citation_tracker_js_1.createCitationTracker)();
|
|
652
|
+
const toolCallStates = new Map();
|
|
653
|
+
const events = [];
|
|
654
|
+
try {
|
|
655
|
+
for await (const event of stream) {
|
|
656
|
+
events.push(event);
|
|
657
|
+
if ((0, pause_controller_js_1.isPaused)()) {
|
|
658
|
+
console.log(`[OpenAI] System paused during stream for model ${model}. Waiting...`);
|
|
659
|
+
await waitWhilePaused(100, agent.abortSignal);
|
|
660
|
+
console.log(`[OpenAI] System resumed, continuing stream for model ${model}`);
|
|
661
|
+
}
|
|
662
|
+
if (event.type === 'response.in_progress') {
|
|
663
|
+
}
|
|
664
|
+
else if (event.type === 'response.completed' && event.response?.usage) {
|
|
665
|
+
index_js_1.costTracker.addUsage({
|
|
666
|
+
model,
|
|
667
|
+
input_tokens: event.response.usage.input_tokens || 0,
|
|
668
|
+
output_tokens: event.response.usage.output_tokens || 0,
|
|
669
|
+
cached_tokens: event.response.usage.input_tokens_details?.cached_tokens || 0,
|
|
670
|
+
metadata: {
|
|
671
|
+
reasoning_tokens: event.response.usage.output_tokens_details?.reasoning_tokens || 0,
|
|
672
|
+
},
|
|
673
|
+
});
|
|
674
|
+
}
|
|
675
|
+
else if (event.type === 'response.failed' && event.response?.error) {
|
|
676
|
+
const errorInfo = event.response.error;
|
|
677
|
+
(0, llm_logger_js_1.log_llm_error)(requestId, errorInfo);
|
|
678
|
+
console.error(`Response ${event.response.id} failed: [${errorInfo.code}] ${errorInfo.message}`);
|
|
679
|
+
yield {
|
|
680
|
+
type: 'error',
|
|
681
|
+
error: `OpenAI response failed: [${errorInfo.code}] ${errorInfo.message}`,
|
|
682
|
+
};
|
|
683
|
+
}
|
|
684
|
+
else if (event.type === 'response.incomplete' && event.response?.incomplete_details) {
|
|
685
|
+
const reason = event.response.incomplete_details.reason;
|
|
686
|
+
(0, llm_logger_js_1.log_llm_error)(requestId, 'OpenAI response incomplete: ' + reason);
|
|
687
|
+
console.warn(`Response ${event.response.id} incomplete: ${reason}`);
|
|
688
|
+
yield {
|
|
689
|
+
type: 'error',
|
|
690
|
+
error: 'OpenAI response incomplete: ' + reason,
|
|
691
|
+
};
|
|
692
|
+
}
|
|
693
|
+
else if (event.type === 'response.output_item.added' && event.item) {
|
|
694
|
+
if (event.item.type === 'function_call') {
|
|
695
|
+
if (!toolCallStates.has(event.item.id)) {
|
|
696
|
+
toolCallStates.set(event.item.id, {
|
|
697
|
+
id: event.item.id,
|
|
698
|
+
call_id: event.item.call_id,
|
|
699
|
+
type: 'function',
|
|
700
|
+
function: {
|
|
701
|
+
name: event.item.name || '',
|
|
702
|
+
arguments: '',
|
|
703
|
+
},
|
|
704
|
+
});
|
|
705
|
+
}
|
|
706
|
+
else {
|
|
707
|
+
console.warn(`Received output_item.added for already tracked function call ID: ${event.item.id}`);
|
|
708
|
+
}
|
|
709
|
+
}
|
|
710
|
+
}
|
|
711
|
+
else if (event.type === 'response.output_item.done' && event.item) {
|
|
712
|
+
if (event.item.type === 'reasoning' && !event.item.summary.length) {
|
|
713
|
+
yield {
|
|
714
|
+
type: 'message_complete',
|
|
715
|
+
content: '',
|
|
716
|
+
message_id: event.item.id + '-0',
|
|
717
|
+
thinking_content: '',
|
|
718
|
+
};
|
|
719
|
+
}
|
|
720
|
+
}
|
|
721
|
+
else if (event.type === 'response.content_part.added' && event.part) {
|
|
722
|
+
}
|
|
723
|
+
else if (event.type === 'response.content_part.done' && event.part) {
|
|
724
|
+
}
|
|
725
|
+
else if (event.type === 'response.output_text.delta' && event.delta) {
|
|
726
|
+
const itemId = event.item_id;
|
|
727
|
+
let position = messagePositions.get(itemId) ?? 0;
|
|
728
|
+
for (const ev of (0, delta_buffer_js_1.bufferDelta)(deltaBuffers, itemId, event.delta, content => ({
|
|
729
|
+
type: 'message_delta',
|
|
730
|
+
content,
|
|
731
|
+
message_id: itemId,
|
|
732
|
+
order: position++,
|
|
733
|
+
}))) {
|
|
734
|
+
yield ev;
|
|
735
|
+
}
|
|
736
|
+
messagePositions.set(itemId, position);
|
|
737
|
+
}
|
|
738
|
+
else if (event.type === 'response.output_text.annotation.added' &&
|
|
739
|
+
event.annotation) {
|
|
740
|
+
const eventData = event;
|
|
741
|
+
if (eventData.annotation?.type === 'url_citation' && eventData.annotation.url) {
|
|
742
|
+
const marker = (0, citation_tracker_js_1.formatCitation)(citationTracker, {
|
|
743
|
+
title: eventData.annotation.title || eventData.annotation.url,
|
|
744
|
+
url: eventData.annotation.url,
|
|
745
|
+
});
|
|
746
|
+
let position = messagePositions.get(eventData.item_id) ?? 0;
|
|
747
|
+
yield {
|
|
748
|
+
type: 'message_delta',
|
|
749
|
+
content: marker,
|
|
750
|
+
message_id: eventData.item_id,
|
|
751
|
+
order: position++,
|
|
752
|
+
};
|
|
753
|
+
messagePositions.set(eventData.item_id, position);
|
|
754
|
+
}
|
|
755
|
+
else {
|
|
756
|
+
console.log('Annotation added:', eventData.annotation);
|
|
757
|
+
}
|
|
758
|
+
}
|
|
759
|
+
else if (event.type === 'response.output_text.done' && event.text !== undefined) {
|
|
760
|
+
const itemId = event.item_id;
|
|
761
|
+
let finalText = event.text;
|
|
762
|
+
if (citationTracker.citations.size > 0) {
|
|
763
|
+
const footnotes = (0, citation_tracker_js_1.generateFootnotes)(citationTracker);
|
|
764
|
+
finalText += footnotes;
|
|
765
|
+
}
|
|
766
|
+
yield {
|
|
767
|
+
type: 'message_complete',
|
|
768
|
+
content: finalText,
|
|
769
|
+
message_id: itemId,
|
|
770
|
+
};
|
|
771
|
+
messagePositions.delete(itemId);
|
|
772
|
+
}
|
|
773
|
+
else if (event.type === 'response.refusal.delta' && event.delta) {
|
|
774
|
+
console.log(`Refusal delta for item ${event.item_id}: ${event.delta}`);
|
|
775
|
+
}
|
|
776
|
+
else if (event.type === 'response.refusal.done' && event.refusal) {
|
|
777
|
+
(0, llm_logger_js_1.log_llm_error)(requestId, 'OpenAI refusal error: ' + event.refusal);
|
|
778
|
+
console.log(`Refusal done for item ${event.item_id}: ${event.refusal}`);
|
|
779
|
+
yield {
|
|
780
|
+
type: 'error',
|
|
781
|
+
error: 'OpenAI refusal error: ' + event.refusal,
|
|
782
|
+
};
|
|
783
|
+
}
|
|
784
|
+
else if (event.type === 'response.function_call_arguments.delta' && event.delta) {
|
|
785
|
+
const currentCall = toolCallStates.get(event.item_id);
|
|
786
|
+
if (currentCall) {
|
|
787
|
+
currentCall.function.arguments += event.delta;
|
|
788
|
+
}
|
|
789
|
+
else {
|
|
790
|
+
console.warn(`Received function_call_arguments.delta for unknown item_id: ${event.item_id}`);
|
|
791
|
+
}
|
|
792
|
+
}
|
|
793
|
+
else if (event.type === 'response.function_call_arguments.done' &&
|
|
794
|
+
event.arguments !== undefined) {
|
|
795
|
+
const currentCall = toolCallStates.get(event.item_id);
|
|
796
|
+
if (currentCall) {
|
|
797
|
+
currentCall.function.arguments = event.arguments;
|
|
798
|
+
yield {
|
|
799
|
+
type: 'tool_start',
|
|
800
|
+
tool_call: currentCall,
|
|
801
|
+
};
|
|
802
|
+
toolCallStates.delete(event.item_id);
|
|
803
|
+
}
|
|
804
|
+
else {
|
|
805
|
+
console.warn(`Received function_call_arguments.done for unknown or already yielded item_id: ${event.item_id}`);
|
|
806
|
+
}
|
|
807
|
+
}
|
|
808
|
+
else if (event.type === 'response.file_search_call.in_progress') {
|
|
809
|
+
console.log(`File search in progress for item ${event.item_id}...`);
|
|
810
|
+
}
|
|
811
|
+
else if (event.type === 'response.file_search_call.searching') {
|
|
812
|
+
console.log(`File search searching for item ${event.item_id}...`);
|
|
813
|
+
}
|
|
814
|
+
else if (event.type === 'response.file_search_call.completed') {
|
|
815
|
+
console.log(`File search completed for item ${event.item_id}.`);
|
|
816
|
+
}
|
|
817
|
+
else if (event.type === 'response.web_search_call.in_progress') {
|
|
818
|
+
console.log(`Web search in progress for item ${event.item_id}...`);
|
|
819
|
+
}
|
|
820
|
+
else if (event.type === 'response.web_search_call.searching') {
|
|
821
|
+
console.log(`Web search searching for item ${event.item_id}...`);
|
|
822
|
+
}
|
|
823
|
+
else if (event.type === 'response.web_search_call.completed') {
|
|
824
|
+
console.log(`Web search completed for item ${event.item_id}.`);
|
|
825
|
+
}
|
|
826
|
+
else if (event.type === 'response.reasoning_summary_part.added') {
|
|
827
|
+
console.log(`Reasoning summary part added for item ${event.item_id}, index ${event.summary_index}`);
|
|
828
|
+
}
|
|
829
|
+
else if (event.type === 'response.reasoning_summary_part.done') {
|
|
830
|
+
console.log(`Reasoning summary part done for item ${event.item_id}, index ${event.summary_index}`);
|
|
831
|
+
}
|
|
832
|
+
else if (event.type === 'response.reasoning_summary_text.delta' && event.delta) {
|
|
833
|
+
const itemId = event.item_id + '-' + event.summary_index;
|
|
834
|
+
let position = reasoningPositions.get(itemId) ?? 0;
|
|
835
|
+
reasoningAggregates.set(itemId, reasoningAggregates.get(itemId) + event.delta);
|
|
836
|
+
yield {
|
|
837
|
+
type: 'message_delta',
|
|
838
|
+
content: '',
|
|
839
|
+
message_id: itemId,
|
|
840
|
+
thinking_content: event.delta,
|
|
841
|
+
order: position++,
|
|
842
|
+
};
|
|
843
|
+
reasoningPositions.set(itemId, position);
|
|
844
|
+
}
|
|
845
|
+
else if (event.type === 'response.reasoning_summary_text.done' && event.text !== undefined) {
|
|
846
|
+
const itemId = event.item_id + '-' + event.summary_index;
|
|
847
|
+
const aggregatedThinking = event.text;
|
|
848
|
+
yield {
|
|
849
|
+
type: 'message_complete',
|
|
850
|
+
content: '',
|
|
851
|
+
message_id: itemId,
|
|
852
|
+
thinking_content: aggregatedThinking,
|
|
853
|
+
};
|
|
854
|
+
reasoningPositions.delete(itemId);
|
|
855
|
+
reasoningAggregates.delete(itemId);
|
|
856
|
+
}
|
|
857
|
+
else if (event.type === 'error' && event.message) {
|
|
858
|
+
(0, llm_logger_js_1.log_llm_error)(requestId, event);
|
|
859
|
+
console.error(`API Stream Error (${model}): [${event.code || 'N/A'}] ${event.message}`);
|
|
860
|
+
yield {
|
|
861
|
+
type: 'error',
|
|
862
|
+
error: `OpenAI API error (${model}): [${event.code || 'N/A'}] ${event.message}`,
|
|
863
|
+
};
|
|
864
|
+
}
|
|
865
|
+
}
|
|
866
|
+
}
|
|
867
|
+
catch (streamError) {
|
|
868
|
+
(0, llm_logger_js_1.log_llm_error)(requestId, streamError);
|
|
869
|
+
console.error('Error processing response stream:', streamError);
|
|
870
|
+
yield {
|
|
871
|
+
type: 'error',
|
|
872
|
+
error: `OpenAI stream request error (${model}): ${streamError}`,
|
|
873
|
+
};
|
|
874
|
+
}
|
|
875
|
+
finally {
|
|
876
|
+
if (toolCallStates.size > 0) {
|
|
877
|
+
console.warn(`Stream ended with ${toolCallStates.size} incomplete tool call(s).`);
|
|
878
|
+
for (const [, toolCall] of toolCallStates.entries()) {
|
|
879
|
+
if (toolCall.function.name) {
|
|
880
|
+
yield {
|
|
881
|
+
type: 'tool_start',
|
|
882
|
+
tool_call: toolCall,
|
|
883
|
+
};
|
|
884
|
+
}
|
|
885
|
+
}
|
|
886
|
+
toolCallStates.clear();
|
|
887
|
+
}
|
|
888
|
+
for (const ev of (0, delta_buffer_js_1.flushBufferedDeltas)(deltaBuffers, (id, content) => {
|
|
889
|
+
let position = messagePositions.get(id) ?? 0;
|
|
890
|
+
position++;
|
|
891
|
+
messagePositions.set(id, position);
|
|
892
|
+
return {
|
|
893
|
+
type: 'message_delta',
|
|
894
|
+
content,
|
|
895
|
+
message_id: id,
|
|
896
|
+
order: position,
|
|
897
|
+
};
|
|
898
|
+
})) {
|
|
899
|
+
yield ev;
|
|
900
|
+
}
|
|
901
|
+
messagePositions.clear();
|
|
902
|
+
(0, llm_logger_js_1.log_llm_response)(requestId, events);
|
|
903
|
+
}
|
|
904
|
+
}
|
|
905
|
+
catch (error) {
|
|
906
|
+
(0, llm_logger_js_1.log_llm_error)(requestId, error);
|
|
907
|
+
console.error('Error in OpenAI streaming response:', error);
|
|
908
|
+
yield {
|
|
909
|
+
type: 'error',
|
|
910
|
+
error: 'OpenAI streaming error: ' + (error instanceof Error ? error.stack : String(error)),
|
|
911
|
+
};
|
|
912
|
+
}
|
|
913
|
+
}
|
|
914
|
+
async *createTranscription(audio, agent, model, opts) {
|
|
915
|
+
const transcriptionModels = ['gpt-4o-transcribe', 'gpt-4o-mini-transcribe', 'whisper-1'];
|
|
916
|
+
if (!transcriptionModels.includes(model)) {
|
|
917
|
+
throw new Error(`Model ${model} does not support transcription. Supported models: ${transcriptionModels.join(', ')}`);
|
|
918
|
+
}
|
|
919
|
+
let ws = null;
|
|
920
|
+
let isConnected = false;
|
|
921
|
+
let connectionError = null;
|
|
922
|
+
try {
|
|
923
|
+
const { WebSocket } = await Promise.resolve().then(() => __importStar(require('ws')));
|
|
924
|
+
const apiKey = this.apiKey || process.env.OPENAI_API_KEY;
|
|
925
|
+
if (!apiKey) {
|
|
926
|
+
throw new Error('Failed to initialize OpenAI transcription. Make sure OPENAI_API_KEY is set.');
|
|
927
|
+
}
|
|
928
|
+
const wsUrl = 'wss://api.openai.com/v1/realtime?intent=transcription';
|
|
929
|
+
ws = new WebSocket(wsUrl, {
|
|
930
|
+
headers: {
|
|
931
|
+
Authorization: 'Bearer ' + apiKey,
|
|
932
|
+
'OpenAI-Beta': 'realtime=v1',
|
|
933
|
+
},
|
|
934
|
+
});
|
|
935
|
+
const transcriptEvents = [];
|
|
936
|
+
const connectionPromise = new Promise((resolve, reject) => {
|
|
937
|
+
const timeout = setTimeout(() => {
|
|
938
|
+
reject(new Error('Connection timeout'));
|
|
939
|
+
}, 10000);
|
|
940
|
+
ws.on('open', () => {
|
|
941
|
+
clearTimeout(timeout);
|
|
942
|
+
console.log('[OpenAI] WebSocket connected for transcription');
|
|
943
|
+
isConnected = true;
|
|
944
|
+
resolve();
|
|
945
|
+
});
|
|
946
|
+
ws.on('error', error => {
|
|
947
|
+
clearTimeout(timeout);
|
|
948
|
+
connectionError = error;
|
|
949
|
+
reject(error);
|
|
950
|
+
});
|
|
951
|
+
});
|
|
952
|
+
ws.on('message', (data) => {
|
|
953
|
+
try {
|
|
954
|
+
const event = JSON.parse(data.toString());
|
|
955
|
+
console.dir(event, { depth: null });
|
|
956
|
+
switch (event.type) {
|
|
957
|
+
case 'transcription_session.created':
|
|
958
|
+
case 'session.created': {
|
|
959
|
+
const sessionUpdate = {
|
|
960
|
+
type: 'transcription_session.update',
|
|
961
|
+
session: {
|
|
962
|
+
input_audio_format: opts?.audioFormat?.encoding === 'pcm' ? 'pcm16' : 'pcm16',
|
|
963
|
+
input_audio_transcription: {
|
|
964
|
+
model: model,
|
|
965
|
+
prompt: opts?.prompt || 'You are a helpful assistant.',
|
|
966
|
+
language: opts?.language || 'en',
|
|
967
|
+
},
|
|
968
|
+
turn_detection: opts?.vad === false
|
|
969
|
+
? null
|
|
970
|
+
: {
|
|
971
|
+
type: 'semantic_vad',
|
|
972
|
+
},
|
|
973
|
+
input_audio_noise_reduction: opts?.noiseReduction === null
|
|
974
|
+
? null
|
|
975
|
+
: {
|
|
976
|
+
type: opts?.noiseReduction || 'far_field',
|
|
977
|
+
},
|
|
978
|
+
},
|
|
979
|
+
};
|
|
980
|
+
ws.send(JSON.stringify(sessionUpdate));
|
|
981
|
+
break;
|
|
982
|
+
}
|
|
983
|
+
case 'conversation.item.input_audio_transcription.delta': {
|
|
984
|
+
if (model !== 'whisper-1') {
|
|
985
|
+
const deltaEvent = {
|
|
986
|
+
type: 'transcription_turn_delta',
|
|
987
|
+
timestamp: new Date().toISOString(),
|
|
988
|
+
delta: event.delta,
|
|
989
|
+
partial: true,
|
|
990
|
+
};
|
|
991
|
+
transcriptEvents.push(deltaEvent);
|
|
992
|
+
}
|
|
993
|
+
break;
|
|
994
|
+
}
|
|
995
|
+
case 'conversation.item.input_audio_transcription.completed': {
|
|
996
|
+
const completeText = event.transcript;
|
|
997
|
+
const turnEvent = {
|
|
998
|
+
type: 'transcription_turn_complete',
|
|
999
|
+
timestamp: new Date().toISOString(),
|
|
1000
|
+
text: completeText,
|
|
1001
|
+
};
|
|
1002
|
+
transcriptEvents.push(turnEvent);
|
|
1003
|
+
break;
|
|
1004
|
+
}
|
|
1005
|
+
case 'input_audio_buffer.speech_started': {
|
|
1006
|
+
const previewEvent = {
|
|
1007
|
+
type: 'transcription_turn_start',
|
|
1008
|
+
timestamp: new Date().toISOString(),
|
|
1009
|
+
};
|
|
1010
|
+
transcriptEvents.push(previewEvent);
|
|
1011
|
+
break;
|
|
1012
|
+
}
|
|
1013
|
+
case 'input_audio_buffer.speech_stopped': {
|
|
1014
|
+
break;
|
|
1015
|
+
}
|
|
1016
|
+
case 'error': {
|
|
1017
|
+
const errorEvent = {
|
|
1018
|
+
type: 'error',
|
|
1019
|
+
timestamp: new Date().toISOString(),
|
|
1020
|
+
error: event.error?.message || 'Unknown error',
|
|
1021
|
+
};
|
|
1022
|
+
transcriptEvents.push(errorEvent);
|
|
1023
|
+
break;
|
|
1024
|
+
}
|
|
1025
|
+
}
|
|
1026
|
+
}
|
|
1027
|
+
catch (error) {
|
|
1028
|
+
console.error('[OpenAI] Error processing message:', error);
|
|
1029
|
+
}
|
|
1030
|
+
});
|
|
1031
|
+
ws.on('close', () => {
|
|
1032
|
+
console.log('[OpenAI] WebSocket closed');
|
|
1033
|
+
isConnected = false;
|
|
1034
|
+
});
|
|
1035
|
+
await connectionPromise;
|
|
1036
|
+
const audioStream = normalizeAudioSource(audio);
|
|
1037
|
+
const reader = audioStream.getReader();
|
|
1038
|
+
try {
|
|
1039
|
+
while (true) {
|
|
1040
|
+
const { done, value } = await reader.read();
|
|
1041
|
+
if (done)
|
|
1042
|
+
break;
|
|
1043
|
+
if (value && ws && isConnected) {
|
|
1044
|
+
const audioEvent = {
|
|
1045
|
+
type: 'input_audio_buffer.append',
|
|
1046
|
+
audio: Buffer.from(value).toString('base64'),
|
|
1047
|
+
};
|
|
1048
|
+
ws.send(JSON.stringify(audioEvent));
|
|
1049
|
+
}
|
|
1050
|
+
if (transcriptEvents.length > 0) {
|
|
1051
|
+
const events = transcriptEvents.splice(0, transcriptEvents.length);
|
|
1052
|
+
for (const event of events) {
|
|
1053
|
+
yield event;
|
|
1054
|
+
}
|
|
1055
|
+
}
|
|
1056
|
+
if (connectionError) {
|
|
1057
|
+
throw connectionError;
|
|
1058
|
+
}
|
|
1059
|
+
}
|
|
1060
|
+
if (opts?.vad === false && ws && isConnected) {
|
|
1061
|
+
ws.send(JSON.stringify({ type: 'input_audio_buffer.commit' }));
|
|
1062
|
+
}
|
|
1063
|
+
await new Promise(resolve => setTimeout(resolve, 1000));
|
|
1064
|
+
if (transcriptEvents.length > 0) {
|
|
1065
|
+
const events = transcriptEvents.splice(0, transcriptEvents.length);
|
|
1066
|
+
for (const event of events) {
|
|
1067
|
+
yield event;
|
|
1068
|
+
}
|
|
1069
|
+
}
|
|
1070
|
+
const completeEvent = {
|
|
1071
|
+
type: 'transcription_complete',
|
|
1072
|
+
timestamp: new Date().toISOString(),
|
|
1073
|
+
};
|
|
1074
|
+
yield completeEvent;
|
|
1075
|
+
}
|
|
1076
|
+
finally {
|
|
1077
|
+
reader.releaseLock();
|
|
1078
|
+
if (ws && ws.readyState === ws.OPEN) {
|
|
1079
|
+
ws.close();
|
|
1080
|
+
}
|
|
1081
|
+
}
|
|
1082
|
+
}
|
|
1083
|
+
catch (error) {
|
|
1084
|
+
console.error('[OpenAI] Transcription error:', error);
|
|
1085
|
+
const errorEvent = {
|
|
1086
|
+
type: 'error',
|
|
1087
|
+
timestamp: new Date().toISOString(),
|
|
1088
|
+
error: error instanceof Error ? error.message : 'Transcription failed',
|
|
1089
|
+
};
|
|
1090
|
+
yield errorEvent;
|
|
1091
|
+
}
|
|
1092
|
+
}
|
|
1093
|
+
}
|
|
1094
|
+
exports.OpenAIProvider = OpenAIProvider;
|
|
1095
|
+
function normalizeAudioSource(source) {
|
|
1096
|
+
if (source instanceof ReadableStream) {
|
|
1097
|
+
return source;
|
|
1098
|
+
}
|
|
1099
|
+
if (typeof source === 'object' && source !== null && Symbol.asyncIterator in source) {
|
|
1100
|
+
return new ReadableStream({
|
|
1101
|
+
async start(controller) {
|
|
1102
|
+
try {
|
|
1103
|
+
for await (const chunk of source) {
|
|
1104
|
+
controller.enqueue(chunk);
|
|
1105
|
+
}
|
|
1106
|
+
controller.close();
|
|
1107
|
+
}
|
|
1108
|
+
catch (error) {
|
|
1109
|
+
controller.error(error);
|
|
1110
|
+
}
|
|
1111
|
+
},
|
|
1112
|
+
});
|
|
1113
|
+
}
|
|
1114
|
+
throw new Error('Invalid audio source type');
|
|
1115
|
+
}
|
|
1116
|
+
exports.openaiProvider = new OpenAIProvider();
|
|
1117
|
+
//# sourceMappingURL=openai.js.map
|