@rcrsr/rill-ext-openai 0.8.3 → 0.8.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +81 -4
- package/dist/index.js +940 -14
- package/package.json +10 -6
- package/dist/factory.d.ts +0 -27
- package/dist/factory.d.ts.map +0 -1
- package/dist/factory.js +0 -768
- package/dist/factory.js.map +0 -1
- package/dist/index.d.ts.map +0 -1
- package/dist/index.js.map +0 -1
- package/dist/types.d.ts +0 -11
- package/dist/types.d.ts.map +0 -1
- package/dist/types.js +0 -6
- package/dist/types.js.map +0 -1
package/dist/index.js
CHANGED
|
@@ -1,14 +1,940 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
//
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
1
|
+
// src/factory.ts
|
|
2
|
+
import OpenAI from "openai";
|
|
3
|
+
import {
|
|
4
|
+
RuntimeError as RuntimeError4,
|
|
5
|
+
emitExtensionEvent,
|
|
6
|
+
createVector,
|
|
7
|
+
isCallable as isCallable2,
|
|
8
|
+
isVector
|
|
9
|
+
} from "@rcrsr/rill";
|
|
10
|
+
|
|
11
|
+
// ../../shared/ext-llm/dist/validation.js
|
|
12
|
+
import { RuntimeError } from "@rcrsr/rill";
|
|
13
|
+
var MIN_TEMPERATURE = 0;
|
|
14
|
+
var MAX_TEMPERATURE = 2;
|
|
15
|
+
function validateApiKey(key) {
|
|
16
|
+
if (key === void 0) {
|
|
17
|
+
throw new Error("api_key is required");
|
|
18
|
+
}
|
|
19
|
+
if (key === "") {
|
|
20
|
+
throw new Error("api_key cannot be empty");
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
function validateModel(model) {
|
|
24
|
+
if (!model) {
|
|
25
|
+
throw new Error("model is required");
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
function validateTemperature(temperature) {
|
|
29
|
+
if (temperature === void 0) {
|
|
30
|
+
return;
|
|
31
|
+
}
|
|
32
|
+
if (temperature < MIN_TEMPERATURE || temperature > MAX_TEMPERATURE) {
|
|
33
|
+
throw new Error("temperature must be between 0 and 2");
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
function validateEmbedText(text) {
|
|
37
|
+
if (text === "") {
|
|
38
|
+
throw new RuntimeError("RILL-R001", "embed text cannot be empty");
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
function validateEmbedBatch(texts) {
|
|
42
|
+
const validated = [];
|
|
43
|
+
for (let i = 0; i < texts.length; i++) {
|
|
44
|
+
const item = texts[i];
|
|
45
|
+
if (typeof item !== "string") {
|
|
46
|
+
throw new RuntimeError("RILL-R001", "embed_batch requires list of strings");
|
|
47
|
+
}
|
|
48
|
+
if (item === "") {
|
|
49
|
+
throw new RuntimeError("RILL-R001", `embed text cannot be empty at index ${i}`);
|
|
50
|
+
}
|
|
51
|
+
validated.push(item);
|
|
52
|
+
}
|
|
53
|
+
return validated;
|
|
54
|
+
}
|
|
55
|
+
function validateEmbedModel(model) {
|
|
56
|
+
if (!model) {
|
|
57
|
+
throw new RuntimeError("RILL-R001", "embed_model not configured");
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
// ../../shared/ext-llm/dist/errors.js
|
|
62
|
+
import { RuntimeError as RuntimeError2 } from "@rcrsr/rill";
|
|
63
|
+
function mapProviderError(providerName, error, detect) {
|
|
64
|
+
const detected = detect(error);
|
|
65
|
+
if (detected !== null) {
|
|
66
|
+
const { status, message } = detected;
|
|
67
|
+
if (status !== void 0) {
|
|
68
|
+
return new RuntimeError2("RILL-R004", `${providerName} API error (HTTP ${status}): ${message}`, void 0, { cause: error });
|
|
69
|
+
}
|
|
70
|
+
return new RuntimeError2("RILL-R004", `${providerName} API error: ${message}`, void 0, { cause: error });
|
|
71
|
+
}
|
|
72
|
+
if (error instanceof Error) {
|
|
73
|
+
return new RuntimeError2("RILL-R004", `${providerName} error: ${error.message}`, void 0, { cause: error });
|
|
74
|
+
}
|
|
75
|
+
return new RuntimeError2("RILL-R004", `${providerName} error: Unknown error`, void 0, { cause: error });
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
// ../../shared/ext-llm/dist/tool-loop.js
|
|
79
|
+
import { isCallable, isDict, RuntimeError as RuntimeError3 } from "@rcrsr/rill";
|
|
80
|
+
async function executeToolCall(toolName, toolInput, tools, context) {
|
|
81
|
+
if (!isDict(tools)) {
|
|
82
|
+
throw new RuntimeError3("RILL-R004", "tools must be a dict mapping tool names to functions");
|
|
83
|
+
}
|
|
84
|
+
const toolsDict = tools;
|
|
85
|
+
const toolFn = toolsDict[toolName];
|
|
86
|
+
if (toolFn === void 0 || toolFn === null) {
|
|
87
|
+
throw new RuntimeError3("RILL-R004", `Unknown tool: ${toolName}`);
|
|
88
|
+
}
|
|
89
|
+
if (!isCallable(toolFn)) {
|
|
90
|
+
throw new RuntimeError3("RILL-R004", `Invalid tool input for ${toolName}: tool must be callable`);
|
|
91
|
+
}
|
|
92
|
+
if (typeof toolInput !== "object" || toolInput === null) {
|
|
93
|
+
throw new RuntimeError3("RILL-R004", `Invalid tool input for ${toolName}: input must be an object`);
|
|
94
|
+
}
|
|
95
|
+
const callable = toolFn;
|
|
96
|
+
if (callable.kind !== "runtime" && callable.kind !== "application") {
|
|
97
|
+
throw new RuntimeError3("RILL-R004", `Invalid tool input for ${toolName}: tool must be application or runtime callable`);
|
|
98
|
+
}
|
|
99
|
+
try {
|
|
100
|
+
let args;
|
|
101
|
+
if (callable.kind === "application" && callable.params) {
|
|
102
|
+
const params = callable.params;
|
|
103
|
+
const inputDict = toolInput;
|
|
104
|
+
args = params.map((param) => {
|
|
105
|
+
const value = inputDict[param.name];
|
|
106
|
+
return value !== void 0 ? value : void 0;
|
|
107
|
+
});
|
|
108
|
+
} else {
|
|
109
|
+
args = [toolInput];
|
|
110
|
+
}
|
|
111
|
+
const ctx = context ?? {
|
|
112
|
+
parent: void 0,
|
|
113
|
+
variables: /* @__PURE__ */ new Map(),
|
|
114
|
+
pipeValue: null
|
|
115
|
+
};
|
|
116
|
+
const result = callable.fn(args, ctx);
|
|
117
|
+
return result instanceof Promise ? await result : result;
|
|
118
|
+
} catch (error) {
|
|
119
|
+
if (error instanceof RuntimeError3) {
|
|
120
|
+
throw error;
|
|
121
|
+
}
|
|
122
|
+
const message = error instanceof Error ? error.message : "Unknown error";
|
|
123
|
+
throw new RuntimeError3("RILL-R004", `Invalid tool input for ${toolName}: ${message}`);
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
async function executeToolLoop(messages, tools, maxErrors, callbacks, emitEvent, maxTurns = 10, context) {
|
|
127
|
+
if (tools === void 0) {
|
|
128
|
+
throw new RuntimeError3("RILL-R004", "tools parameter is required");
|
|
129
|
+
}
|
|
130
|
+
if (!isDict(tools)) {
|
|
131
|
+
throw new RuntimeError3("RILL-R004", "tools must be a dict mapping tool names to functions");
|
|
132
|
+
}
|
|
133
|
+
const toolsDict = tools;
|
|
134
|
+
const toolDescriptors = Object.entries(toolsDict).map(([name, fn]) => {
|
|
135
|
+
const fnValue = fn;
|
|
136
|
+
if (!isCallable(fnValue)) {
|
|
137
|
+
throw new RuntimeError3("RILL-R004", `tool '${name}' must be callable function`);
|
|
138
|
+
}
|
|
139
|
+
const callable = fnValue;
|
|
140
|
+
const description = callable.kind === "application" && callable.description ? callable.description : "";
|
|
141
|
+
const properties = {};
|
|
142
|
+
const required = [];
|
|
143
|
+
if (callable.kind === "application" && callable.params) {
|
|
144
|
+
for (const param of callable.params) {
|
|
145
|
+
let jsonSchemaType;
|
|
146
|
+
switch (param.typeName) {
|
|
147
|
+
case "string":
|
|
148
|
+
jsonSchemaType = "string";
|
|
149
|
+
break;
|
|
150
|
+
case "number":
|
|
151
|
+
jsonSchemaType = "number";
|
|
152
|
+
break;
|
|
153
|
+
case "bool":
|
|
154
|
+
jsonSchemaType = "boolean";
|
|
155
|
+
break;
|
|
156
|
+
case "list":
|
|
157
|
+
jsonSchemaType = "array";
|
|
158
|
+
break;
|
|
159
|
+
case "dict":
|
|
160
|
+
case "vector":
|
|
161
|
+
jsonSchemaType = "object";
|
|
162
|
+
break;
|
|
163
|
+
case null:
|
|
164
|
+
jsonSchemaType = "string";
|
|
165
|
+
break;
|
|
166
|
+
}
|
|
167
|
+
const property = {
|
|
168
|
+
type: jsonSchemaType
|
|
169
|
+
};
|
|
170
|
+
if (param.description) {
|
|
171
|
+
property["description"] = param.description;
|
|
172
|
+
}
|
|
173
|
+
properties[param.name] = property;
|
|
174
|
+
if (param.defaultValue === null) {
|
|
175
|
+
required.push(param.name);
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
}
|
|
179
|
+
return {
|
|
180
|
+
name,
|
|
181
|
+
description,
|
|
182
|
+
input_schema: {
|
|
183
|
+
type: "object",
|
|
184
|
+
properties,
|
|
185
|
+
required
|
|
186
|
+
}
|
|
187
|
+
};
|
|
188
|
+
});
|
|
189
|
+
const providerTools = callbacks.buildTools(toolDescriptors);
|
|
190
|
+
let consecutiveErrors = 0;
|
|
191
|
+
let totalInputTokens = 0;
|
|
192
|
+
let totalOutputTokens = 0;
|
|
193
|
+
const executedToolCalls = [];
|
|
194
|
+
let currentMessages = [...messages];
|
|
195
|
+
let turnCount = 0;
|
|
196
|
+
while (turnCount < maxTurns) {
|
|
197
|
+
turnCount++;
|
|
198
|
+
let response;
|
|
199
|
+
try {
|
|
200
|
+
response = await callbacks.callAPI(currentMessages, providerTools);
|
|
201
|
+
} catch (error) {
|
|
202
|
+
const message = error instanceof Error ? error.message : "Unknown error";
|
|
203
|
+
throw new RuntimeError3("RILL-R004", `Provider API error: ${message}`, void 0, { cause: error });
|
|
204
|
+
}
|
|
205
|
+
if (typeof response === "object" && response !== null && "usage" in response) {
|
|
206
|
+
const usage = response["usage"];
|
|
207
|
+
if (typeof usage === "object" && usage !== null) {
|
|
208
|
+
const usageRecord = usage;
|
|
209
|
+
const inputTokens = typeof usageRecord["input_tokens"] === "number" ? usageRecord["input_tokens"] : 0;
|
|
210
|
+
const outputTokens = typeof usageRecord["output_tokens"] === "number" ? usageRecord["output_tokens"] : 0;
|
|
211
|
+
totalInputTokens += inputTokens;
|
|
212
|
+
totalOutputTokens += outputTokens;
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
const toolCalls = callbacks.extractToolCalls(response);
|
|
216
|
+
if (toolCalls === null || toolCalls.length === 0) {
|
|
217
|
+
return {
|
|
218
|
+
response,
|
|
219
|
+
toolCalls: executedToolCalls,
|
|
220
|
+
totalTokens: { input: totalInputTokens, output: totalOutputTokens },
|
|
221
|
+
turns: turnCount
|
|
222
|
+
};
|
|
223
|
+
}
|
|
224
|
+
const toolResults = [];
|
|
225
|
+
for (const toolCall of toolCalls) {
|
|
226
|
+
const { id, name, input } = toolCall;
|
|
227
|
+
emitEvent("tool_call", { tool_name: name, args: input });
|
|
228
|
+
const toolStartTime = Date.now();
|
|
229
|
+
try {
|
|
230
|
+
const result = await executeToolCall(name, input, tools, context);
|
|
231
|
+
const duration = Date.now() - toolStartTime;
|
|
232
|
+
toolResults.push({ id, name, result });
|
|
233
|
+
executedToolCalls.push({ name, result });
|
|
234
|
+
consecutiveErrors = 0;
|
|
235
|
+
emitEvent("tool_result", { tool_name: name, duration });
|
|
236
|
+
} catch (error) {
|
|
237
|
+
const duration = Date.now() - toolStartTime;
|
|
238
|
+
consecutiveErrors++;
|
|
239
|
+
let originalError;
|
|
240
|
+
if (error instanceof RuntimeError3) {
|
|
241
|
+
const prefix = `Invalid tool input for ${name}: `;
|
|
242
|
+
if (error.message.startsWith(prefix)) {
|
|
243
|
+
originalError = error.message.slice(prefix.length);
|
|
244
|
+
} else {
|
|
245
|
+
originalError = error.message;
|
|
246
|
+
}
|
|
247
|
+
} else if (error instanceof Error) {
|
|
248
|
+
originalError = error.message;
|
|
249
|
+
} else {
|
|
250
|
+
originalError = "Unknown error";
|
|
251
|
+
}
|
|
252
|
+
const errorResult = originalError;
|
|
253
|
+
toolResults.push({
|
|
254
|
+
id,
|
|
255
|
+
name,
|
|
256
|
+
result: errorResult,
|
|
257
|
+
error: originalError
|
|
258
|
+
});
|
|
259
|
+
emitEvent("tool_result", {
|
|
260
|
+
tool_name: name,
|
|
261
|
+
error: originalError,
|
|
262
|
+
duration
|
|
263
|
+
});
|
|
264
|
+
if (consecutiveErrors >= maxErrors) {
|
|
265
|
+
throw new RuntimeError3("RILL-R004", `Tool execution failed: ${maxErrors} consecutive errors`);
|
|
266
|
+
}
|
|
267
|
+
}
|
|
268
|
+
}
|
|
269
|
+
const toolResultMessage = callbacks.formatToolResult(toolResults);
|
|
270
|
+
currentMessages.push(toolResultMessage);
|
|
271
|
+
}
|
|
272
|
+
return {
|
|
273
|
+
response: null,
|
|
274
|
+
toolCalls: executedToolCalls,
|
|
275
|
+
totalTokens: { input: totalInputTokens, output: totalOutputTokens },
|
|
276
|
+
turns: turnCount
|
|
277
|
+
};
|
|
278
|
+
}
|
|
279
|
+
|
|
280
|
+
// src/factory.ts
|
|
281
|
+
var DEFAULT_MAX_TOKENS = 4096;
|
|
282
|
+
var detectOpenAIError = (error) => {
|
|
283
|
+
if (error instanceof OpenAI.APIError) {
|
|
284
|
+
return {
|
|
285
|
+
status: error.status ?? void 0,
|
|
286
|
+
message: error.message
|
|
287
|
+
};
|
|
288
|
+
}
|
|
289
|
+
return null;
|
|
290
|
+
};
|
|
291
|
+
function createOpenAIExtension(config) {
|
|
292
|
+
validateApiKey(config.api_key);
|
|
293
|
+
validateModel(config.model);
|
|
294
|
+
validateTemperature(config.temperature);
|
|
295
|
+
const client = new OpenAI({
|
|
296
|
+
apiKey: config.api_key,
|
|
297
|
+
baseURL: config.base_url,
|
|
298
|
+
maxRetries: config.max_retries,
|
|
299
|
+
timeout: config.timeout
|
|
300
|
+
});
|
|
301
|
+
const factoryModel = config.model;
|
|
302
|
+
const factoryTemperature = config.temperature;
|
|
303
|
+
const factoryMaxTokens = config.max_tokens ?? DEFAULT_MAX_TOKENS;
|
|
304
|
+
const factorySystem = config.system;
|
|
305
|
+
const factoryEmbedModel = config.embed_model;
|
|
306
|
+
void factoryEmbedModel;
|
|
307
|
+
let abortController = new AbortController();
|
|
308
|
+
const dispose = async () => {
|
|
309
|
+
try {
|
|
310
|
+
if (abortController) {
|
|
311
|
+
abortController.abort();
|
|
312
|
+
abortController = void 0;
|
|
313
|
+
}
|
|
314
|
+
} catch (error) {
|
|
315
|
+
const message = error instanceof Error ? error.message : "Unknown error";
|
|
316
|
+
console.warn(`Failed to abort OpenAI requests: ${message}`);
|
|
317
|
+
}
|
|
318
|
+
try {
|
|
319
|
+
} catch (error) {
|
|
320
|
+
const message = error instanceof Error ? error.message : "Unknown error";
|
|
321
|
+
console.warn(`Failed to cleanup OpenAI SDK: ${message}`);
|
|
322
|
+
}
|
|
323
|
+
};
|
|
324
|
+
const result = {
|
|
325
|
+
// IR-4: openai::message
|
|
326
|
+
message: {
|
|
327
|
+
params: [
|
|
328
|
+
{ name: "text", type: "string" },
|
|
329
|
+
{ name: "options", type: "dict", defaultValue: {} }
|
|
330
|
+
],
|
|
331
|
+
fn: async (args, ctx) => {
|
|
332
|
+
const startTime = Date.now();
|
|
333
|
+
try {
|
|
334
|
+
const text = args[0];
|
|
335
|
+
const options = args[1] ?? {};
|
|
336
|
+
if (text.trim().length === 0) {
|
|
337
|
+
throw new RuntimeError4("RILL-R004", "prompt text cannot be empty");
|
|
338
|
+
}
|
|
339
|
+
const system = typeof options["system"] === "string" ? options["system"] : factorySystem;
|
|
340
|
+
const maxTokens = typeof options["max_tokens"] === "number" ? options["max_tokens"] : factoryMaxTokens;
|
|
341
|
+
const apiMessages = [];
|
|
342
|
+
if (system !== void 0) {
|
|
343
|
+
apiMessages.push({
|
|
344
|
+
role: "system",
|
|
345
|
+
content: system
|
|
346
|
+
});
|
|
347
|
+
}
|
|
348
|
+
apiMessages.push({
|
|
349
|
+
role: "user",
|
|
350
|
+
content: text
|
|
351
|
+
});
|
|
352
|
+
const apiParams = {
|
|
353
|
+
model: factoryModel,
|
|
354
|
+
max_tokens: maxTokens,
|
|
355
|
+
messages: apiMessages
|
|
356
|
+
};
|
|
357
|
+
if (factoryTemperature !== void 0) {
|
|
358
|
+
apiParams.temperature = factoryTemperature;
|
|
359
|
+
}
|
|
360
|
+
const response = await client.chat.completions.create(apiParams);
|
|
361
|
+
const content = response.choices[0]?.message?.content ?? "";
|
|
362
|
+
const result2 = {
|
|
363
|
+
content,
|
|
364
|
+
model: response.model,
|
|
365
|
+
usage: {
|
|
366
|
+
input: response.usage?.prompt_tokens ?? 0,
|
|
367
|
+
output: response.usage?.completion_tokens ?? 0
|
|
368
|
+
},
|
|
369
|
+
stop_reason: response.choices[0]?.finish_reason ?? "unknown",
|
|
370
|
+
id: response.id,
|
|
371
|
+
messages: [
|
|
372
|
+
...system ? [{ role: "system", content: system }] : [],
|
|
373
|
+
{ role: "user", content: text },
|
|
374
|
+
{ role: "assistant", content }
|
|
375
|
+
]
|
|
376
|
+
};
|
|
377
|
+
const duration = Date.now() - startTime;
|
|
378
|
+
emitExtensionEvent(ctx, {
|
|
379
|
+
event: "openai:message",
|
|
380
|
+
subsystem: "extension:openai",
|
|
381
|
+
duration,
|
|
382
|
+
model: response.model,
|
|
383
|
+
usage: result2.usage
|
|
384
|
+
});
|
|
385
|
+
return result2;
|
|
386
|
+
} catch (error) {
|
|
387
|
+
const duration = Date.now() - startTime;
|
|
388
|
+
const rillError = mapProviderError(
|
|
389
|
+
"OpenAI",
|
|
390
|
+
error,
|
|
391
|
+
detectOpenAIError
|
|
392
|
+
);
|
|
393
|
+
emitExtensionEvent(ctx, {
|
|
394
|
+
event: "openai:error",
|
|
395
|
+
subsystem: "extension:openai",
|
|
396
|
+
error: rillError.message,
|
|
397
|
+
duration
|
|
398
|
+
});
|
|
399
|
+
throw rillError;
|
|
400
|
+
}
|
|
401
|
+
},
|
|
402
|
+
description: "Send single message to OpenAI API",
|
|
403
|
+
returnType: "dict"
|
|
404
|
+
},
|
|
405
|
+
// IR-5: openai::messages
|
|
406
|
+
messages: {
|
|
407
|
+
params: [
|
|
408
|
+
{ name: "messages", type: "list" },
|
|
409
|
+
{ name: "options", type: "dict", defaultValue: {} }
|
|
410
|
+
],
|
|
411
|
+
fn: async (args, ctx) => {
|
|
412
|
+
const startTime = Date.now();
|
|
413
|
+
try {
|
|
414
|
+
const messages = args[0];
|
|
415
|
+
const options = args[1] ?? {};
|
|
416
|
+
if (messages.length === 0) {
|
|
417
|
+
throw new RuntimeError4(
|
|
418
|
+
"RILL-R004",
|
|
419
|
+
"messages list cannot be empty"
|
|
420
|
+
);
|
|
421
|
+
}
|
|
422
|
+
const system = typeof options["system"] === "string" ? options["system"] : factorySystem;
|
|
423
|
+
const maxTokens = typeof options["max_tokens"] === "number" ? options["max_tokens"] : factoryMaxTokens;
|
|
424
|
+
const apiMessages = [];
|
|
425
|
+
if (system !== void 0) {
|
|
426
|
+
apiMessages.push({
|
|
427
|
+
role: "system",
|
|
428
|
+
content: system
|
|
429
|
+
});
|
|
430
|
+
}
|
|
431
|
+
for (let i = 0; i < messages.length; i++) {
|
|
432
|
+
const msg = messages[i];
|
|
433
|
+
if (!msg || typeof msg !== "object" || !("role" in msg)) {
|
|
434
|
+
throw new RuntimeError4(
|
|
435
|
+
"RILL-R004",
|
|
436
|
+
"message missing required 'role' field"
|
|
437
|
+
);
|
|
438
|
+
}
|
|
439
|
+
const role = msg["role"];
|
|
440
|
+
if (role !== "user" && role !== "assistant" && role !== "tool") {
|
|
441
|
+
throw new RuntimeError4("RILL-R004", `invalid role '${role}'`);
|
|
442
|
+
}
|
|
443
|
+
if (role === "user" || role === "tool") {
|
|
444
|
+
if (!("content" in msg) || typeof msg["content"] !== "string") {
|
|
445
|
+
throw new RuntimeError4(
|
|
446
|
+
"RILL-R004",
|
|
447
|
+
`${role} message requires 'content'`
|
|
448
|
+
);
|
|
449
|
+
}
|
|
450
|
+
apiMessages.push({
|
|
451
|
+
role,
|
|
452
|
+
content: msg["content"]
|
|
453
|
+
});
|
|
454
|
+
} else if (role === "assistant") {
|
|
455
|
+
const hasContent = "content" in msg && msg["content"];
|
|
456
|
+
const hasToolCalls = "tool_calls" in msg && msg["tool_calls"];
|
|
457
|
+
if (!hasContent && !hasToolCalls) {
|
|
458
|
+
throw new RuntimeError4(
|
|
459
|
+
"RILL-R004",
|
|
460
|
+
"assistant message requires 'content' or 'tool_calls'"
|
|
461
|
+
);
|
|
462
|
+
}
|
|
463
|
+
if (hasContent) {
|
|
464
|
+
apiMessages.push({
|
|
465
|
+
role: "assistant",
|
|
466
|
+
content: msg["content"]
|
|
467
|
+
});
|
|
468
|
+
}
|
|
469
|
+
}
|
|
470
|
+
}
|
|
471
|
+
const apiParams = {
|
|
472
|
+
model: factoryModel,
|
|
473
|
+
max_tokens: maxTokens,
|
|
474
|
+
messages: apiMessages
|
|
475
|
+
};
|
|
476
|
+
if (factoryTemperature !== void 0) {
|
|
477
|
+
apiParams.temperature = factoryTemperature;
|
|
478
|
+
}
|
|
479
|
+
const response = await client.chat.completions.create(apiParams);
|
|
480
|
+
const content = response.choices[0]?.message?.content ?? "";
|
|
481
|
+
const fullMessages = [
|
|
482
|
+
...messages.map((m) => {
|
|
483
|
+
const normalized = { role: m["role"] };
|
|
484
|
+
if ("content" in m) normalized["content"] = m["content"];
|
|
485
|
+
if ("tool_calls" in m) normalized["tool_calls"] = m["tool_calls"];
|
|
486
|
+
return normalized;
|
|
487
|
+
}),
|
|
488
|
+
{ role: "assistant", content }
|
|
489
|
+
];
|
|
490
|
+
const result2 = {
|
|
491
|
+
content,
|
|
492
|
+
model: response.model,
|
|
493
|
+
usage: {
|
|
494
|
+
input: response.usage?.prompt_tokens ?? 0,
|
|
495
|
+
output: response.usage?.completion_tokens ?? 0
|
|
496
|
+
},
|
|
497
|
+
stop_reason: response.choices[0]?.finish_reason ?? "unknown",
|
|
498
|
+
id: response.id,
|
|
499
|
+
messages: fullMessages
|
|
500
|
+
};
|
|
501
|
+
const duration = Date.now() - startTime;
|
|
502
|
+
emitExtensionEvent(ctx, {
|
|
503
|
+
event: "openai:messages",
|
|
504
|
+
subsystem: "extension:openai",
|
|
505
|
+
duration,
|
|
506
|
+
model: response.model,
|
|
507
|
+
usage: result2.usage
|
|
508
|
+
});
|
|
509
|
+
return result2;
|
|
510
|
+
} catch (error) {
|
|
511
|
+
const duration = Date.now() - startTime;
|
|
512
|
+
const rillError = mapProviderError(
|
|
513
|
+
"OpenAI",
|
|
514
|
+
error,
|
|
515
|
+
detectOpenAIError
|
|
516
|
+
);
|
|
517
|
+
emitExtensionEvent(ctx, {
|
|
518
|
+
event: "openai:error",
|
|
519
|
+
subsystem: "extension:openai",
|
|
520
|
+
error: rillError.message,
|
|
521
|
+
duration
|
|
522
|
+
});
|
|
523
|
+
throw rillError;
|
|
524
|
+
}
|
|
525
|
+
},
|
|
526
|
+
description: "Send multi-turn conversation to OpenAI API",
|
|
527
|
+
returnType: "dict"
|
|
528
|
+
},
|
|
529
|
+
// IR-6: openai::embed
|
|
530
|
+
embed: {
|
|
531
|
+
params: [{ name: "text", type: "string" }],
|
|
532
|
+
fn: async (args, ctx) => {
|
|
533
|
+
const startTime = Date.now();
|
|
534
|
+
try {
|
|
535
|
+
const text = args[0];
|
|
536
|
+
validateEmbedText(text.trim());
|
|
537
|
+
validateEmbedModel(factoryEmbedModel);
|
|
538
|
+
const response = await client.embeddings.create({
|
|
539
|
+
model: factoryEmbedModel,
|
|
540
|
+
input: text,
|
|
541
|
+
encoding_format: "float"
|
|
542
|
+
});
|
|
543
|
+
const embeddingData = response.data[0]?.embedding;
|
|
544
|
+
if (!embeddingData || embeddingData.length === 0) {
|
|
545
|
+
throw new RuntimeError4(
|
|
546
|
+
"RILL-R004",
|
|
547
|
+
"OpenAI: empty embedding returned"
|
|
548
|
+
);
|
|
549
|
+
}
|
|
550
|
+
const float32Data = new Float32Array(embeddingData);
|
|
551
|
+
const vector = createVector(float32Data, factoryEmbedModel);
|
|
552
|
+
const duration = Date.now() - startTime;
|
|
553
|
+
emitExtensionEvent(ctx, {
|
|
554
|
+
event: "openai:embed",
|
|
555
|
+
subsystem: "extension:openai",
|
|
556
|
+
duration,
|
|
557
|
+
model: factoryEmbedModel,
|
|
558
|
+
dimensions: float32Data.length
|
|
559
|
+
});
|
|
560
|
+
return vector;
|
|
561
|
+
} catch (error) {
|
|
562
|
+
const duration = Date.now() - startTime;
|
|
563
|
+
const rillError = mapProviderError(
|
|
564
|
+
"OpenAI",
|
|
565
|
+
error,
|
|
566
|
+
detectOpenAIError
|
|
567
|
+
);
|
|
568
|
+
emitExtensionEvent(ctx, {
|
|
569
|
+
event: "openai:error",
|
|
570
|
+
subsystem: "extension:openai",
|
|
571
|
+
error: rillError.message,
|
|
572
|
+
duration
|
|
573
|
+
});
|
|
574
|
+
throw rillError;
|
|
575
|
+
}
|
|
576
|
+
},
|
|
577
|
+
description: "Generate embedding vector for text",
|
|
578
|
+
returnType: "vector"
|
|
579
|
+
},
|
|
580
|
+
// IR-7: openai::embed_batch
|
|
581
|
+
embed_batch: {
|
|
582
|
+
params: [{ name: "texts", type: "list" }],
|
|
583
|
+
fn: async (args, ctx) => {
|
|
584
|
+
const startTime = Date.now();
|
|
585
|
+
try {
|
|
586
|
+
const texts = args[0];
|
|
587
|
+
if (texts.length === 0) {
|
|
588
|
+
return [];
|
|
589
|
+
}
|
|
590
|
+
validateEmbedModel(factoryEmbedModel);
|
|
591
|
+
const stringTexts = validateEmbedBatch(texts);
|
|
592
|
+
const response = await client.embeddings.create({
|
|
593
|
+
model: factoryEmbedModel,
|
|
594
|
+
input: stringTexts,
|
|
595
|
+
encoding_format: "float"
|
|
596
|
+
});
|
|
597
|
+
const vectors = [];
|
|
598
|
+
for (const embeddingItem of response.data) {
|
|
599
|
+
const embeddingData = embeddingItem.embedding;
|
|
600
|
+
if (!embeddingData || embeddingData.length === 0) {
|
|
601
|
+
throw new RuntimeError4(
|
|
602
|
+
"RILL-R004",
|
|
603
|
+
"OpenAI: empty embedding returned"
|
|
604
|
+
);
|
|
605
|
+
}
|
|
606
|
+
const float32Data = new Float32Array(embeddingData);
|
|
607
|
+
const vector = createVector(float32Data, factoryEmbedModel);
|
|
608
|
+
vectors.push(vector);
|
|
609
|
+
}
|
|
610
|
+
const duration = Date.now() - startTime;
|
|
611
|
+
const firstVector = vectors[0];
|
|
612
|
+
const dimensions = firstVector && isVector(firstVector) ? firstVector.data.length : 0;
|
|
613
|
+
emitExtensionEvent(ctx, {
|
|
614
|
+
event: "openai:embed_batch",
|
|
615
|
+
subsystem: "extension:openai",
|
|
616
|
+
duration,
|
|
617
|
+
model: factoryEmbedModel,
|
|
618
|
+
dimensions,
|
|
619
|
+
count: vectors.length
|
|
620
|
+
});
|
|
621
|
+
return vectors;
|
|
622
|
+
} catch (error) {
|
|
623
|
+
const duration = Date.now() - startTime;
|
|
624
|
+
const rillError = mapProviderError(
|
|
625
|
+
"OpenAI",
|
|
626
|
+
error,
|
|
627
|
+
detectOpenAIError
|
|
628
|
+
);
|
|
629
|
+
emitExtensionEvent(ctx, {
|
|
630
|
+
event: "openai:error",
|
|
631
|
+
subsystem: "extension:openai",
|
|
632
|
+
error: rillError.message,
|
|
633
|
+
duration
|
|
634
|
+
});
|
|
635
|
+
throw rillError;
|
|
636
|
+
}
|
|
637
|
+
},
|
|
638
|
+
description: "Generate embedding vectors for multiple texts",
|
|
639
|
+
returnType: "list"
|
|
640
|
+
},
|
|
641
|
+
// IR-8: openai::tool_loop
|
|
642
|
+
tool_loop: {
|
|
643
|
+
params: [
|
|
644
|
+
{ name: "prompt", type: "string" },
|
|
645
|
+
{ name: "options", type: "dict", defaultValue: {} }
|
|
646
|
+
],
|
|
647
|
+
fn: async (args, ctx) => {
|
|
648
|
+
const startTime = Date.now();
|
|
649
|
+
try {
|
|
650
|
+
const prompt = args[0];
|
|
651
|
+
const options = args[1] ?? {};
|
|
652
|
+
if (prompt.trim().length === 0) {
|
|
653
|
+
throw new RuntimeError4("RILL-R004", "prompt text cannot be empty");
|
|
654
|
+
}
|
|
655
|
+
if (!("tools" in options) || !Array.isArray(options["tools"])) {
|
|
656
|
+
throw new RuntimeError4(
|
|
657
|
+
"RILL-R004",
|
|
658
|
+
"tool_loop requires 'tools' option"
|
|
659
|
+
);
|
|
660
|
+
}
|
|
661
|
+
const toolDescriptors = options["tools"];
|
|
662
|
+
const toolsDict = {};
|
|
663
|
+
for (const descriptor of toolDescriptors) {
|
|
664
|
+
const name = typeof descriptor["name"] === "string" ? descriptor["name"] : null;
|
|
665
|
+
if (!name) {
|
|
666
|
+
throw new RuntimeError4(
|
|
667
|
+
"RILL-R004",
|
|
668
|
+
"tool descriptor missing name"
|
|
669
|
+
);
|
|
670
|
+
}
|
|
671
|
+
const toolFnValue = descriptor["fn"];
|
|
672
|
+
if (!toolFnValue) {
|
|
673
|
+
throw new RuntimeError4(
|
|
674
|
+
"RILL-R004",
|
|
675
|
+
`tool '${name}' missing fn property`
|
|
676
|
+
);
|
|
677
|
+
}
|
|
678
|
+
if (!isCallable2(toolFnValue)) {
|
|
679
|
+
throw new RuntimeError4(
|
|
680
|
+
"RILL-R004",
|
|
681
|
+
`tool '${name}' fn must be callable`
|
|
682
|
+
);
|
|
683
|
+
}
|
|
684
|
+
const paramsObj = descriptor["params"];
|
|
685
|
+
const description = typeof descriptor["description"] === "string" ? descriptor["description"] : "";
|
|
686
|
+
let enhancedCallable = toolFnValue;
|
|
687
|
+
if (paramsObj && typeof paramsObj === "object" && !Array.isArray(paramsObj)) {
|
|
688
|
+
const params = Object.entries(
|
|
689
|
+
paramsObj
|
|
690
|
+
).map(([paramName, paramMeta]) => {
|
|
691
|
+
const meta = paramMeta;
|
|
692
|
+
const typeStr = typeof meta["type"] === "string" ? meta["type"] : null;
|
|
693
|
+
let typeName = null;
|
|
694
|
+
if (typeStr === "string") typeName = "string";
|
|
695
|
+
else if (typeStr === "number") typeName = "number";
|
|
696
|
+
else if (typeStr === "bool" || typeStr === "boolean")
|
|
697
|
+
typeName = "bool";
|
|
698
|
+
else if (typeStr === "list" || typeStr === "array")
|
|
699
|
+
typeName = "list";
|
|
700
|
+
else if (typeStr === "dict" || typeStr === "object")
|
|
701
|
+
typeName = "dict";
|
|
702
|
+
else if (typeStr === "vector") typeName = "vector";
|
|
703
|
+
const param = {
|
|
704
|
+
name: paramName,
|
|
705
|
+
typeName,
|
|
706
|
+
defaultValue: null,
|
|
707
|
+
annotations: {}
|
|
708
|
+
};
|
|
709
|
+
if (typeof meta["description"] === "string") {
|
|
710
|
+
param.description = meta["description"];
|
|
711
|
+
}
|
|
712
|
+
return param;
|
|
713
|
+
});
|
|
714
|
+
const baseCallable = toolFnValue;
|
|
715
|
+
enhancedCallable = {
|
|
716
|
+
__type: "callable",
|
|
717
|
+
kind: "application",
|
|
718
|
+
params,
|
|
719
|
+
fn: baseCallable.fn,
|
|
720
|
+
description,
|
|
721
|
+
isProperty: baseCallable.isProperty ?? false
|
|
722
|
+
};
|
|
723
|
+
}
|
|
724
|
+
toolsDict[name] = enhancedCallable;
|
|
725
|
+
}
|
|
726
|
+
const system = typeof options["system"] === "string" ? options["system"] : factorySystem;
|
|
727
|
+
const maxTokens = typeof options["max_tokens"] === "number" ? options["max_tokens"] : factoryMaxTokens;
|
|
728
|
+
const maxErrors = typeof options["max_errors"] === "number" ? options["max_errors"] : 3;
|
|
729
|
+
const maxTurns = typeof options["max_turns"] === "number" ? options["max_turns"] : 10;
|
|
730
|
+
const messages = [];
|
|
731
|
+
if (system !== void 0) {
|
|
732
|
+
messages.push({
|
|
733
|
+
role: "system",
|
|
734
|
+
content: system
|
|
735
|
+
});
|
|
736
|
+
}
|
|
737
|
+
if ("messages" in options && Array.isArray(options["messages"])) {
|
|
738
|
+
const prependedMessages = options["messages"];
|
|
739
|
+
for (const msg of prependedMessages) {
|
|
740
|
+
if (!msg || typeof msg !== "object" || !("role" in msg)) {
|
|
741
|
+
throw new RuntimeError4(
|
|
742
|
+
"RILL-R004",
|
|
743
|
+
"message missing required 'role' field"
|
|
744
|
+
);
|
|
745
|
+
}
|
|
746
|
+
const role = msg["role"];
|
|
747
|
+
if (role !== "user" && role !== "assistant") {
|
|
748
|
+
throw new RuntimeError4("RILL-R004", `invalid role '${role}'`);
|
|
749
|
+
}
|
|
750
|
+
if (!("content" in msg) || typeof msg["content"] !== "string") {
|
|
751
|
+
throw new RuntimeError4(
|
|
752
|
+
"RILL-R004",
|
|
753
|
+
`${role} message requires 'content'`
|
|
754
|
+
);
|
|
755
|
+
}
|
|
756
|
+
messages.push({
|
|
757
|
+
role,
|
|
758
|
+
content: msg["content"]
|
|
759
|
+
});
|
|
760
|
+
}
|
|
761
|
+
}
|
|
762
|
+
messages.push({
|
|
763
|
+
role: "user",
|
|
764
|
+
content: prompt
|
|
765
|
+
});
|
|
766
|
+
const callbacks = {
|
|
767
|
+
// Build OpenAI Tool format from tool definitions
|
|
768
|
+
buildTools: (toolDefs) => {
|
|
769
|
+
return toolDefs.map((def) => ({
|
|
770
|
+
type: "function",
|
|
771
|
+
function: {
|
|
772
|
+
name: def.name,
|
|
773
|
+
description: def.description,
|
|
774
|
+
parameters: def.input_schema
|
|
775
|
+
}
|
|
776
|
+
}));
|
|
777
|
+
},
|
|
778
|
+
// Call OpenAI API
|
|
779
|
+
callAPI: async (msgs, tools) => {
|
|
780
|
+
const apiParams = {
|
|
781
|
+
model: factoryModel,
|
|
782
|
+
max_tokens: maxTokens,
|
|
783
|
+
messages: msgs,
|
|
784
|
+
tools,
|
|
785
|
+
tool_choice: "auto"
|
|
786
|
+
};
|
|
787
|
+
if (factoryTemperature !== void 0) {
|
|
788
|
+
apiParams.temperature = factoryTemperature;
|
|
789
|
+
}
|
|
790
|
+
const response2 = await client.chat.completions.create(apiParams);
|
|
791
|
+
return {
|
|
792
|
+
...response2,
|
|
793
|
+
usage: {
|
|
794
|
+
input_tokens: response2.usage?.prompt_tokens ?? 0,
|
|
795
|
+
output_tokens: response2.usage?.completion_tokens ?? 0
|
|
796
|
+
}
|
|
797
|
+
};
|
|
798
|
+
},
|
|
799
|
+
// Extract tool calls from OpenAI response
|
|
800
|
+
extractToolCalls: (response2) => {
|
|
801
|
+
if (!response2 || typeof response2 !== "object" || !("choices" in response2)) {
|
|
802
|
+
return null;
|
|
803
|
+
}
|
|
804
|
+
const choices = response2.choices;
|
|
805
|
+
if (!Array.isArray(choices) || choices.length === 0) {
|
|
806
|
+
return null;
|
|
807
|
+
}
|
|
808
|
+
const choice = choices[0];
|
|
809
|
+
if (!choice || typeof choice !== "object" || !("message" in choice)) {
|
|
810
|
+
return null;
|
|
811
|
+
}
|
|
812
|
+
const message = choice.message;
|
|
813
|
+
if (!message || typeof message !== "object" || !("tool_calls" in message)) {
|
|
814
|
+
return null;
|
|
815
|
+
}
|
|
816
|
+
const toolCalls = message.tool_calls;
|
|
817
|
+
if (!toolCalls || !Array.isArray(toolCalls)) {
|
|
818
|
+
return null;
|
|
819
|
+
}
|
|
820
|
+
const functionToolCalls = toolCalls.filter(
|
|
821
|
+
(tc) => typeof tc === "object" && tc !== null && "type" in tc && tc.type === "function"
|
|
822
|
+
);
|
|
823
|
+
return functionToolCalls.map((tc) => {
|
|
824
|
+
const functionCall = tc;
|
|
825
|
+
const args2 = functionCall.function.arguments;
|
|
826
|
+
let parsedArgs;
|
|
827
|
+
try {
|
|
828
|
+
parsedArgs = JSON.parse(args2);
|
|
829
|
+
} catch {
|
|
830
|
+
parsedArgs = {};
|
|
831
|
+
}
|
|
832
|
+
return {
|
|
833
|
+
id: tc.id,
|
|
834
|
+
name: functionCall.function.name,
|
|
835
|
+
input: parsedArgs
|
|
836
|
+
};
|
|
837
|
+
});
|
|
838
|
+
},
|
|
839
|
+
// Format tool results into OpenAI message format
|
|
840
|
+
formatToolResult: (toolResults) => {
|
|
841
|
+
return toolResults.map((tr) => ({
|
|
842
|
+
role: "tool",
|
|
843
|
+
tool_call_id: tr.id,
|
|
844
|
+
content: tr.error ? JSON.stringify({ error: tr.error, code: "RILL-R001" }) : typeof tr.result === "string" ? tr.result : JSON.stringify(tr.result)
|
|
845
|
+
}));
|
|
846
|
+
}
|
|
847
|
+
};
|
|
848
|
+
const loopResult = await executeToolLoop(
|
|
849
|
+
messages,
|
|
850
|
+
toolsDict,
|
|
851
|
+
maxErrors,
|
|
852
|
+
callbacks,
|
|
853
|
+
(event, data) => {
|
|
854
|
+
const eventMap = {
|
|
855
|
+
tool_call: "openai:tool_call",
|
|
856
|
+
tool_result: "openai:tool_result"
|
|
857
|
+
};
|
|
858
|
+
emitExtensionEvent(ctx, {
|
|
859
|
+
event: eventMap[event] || event,
|
|
860
|
+
subsystem: "extension:openai",
|
|
861
|
+
...data
|
|
862
|
+
});
|
|
863
|
+
},
|
|
864
|
+
maxTurns,
|
|
865
|
+
ctx
|
|
866
|
+
);
|
|
867
|
+
const response = loopResult.response;
|
|
868
|
+
const content = response?.choices[0]?.message?.content ?? "";
|
|
869
|
+
const stopReason = loopResult.turns >= maxTurns ? "max_turns" : response?.choices[0]?.finish_reason ?? "stop";
|
|
870
|
+
const fullMessages = [];
|
|
871
|
+
for (const msg of messages) {
|
|
872
|
+
if ("role" in msg && msg.role !== "system") {
|
|
873
|
+
const historyMsg = {
|
|
874
|
+
role: msg.role
|
|
875
|
+
};
|
|
876
|
+
if ("content" in msg && msg.content) {
|
|
877
|
+
historyMsg["content"] = msg.content;
|
|
878
|
+
}
|
|
879
|
+
if ("tool_calls" in msg && msg.tool_calls) {
|
|
880
|
+
historyMsg["tool_calls"] = msg.tool_calls;
|
|
881
|
+
}
|
|
882
|
+
fullMessages.push(historyMsg);
|
|
883
|
+
}
|
|
884
|
+
}
|
|
885
|
+
if (response) {
|
|
886
|
+
fullMessages.push({
|
|
887
|
+
role: "assistant",
|
|
888
|
+
content
|
|
889
|
+
});
|
|
890
|
+
}
|
|
891
|
+
const result2 = {
|
|
892
|
+
content,
|
|
893
|
+
model: factoryModel,
|
|
894
|
+
usage: {
|
|
895
|
+
input: loopResult.totalTokens.input,
|
|
896
|
+
output: loopResult.totalTokens.output
|
|
897
|
+
},
|
|
898
|
+
stop_reason: stopReason,
|
|
899
|
+
turns: loopResult.turns,
|
|
900
|
+
messages: fullMessages
|
|
901
|
+
};
|
|
902
|
+
const duration = Date.now() - startTime;
|
|
903
|
+
emitExtensionEvent(ctx, {
|
|
904
|
+
event: "openai:tool_loop",
|
|
905
|
+
subsystem: "extension:openai",
|
|
906
|
+
turns: loopResult.turns,
|
|
907
|
+
total_duration: duration,
|
|
908
|
+
usage: result2.usage
|
|
909
|
+
});
|
|
910
|
+
return result2;
|
|
911
|
+
} catch (error) {
|
|
912
|
+
const duration = Date.now() - startTime;
|
|
913
|
+
const rillError = mapProviderError(
|
|
914
|
+
"OpenAI",
|
|
915
|
+
error,
|
|
916
|
+
detectOpenAIError
|
|
917
|
+
);
|
|
918
|
+
emitExtensionEvent(ctx, {
|
|
919
|
+
event: "openai:error",
|
|
920
|
+
subsystem: "extension:openai",
|
|
921
|
+
error: rillError.message,
|
|
922
|
+
duration
|
|
923
|
+
});
|
|
924
|
+
throw rillError;
|
|
925
|
+
}
|
|
926
|
+
},
|
|
927
|
+
description: "Execute tool-use loop with OpenAI API",
|
|
928
|
+
returnType: "dict"
|
|
929
|
+
}
|
|
930
|
+
};
|
|
931
|
+
result.dispose = dispose;
|
|
932
|
+
return result;
|
|
933
|
+
}
|
|
934
|
+
|
|
935
|
+
// src/index.ts
|
|
936
|
+
var VERSION = "0.0.1";
|
|
937
|
+
export {
|
|
938
|
+
VERSION,
|
|
939
|
+
createOpenAIExtension
|
|
940
|
+
};
|