@prompty/openai 2.0.0-alpha.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +87 -0
- package/dist/index.cjs +671 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +72 -0
- package/dist/index.d.ts +72 -0
- package/dist/index.js +627 -0
- package/dist/index.js.map +1 -0
- package/package.json +59 -0
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,671 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
+
|
|
30
|
+
// src/index.ts
|
|
31
|
+
var index_exports = {};
|
|
32
|
+
__export(index_exports, {
|
|
33
|
+
OpenAIExecutor: () => OpenAIExecutor,
|
|
34
|
+
OpenAIProcessor: () => OpenAIProcessor,
|
|
35
|
+
buildChatArgs: () => buildChatArgs,
|
|
36
|
+
buildEmbeddingArgs: () => buildEmbeddingArgs,
|
|
37
|
+
buildImageArgs: () => buildImageArgs,
|
|
38
|
+
buildResponsesArgs: () => buildResponsesArgs,
|
|
39
|
+
messageToWire: () => messageToWire,
|
|
40
|
+
processResponse: () => processResponse
|
|
41
|
+
});
|
|
42
|
+
module.exports = __toCommonJS(index_exports);
|
|
43
|
+
|
|
44
|
+
// src/executor.ts
|
|
45
|
+
var import_openai = __toESM(require("openai"), 1);
|
|
46
|
+
var import_core = require("@prompty/core");
|
|
47
|
+
var import_core2 = require("@prompty/core");
|
|
48
|
+
var import_core3 = require("@prompty/core");
|
|
49
|
+
|
|
50
|
+
// src/wire.ts
|
|
51
|
+
function messageToWire(msg) {
|
|
52
|
+
const wire = { role: msg.role };
|
|
53
|
+
for (const [k, v] of Object.entries(msg.metadata)) {
|
|
54
|
+
if (k !== "role" && k !== "content") {
|
|
55
|
+
wire[k] = v;
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
const content = msg.toTextContent();
|
|
59
|
+
if (typeof content === "string") {
|
|
60
|
+
wire.content = content;
|
|
61
|
+
} else {
|
|
62
|
+
wire.content = msg.parts.map(partToWire);
|
|
63
|
+
}
|
|
64
|
+
return wire;
|
|
65
|
+
}
|
|
66
|
+
function partToWire(part) {
|
|
67
|
+
switch (part.kind) {
|
|
68
|
+
case "text":
|
|
69
|
+
return { type: "text", text: part.value };
|
|
70
|
+
case "image": {
|
|
71
|
+
const imageUrl = { url: part.source };
|
|
72
|
+
if (part.detail) imageUrl.detail = part.detail;
|
|
73
|
+
return { type: "image_url", image_url: imageUrl };
|
|
74
|
+
}
|
|
75
|
+
case "audio":
|
|
76
|
+
return {
|
|
77
|
+
type: "input_audio",
|
|
78
|
+
input_audio: {
|
|
79
|
+
data: part.source,
|
|
80
|
+
...part.mediaType && { format: part.mediaType }
|
|
81
|
+
}
|
|
82
|
+
};
|
|
83
|
+
case "file":
|
|
84
|
+
return { type: "file", file: { url: part.source } };
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
function buildChatArgs(agent, messages) {
|
|
88
|
+
const model = agent.model?.id || "gpt-4";
|
|
89
|
+
const wireMessages = messages.map(messageToWire);
|
|
90
|
+
const args = {
|
|
91
|
+
model,
|
|
92
|
+
messages: wireMessages,
|
|
93
|
+
...buildOptions(agent)
|
|
94
|
+
};
|
|
95
|
+
const tools = toolsToWire(agent);
|
|
96
|
+
if (tools.length > 0) {
|
|
97
|
+
args.tools = tools;
|
|
98
|
+
}
|
|
99
|
+
const responseFormat = outputSchemaToWire(agent);
|
|
100
|
+
if (responseFormat) {
|
|
101
|
+
args.response_format = responseFormat;
|
|
102
|
+
}
|
|
103
|
+
return args;
|
|
104
|
+
}
|
|
105
|
+
function buildEmbeddingArgs(agent, data) {
|
|
106
|
+
const model = agent.model?.id || "text-embedding-ada-002";
|
|
107
|
+
const args = {
|
|
108
|
+
input: Array.isArray(data) ? data : [data],
|
|
109
|
+
model
|
|
110
|
+
};
|
|
111
|
+
const extra = agent.model?.options?.additionalProperties;
|
|
112
|
+
if (extra) {
|
|
113
|
+
for (const [k, v] of Object.entries(extra)) {
|
|
114
|
+
args[k] = v;
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
return args;
|
|
118
|
+
}
|
|
119
|
+
function buildImageArgs(agent, data) {
|
|
120
|
+
const model = agent.model?.id || "dall-e-3";
|
|
121
|
+
let prompt;
|
|
122
|
+
if (typeof data === "string") {
|
|
123
|
+
prompt = data;
|
|
124
|
+
} else if (Array.isArray(data)) {
|
|
125
|
+
prompt = data.map((m) => {
|
|
126
|
+
if (typeof m.text === "string") return m.text;
|
|
127
|
+
if (Array.isArray(m.parts)) {
|
|
128
|
+
return m.parts.filter((p) => p.kind === "text").map((p) => p.value).join("");
|
|
129
|
+
}
|
|
130
|
+
return String(m);
|
|
131
|
+
}).join("\n").trim();
|
|
132
|
+
} else {
|
|
133
|
+
prompt = String(data);
|
|
134
|
+
}
|
|
135
|
+
const args = { prompt, model };
|
|
136
|
+
const extra = agent.model?.options?.additionalProperties;
|
|
137
|
+
if (extra) {
|
|
138
|
+
for (const [k, v] of Object.entries(extra)) {
|
|
139
|
+
args[k] = v;
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
return args;
|
|
143
|
+
}
|
|
144
|
+
var KIND_TO_JSON_TYPE = {
|
|
145
|
+
string: "string",
|
|
146
|
+
integer: "integer",
|
|
147
|
+
float: "number",
|
|
148
|
+
number: "number",
|
|
149
|
+
boolean: "boolean",
|
|
150
|
+
array: "array",
|
|
151
|
+
object: "object"
|
|
152
|
+
};
|
|
153
|
+
function buildOptions(agent) {
|
|
154
|
+
const opts = agent.model?.options;
|
|
155
|
+
if (!opts) return {};
|
|
156
|
+
const result = {};
|
|
157
|
+
if (opts.temperature !== void 0) result.temperature = opts.temperature;
|
|
158
|
+
if (opts.maxOutputTokens !== void 0) result.max_completion_tokens = opts.maxOutputTokens;
|
|
159
|
+
if (opts.topP !== void 0) result.top_p = opts.topP;
|
|
160
|
+
if (opts.frequencyPenalty !== void 0) result.frequency_penalty = opts.frequencyPenalty;
|
|
161
|
+
if (opts.presencePenalty !== void 0) result.presence_penalty = opts.presencePenalty;
|
|
162
|
+
if (opts.stopSequences !== void 0) result.stop = opts.stopSequences;
|
|
163
|
+
if (opts.seed !== void 0) result.seed = opts.seed;
|
|
164
|
+
if (opts.additionalProperties) {
|
|
165
|
+
for (const [k, v] of Object.entries(opts.additionalProperties)) {
|
|
166
|
+
if (!(k in result)) {
|
|
167
|
+
result[k] = v;
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
return result;
|
|
172
|
+
}
|
|
173
|
+
function schemaToWire(properties) {
|
|
174
|
+
const props = {};
|
|
175
|
+
const required = [];
|
|
176
|
+
for (const p of properties) {
|
|
177
|
+
if (!p.name) continue;
|
|
178
|
+
const schema = {
|
|
179
|
+
type: KIND_TO_JSON_TYPE[p.kind ?? "string"] ?? "string"
|
|
180
|
+
};
|
|
181
|
+
if (p.description) schema.description = p.description;
|
|
182
|
+
if (p.enumValues && p.enumValues.length > 0) schema.enum = p.enumValues;
|
|
183
|
+
props[p.name] = schema;
|
|
184
|
+
if (p.required) required.push(p.name);
|
|
185
|
+
}
|
|
186
|
+
const result = { type: "object", properties: props };
|
|
187
|
+
if (required.length > 0) result.required = required;
|
|
188
|
+
return result;
|
|
189
|
+
}
|
|
190
|
+
function propertyToJsonSchema(prop) {
|
|
191
|
+
const schema = {
|
|
192
|
+
type: KIND_TO_JSON_TYPE[prop.kind ?? "string"] ?? "string"
|
|
193
|
+
};
|
|
194
|
+
if (prop.description) schema.description = prop.description;
|
|
195
|
+
if (prop.enumValues && prop.enumValues.length > 0) schema.enum = prop.enumValues;
|
|
196
|
+
if (prop.kind === "array") {
|
|
197
|
+
schema.items = prop.items ? propertyToJsonSchema(prop.items) : { type: "string" };
|
|
198
|
+
}
|
|
199
|
+
if (prop.kind === "object") {
|
|
200
|
+
if (prop.properties) {
|
|
201
|
+
const nested = {};
|
|
202
|
+
const req = [];
|
|
203
|
+
for (const p of prop.properties) {
|
|
204
|
+
if (!p.name) continue;
|
|
205
|
+
nested[p.name] = propertyToJsonSchema(p);
|
|
206
|
+
req.push(p.name);
|
|
207
|
+
}
|
|
208
|
+
schema.properties = nested;
|
|
209
|
+
schema.required = req;
|
|
210
|
+
} else {
|
|
211
|
+
schema.properties = {};
|
|
212
|
+
schema.required = [];
|
|
213
|
+
}
|
|
214
|
+
schema.additionalProperties = false;
|
|
215
|
+
}
|
|
216
|
+
return schema;
|
|
217
|
+
}
|
|
218
|
+
function toolsToWire(agent) {
|
|
219
|
+
const tools = agent.tools;
|
|
220
|
+
if (!tools || tools.length === 0) return [];
|
|
221
|
+
const result = [];
|
|
222
|
+
for (const t of tools) {
|
|
223
|
+
if (t.kind !== "function") continue;
|
|
224
|
+
const funcDef = { name: t.name };
|
|
225
|
+
if (t.description) funcDef.description = t.description;
|
|
226
|
+
const params = t.parameters;
|
|
227
|
+
if (params && Array.isArray(params)) {
|
|
228
|
+
funcDef.parameters = schemaToWire(params);
|
|
229
|
+
}
|
|
230
|
+
const strict = t.strict;
|
|
231
|
+
if (strict) {
|
|
232
|
+
funcDef.strict = true;
|
|
233
|
+
if (funcDef.parameters) {
|
|
234
|
+
funcDef.parameters.additionalProperties = false;
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
result.push({ type: "function", function: funcDef });
|
|
238
|
+
}
|
|
239
|
+
return result;
|
|
240
|
+
}
|
|
241
|
+
function outputSchemaToWire(agent) {
|
|
242
|
+
const outputs = agent.outputs;
|
|
243
|
+
if (!outputs || outputs.length === 0) return null;
|
|
244
|
+
const properties = {};
|
|
245
|
+
const required = [];
|
|
246
|
+
for (const prop of outputs) {
|
|
247
|
+
if (!prop.name) continue;
|
|
248
|
+
properties[prop.name] = propertyToJsonSchema(prop);
|
|
249
|
+
required.push(prop.name);
|
|
250
|
+
}
|
|
251
|
+
const name = (agent.name || "response").toLowerCase().replace(/[\s-]/g, "_");
|
|
252
|
+
return {
|
|
253
|
+
type: "json_schema",
|
|
254
|
+
json_schema: {
|
|
255
|
+
name,
|
|
256
|
+
strict: true,
|
|
257
|
+
schema: {
|
|
258
|
+
type: "object",
|
|
259
|
+
properties,
|
|
260
|
+
required,
|
|
261
|
+
additionalProperties: false
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
};
|
|
265
|
+
}
|
|
266
|
+
function buildResponsesArgs(agent, messages) {
|
|
267
|
+
const model = agent.model?.id || "gpt-4o";
|
|
268
|
+
const systemParts = [];
|
|
269
|
+
const inputMessages = [];
|
|
270
|
+
for (const msg of messages) {
|
|
271
|
+
if (msg.role === "system" || msg.role === "developer") {
|
|
272
|
+
systemParts.push(msg.text);
|
|
273
|
+
} else {
|
|
274
|
+
inputMessages.push(messageToResponsesInput(msg));
|
|
275
|
+
}
|
|
276
|
+
}
|
|
277
|
+
const args = {
|
|
278
|
+
model,
|
|
279
|
+
input: inputMessages
|
|
280
|
+
};
|
|
281
|
+
if (systemParts.length > 0) {
|
|
282
|
+
args.instructions = systemParts.join("\n\n");
|
|
283
|
+
}
|
|
284
|
+
const responseOpts = buildResponsesOptions(agent);
|
|
285
|
+
Object.assign(args, responseOpts);
|
|
286
|
+
const tools = responsesToolsToWire(agent);
|
|
287
|
+
if (tools.length > 0) {
|
|
288
|
+
args.tools = tools;
|
|
289
|
+
}
|
|
290
|
+
const textConfig = outputSchemaToResponsesWire(agent);
|
|
291
|
+
if (textConfig) {
|
|
292
|
+
args.text = textConfig;
|
|
293
|
+
}
|
|
294
|
+
return args;
|
|
295
|
+
}
|
|
296
|
+
function messageToResponsesInput(msg) {
|
|
297
|
+
const content = msg.toTextContent();
|
|
298
|
+
if (msg.metadata.tool_call_id) {
|
|
299
|
+
return {
|
|
300
|
+
type: "function_call_output",
|
|
301
|
+
call_id: msg.metadata.tool_call_id,
|
|
302
|
+
output: typeof content === "string" ? content : JSON.stringify(content)
|
|
303
|
+
};
|
|
304
|
+
}
|
|
305
|
+
const role = msg.role === "tool" ? "user" : msg.role;
|
|
306
|
+
return { role, content };
|
|
307
|
+
}
|
|
308
|
+
function buildResponsesOptions(agent) {
|
|
309
|
+
const opts = agent.model?.options;
|
|
310
|
+
if (!opts) return {};
|
|
311
|
+
const result = {};
|
|
312
|
+
if (opts.temperature !== void 0) result.temperature = opts.temperature;
|
|
313
|
+
if (opts.maxOutputTokens !== void 0) result.max_output_tokens = opts.maxOutputTokens;
|
|
314
|
+
if (opts.topP !== void 0) result.top_p = opts.topP;
|
|
315
|
+
if (opts.additionalProperties) {
|
|
316
|
+
for (const [k, v] of Object.entries(opts.additionalProperties)) {
|
|
317
|
+
if (!(k in result)) {
|
|
318
|
+
result[k] = v;
|
|
319
|
+
}
|
|
320
|
+
}
|
|
321
|
+
}
|
|
322
|
+
return result;
|
|
323
|
+
}
|
|
324
|
+
function responsesToolsToWire(agent) {
|
|
325
|
+
const tools = agent.tools;
|
|
326
|
+
if (!tools || tools.length === 0) return [];
|
|
327
|
+
const result = [];
|
|
328
|
+
for (const t of tools) {
|
|
329
|
+
if (t.kind !== "function") continue;
|
|
330
|
+
const tool = {
|
|
331
|
+
type: "function",
|
|
332
|
+
name: t.name
|
|
333
|
+
};
|
|
334
|
+
if (t.description) tool.description = t.description;
|
|
335
|
+
const params = t.parameters;
|
|
336
|
+
if (params && Array.isArray(params)) {
|
|
337
|
+
tool.parameters = schemaToWire(params);
|
|
338
|
+
}
|
|
339
|
+
const strict = t.strict;
|
|
340
|
+
if (strict) {
|
|
341
|
+
tool.strict = true;
|
|
342
|
+
if (tool.parameters) {
|
|
343
|
+
tool.parameters.additionalProperties = false;
|
|
344
|
+
}
|
|
345
|
+
}
|
|
346
|
+
result.push(tool);
|
|
347
|
+
}
|
|
348
|
+
return result;
|
|
349
|
+
}
|
|
350
|
+
function outputSchemaToResponsesWire(agent) {
|
|
351
|
+
const outputs = agent.outputs;
|
|
352
|
+
if (!outputs || outputs.length === 0) return null;
|
|
353
|
+
const properties = {};
|
|
354
|
+
const required = [];
|
|
355
|
+
for (const prop of outputs) {
|
|
356
|
+
if (!prop.name) continue;
|
|
357
|
+
properties[prop.name] = propertyToJsonSchema(prop);
|
|
358
|
+
required.push(prop.name);
|
|
359
|
+
}
|
|
360
|
+
const name = (agent.name || "response").toLowerCase().replace(/[\s-]/g, "_");
|
|
361
|
+
return {
|
|
362
|
+
format: {
|
|
363
|
+
type: "json_schema",
|
|
364
|
+
name,
|
|
365
|
+
strict: true,
|
|
366
|
+
schema: {
|
|
367
|
+
type: "object",
|
|
368
|
+
properties,
|
|
369
|
+
required,
|
|
370
|
+
additionalProperties: false
|
|
371
|
+
}
|
|
372
|
+
}
|
|
373
|
+
};
|
|
374
|
+
}
|
|
375
|
+
|
|
376
|
+
// src/executor.ts
|
|
377
|
+
var OpenAIExecutor = class {
|
|
378
|
+
async execute(agent, messages) {
|
|
379
|
+
return (0, import_core3.traceSpan)("OpenAIExecutor", async (emit) => {
|
|
380
|
+
emit("signature", "prompty.openai.executor.OpenAIExecutor.invoke");
|
|
381
|
+
emit("inputs", { data: messages });
|
|
382
|
+
const client = this.resolveClient(agent);
|
|
383
|
+
const clientName = client.constructor?.name ?? "OpenAI";
|
|
384
|
+
await (0, import_core3.traceSpan)(clientName, async (ctorEmit) => {
|
|
385
|
+
ctorEmit("signature", `${clientName}.ctor`);
|
|
386
|
+
const conn = agent.model?.connection;
|
|
387
|
+
if (conn instanceof import_core.ReferenceConnection) {
|
|
388
|
+
ctorEmit("inputs", { source: "reference", name: conn.name });
|
|
389
|
+
} else {
|
|
390
|
+
ctorEmit("inputs", (0, import_core3.sanitizeValue)("ctor", this.clientKwargs(agent)));
|
|
391
|
+
}
|
|
392
|
+
ctorEmit("result", clientName);
|
|
393
|
+
});
|
|
394
|
+
const apiType = agent.model?.apiType ?? "chat";
|
|
395
|
+
const result = await this.executeApiCall(client, clientName, agent, messages, apiType);
|
|
396
|
+
emit("result", result);
|
|
397
|
+
return result;
|
|
398
|
+
});
|
|
399
|
+
}
|
|
400
|
+
/** Dispatch to the appropriate API and trace the call. */
|
|
401
|
+
async executeApiCall(client, clientName, agent, messages, apiType) {
|
|
402
|
+
switch (apiType) {
|
|
403
|
+
case "chat":
|
|
404
|
+
case "agent": {
|
|
405
|
+
const args = buildChatArgs(agent, messages);
|
|
406
|
+
const isStreaming = !!args.stream;
|
|
407
|
+
return (0, import_core3.traceSpan)("create", async (callEmit) => {
|
|
408
|
+
callEmit("signature", `${clientName}.chat.completions.create`);
|
|
409
|
+
callEmit("inputs", (0, import_core3.sanitizeValue)("create", args));
|
|
410
|
+
const result = await client.chat.completions.create(
|
|
411
|
+
args
|
|
412
|
+
);
|
|
413
|
+
if (isStreaming) {
|
|
414
|
+
return new import_core.PromptyStream(`${clientName}Executor`, result);
|
|
415
|
+
}
|
|
416
|
+
callEmit("result", result);
|
|
417
|
+
return result;
|
|
418
|
+
});
|
|
419
|
+
}
|
|
420
|
+
case "embedding": {
|
|
421
|
+
const args = buildEmbeddingArgs(agent, messages);
|
|
422
|
+
return (0, import_core3.traceSpan)("create", async (callEmit) => {
|
|
423
|
+
callEmit("signature", `${clientName}.embeddings.create`);
|
|
424
|
+
callEmit("inputs", (0, import_core3.sanitizeValue)("create", args));
|
|
425
|
+
const result = await client.embeddings.create(
|
|
426
|
+
args
|
|
427
|
+
);
|
|
428
|
+
callEmit("result", result);
|
|
429
|
+
return result;
|
|
430
|
+
});
|
|
431
|
+
}
|
|
432
|
+
case "image": {
|
|
433
|
+
const args = buildImageArgs(agent, messages);
|
|
434
|
+
return (0, import_core3.traceSpan)("generate", async (callEmit) => {
|
|
435
|
+
callEmit("signature", `${clientName}.images.generate`);
|
|
436
|
+
callEmit("inputs", (0, import_core3.sanitizeValue)("generate", args));
|
|
437
|
+
const result = await client.images.generate(
|
|
438
|
+
args
|
|
439
|
+
);
|
|
440
|
+
callEmit("result", result);
|
|
441
|
+
return result;
|
|
442
|
+
});
|
|
443
|
+
}
|
|
444
|
+
case "responses": {
|
|
445
|
+
const args = buildResponsesArgs(agent, messages);
|
|
446
|
+
const isStreaming = !!args.stream;
|
|
447
|
+
return (0, import_core3.traceSpan)("create", async (callEmit) => {
|
|
448
|
+
callEmit("signature", `${clientName}.responses.create`);
|
|
449
|
+
callEmit("inputs", (0, import_core3.sanitizeValue)("create", args));
|
|
450
|
+
const result = await client.responses.create(
|
|
451
|
+
args
|
|
452
|
+
);
|
|
453
|
+
if (isStreaming) {
|
|
454
|
+
return new import_core.PromptyStream(`${clientName}Executor`, result);
|
|
455
|
+
}
|
|
456
|
+
callEmit("result", result);
|
|
457
|
+
return result;
|
|
458
|
+
});
|
|
459
|
+
}
|
|
460
|
+
default:
|
|
461
|
+
throw new Error(`Unsupported apiType: ${apiType}`);
|
|
462
|
+
}
|
|
463
|
+
}
|
|
464
|
+
resolveClient(agent) {
|
|
465
|
+
const conn = agent.model?.connection;
|
|
466
|
+
if (conn instanceof import_core.ReferenceConnection) {
|
|
467
|
+
return (0, import_core2.getConnection)(conn.name);
|
|
468
|
+
}
|
|
469
|
+
const kwargs = this.clientKwargs(agent);
|
|
470
|
+
return new import_openai.default(kwargs);
|
|
471
|
+
}
|
|
472
|
+
clientKwargs(agent) {
|
|
473
|
+
const kwargs = {};
|
|
474
|
+
const conn = agent.model?.connection;
|
|
475
|
+
if (conn instanceof import_core.ApiKeyConnection) {
|
|
476
|
+
if (conn.apiKey) kwargs.apiKey = conn.apiKey;
|
|
477
|
+
if (conn.endpoint) kwargs.baseURL = conn.endpoint;
|
|
478
|
+
}
|
|
479
|
+
return kwargs;
|
|
480
|
+
}
|
|
481
|
+
};
|
|
482
|
+
|
|
483
|
+
// src/processor.ts
|
|
484
|
+
var import_core4 = require("@prompty/core");
|
|
485
|
+
var OpenAIProcessor = class {
|
|
486
|
+
async process(agent, response) {
|
|
487
|
+
return (0, import_core4.traceSpan)("OpenAIProcessor", async (emit) => {
|
|
488
|
+
emit("signature", "prompty.openai.processor.OpenAIProcessor.invoke");
|
|
489
|
+
emit("inputs", { data: response });
|
|
490
|
+
const result = processResponse(agent, response);
|
|
491
|
+
if (!isAsyncIterable(response)) {
|
|
492
|
+
emit("result", result);
|
|
493
|
+
}
|
|
494
|
+
return result;
|
|
495
|
+
});
|
|
496
|
+
}
|
|
497
|
+
};
|
|
498
|
+
function processResponse(agent, response) {
|
|
499
|
+
if (typeof response !== "object" || response === null) return response;
|
|
500
|
+
if (isAsyncIterable(response)) {
|
|
501
|
+
return streamGenerator(response);
|
|
502
|
+
}
|
|
503
|
+
const r = response;
|
|
504
|
+
if (r.object === "response" && Array.isArray(r.output)) {
|
|
505
|
+
return processResponsesApi(agent, r);
|
|
506
|
+
}
|
|
507
|
+
if (r.choices) {
|
|
508
|
+
return processChatCompletion(agent, r);
|
|
509
|
+
}
|
|
510
|
+
if (r.data && r.object === "list") {
|
|
511
|
+
return processEmbedding(r);
|
|
512
|
+
}
|
|
513
|
+
if (r.data && Array.isArray(r.data)) {
|
|
514
|
+
const data = r.data;
|
|
515
|
+
if (data.length > 0 && ("url" in data[0] || "b64_json" in data[0])) {
|
|
516
|
+
return processImage(r);
|
|
517
|
+
}
|
|
518
|
+
}
|
|
519
|
+
return response;
|
|
520
|
+
}
|
|
521
|
+
function isAsyncIterable(value) {
|
|
522
|
+
return typeof value === "object" && value !== null && Symbol.asyncIterator in value;
|
|
523
|
+
}
|
|
524
|
+
async function* streamGenerator(response) {
|
|
525
|
+
const toolCallAcc = /* @__PURE__ */ new Map();
|
|
526
|
+
for await (const chunk of response) {
|
|
527
|
+
const c = chunk;
|
|
528
|
+
const choices = c.choices;
|
|
529
|
+
if (!choices || choices.length === 0) continue;
|
|
530
|
+
const delta = choices[0].delta;
|
|
531
|
+
if (!delta) continue;
|
|
532
|
+
if (delta.content != null) {
|
|
533
|
+
yield delta.content;
|
|
534
|
+
}
|
|
535
|
+
const tcDeltas = delta.tool_calls;
|
|
536
|
+
if (tcDeltas) {
|
|
537
|
+
for (const tcDelta of tcDeltas) {
|
|
538
|
+
const idx = tcDelta.index;
|
|
539
|
+
if (!toolCallAcc.has(idx)) {
|
|
540
|
+
toolCallAcc.set(idx, { id: "", name: "", arguments: "" });
|
|
541
|
+
}
|
|
542
|
+
const acc = toolCallAcc.get(idx);
|
|
543
|
+
if (tcDelta.id) acc.id = tcDelta.id;
|
|
544
|
+
const fn = tcDelta.function;
|
|
545
|
+
if (fn) {
|
|
546
|
+
if (fn.name) acc.name = fn.name;
|
|
547
|
+
if (fn.arguments) acc.arguments += fn.arguments;
|
|
548
|
+
}
|
|
549
|
+
}
|
|
550
|
+
}
|
|
551
|
+
if (delta.refusal != null) {
|
|
552
|
+
throw new Error(`Model refused: ${delta.refusal}`);
|
|
553
|
+
}
|
|
554
|
+
}
|
|
555
|
+
const sortedIndices = [...toolCallAcc.keys()].sort((a, b) => a - b);
|
|
556
|
+
for (const idx of sortedIndices) {
|
|
557
|
+
const tc = toolCallAcc.get(idx);
|
|
558
|
+
yield { id: tc.id, name: tc.name, arguments: tc.arguments };
|
|
559
|
+
}
|
|
560
|
+
}
|
|
561
|
+
function processResponsesApi(agent, response) {
|
|
562
|
+
const output = response.output;
|
|
563
|
+
const funcCalls = [];
|
|
564
|
+
for (const item of output) {
|
|
565
|
+
if (item.type === "function_call") {
|
|
566
|
+
funcCalls.push({
|
|
567
|
+
id: item.call_id ?? item.id ?? "",
|
|
568
|
+
name: item.name,
|
|
569
|
+
arguments: item.arguments
|
|
570
|
+
});
|
|
571
|
+
}
|
|
572
|
+
}
|
|
573
|
+
if (funcCalls.length > 0) {
|
|
574
|
+
return funcCalls;
|
|
575
|
+
}
|
|
576
|
+
const outputText = response.output_text;
|
|
577
|
+
if (outputText !== void 0) {
|
|
578
|
+
if (agent.outputs && agent.outputs.length > 0) {
|
|
579
|
+
try {
|
|
580
|
+
return JSON.parse(outputText);
|
|
581
|
+
} catch {
|
|
582
|
+
return outputText;
|
|
583
|
+
}
|
|
584
|
+
}
|
|
585
|
+
return outputText;
|
|
586
|
+
}
|
|
587
|
+
const texts = [];
|
|
588
|
+
for (const item of output) {
|
|
589
|
+
if (item.type === "message") {
|
|
590
|
+
const content = item.content;
|
|
591
|
+
if (content) {
|
|
592
|
+
for (const part of content) {
|
|
593
|
+
if (part.type === "output_text" || part.type === "text") {
|
|
594
|
+
texts.push(part.text);
|
|
595
|
+
}
|
|
596
|
+
}
|
|
597
|
+
}
|
|
598
|
+
}
|
|
599
|
+
}
|
|
600
|
+
if (texts.length > 0) {
|
|
601
|
+
const text = texts.join("");
|
|
602
|
+
if (agent.outputs && agent.outputs.length > 0) {
|
|
603
|
+
try {
|
|
604
|
+
return JSON.parse(text);
|
|
605
|
+
} catch {
|
|
606
|
+
return text;
|
|
607
|
+
}
|
|
608
|
+
}
|
|
609
|
+
return text;
|
|
610
|
+
}
|
|
611
|
+
return response;
|
|
612
|
+
}
|
|
613
|
+
function processChatCompletion(agent, response) {
|
|
614
|
+
const choices = response.choices;
|
|
615
|
+
if (!choices || choices.length === 0) return null;
|
|
616
|
+
const choice = choices[0];
|
|
617
|
+
const message = choice.message;
|
|
618
|
+
if (!message) return null;
|
|
619
|
+
const toolCalls = message.tool_calls;
|
|
620
|
+
if (toolCalls && toolCalls.length > 0) {
|
|
621
|
+
return toolCalls.map((tc) => {
|
|
622
|
+
const fn = tc.function;
|
|
623
|
+
return {
|
|
624
|
+
id: tc.id,
|
|
625
|
+
name: fn.name,
|
|
626
|
+
arguments: fn.arguments
|
|
627
|
+
};
|
|
628
|
+
});
|
|
629
|
+
}
|
|
630
|
+
const content = message.content;
|
|
631
|
+
if (content === null) return null;
|
|
632
|
+
if (agent.outputs && agent.outputs.length > 0) {
|
|
633
|
+
try {
|
|
634
|
+
return JSON.parse(content);
|
|
635
|
+
} catch {
|
|
636
|
+
return content;
|
|
637
|
+
}
|
|
638
|
+
}
|
|
639
|
+
return content;
|
|
640
|
+
}
|
|
641
|
+
function processEmbedding(response) {
|
|
642
|
+
const data = response.data;
|
|
643
|
+
if (data.length === 1) {
|
|
644
|
+
return data[0].embedding;
|
|
645
|
+
}
|
|
646
|
+
return data.map((d) => d.embedding);
|
|
647
|
+
}
|
|
648
|
+
function processImage(response) {
|
|
649
|
+
const data = response.data;
|
|
650
|
+
if (data.length === 1) {
|
|
651
|
+
return data[0].url ?? data[0].b64_json;
|
|
652
|
+
}
|
|
653
|
+
return data.map((d) => d.url ?? d.b64_json);
|
|
654
|
+
}
|
|
655
|
+
|
|
656
|
+
// src/index.ts
|
|
657
|
+
var import_core5 = require("@prompty/core");
|
|
658
|
+
(0, import_core5.registerExecutor)("openai", new OpenAIExecutor());
|
|
659
|
+
(0, import_core5.registerProcessor)("openai", new OpenAIProcessor());
|
|
660
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
661
|
+
0 && (module.exports = {
|
|
662
|
+
OpenAIExecutor,
|
|
663
|
+
OpenAIProcessor,
|
|
664
|
+
buildChatArgs,
|
|
665
|
+
buildEmbeddingArgs,
|
|
666
|
+
buildImageArgs,
|
|
667
|
+
buildResponsesArgs,
|
|
668
|
+
messageToWire,
|
|
669
|
+
processResponse
|
|
670
|
+
});
|
|
671
|
+
//# sourceMappingURL=index.cjs.map
|