@prompty/openai 2.0.0-alpha.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js ADDED
@@ -0,0 +1,627 @@
1
+ // src/executor.ts
2
+ import OpenAI from "openai";
3
+ import { ApiKeyConnection, ReferenceConnection, PromptyStream } from "@prompty/core";
4
+ import { getConnection } from "@prompty/core";
5
+ import { traceSpan, sanitizeValue } from "@prompty/core";
6
+
7
+ // src/wire.ts
8
+ function messageToWire(msg) {
9
+ const wire = { role: msg.role };
10
+ for (const [k, v] of Object.entries(msg.metadata)) {
11
+ if (k !== "role" && k !== "content") {
12
+ wire[k] = v;
13
+ }
14
+ }
15
+ const content = msg.toTextContent();
16
+ if (typeof content === "string") {
17
+ wire.content = content;
18
+ } else {
19
+ wire.content = msg.parts.map(partToWire);
20
+ }
21
+ return wire;
22
+ }
23
+ function partToWire(part) {
24
+ switch (part.kind) {
25
+ case "text":
26
+ return { type: "text", text: part.value };
27
+ case "image": {
28
+ const imageUrl = { url: part.source };
29
+ if (part.detail) imageUrl.detail = part.detail;
30
+ return { type: "image_url", image_url: imageUrl };
31
+ }
32
+ case "audio":
33
+ return {
34
+ type: "input_audio",
35
+ input_audio: {
36
+ data: part.source,
37
+ ...part.mediaType && { format: part.mediaType }
38
+ }
39
+ };
40
+ case "file":
41
+ return { type: "file", file: { url: part.source } };
42
+ }
43
+ }
44
+ function buildChatArgs(agent, messages) {
45
+ const model = agent.model?.id || "gpt-4";
46
+ const wireMessages = messages.map(messageToWire);
47
+ const args = {
48
+ model,
49
+ messages: wireMessages,
50
+ ...buildOptions(agent)
51
+ };
52
+ const tools = toolsToWire(agent);
53
+ if (tools.length > 0) {
54
+ args.tools = tools;
55
+ }
56
+ const responseFormat = outputSchemaToWire(agent);
57
+ if (responseFormat) {
58
+ args.response_format = responseFormat;
59
+ }
60
+ return args;
61
+ }
62
+ function buildEmbeddingArgs(agent, data) {
63
+ const model = agent.model?.id || "text-embedding-ada-002";
64
+ const args = {
65
+ input: Array.isArray(data) ? data : [data],
66
+ model
67
+ };
68
+ const extra = agent.model?.options?.additionalProperties;
69
+ if (extra) {
70
+ for (const [k, v] of Object.entries(extra)) {
71
+ args[k] = v;
72
+ }
73
+ }
74
+ return args;
75
+ }
76
+ function buildImageArgs(agent, data) {
77
+ const model = agent.model?.id || "dall-e-3";
78
+ let prompt;
79
+ if (typeof data === "string") {
80
+ prompt = data;
81
+ } else if (Array.isArray(data)) {
82
+ prompt = data.map((m) => {
83
+ if (typeof m.text === "string") return m.text;
84
+ if (Array.isArray(m.parts)) {
85
+ return m.parts.filter((p) => p.kind === "text").map((p) => p.value).join("");
86
+ }
87
+ return String(m);
88
+ }).join("\n").trim();
89
+ } else {
90
+ prompt = String(data);
91
+ }
92
+ const args = { prompt, model };
93
+ const extra = agent.model?.options?.additionalProperties;
94
+ if (extra) {
95
+ for (const [k, v] of Object.entries(extra)) {
96
+ args[k] = v;
97
+ }
98
+ }
99
+ return args;
100
+ }
101
+ var KIND_TO_JSON_TYPE = {
102
+ string: "string",
103
+ integer: "integer",
104
+ float: "number",
105
+ number: "number",
106
+ boolean: "boolean",
107
+ array: "array",
108
+ object: "object"
109
+ };
110
+ function buildOptions(agent) {
111
+ const opts = agent.model?.options;
112
+ if (!opts) return {};
113
+ const result = {};
114
+ if (opts.temperature !== void 0) result.temperature = opts.temperature;
115
+ if (opts.maxOutputTokens !== void 0) result.max_completion_tokens = opts.maxOutputTokens;
116
+ if (opts.topP !== void 0) result.top_p = opts.topP;
117
+ if (opts.frequencyPenalty !== void 0) result.frequency_penalty = opts.frequencyPenalty;
118
+ if (opts.presencePenalty !== void 0) result.presence_penalty = opts.presencePenalty;
119
+ if (opts.stopSequences !== void 0) result.stop = opts.stopSequences;
120
+ if (opts.seed !== void 0) result.seed = opts.seed;
121
+ if (opts.additionalProperties) {
122
+ for (const [k, v] of Object.entries(opts.additionalProperties)) {
123
+ if (!(k in result)) {
124
+ result[k] = v;
125
+ }
126
+ }
127
+ }
128
+ return result;
129
+ }
130
+ function schemaToWire(properties) {
131
+ const props = {};
132
+ const required = [];
133
+ for (const p of properties) {
134
+ if (!p.name) continue;
135
+ const schema = {
136
+ type: KIND_TO_JSON_TYPE[p.kind ?? "string"] ?? "string"
137
+ };
138
+ if (p.description) schema.description = p.description;
139
+ if (p.enumValues && p.enumValues.length > 0) schema.enum = p.enumValues;
140
+ props[p.name] = schema;
141
+ if (p.required) required.push(p.name);
142
+ }
143
+ const result = { type: "object", properties: props };
144
+ if (required.length > 0) result.required = required;
145
+ return result;
146
+ }
147
+ function propertyToJsonSchema(prop) {
148
+ const schema = {
149
+ type: KIND_TO_JSON_TYPE[prop.kind ?? "string"] ?? "string"
150
+ };
151
+ if (prop.description) schema.description = prop.description;
152
+ if (prop.enumValues && prop.enumValues.length > 0) schema.enum = prop.enumValues;
153
+ if (prop.kind === "array") {
154
+ schema.items = prop.items ? propertyToJsonSchema(prop.items) : { type: "string" };
155
+ }
156
+ if (prop.kind === "object") {
157
+ if (prop.properties) {
158
+ const nested = {};
159
+ const req = [];
160
+ for (const p of prop.properties) {
161
+ if (!p.name) continue;
162
+ nested[p.name] = propertyToJsonSchema(p);
163
+ req.push(p.name);
164
+ }
165
+ schema.properties = nested;
166
+ schema.required = req;
167
+ } else {
168
+ schema.properties = {};
169
+ schema.required = [];
170
+ }
171
+ schema.additionalProperties = false;
172
+ }
173
+ return schema;
174
+ }
175
+ function toolsToWire(agent) {
176
+ const tools = agent.tools;
177
+ if (!tools || tools.length === 0) return [];
178
+ const result = [];
179
+ for (const t of tools) {
180
+ if (t.kind !== "function") continue;
181
+ const funcDef = { name: t.name };
182
+ if (t.description) funcDef.description = t.description;
183
+ const params = t.parameters;
184
+ if (params && Array.isArray(params)) {
185
+ funcDef.parameters = schemaToWire(params);
186
+ }
187
+ const strict = t.strict;
188
+ if (strict) {
189
+ funcDef.strict = true;
190
+ if (funcDef.parameters) {
191
+ funcDef.parameters.additionalProperties = false;
192
+ }
193
+ }
194
+ result.push({ type: "function", function: funcDef });
195
+ }
196
+ return result;
197
+ }
198
+ function outputSchemaToWire(agent) {
199
+ const outputs = agent.outputs;
200
+ if (!outputs || outputs.length === 0) return null;
201
+ const properties = {};
202
+ const required = [];
203
+ for (const prop of outputs) {
204
+ if (!prop.name) continue;
205
+ properties[prop.name] = propertyToJsonSchema(prop);
206
+ required.push(prop.name);
207
+ }
208
+ const name = (agent.name || "response").toLowerCase().replace(/[\s-]/g, "_");
209
+ return {
210
+ type: "json_schema",
211
+ json_schema: {
212
+ name,
213
+ strict: true,
214
+ schema: {
215
+ type: "object",
216
+ properties,
217
+ required,
218
+ additionalProperties: false
219
+ }
220
+ }
221
+ };
222
+ }
223
+ function buildResponsesArgs(agent, messages) {
224
+ const model = agent.model?.id || "gpt-4o";
225
+ const systemParts = [];
226
+ const inputMessages = [];
227
+ for (const msg of messages) {
228
+ if (msg.role === "system" || msg.role === "developer") {
229
+ systemParts.push(msg.text);
230
+ } else {
231
+ inputMessages.push(messageToResponsesInput(msg));
232
+ }
233
+ }
234
+ const args = {
235
+ model,
236
+ input: inputMessages
237
+ };
238
+ if (systemParts.length > 0) {
239
+ args.instructions = systemParts.join("\n\n");
240
+ }
241
+ const responseOpts = buildResponsesOptions(agent);
242
+ Object.assign(args, responseOpts);
243
+ const tools = responsesToolsToWire(agent);
244
+ if (tools.length > 0) {
245
+ args.tools = tools;
246
+ }
247
+ const textConfig = outputSchemaToResponsesWire(agent);
248
+ if (textConfig) {
249
+ args.text = textConfig;
250
+ }
251
+ return args;
252
+ }
253
+ function messageToResponsesInput(msg) {
254
+ const content = msg.toTextContent();
255
+ if (msg.metadata.tool_call_id) {
256
+ return {
257
+ type: "function_call_output",
258
+ call_id: msg.metadata.tool_call_id,
259
+ output: typeof content === "string" ? content : JSON.stringify(content)
260
+ };
261
+ }
262
+ const role = msg.role === "tool" ? "user" : msg.role;
263
+ return { role, content };
264
+ }
265
+ function buildResponsesOptions(agent) {
266
+ const opts = agent.model?.options;
267
+ if (!opts) return {};
268
+ const result = {};
269
+ if (opts.temperature !== void 0) result.temperature = opts.temperature;
270
+ if (opts.maxOutputTokens !== void 0) result.max_output_tokens = opts.maxOutputTokens;
271
+ if (opts.topP !== void 0) result.top_p = opts.topP;
272
+ if (opts.additionalProperties) {
273
+ for (const [k, v] of Object.entries(opts.additionalProperties)) {
274
+ if (!(k in result)) {
275
+ result[k] = v;
276
+ }
277
+ }
278
+ }
279
+ return result;
280
+ }
281
+ function responsesToolsToWire(agent) {
282
+ const tools = agent.tools;
283
+ if (!tools || tools.length === 0) return [];
284
+ const result = [];
285
+ for (const t of tools) {
286
+ if (t.kind !== "function") continue;
287
+ const tool = {
288
+ type: "function",
289
+ name: t.name
290
+ };
291
+ if (t.description) tool.description = t.description;
292
+ const params = t.parameters;
293
+ if (params && Array.isArray(params)) {
294
+ tool.parameters = schemaToWire(params);
295
+ }
296
+ const strict = t.strict;
297
+ if (strict) {
298
+ tool.strict = true;
299
+ if (tool.parameters) {
300
+ tool.parameters.additionalProperties = false;
301
+ }
302
+ }
303
+ result.push(tool);
304
+ }
305
+ return result;
306
+ }
307
+ function outputSchemaToResponsesWire(agent) {
308
+ const outputs = agent.outputs;
309
+ if (!outputs || outputs.length === 0) return null;
310
+ const properties = {};
311
+ const required = [];
312
+ for (const prop of outputs) {
313
+ if (!prop.name) continue;
314
+ properties[prop.name] = propertyToJsonSchema(prop);
315
+ required.push(prop.name);
316
+ }
317
+ const name = (agent.name || "response").toLowerCase().replace(/[\s-]/g, "_");
318
+ return {
319
+ format: {
320
+ type: "json_schema",
321
+ name,
322
+ strict: true,
323
+ schema: {
324
+ type: "object",
325
+ properties,
326
+ required,
327
+ additionalProperties: false
328
+ }
329
+ }
330
+ };
331
+ }
332
+
333
+ // src/executor.ts
334
+ var OpenAIExecutor = class {
335
+ async execute(agent, messages) {
336
+ return traceSpan("OpenAIExecutor", async (emit) => {
337
+ emit("signature", "prompty.openai.executor.OpenAIExecutor.invoke");
338
+ emit("inputs", { data: messages });
339
+ const client = this.resolveClient(agent);
340
+ const clientName = client.constructor?.name ?? "OpenAI";
341
+ await traceSpan(clientName, async (ctorEmit) => {
342
+ ctorEmit("signature", `${clientName}.ctor`);
343
+ const conn = agent.model?.connection;
344
+ if (conn instanceof ReferenceConnection) {
345
+ ctorEmit("inputs", { source: "reference", name: conn.name });
346
+ } else {
347
+ ctorEmit("inputs", sanitizeValue("ctor", this.clientKwargs(agent)));
348
+ }
349
+ ctorEmit("result", clientName);
350
+ });
351
+ const apiType = agent.model?.apiType ?? "chat";
352
+ const result = await this.executeApiCall(client, clientName, agent, messages, apiType);
353
+ emit("result", result);
354
+ return result;
355
+ });
356
+ }
357
+ /** Dispatch to the appropriate API and trace the call. */
358
+ async executeApiCall(client, clientName, agent, messages, apiType) {
359
+ switch (apiType) {
360
+ case "chat":
361
+ case "agent": {
362
+ const args = buildChatArgs(agent, messages);
363
+ const isStreaming = !!args.stream;
364
+ return traceSpan("create", async (callEmit) => {
365
+ callEmit("signature", `${clientName}.chat.completions.create`);
366
+ callEmit("inputs", sanitizeValue("create", args));
367
+ const result = await client.chat.completions.create(
368
+ args
369
+ );
370
+ if (isStreaming) {
371
+ return new PromptyStream(`${clientName}Executor`, result);
372
+ }
373
+ callEmit("result", result);
374
+ return result;
375
+ });
376
+ }
377
+ case "embedding": {
378
+ const args = buildEmbeddingArgs(agent, messages);
379
+ return traceSpan("create", async (callEmit) => {
380
+ callEmit("signature", `${clientName}.embeddings.create`);
381
+ callEmit("inputs", sanitizeValue("create", args));
382
+ const result = await client.embeddings.create(
383
+ args
384
+ );
385
+ callEmit("result", result);
386
+ return result;
387
+ });
388
+ }
389
+ case "image": {
390
+ const args = buildImageArgs(agent, messages);
391
+ return traceSpan("generate", async (callEmit) => {
392
+ callEmit("signature", `${clientName}.images.generate`);
393
+ callEmit("inputs", sanitizeValue("generate", args));
394
+ const result = await client.images.generate(
395
+ args
396
+ );
397
+ callEmit("result", result);
398
+ return result;
399
+ });
400
+ }
401
+ case "responses": {
402
+ const args = buildResponsesArgs(agent, messages);
403
+ const isStreaming = !!args.stream;
404
+ return traceSpan("create", async (callEmit) => {
405
+ callEmit("signature", `${clientName}.responses.create`);
406
+ callEmit("inputs", sanitizeValue("create", args));
407
+ const result = await client.responses.create(
408
+ args
409
+ );
410
+ if (isStreaming) {
411
+ return new PromptyStream(`${clientName}Executor`, result);
412
+ }
413
+ callEmit("result", result);
414
+ return result;
415
+ });
416
+ }
417
+ default:
418
+ throw new Error(`Unsupported apiType: ${apiType}`);
419
+ }
420
+ }
421
+ resolveClient(agent) {
422
+ const conn = agent.model?.connection;
423
+ if (conn instanceof ReferenceConnection) {
424
+ return getConnection(conn.name);
425
+ }
426
+ const kwargs = this.clientKwargs(agent);
427
+ return new OpenAI(kwargs);
428
+ }
429
+ clientKwargs(agent) {
430
+ const kwargs = {};
431
+ const conn = agent.model?.connection;
432
+ if (conn instanceof ApiKeyConnection) {
433
+ if (conn.apiKey) kwargs.apiKey = conn.apiKey;
434
+ if (conn.endpoint) kwargs.baseURL = conn.endpoint;
435
+ }
436
+ return kwargs;
437
+ }
438
+ };
439
+
440
+ // src/processor.ts
441
+ import { traceSpan as traceSpan2 } from "@prompty/core";
442
+ var OpenAIProcessor = class {
443
+ async process(agent, response) {
444
+ return traceSpan2("OpenAIProcessor", async (emit) => {
445
+ emit("signature", "prompty.openai.processor.OpenAIProcessor.invoke");
446
+ emit("inputs", { data: response });
447
+ const result = processResponse(agent, response);
448
+ if (!isAsyncIterable(response)) {
449
+ emit("result", result);
450
+ }
451
+ return result;
452
+ });
453
+ }
454
+ };
455
+ function processResponse(agent, response) {
456
+ if (typeof response !== "object" || response === null) return response;
457
+ if (isAsyncIterable(response)) {
458
+ return streamGenerator(response);
459
+ }
460
+ const r = response;
461
+ if (r.object === "response" && Array.isArray(r.output)) {
462
+ return processResponsesApi(agent, r);
463
+ }
464
+ if (r.choices) {
465
+ return processChatCompletion(agent, r);
466
+ }
467
+ if (r.data && r.object === "list") {
468
+ return processEmbedding(r);
469
+ }
470
+ if (r.data && Array.isArray(r.data)) {
471
+ const data = r.data;
472
+ if (data.length > 0 && ("url" in data[0] || "b64_json" in data[0])) {
473
+ return processImage(r);
474
+ }
475
+ }
476
+ return response;
477
+ }
478
+ function isAsyncIterable(value) {
479
+ return typeof value === "object" && value !== null && Symbol.asyncIterator in value;
480
+ }
481
+ async function* streamGenerator(response) {
482
+ const toolCallAcc = /* @__PURE__ */ new Map();
483
+ for await (const chunk of response) {
484
+ const c = chunk;
485
+ const choices = c.choices;
486
+ if (!choices || choices.length === 0) continue;
487
+ const delta = choices[0].delta;
488
+ if (!delta) continue;
489
+ if (delta.content != null) {
490
+ yield delta.content;
491
+ }
492
+ const tcDeltas = delta.tool_calls;
493
+ if (tcDeltas) {
494
+ for (const tcDelta of tcDeltas) {
495
+ const idx = tcDelta.index;
496
+ if (!toolCallAcc.has(idx)) {
497
+ toolCallAcc.set(idx, { id: "", name: "", arguments: "" });
498
+ }
499
+ const acc = toolCallAcc.get(idx);
500
+ if (tcDelta.id) acc.id = tcDelta.id;
501
+ const fn = tcDelta.function;
502
+ if (fn) {
503
+ if (fn.name) acc.name = fn.name;
504
+ if (fn.arguments) acc.arguments += fn.arguments;
505
+ }
506
+ }
507
+ }
508
+ if (delta.refusal != null) {
509
+ throw new Error(`Model refused: ${delta.refusal}`);
510
+ }
511
+ }
512
+ const sortedIndices = [...toolCallAcc.keys()].sort((a, b) => a - b);
513
+ for (const idx of sortedIndices) {
514
+ const tc = toolCallAcc.get(idx);
515
+ yield { id: tc.id, name: tc.name, arguments: tc.arguments };
516
+ }
517
+ }
518
+ function processResponsesApi(agent, response) {
519
+ const output = response.output;
520
+ const funcCalls = [];
521
+ for (const item of output) {
522
+ if (item.type === "function_call") {
523
+ funcCalls.push({
524
+ id: item.call_id ?? item.id ?? "",
525
+ name: item.name,
526
+ arguments: item.arguments
527
+ });
528
+ }
529
+ }
530
+ if (funcCalls.length > 0) {
531
+ return funcCalls;
532
+ }
533
+ const outputText = response.output_text;
534
+ if (outputText !== void 0) {
535
+ if (agent.outputs && agent.outputs.length > 0) {
536
+ try {
537
+ return JSON.parse(outputText);
538
+ } catch {
539
+ return outputText;
540
+ }
541
+ }
542
+ return outputText;
543
+ }
544
+ const texts = [];
545
+ for (const item of output) {
546
+ if (item.type === "message") {
547
+ const content = item.content;
548
+ if (content) {
549
+ for (const part of content) {
550
+ if (part.type === "output_text" || part.type === "text") {
551
+ texts.push(part.text);
552
+ }
553
+ }
554
+ }
555
+ }
556
+ }
557
+ if (texts.length > 0) {
558
+ const text = texts.join("");
559
+ if (agent.outputs && agent.outputs.length > 0) {
560
+ try {
561
+ return JSON.parse(text);
562
+ } catch {
563
+ return text;
564
+ }
565
+ }
566
+ return text;
567
+ }
568
+ return response;
569
+ }
570
+ function processChatCompletion(agent, response) {
571
+ const choices = response.choices;
572
+ if (!choices || choices.length === 0) return null;
573
+ const choice = choices[0];
574
+ const message = choice.message;
575
+ if (!message) return null;
576
+ const toolCalls = message.tool_calls;
577
+ if (toolCalls && toolCalls.length > 0) {
578
+ return toolCalls.map((tc) => {
579
+ const fn = tc.function;
580
+ return {
581
+ id: tc.id,
582
+ name: fn.name,
583
+ arguments: fn.arguments
584
+ };
585
+ });
586
+ }
587
+ const content = message.content;
588
+ if (content === null) return null;
589
+ if (agent.outputs && agent.outputs.length > 0) {
590
+ try {
591
+ return JSON.parse(content);
592
+ } catch {
593
+ return content;
594
+ }
595
+ }
596
+ return content;
597
+ }
598
+ function processEmbedding(response) {
599
+ const data = response.data;
600
+ if (data.length === 1) {
601
+ return data[0].embedding;
602
+ }
603
+ return data.map((d) => d.embedding);
604
+ }
605
+ function processImage(response) {
606
+ const data = response.data;
607
+ if (data.length === 1) {
608
+ return data[0].url ?? data[0].b64_json;
609
+ }
610
+ return data.map((d) => d.url ?? d.b64_json);
611
+ }
612
+
613
+ // src/index.ts
614
+ import { registerExecutor, registerProcessor } from "@prompty/core";
615
+ registerExecutor("openai", new OpenAIExecutor());
616
+ registerProcessor("openai", new OpenAIProcessor());
617
+ export {
618
+ OpenAIExecutor,
619
+ OpenAIProcessor,
620
+ buildChatArgs,
621
+ buildEmbeddingArgs,
622
+ buildImageArgs,
623
+ buildResponsesArgs,
624
+ messageToWire,
625
+ processResponse
626
+ };
627
+ //# sourceMappingURL=index.js.map