@prompty/foundry 2.0.0-alpha.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +86 -0
- package/dist/index.cjs +310 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +62 -0
- package/dist/index.d.ts +62 -0
- package/dist/index.js +287 -0
- package/dist/index.js.map +1 -0
- package/package.json +64 -0
package/README.md
ADDED
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
# @prompty/foundry
|
|
2
|
+
|
|
3
|
+
Microsoft Foundry provider for Prompty — executor and processor for Azure AI Foundry (and Azure OpenAI).
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
npm install @prompty/core @prompty/foundry openai @azure/identity
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
## Usage
|
|
12
|
+
|
|
13
|
+
Import `@prompty/foundry` to auto-register the `foundry` provider (and the legacy `azure` alias):
|
|
14
|
+
|
|
15
|
+
```typescript
|
|
16
|
+
import "@prompty/foundry";
|
|
17
|
+
import { run } from "@prompty/core";
|
|
18
|
+
|
|
19
|
+
const result = await run("./chat.prompty", {
|
|
20
|
+
question: "What is Azure AI Foundry?",
|
|
21
|
+
});
|
|
22
|
+
```
|
|
23
|
+
|
|
24
|
+
## `.prompty` Configuration
|
|
25
|
+
|
|
26
|
+
### Azure AI Foundry
|
|
27
|
+
|
|
28
|
+
```prompty
|
|
29
|
+
---
|
|
30
|
+
name: foundry-prompt
|
|
31
|
+
model:
|
|
32
|
+
id: gpt-4o-mini
|
|
33
|
+
provider: foundry
|
|
34
|
+
apiType: chat
|
|
35
|
+
connection:
|
|
36
|
+
kind: key
|
|
37
|
+
endpoint: ${env:AZURE_OPENAI_ENDPOINT}
|
|
38
|
+
apiKey: ${env:AZURE_OPENAI_API_KEY}
|
|
39
|
+
options:
|
|
40
|
+
temperature: 0.7
|
|
41
|
+
---
|
|
42
|
+
system:
|
|
43
|
+
You are a helpful assistant.
|
|
44
|
+
|
|
45
|
+
user:
|
|
46
|
+
{{question}}
|
|
47
|
+
```
|
|
48
|
+
|
|
49
|
+
### Azure OpenAI (Legacy)
|
|
50
|
+
|
|
51
|
+
The `azure` provider name is supported as a backward-compatible alias:
|
|
52
|
+
|
|
53
|
+
```prompty
|
|
54
|
+
model:
|
|
55
|
+
provider: azure
|
|
56
|
+
# ... same connection config
|
|
57
|
+
```
|
|
58
|
+
|
|
59
|
+
## Authentication
|
|
60
|
+
|
|
61
|
+
The Foundry provider supports:
|
|
62
|
+
|
|
63
|
+
- **API key** — via `connection.kind: key` with `apiKey`
|
|
64
|
+
- **Azure Identity** — falls back to `DefaultAzureCredential` when no API key is provided
|
|
65
|
+
|
|
66
|
+
## Supported API Types
|
|
67
|
+
|
|
68
|
+
| `apiType` | Description |
|
|
69
|
+
|-----------|-------------|
|
|
70
|
+
| `chat` (default) | Chat completions |
|
|
71
|
+
| `embedding` | Embeddings |
|
|
72
|
+
| `image` | Image generation |
|
|
73
|
+
| `responses` | Responses API |
|
|
74
|
+
|
|
75
|
+
## Exports
|
|
76
|
+
|
|
77
|
+
| Export | Description |
|
|
78
|
+
|--------|-------------|
|
|
79
|
+
| `FoundryExecutor` | Executor for Azure AI Foundry |
|
|
80
|
+
| `FoundryProcessor` | Processor for Foundry responses |
|
|
81
|
+
| `AzureExecutor` | Backward-compatible alias |
|
|
82
|
+
| `AzureProcessor` | Backward-compatible alias |
|
|
83
|
+
|
|
84
|
+
## License
|
|
85
|
+
|
|
86
|
+
MIT
|
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,310 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
6
|
+
var __export = (target, all) => {
|
|
7
|
+
for (var name in all)
|
|
8
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
|
+
};
|
|
10
|
+
var __copyProps = (to, from, except, desc) => {
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
|
+
for (let key of __getOwnPropNames(from))
|
|
13
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
15
|
+
}
|
|
16
|
+
return to;
|
|
17
|
+
};
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
19
|
+
|
|
20
|
+
// src/index.ts
|
|
21
|
+
var index_exports = {};
|
|
22
|
+
__export(index_exports, {
|
|
23
|
+
AzureExecutor: () => AzureExecutor,
|
|
24
|
+
AzureProcessor: () => AzureProcessor,
|
|
25
|
+
FoundryExecutor: () => FoundryExecutor,
|
|
26
|
+
FoundryProcessor: () => FoundryProcessor
|
|
27
|
+
});
|
|
28
|
+
module.exports = __toCommonJS(index_exports);
|
|
29
|
+
|
|
30
|
+
// src/executor.ts
|
|
31
|
+
var import_core = require("@prompty/core");
|
|
32
|
+
var import_core2 = require("@prompty/core");
|
|
33
|
+
var import_openai = require("@prompty/openai");
|
|
34
|
+
var import_openai2 = require("@prompty/openai");
|
|
35
|
+
function getResourceEndpoint(projectEndpoint) {
|
|
36
|
+
const url = new URL(projectEndpoint);
|
|
37
|
+
return `${url.protocol}//${url.host}`;
|
|
38
|
+
}
|
|
39
|
+
var FoundryExecutor = class extends import_openai.OpenAIExecutor {
|
|
40
|
+
async execute(agent, messages) {
|
|
41
|
+
return (0, import_core2.traceSpan)("FoundryExecutor", async (emit) => {
|
|
42
|
+
emit("signature", "prompty.foundry.executor.FoundryExecutor.invoke");
|
|
43
|
+
emit("inputs", { data: messages });
|
|
44
|
+
const client = this.resolveClient(agent);
|
|
45
|
+
const clientName = client.constructor?.name ?? "OpenAI";
|
|
46
|
+
await (0, import_core2.traceSpan)(clientName, async (ctorEmit) => {
|
|
47
|
+
ctorEmit("signature", `${clientName}.ctor`);
|
|
48
|
+
const conn = agent.model?.connection;
|
|
49
|
+
if (conn instanceof import_core.ReferenceConnection) {
|
|
50
|
+
ctorEmit("inputs", { source: "reference", name: conn.name });
|
|
51
|
+
} else if (conn instanceof import_core.FoundryConnection) {
|
|
52
|
+
ctorEmit("inputs", (0, import_core2.sanitizeValue)("ctor", {
|
|
53
|
+
endpoint: conn.endpoint ? getResourceEndpoint(conn.endpoint) : void 0,
|
|
54
|
+
deployment: agent.model?.id,
|
|
55
|
+
apiVersion: "2025-04-01-preview",
|
|
56
|
+
auth: "DefaultAzureCredential"
|
|
57
|
+
}));
|
|
58
|
+
}
|
|
59
|
+
ctorEmit("result", clientName);
|
|
60
|
+
});
|
|
61
|
+
const apiType = agent.model?.apiType ?? "chat";
|
|
62
|
+
const result = await this.dispatchApiCall(client, clientName, agent, messages, apiType);
|
|
63
|
+
emit("result", result);
|
|
64
|
+
return result;
|
|
65
|
+
});
|
|
66
|
+
}
|
|
67
|
+
async dispatchApiCall(client, clientName, agent, messages, apiType) {
|
|
68
|
+
switch (apiType) {
|
|
69
|
+
case "chat":
|
|
70
|
+
case "agent": {
|
|
71
|
+
const args = (0, import_openai2.buildChatArgs)(agent, messages);
|
|
72
|
+
const isStreaming = !!args.stream;
|
|
73
|
+
return (0, import_core2.traceSpan)("create", async (callEmit) => {
|
|
74
|
+
callEmit("signature", `${clientName}.chat.completions.create`);
|
|
75
|
+
callEmit("inputs", (0, import_core2.sanitizeValue)("create", args));
|
|
76
|
+
const result = await client.chat.completions.create(
|
|
77
|
+
args
|
|
78
|
+
);
|
|
79
|
+
if (isStreaming) {
|
|
80
|
+
return new import_core.PromptyStream(`${clientName}Executor`, result);
|
|
81
|
+
}
|
|
82
|
+
callEmit("result", result);
|
|
83
|
+
return result;
|
|
84
|
+
});
|
|
85
|
+
}
|
|
86
|
+
case "embedding": {
|
|
87
|
+
const args = (0, import_openai2.buildEmbeddingArgs)(agent, messages);
|
|
88
|
+
return (0, import_core2.traceSpan)("create", async (callEmit) => {
|
|
89
|
+
callEmit("signature", `${clientName}.embeddings.create`);
|
|
90
|
+
callEmit("inputs", (0, import_core2.sanitizeValue)("create", args));
|
|
91
|
+
const result = await client.embeddings.create(
|
|
92
|
+
args
|
|
93
|
+
);
|
|
94
|
+
callEmit("result", result);
|
|
95
|
+
return result;
|
|
96
|
+
});
|
|
97
|
+
}
|
|
98
|
+
case "image": {
|
|
99
|
+
const args = (0, import_openai2.buildImageArgs)(agent, messages);
|
|
100
|
+
return (0, import_core2.traceSpan)("generate", async (callEmit) => {
|
|
101
|
+
callEmit("signature", `${clientName}.images.generate`);
|
|
102
|
+
callEmit("inputs", (0, import_core2.sanitizeValue)("generate", args));
|
|
103
|
+
const result = await client.images.generate(
|
|
104
|
+
args
|
|
105
|
+
);
|
|
106
|
+
callEmit("result", result);
|
|
107
|
+
return result;
|
|
108
|
+
});
|
|
109
|
+
}
|
|
110
|
+
case "responses": {
|
|
111
|
+
const args = (0, import_openai2.buildResponsesArgs)(agent, messages);
|
|
112
|
+
const isStreaming = !!args.stream;
|
|
113
|
+
return (0, import_core2.traceSpan)("create", async (callEmit) => {
|
|
114
|
+
callEmit("signature", `${clientName}.responses.create`);
|
|
115
|
+
callEmit("inputs", (0, import_core2.sanitizeValue)("create", args));
|
|
116
|
+
const result = await client.responses.create(
|
|
117
|
+
args
|
|
118
|
+
);
|
|
119
|
+
if (isStreaming) {
|
|
120
|
+
return new import_core.PromptyStream(`${clientName}Executor`, result);
|
|
121
|
+
}
|
|
122
|
+
callEmit("result", result);
|
|
123
|
+
return result;
|
|
124
|
+
});
|
|
125
|
+
}
|
|
126
|
+
default:
|
|
127
|
+
throw new Error(`Unsupported apiType: ${apiType}`);
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
resolveClient(agent) {
|
|
131
|
+
const conn = agent.model?.connection;
|
|
132
|
+
if (conn instanceof import_core.ReferenceConnection) {
|
|
133
|
+
return (0, import_core2.getConnection)(conn.name);
|
|
134
|
+
}
|
|
135
|
+
if (conn instanceof import_core.FoundryConnection && conn.endpoint) {
|
|
136
|
+
const { AzureOpenAI: AzureOpenAI2 } = require("openai");
|
|
137
|
+
const { DefaultAzureCredential, getBearerTokenProvider } = require("@azure/identity");
|
|
138
|
+
const credential = new DefaultAzureCredential();
|
|
139
|
+
const scope = "https://cognitiveservices.azure.com/.default";
|
|
140
|
+
const azureADTokenProvider = getBearerTokenProvider(credential, scope);
|
|
141
|
+
const resourceEndpoint = getResourceEndpoint(conn.endpoint);
|
|
142
|
+
return new AzureOpenAI2({
|
|
143
|
+
endpoint: resourceEndpoint,
|
|
144
|
+
azureADTokenProvider,
|
|
145
|
+
deployment: agent.model?.id,
|
|
146
|
+
apiVersion: "2025-04-01-preview"
|
|
147
|
+
});
|
|
148
|
+
}
|
|
149
|
+
throw new Error(
|
|
150
|
+
"Foundry executor requires a FoundryConnection (with endpoint) or a ReferenceConnection (with a pre-registered client). Set model.connection.kind to 'foundry' with an endpoint, or register a client with registerConnection()."
|
|
151
|
+
);
|
|
152
|
+
}
|
|
153
|
+
};
|
|
154
|
+
|
|
155
|
+
// src/processor.ts
|
|
156
|
+
var import_openai3 = require("@prompty/openai");
|
|
157
|
+
var import_core3 = require("@prompty/core");
|
|
158
|
+
var FoundryProcessor = class {
|
|
159
|
+
async process(agent, response) {
|
|
160
|
+
return (0, import_core3.traceSpan)("FoundryProcessor", async (emit) => {
|
|
161
|
+
emit("signature", "prompty.foundry.processor.FoundryProcessor.invoke");
|
|
162
|
+
emit("inputs", { data: response });
|
|
163
|
+
const result = (0, import_openai3.processResponse)(agent, response);
|
|
164
|
+
emit("result", result);
|
|
165
|
+
return result;
|
|
166
|
+
});
|
|
167
|
+
}
|
|
168
|
+
};
|
|
169
|
+
|
|
170
|
+
// src/azure-executor.ts
|
|
171
|
+
var import_openai4 = require("openai");
|
|
172
|
+
var import_core4 = require("@prompty/core");
|
|
173
|
+
var import_core5 = require("@prompty/core");
|
|
174
|
+
var import_openai5 = require("@prompty/openai");
|
|
175
|
+
var import_openai6 = require("@prompty/openai");
|
|
176
|
+
var AzureExecutor = class extends import_openai5.OpenAIExecutor {
|
|
177
|
+
async execute(agent, messages) {
|
|
178
|
+
return (0, import_core5.traceSpan)("AzureExecutor", async (emit) => {
|
|
179
|
+
emit("signature", "prompty.azure.executor.AzureExecutor.invoke");
|
|
180
|
+
emit("inputs", { data: messages });
|
|
181
|
+
const client = this.resolveClient(agent);
|
|
182
|
+
const clientName = client.constructor?.name ?? "AzureOpenAI";
|
|
183
|
+
await (0, import_core5.traceSpan)(clientName, async (ctorEmit) => {
|
|
184
|
+
ctorEmit("signature", `${clientName}.ctor`);
|
|
185
|
+
const conn = agent.model?.connection;
|
|
186
|
+
if (conn instanceof import_core4.ReferenceConnection) {
|
|
187
|
+
ctorEmit("inputs", { source: "reference", name: conn.name });
|
|
188
|
+
} else {
|
|
189
|
+
ctorEmit("inputs", (0, import_core5.sanitizeValue)("ctor", this.clientKwargs(agent)));
|
|
190
|
+
}
|
|
191
|
+
ctorEmit("result", clientName);
|
|
192
|
+
});
|
|
193
|
+
const apiType = agent.model?.apiType ?? "chat";
|
|
194
|
+
const result = await this.dispatchApiCall(client, clientName, agent, messages, apiType);
|
|
195
|
+
emit("result", result);
|
|
196
|
+
return result;
|
|
197
|
+
});
|
|
198
|
+
}
|
|
199
|
+
async dispatchApiCall(client, clientName, agent, messages, apiType) {
|
|
200
|
+
switch (apiType) {
|
|
201
|
+
case "chat":
|
|
202
|
+
case "agent": {
|
|
203
|
+
const args = (0, import_openai6.buildChatArgs)(agent, messages);
|
|
204
|
+
const isStreaming = !!args.stream;
|
|
205
|
+
return (0, import_core5.traceSpan)("create", async (callEmit) => {
|
|
206
|
+
callEmit("signature", `${clientName}.chat.completions.create`);
|
|
207
|
+
callEmit("inputs", (0, import_core5.sanitizeValue)("create", args));
|
|
208
|
+
const result = await client.chat.completions.create(
|
|
209
|
+
args
|
|
210
|
+
);
|
|
211
|
+
if (isStreaming) {
|
|
212
|
+
return new import_core4.PromptyStream(`${clientName}Executor`, result);
|
|
213
|
+
}
|
|
214
|
+
callEmit("result", result);
|
|
215
|
+
return result;
|
|
216
|
+
});
|
|
217
|
+
}
|
|
218
|
+
case "embedding": {
|
|
219
|
+
const args = (0, import_openai6.buildEmbeddingArgs)(agent, messages);
|
|
220
|
+
return (0, import_core5.traceSpan)("create", async (callEmit) => {
|
|
221
|
+
callEmit("signature", `${clientName}.embeddings.create`);
|
|
222
|
+
callEmit("inputs", (0, import_core5.sanitizeValue)("create", args));
|
|
223
|
+
const result = await client.embeddings.create(
|
|
224
|
+
args
|
|
225
|
+
);
|
|
226
|
+
callEmit("result", result);
|
|
227
|
+
return result;
|
|
228
|
+
});
|
|
229
|
+
}
|
|
230
|
+
case "image": {
|
|
231
|
+
const args = (0, import_openai6.buildImageArgs)(agent, messages);
|
|
232
|
+
return (0, import_core5.traceSpan)("generate", async (callEmit) => {
|
|
233
|
+
callEmit("signature", `${clientName}.images.generate`);
|
|
234
|
+
callEmit("inputs", (0, import_core5.sanitizeValue)("generate", args));
|
|
235
|
+
const result = await client.images.generate(
|
|
236
|
+
args
|
|
237
|
+
);
|
|
238
|
+
callEmit("result", result);
|
|
239
|
+
return result;
|
|
240
|
+
});
|
|
241
|
+
}
|
|
242
|
+
case "responses": {
|
|
243
|
+
const args = (0, import_openai6.buildResponsesArgs)(agent, messages);
|
|
244
|
+
const isStreaming = !!args.stream;
|
|
245
|
+
return (0, import_core5.traceSpan)("create", async (callEmit) => {
|
|
246
|
+
callEmit("signature", `${clientName}.responses.create`);
|
|
247
|
+
callEmit("inputs", (0, import_core5.sanitizeValue)("create", args));
|
|
248
|
+
const result = await client.responses.create(
|
|
249
|
+
args
|
|
250
|
+
);
|
|
251
|
+
if (isStreaming) {
|
|
252
|
+
return new import_core4.PromptyStream(`${clientName}Executor`, result);
|
|
253
|
+
}
|
|
254
|
+
callEmit("result", result);
|
|
255
|
+
return result;
|
|
256
|
+
});
|
|
257
|
+
}
|
|
258
|
+
default:
|
|
259
|
+
throw new Error(`Unsupported apiType: ${apiType}`);
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
resolveClient(agent) {
|
|
263
|
+
const conn = agent.model?.connection;
|
|
264
|
+
if (conn instanceof import_core4.ReferenceConnection) {
|
|
265
|
+
return (0, import_core5.getConnection)(conn.name);
|
|
266
|
+
}
|
|
267
|
+
const kwargs = this.clientKwargs(agent);
|
|
268
|
+
return new import_openai4.AzureOpenAI(kwargs);
|
|
269
|
+
}
|
|
270
|
+
clientKwargs(agent) {
|
|
271
|
+
const kwargs = {};
|
|
272
|
+
const conn = agent.model?.connection;
|
|
273
|
+
if (conn instanceof import_core4.ApiKeyConnection) {
|
|
274
|
+
if (conn.apiKey) kwargs.apiKey = conn.apiKey;
|
|
275
|
+
if (conn.endpoint) kwargs.endpoint = conn.endpoint;
|
|
276
|
+
}
|
|
277
|
+
kwargs.deployment = agent.model?.id;
|
|
278
|
+
return kwargs;
|
|
279
|
+
}
|
|
280
|
+
};
|
|
281
|
+
|
|
282
|
+
// src/azure-processor.ts
|
|
283
|
+
var import_openai7 = require("@prompty/openai");
|
|
284
|
+
var import_core6 = require("@prompty/core");
|
|
285
|
+
var AzureProcessor = class {
|
|
286
|
+
async process(agent, response) {
|
|
287
|
+
return (0, import_core6.traceSpan)("AzureProcessor", async (emit) => {
|
|
288
|
+
emit("signature", "prompty.azure.processor.AzureProcessor.invoke");
|
|
289
|
+
emit("inputs", { data: response });
|
|
290
|
+
const result = (0, import_openai7.processResponse)(agent, response);
|
|
291
|
+
emit("result", result);
|
|
292
|
+
return result;
|
|
293
|
+
});
|
|
294
|
+
}
|
|
295
|
+
};
|
|
296
|
+
|
|
297
|
+
// src/index.ts
|
|
298
|
+
var import_core7 = require("@prompty/core");
|
|
299
|
+
(0, import_core7.registerExecutor)("foundry", new FoundryExecutor());
|
|
300
|
+
(0, import_core7.registerProcessor)("foundry", new FoundryProcessor());
|
|
301
|
+
(0, import_core7.registerExecutor)("azure", new AzureExecutor());
|
|
302
|
+
(0, import_core7.registerProcessor)("azure", new AzureProcessor());
|
|
303
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
304
|
+
0 && (module.exports = {
|
|
305
|
+
AzureExecutor,
|
|
306
|
+
AzureProcessor,
|
|
307
|
+
FoundryExecutor,
|
|
308
|
+
FoundryProcessor
|
|
309
|
+
});
|
|
310
|
+
//# sourceMappingURL=index.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/index.ts","../src/executor.ts","../src/processor.ts","../src/azure-executor.ts","../src/azure-processor.ts"],"sourcesContent":["/**\r\n * @prompty/foundry — Microsoft Foundry and Azure OpenAI provider for Prompty.\r\n *\r\n * Importing this package auto-registers:\r\n * - \"foundry\" executor and processor (primary)\r\n * - \"azure\" executor and processor (deprecated alias for backward compatibility)\r\n *\r\n * @module @prompty/foundry\r\n */\r\n\r\nexport { FoundryExecutor } from \"./executor.js\";\r\nexport { FoundryProcessor } from \"./processor.js\";\r\nexport { AzureExecutor } from \"./azure-executor.js\";\r\nexport { AzureProcessor } from \"./azure-processor.js\";\r\n\r\n// Auto-register on import\r\nimport { registerExecutor, registerProcessor } from \"@prompty/core\";\r\nimport { FoundryExecutor } from \"./executor.js\";\r\nimport { FoundryProcessor } from \"./processor.js\";\r\nimport { AzureExecutor } from \"./azure-executor.js\";\r\nimport { AzureProcessor } from \"./azure-processor.js\";\r\n\r\n// Primary registration\r\nregisterExecutor(\"foundry\", new FoundryExecutor());\r\nregisterProcessor(\"foundry\", new FoundryProcessor());\r\n\r\n// Deprecated backward-compat alias\r\nregisterExecutor(\"azure\", new AzureExecutor());\r\nregisterProcessor(\"azure\", new AzureProcessor());\r\n","/**\r\n * Foundry executor — extends OpenAI executor with Azure AI Foundry client resolution.\r\n *\r\n * For Chat Completions: builds an AzureOpenAI client from the Foundry resource\r\n * endpoint (derived from the project endpoint) with DefaultAzureCredential.\r\n *\r\n * The Foundry project endpoint is:\r\n * https://<resource>.services.ai.azure.com/api/projects/<project>\r\n * The AzureOpenAI endpoint (for Chat Completions) is:\r\n * https://<resource>.services.ai.azure.com\r\n *\r\n * @module\r\n */\r\n\r\nimport type OpenAI from \"openai\";\r\nimport type { Prompty, Message } from \"@prompty/core\";\r\nimport { FoundryConnection, ReferenceConnection, PromptyStream } from \"@prompty/core\";\r\nimport { getConnection, traceSpan, sanitizeValue } from \"@prompty/core\";\r\nimport { OpenAIExecutor } from \"@prompty/openai\";\r\nimport { buildChatArgs, buildEmbeddingArgs, buildImageArgs, buildResponsesArgs } from \"@prompty/openai\";\r\n\r\n/**\r\n * Extract the resource base endpoint from a Foundry project endpoint.\r\n * e.g. \"https://foo.services.ai.azure.com/api/projects/bar\" → \"https://foo.services.ai.azure.com\"\r\n */\r\nfunction getResourceEndpoint(projectEndpoint: string): string {\r\n const url = new URL(projectEndpoint);\r\n return `${url.protocol}//${url.host}`;\r\n}\r\n\r\nexport class FoundryExecutor extends OpenAIExecutor {\r\n override async execute(agent: Prompty, messages: Message[]): Promise<unknown> {\r\n return traceSpan(\"FoundryExecutor\", async (emit) => {\r\n emit(\"signature\", \"prompty.foundry.executor.FoundryExecutor.invoke\");\r\n emit(\"inputs\", { data: messages });\r\n\r\n const client = this.resolveClient(agent);\r\n const clientName = client.constructor?.name ?? \"OpenAI\";\r\n\r\n // Trace what client we resolved and how\r\n await traceSpan(clientName, async (ctorEmit) => {\r\n ctorEmit(\"signature\", `${clientName}.ctor`);\r\n const conn = agent.model?.connection;\r\n if (conn instanceof ReferenceConnection) {\r\n ctorEmit(\"inputs\", { source: \"reference\", name: conn.name });\r\n } else if (conn instanceof FoundryConnection) {\r\n ctorEmit(\"inputs\", sanitizeValue(\"ctor\", {\r\n endpoint: conn.endpoint ? getResourceEndpoint(conn.endpoint) : undefined,\r\n deployment: agent.model?.id,\r\n apiVersion: \"2025-04-01-preview\",\r\n auth: \"DefaultAzureCredential\",\r\n }));\r\n }\r\n ctorEmit(\"result\", clientName);\r\n });\r\n\r\n const apiType = agent.model?.apiType ?? \"chat\";\r\n const result = await this.dispatchApiCall(client, clientName, agent, messages, apiType);\r\n emit(\"result\", result);\r\n return result;\r\n });\r\n }\r\n\r\n private async dispatchApiCall(\r\n client: OpenAI,\r\n clientName: string,\r\n agent: Prompty,\r\n messages: Message[],\r\n apiType: string,\r\n ): Promise<unknown> {\r\n switch (apiType) {\r\n case \"chat\":\r\n case \"agent\": {\r\n const args = buildChatArgs(agent, messages);\r\n const isStreaming = !!args.stream;\r\n return traceSpan(\"create\", async (callEmit) => {\r\n callEmit(\"signature\", `${clientName}.chat.completions.create`);\r\n callEmit(\"inputs\", sanitizeValue(\"create\", args));\r\n const result = await client.chat.completions.create(\r\n args as unknown as Parameters<typeof client.chat.completions.create>[0],\r\n );\r\n if (isStreaming) {\r\n return new PromptyStream(`${clientName}Executor`, result as unknown as AsyncIterable<unknown>);\r\n }\r\n callEmit(\"result\", result);\r\n return result;\r\n });\r\n }\r\n case \"embedding\": {\r\n const args = buildEmbeddingArgs(agent, messages);\r\n return traceSpan(\"create\", async (callEmit) => {\r\n callEmit(\"signature\", `${clientName}.embeddings.create`);\r\n callEmit(\"inputs\", sanitizeValue(\"create\", args));\r\n const result = await client.embeddings.create(\r\n args as unknown as Parameters<typeof client.embeddings.create>[0],\r\n );\r\n callEmit(\"result\", result);\r\n return result;\r\n });\r\n }\r\n case \"image\": {\r\n const args = buildImageArgs(agent, messages);\r\n return traceSpan(\"generate\", async (callEmit) => {\r\n callEmit(\"signature\", `${clientName}.images.generate`);\r\n callEmit(\"inputs\", sanitizeValue(\"generate\", args));\r\n const result = await client.images.generate(\r\n args as unknown as Parameters<typeof client.images.generate>[0],\r\n );\r\n callEmit(\"result\", result);\r\n return result;\r\n });\r\n }\r\n case \"responses\": {\r\n const args = buildResponsesArgs(agent, messages);\r\n const isStreaming = !!args.stream;\r\n return traceSpan(\"create\", async (callEmit) => {\r\n callEmit(\"signature\", `${clientName}.responses.create`);\r\n callEmit(\"inputs\", sanitizeValue(\"create\", args));\r\n const result = await client.responses.create(\r\n args as unknown as Parameters<typeof client.responses.create>[0],\r\n );\r\n if (isStreaming) {\r\n return new PromptyStream(`${clientName}Executor`, result as unknown as AsyncIterable<unknown>);\r\n }\r\n callEmit(\"result\", result);\r\n return result;\r\n });\r\n }\r\n default:\r\n throw new Error(`Unsupported apiType: ${apiType}`);\r\n }\r\n }\r\n\r\n protected override resolveClient(agent: Prompty): OpenAI {\r\n const conn = agent.model?.connection;\r\n\r\n // Pre-registered client by name\r\n if (conn instanceof ReferenceConnection) {\r\n return getConnection(conn.name) as OpenAI;\r\n }\r\n\r\n // Build an AzureOpenAI client from the FoundryConnection endpoint\r\n if (conn instanceof FoundryConnection && conn.endpoint) {\r\n const { AzureOpenAI } = require(\"openai\");\r\n const { DefaultAzureCredential, getBearerTokenProvider } = require(\"@azure/identity\");\r\n\r\n const credential = new DefaultAzureCredential();\r\n const scope = \"https://cognitiveservices.azure.com/.default\";\r\n const azureADTokenProvider = getBearerTokenProvider(credential, scope);\r\n const resourceEndpoint = getResourceEndpoint(conn.endpoint);\r\n\r\n return new AzureOpenAI({\r\n endpoint: resourceEndpoint,\r\n azureADTokenProvider,\r\n deployment: agent.model?.id,\r\n apiVersion: \"2025-04-01-preview\",\r\n }) as OpenAI;\r\n }\r\n\r\n throw new Error(\r\n \"Foundry executor requires a FoundryConnection (with endpoint) \" +\r\n \"or a ReferenceConnection (with a pre-registered client). \" +\r\n \"Set model.connection.kind to 'foundry' with an endpoint, \" +\r\n \"or register a client with registerConnection().\",\r\n );\r\n }\r\n}\r\n","/**\r\n * Foundry processor — identical to OpenAI processor.\r\n *\r\n * Foundry returns OpenAI-compatible responses via getOpenAIClient().\r\n *\r\n * @module\r\n */\r\n\r\nimport type { Prompty } from \"@prompty/core\";\r\nimport type { Processor } from \"@prompty/core\";\r\nimport { processResponse } from \"@prompty/openai\";\r\nimport { traceSpan } from \"@prompty/core\";\r\n\r\nexport class FoundryProcessor implements Processor {\r\n async process(agent: Prompty, response: unknown): Promise<unknown> {\r\n return traceSpan(\"FoundryProcessor\", async (emit) => {\r\n emit(\"signature\", \"prompty.foundry.processor.FoundryProcessor.invoke\");\r\n emit(\"inputs\", { data: response });\r\n const result = processResponse(agent, response);\r\n emit(\"result\", result);\r\n return result;\r\n });\r\n }\r\n}\r\n","/**\r\n * Azure OpenAI executor — extends OpenAI executor with Azure-specific client.\r\n *\r\n * @module\r\n */\r\n\r\nimport OpenAI, { AzureOpenAI } from \"openai\";\r\nimport type { Prompty, Message } from \"@prompty/core\";\r\nimport { ApiKeyConnection, ReferenceConnection, PromptyStream } from \"@prompty/core\";\r\nimport { getConnection, traceSpan, sanitizeValue } from \"@prompty/core\";\r\nimport { OpenAIExecutor } from \"@prompty/openai\";\r\nimport { buildChatArgs, buildEmbeddingArgs, buildImageArgs, buildResponsesArgs } from \"@prompty/openai\";\r\n\r\nexport class AzureExecutor extends OpenAIExecutor {\r\n override async execute(agent: Prompty, messages: Message[]): Promise<unknown> {\r\n return traceSpan(\"AzureExecutor\", async (emit) => {\r\n emit(\"signature\", \"prompty.azure.executor.AzureExecutor.invoke\");\r\n emit(\"inputs\", { data: messages });\r\n\r\n const client = this.resolveClient(agent);\r\n const clientName = client.constructor?.name ?? \"AzureOpenAI\";\r\n\r\n // Trace what client we resolved and how\r\n await traceSpan(clientName, async (ctorEmit) => {\r\n ctorEmit(\"signature\", `${clientName}.ctor`);\r\n const conn = agent.model?.connection;\r\n if (conn instanceof ReferenceConnection) {\r\n ctorEmit(\"inputs\", { source: \"reference\", name: conn.name });\r\n } else {\r\n ctorEmit(\"inputs\", sanitizeValue(\"ctor\", this.clientKwargs(agent)));\r\n }\r\n ctorEmit(\"result\", clientName);\r\n });\r\n\r\n const apiType = agent.model?.apiType ?? \"chat\";\r\n const result = await this.dispatchApiCall(client, clientName, agent, messages, apiType);\r\n emit(\"result\", result);\r\n return result;\r\n });\r\n }\r\n\r\n private async dispatchApiCall(\r\n client: OpenAI,\r\n clientName: string,\r\n agent: Prompty,\r\n messages: Message[],\r\n apiType: string,\r\n ): Promise<unknown> {\r\n switch (apiType) {\r\n case \"chat\":\r\n case \"agent\": {\r\n const args = buildChatArgs(agent, messages);\r\n const isStreaming = !!args.stream;\r\n return traceSpan(\"create\", async (callEmit) => {\r\n callEmit(\"signature\", `${clientName}.chat.completions.create`);\r\n callEmit(\"inputs\", sanitizeValue(\"create\", args));\r\n const result = await client.chat.completions.create(\r\n args as unknown as Parameters<typeof client.chat.completions.create>[0],\r\n );\r\n if (isStreaming) {\r\n return new PromptyStream(`${clientName}Executor`, result as unknown as AsyncIterable<unknown>);\r\n }\r\n callEmit(\"result\", result);\r\n return result;\r\n });\r\n }\r\n case \"embedding\": {\r\n const args = buildEmbeddingArgs(agent, messages);\r\n return traceSpan(\"create\", async (callEmit) => {\r\n callEmit(\"signature\", `${clientName}.embeddings.create`);\r\n callEmit(\"inputs\", sanitizeValue(\"create\", args));\r\n const result = await client.embeddings.create(\r\n args as unknown as Parameters<typeof client.embeddings.create>[0],\r\n );\r\n callEmit(\"result\", result);\r\n return result;\r\n });\r\n }\r\n case \"image\": {\r\n const args = buildImageArgs(agent, messages);\r\n return traceSpan(\"generate\", async (callEmit) => {\r\n callEmit(\"signature\", `${clientName}.images.generate`);\r\n callEmit(\"inputs\", sanitizeValue(\"generate\", args));\r\n const result = await client.images.generate(\r\n args as unknown as Parameters<typeof client.images.generate>[0],\r\n );\r\n callEmit(\"result\", result);\r\n return result;\r\n });\r\n }\r\n case \"responses\": {\r\n const args = buildResponsesArgs(agent, messages);\r\n const isStreaming = !!args.stream;\r\n return traceSpan(\"create\", async (callEmit) => {\r\n callEmit(\"signature\", `${clientName}.responses.create`);\r\n callEmit(\"inputs\", sanitizeValue(\"create\", args));\r\n const result = await client.responses.create(\r\n args as unknown as Parameters<typeof client.responses.create>[0],\r\n );\r\n if (isStreaming) {\r\n return new PromptyStream(`${clientName}Executor`, result as unknown as AsyncIterable<unknown>);\r\n }\r\n callEmit(\"result\", result);\r\n return result;\r\n });\r\n }\r\n default:\r\n throw new Error(`Unsupported apiType: ${apiType}`);\r\n }\r\n }\r\n\r\n protected override resolveClient(agent: Prompty): OpenAI {\r\n const conn = agent.model?.connection;\r\n\r\n if (conn instanceof ReferenceConnection) {\r\n return getConnection(conn.name) as OpenAI;\r\n }\r\n\r\n const kwargs = this.clientKwargs(agent);\r\n return new AzureOpenAI(kwargs as ConstructorParameters<typeof AzureOpenAI>[0]);\r\n }\r\n\r\n protected override clientKwargs(agent: Prompty): Record<string, unknown> {\r\n const kwargs: Record<string, unknown> = {};\r\n const conn = agent.model?.connection;\r\n\r\n if (conn instanceof ApiKeyConnection) {\r\n if (conn.apiKey) kwargs.apiKey = conn.apiKey;\r\n if (conn.endpoint) kwargs.endpoint = conn.endpoint;\r\n }\r\n\r\n // Azure requires deployment = model id\r\n kwargs.deployment = agent.model?.id;\r\n\r\n return kwargs;\r\n }\r\n}\r\n","/**\r\n * Azure OpenAI processor — identical to OpenAI processor.\r\n *\r\n * Azure uses the same response format as OpenAI.\r\n *\r\n * @module\r\n */\r\n\r\nimport type { Prompty } from \"@prompty/core\";\r\nimport type { Processor } from \"@prompty/core\";\r\nimport { processResponse } from \"@prompty/openai\";\r\nimport { traceSpan } from \"@prompty/core\";\r\n\r\nexport class AzureProcessor implements Processor {\r\n async process(agent: Prompty, response: unknown): Promise<unknown> {\r\n return traceSpan(\"AzureProcessor\", async (emit) => {\r\n emit(\"signature\", \"prompty.azure.processor.AzureProcessor.invoke\");\r\n emit(\"inputs\", { data: response });\r\n const result = processResponse(agent, response);\r\n emit(\"result\", result);\r\n return result;\r\n });\r\n }\r\n}\r\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACgBA,kBAAsE;AACtE,IAAAA,eAAwD;AACxD,oBAA+B;AAC/B,IAAAC,iBAAsF;AAMtF,SAAS,oBAAoB,iBAAiC;AAC5D,QAAM,MAAM,IAAI,IAAI,eAAe;AACnC,SAAO,GAAG,IAAI,QAAQ,KAAK,IAAI,IAAI;AACrC;AAEO,IAAM,kBAAN,cAA8B,6BAAe;AAAA,EAClD,MAAe,QAAQ,OAAgB,UAAuC;AAC5E,eAAO,wBAAU,mBAAmB,OAAO,SAAS;AAClD,WAAK,aAAa,iDAAiD;AACnE,WAAK,UAAU,EAAE,MAAM,SAAS,CAAC;AAEjC,YAAM,SAAS,KAAK,cAAc,KAAK;AACvC,YAAM,aAAa,OAAO,aAAa,QAAQ;AAG/C,gBAAM,wBAAU,YAAY,OAAO,aAAa;AAC9C,iBAAS,aAAa,GAAG,UAAU,OAAO;AAC1C,cAAM,OAAO,MAAM,OAAO;AAC1B,YAAI,gBAAgB,iCAAqB;AACvC,mBAAS,UAAU,EAAE,QAAQ,aAAa,MAAM,KAAK,KAAK,CAAC;AAAA,QAC7D,WAAW,gBAAgB,+BAAmB;AAC5C,mBAAS,cAAU,4BAAc,QAAQ;AAAA,YACvC,UAAU,KAAK,WAAW,oBAAoB,KAAK,QAAQ,IAAI;AAAA,YAC/D,YAAY,MAAM,OAAO;AAAA,YACzB,YAAY;AAAA,YACZ,MAAM;AAAA,UACR,CAAC,CAAC;AAAA,QACJ;AACA,iBAAS,UAAU,UAAU;AAAA,MAC/B,CAAC;AAED,YAAM,UAAU,MAAM,OAAO,WAAW;AACxC,YAAM,SAAS,MAAM,KAAK,gBAAgB,QAAQ,YAAY,OAAO,UAAU,OAAO;AACtF,WAAK,UAAU,MAAM;AACrB,aAAO;AAAA,IACT,CAAC;AAAA,EACH;AAAA,EAEA,MAAc,gBACZ,QACA,YACA,OACA,UACA,SACkB;AAClB,YAAQ,SAAS;AAAA,MACf,KAAK;AAAA,MACL,KAAK,SAAS;AACZ,cAAM,WAAO,8BAAc,OAAO,QAAQ;AAC1C,cAAM,cAAc,CAAC,CAAC,KAAK;AAC3B,mBAAO,wBAAU,UAAU,OAAO,aAAa;AAC7C,mBAAS,aAAa,GAAG,UAAU,0BAA0B;AAC7D,mBAAS,cAAU,4BAAc,UAAU,IAAI,CAAC;AAChD,gBAAM,SAAS,MAAM,OAAO,KAAK,YAAY;AAAA,YAC3C;AAAA,UACF;AACA,cAAI,aAAa;AACf,mBAAO,IAAI,0BAAc,GAAG,UAAU,YAAY,MAA2C;AAAA,UAC/F;AACA,mBAAS,UAAU,MAAM;AACzB,iBAAO;AAAA,QACT,CAAC;AAAA,MACH;AAAA,MACA,KAAK,aAAa;AAChB,cAAM,WAAO,mCAAmB,OAAO,QAAQ;AAC/C,mBAAO,wBAAU,UAAU,OAAO,aAAa;AAC7C,mBAAS,aAAa,GAAG,UAAU,oBAAoB;AACvD,mBAAS,cAAU,4BAAc,UAAU,IAAI,CAAC;AAChD,gBAAM,SAAS,MAAM,OAAO,WAAW;AAAA,YACrC;AAAA,UACF;AACA,mBAAS,UAAU,MAAM;AACzB,iBAAO;AAAA,QACT,CAAC;AAAA,MACH;AAAA,MACA,KAAK,SAAS;AACZ,cAAM,WAAO,+BAAe,OAAO,QAAQ;AAC3C,mBAAO,wBAAU,YAAY,OAAO,aAAa;AAC/C,mBAAS,aAAa,GAAG,UAAU,kBAAkB;AACrD,mBAAS,cAAU,4BAAc,YAAY,IAAI,CAAC;AAClD,gBAAM,SAAS,MAAM,OAAO,OAAO;AAAA,YACjC;AAAA,UACF;AACA,mBAAS,UAAU,MAAM;AACzB,iBAAO;AAAA,QACT,CAAC;AAAA,MACH;AAAA,MACA,KAAK,aAAa;AAChB,cAAM,WAAO,mCAAmB,OAAO,QAAQ;AAC/C,cAAM,cAAc,CAAC,CAAC,KAAK;AAC3B,mBAAO,wBAAU,UAAU,OAAO,aAAa;AAC7C,mBAAS,aAAa,GAAG,UAAU,mBAAmB;AACtD,mBAAS,cAAU,4BAAc,UAAU,IAAI,CAAC;AAChD,gBAAM,SAAS,MAAM,OAAO,UAAU;AAAA,YACpC;AAAA,UACF;AACA,cAAI,aAAa;AACf,mBAAO,IAAI,0BAAc,GAAG,UAAU,YAAY,MAA2C;AAAA,UAC/F;AACA,mBAAS,UAAU,MAAM;AACzB,iBAAO;AAAA,QACT,CAAC;AAAA,MACH;AAAA,MACA;AACE,cAAM,IAAI,MAAM,wBAAwB,OAAO,EAAE;AAAA,IACrD;AAAA,EACF;AAAA,EAEmB,cAAc,OAAwB;AACvD,UAAM,OAAO,MAAM,OAAO;AAG1B,QAAI,gBAAgB,iCAAqB;AACvC,iBAAO,4BAAc,KAAK,IAAI;AAAA,IAChC;AAGA,QAAI,gBAAgB,iCAAqB,KAAK,UAAU;AACtD,YAAM,EAAE,aAAAC,aAAY,IAAI,QAAQ,QAAQ;AACxC,YAAM,EAAE,wBAAwB,uBAAuB,IAAI,QAAQ,iBAAiB;AAEpF,YAAM,aAAa,IAAI,uBAAuB;AAC9C,YAAM,QAAQ;AACd,YAAM,uBAAuB,uBAAuB,YAAY,KAAK;AACrE,YAAM,mBAAmB,oBAAoB,KAAK,QAAQ;AAE1D,aAAO,IAAIA,aAAY;AAAA,QACrB,UAAU;AAAA,QACV;AAAA,QACA,YAAY,MAAM,OAAO;AAAA,QACzB,YAAY;AAAA,MACd,CAAC;AAAA,IACH;AAEA,UAAM,IAAI;AAAA,MACR;AAAA,IAIF;AAAA,EACF;AACF;;;AC5JA,IAAAC,iBAAgC;AAChC,IAAAC,eAA0B;AAEnB,IAAM,mBAAN,MAA4C;AAAA,EACjD,MAAM,QAAQ,OAAgB,UAAqC;AACjE,eAAO,wBAAU,oBAAoB,OAAO,SAAS;AACnD,WAAK,aAAa,mDAAmD;AACrE,WAAK,UAAU,EAAE,MAAM,SAAS,CAAC;AACjC,YAAM,aAAS,gCAAgB,OAAO,QAAQ;AAC9C,WAAK,UAAU,MAAM;AACrB,aAAO;AAAA,IACT,CAAC;AAAA,EACH;AACF;;;ACjBA,IAAAC,iBAAoC;AAEpC,IAAAC,eAAqE;AACrE,IAAAA,eAAwD;AACxD,IAAAD,iBAA+B;AAC/B,IAAAA,iBAAsF;AAE/E,IAAM,gBAAN,cAA4B,8BAAe;AAAA,EAChD,MAAe,QAAQ,OAAgB,UAAuC;AAC5E,eAAO,wBAAU,iBAAiB,OAAO,SAAS;AAChD,WAAK,aAAa,6CAA6C;AAC/D,WAAK,UAAU,EAAE,MAAM,SAAS,CAAC;AAEjC,YAAM,SAAS,KAAK,cAAc,KAAK;AACvC,YAAM,aAAa,OAAO,aAAa,QAAQ;AAG/C,gBAAM,wBAAU,YAAY,OAAO,aAAa;AAC9C,iBAAS,aAAa,GAAG,UAAU,OAAO;AAC1C,cAAM,OAAO,MAAM,OAAO;AAC1B,YAAI,gBAAgB,kCAAqB;AACvC,mBAAS,UAAU,EAAE,QAAQ,aAAa,MAAM,KAAK,KAAK,CAAC;AAAA,QAC7D,OAAO;AACL,mBAAS,cAAU,4BAAc,QAAQ,KAAK,aAAa,KAAK,CAAC,CAAC;AAAA,QACpE;AACA,iBAAS,UAAU,UAAU;AAAA,MAC/B,CAAC;AAED,YAAM,UAAU,MAAM,OAAO,WAAW;AACxC,YAAM,SAAS,MAAM,KAAK,gBAAgB,QAAQ,YAAY,OAAO,UAAU,OAAO;AACtF,WAAK,UAAU,MAAM;AACrB,aAAO;AAAA,IACT,CAAC;AAAA,EACH;AAAA,EAEA,MAAc,gBACZ,QACA,YACA,OACA,UACA,SACkB;AAClB,YAAQ,SAAS;AAAA,MACf,KAAK;AAAA,MACL,KAAK,SAAS;AACZ,cAAM,WAAO,8BAAc,OAAO,QAAQ;AAC1C,cAAM,cAAc,CAAC,CAAC,KAAK;AAC3B,mBAAO,wBAAU,UAAU,OAAO,aAAa;AAC7C,mBAAS,aAAa,GAAG,UAAU,0BAA0B;AAC7D,mBAAS,cAAU,4BAAc,UAAU,IAAI,CAAC;AAChD,gBAAM,SAAS,MAAM,OAAO,KAAK,YAAY;AAAA,YAC3C;AAAA,UACF;AACA,cAAI,aAAa;AACf,mBAAO,IAAI,2BAAc,GAAG,UAAU,YAAY,MAA2C;AAAA,UAC/F;AACA,mBAAS,UAAU,MAAM;AACzB,iBAAO;AAAA,QACT,CAAC;AAAA,MACH;AAAA,MACA,KAAK,aAAa;AAChB,cAAM,WAAO,mCAAmB,OAAO,QAAQ;AAC/C,mBAAO,wBAAU,UAAU,OAAO,aAAa;AAC7C,mBAAS,aAAa,GAAG,UAAU,oBAAoB;AACvD,mBAAS,cAAU,4BAAc,UAAU,IAAI,CAAC;AAChD,gBAAM,SAAS,MAAM,OAAO,WAAW;AAAA,YACrC;AAAA,UACF;AACA,mBAAS,UAAU,MAAM;AACzB,iBAAO;AAAA,QACT,CAAC;AAAA,MACH;AAAA,MACA,KAAK,SAAS;AACZ,cAAM,WAAO,+BAAe,OAAO,QAAQ;AAC3C,mBAAO,wBAAU,YAAY,OAAO,aAAa;AAC/C,mBAAS,aAAa,GAAG,UAAU,kBAAkB;AACrD,mBAAS,cAAU,4BAAc,YAAY,IAAI,CAAC;AAClD,gBAAM,SAAS,MAAM,OAAO,OAAO;AAAA,YACjC;AAAA,UACF;AACA,mBAAS,UAAU,MAAM;AACzB,iBAAO;AAAA,QACT,CAAC;AAAA,MACH;AAAA,MACA,KAAK,aAAa;AAChB,cAAM,WAAO,mCAAmB,OAAO,QAAQ;AAC/C,cAAM,cAAc,CAAC,CAAC,KAAK;AAC3B,mBAAO,wBAAU,UAAU,OAAO,aAAa;AAC7C,mBAAS,aAAa,GAAG,UAAU,mBAAmB;AACtD,mBAAS,cAAU,4BAAc,UAAU,IAAI,CAAC;AAChD,gBAAM,SAAS,MAAM,OAAO,UAAU;AAAA,YACpC;AAAA,UACF;AACA,cAAI,aAAa;AACf,mBAAO,IAAI,2BAAc,GAAG,UAAU,YAAY,MAA2C;AAAA,UAC/F;AACA,mBAAS,UAAU,MAAM;AACzB,iBAAO;AAAA,QACT,CAAC;AAAA,MACH;AAAA,MACA;AACE,cAAM,IAAI,MAAM,wBAAwB,OAAO,EAAE;AAAA,IACrD;AAAA,EACF;AAAA,EAEmB,cAAc,OAAwB;AACvD,UAAM,OAAO,MAAM,OAAO;AAE1B,QAAI,gBAAgB,kCAAqB;AACvC,iBAAO,4BAAc,KAAK,IAAI;AAAA,IAChC;AAEA,UAAM,SAAS,KAAK,aAAa,KAAK;AACtC,WAAO,IAAI,2BAAY,MAAsD;AAAA,EAC/E;AAAA,EAEmB,aAAa,OAAyC;AACvE,UAAM,SAAkC,CAAC;AACzC,UAAM,OAAO,MAAM,OAAO;AAE1B,QAAI,gBAAgB,+BAAkB;AACpC,UAAI,KAAK,OAAQ,QAAO,SAAS,KAAK;AACtC,UAAI,KAAK,SAAU,QAAO,WAAW,KAAK;AAAA,IAC5C;AAGA,WAAO,aAAa,MAAM,OAAO;AAEjC,WAAO;AAAA,EACT;AACF;;;AC9HA,IAAAE,iBAAgC;AAChC,IAAAC,eAA0B;AAEnB,IAAM,iBAAN,MAA0C;AAAA,EAC/C,MAAM,QAAQ,OAAgB,UAAqC;AACjE,eAAO,wBAAU,kBAAkB,OAAO,SAAS;AACjD,WAAK,aAAa,+CAA+C;AACjE,WAAK,UAAU,EAAE,MAAM,SAAS,CAAC;AACjC,YAAM,aAAS,gCAAgB,OAAO,QAAQ;AAC9C,WAAK,UAAU,MAAM;AACrB,aAAO;AAAA,IACT,CAAC;AAAA,EACH;AACF;;;AJPA,IAAAC,eAAoD;IAOpD,+BAAiB,WAAW,IAAI,gBAAgB,CAAC;AAAA,IACjD,gCAAkB,WAAW,IAAI,iBAAiB,CAAC;AAAA,IAGnD,+BAAiB,SAAS,IAAI,cAAc,CAAC;AAAA,IAC7C,gCAAkB,SAAS,IAAI,eAAe,CAAC;","names":["import_core","import_openai","AzureOpenAI","import_openai","import_core","import_openai","import_core","import_openai","import_core","import_core"]}
|
package/dist/index.d.cts
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
import OpenAI from 'openai';
|
|
2
|
+
import { Prompty, Message, Processor } from '@prompty/core';
|
|
3
|
+
import { OpenAIExecutor } from '@prompty/openai';
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Foundry executor — extends OpenAI executor with Azure AI Foundry client resolution.
|
|
7
|
+
*
|
|
8
|
+
* For Chat Completions: builds an AzureOpenAI client from the Foundry resource
|
|
9
|
+
* endpoint (derived from the project endpoint) with DefaultAzureCredential.
|
|
10
|
+
*
|
|
11
|
+
* The Foundry project endpoint is:
|
|
12
|
+
* https://<resource>.services.ai.azure.com/api/projects/<project>
|
|
13
|
+
* The AzureOpenAI endpoint (for Chat Completions) is:
|
|
14
|
+
* https://<resource>.services.ai.azure.com
|
|
15
|
+
*
|
|
16
|
+
* @module
|
|
17
|
+
*/
|
|
18
|
+
|
|
19
|
+
declare class FoundryExecutor extends OpenAIExecutor {
|
|
20
|
+
execute(agent: Prompty, messages: Message[]): Promise<unknown>;
|
|
21
|
+
private dispatchApiCall;
|
|
22
|
+
protected resolveClient(agent: Prompty): OpenAI;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* Foundry processor — identical to OpenAI processor.
|
|
27
|
+
*
|
|
28
|
+
* Foundry returns OpenAI-compatible responses via getOpenAIClient().
|
|
29
|
+
*
|
|
30
|
+
* @module
|
|
31
|
+
*/
|
|
32
|
+
|
|
33
|
+
declare class FoundryProcessor implements Processor {
|
|
34
|
+
process(agent: Prompty, response: unknown): Promise<unknown>;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
/**
|
|
38
|
+
* Azure OpenAI executor — extends OpenAI executor with Azure-specific client.
|
|
39
|
+
*
|
|
40
|
+
* @module
|
|
41
|
+
*/
|
|
42
|
+
|
|
43
|
+
declare class AzureExecutor extends OpenAIExecutor {
|
|
44
|
+
execute(agent: Prompty, messages: Message[]): Promise<unknown>;
|
|
45
|
+
private dispatchApiCall;
|
|
46
|
+
protected resolveClient(agent: Prompty): OpenAI;
|
|
47
|
+
protected clientKwargs(agent: Prompty): Record<string, unknown>;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
/**
|
|
51
|
+
* Azure OpenAI processor — identical to OpenAI processor.
|
|
52
|
+
*
|
|
53
|
+
* Azure uses the same response format as OpenAI.
|
|
54
|
+
*
|
|
55
|
+
* @module
|
|
56
|
+
*/
|
|
57
|
+
|
|
58
|
+
declare class AzureProcessor implements Processor {
|
|
59
|
+
process(agent: Prompty, response: unknown): Promise<unknown>;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
export { AzureExecutor, AzureProcessor, FoundryExecutor, FoundryProcessor };
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
import OpenAI from 'openai';
|
|
2
|
+
import { Prompty, Message, Processor } from '@prompty/core';
|
|
3
|
+
import { OpenAIExecutor } from '@prompty/openai';
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* Foundry executor — extends OpenAI executor with Azure AI Foundry client resolution.
|
|
7
|
+
*
|
|
8
|
+
* For Chat Completions: builds an AzureOpenAI client from the Foundry resource
|
|
9
|
+
* endpoint (derived from the project endpoint) with DefaultAzureCredential.
|
|
10
|
+
*
|
|
11
|
+
* The Foundry project endpoint is:
|
|
12
|
+
* https://<resource>.services.ai.azure.com/api/projects/<project>
|
|
13
|
+
* The AzureOpenAI endpoint (for Chat Completions) is:
|
|
14
|
+
* https://<resource>.services.ai.azure.com
|
|
15
|
+
*
|
|
16
|
+
* @module
|
|
17
|
+
*/
|
|
18
|
+
|
|
19
|
+
declare class FoundryExecutor extends OpenAIExecutor {
|
|
20
|
+
execute(agent: Prompty, messages: Message[]): Promise<unknown>;
|
|
21
|
+
private dispatchApiCall;
|
|
22
|
+
protected resolveClient(agent: Prompty): OpenAI;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* Foundry processor — identical to OpenAI processor.
|
|
27
|
+
*
|
|
28
|
+
* Foundry returns OpenAI-compatible responses via getOpenAIClient().
|
|
29
|
+
*
|
|
30
|
+
* @module
|
|
31
|
+
*/
|
|
32
|
+
|
|
33
|
+
declare class FoundryProcessor implements Processor {
|
|
34
|
+
process(agent: Prompty, response: unknown): Promise<unknown>;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
/**
|
|
38
|
+
* Azure OpenAI executor — extends OpenAI executor with Azure-specific client.
|
|
39
|
+
*
|
|
40
|
+
* @module
|
|
41
|
+
*/
|
|
42
|
+
|
|
43
|
+
declare class AzureExecutor extends OpenAIExecutor {
|
|
44
|
+
execute(agent: Prompty, messages: Message[]): Promise<unknown>;
|
|
45
|
+
private dispatchApiCall;
|
|
46
|
+
protected resolveClient(agent: Prompty): OpenAI;
|
|
47
|
+
protected clientKwargs(agent: Prompty): Record<string, unknown>;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
/**
|
|
51
|
+
* Azure OpenAI processor — identical to OpenAI processor.
|
|
52
|
+
*
|
|
53
|
+
* Azure uses the same response format as OpenAI.
|
|
54
|
+
*
|
|
55
|
+
* @module
|
|
56
|
+
*/
|
|
57
|
+
|
|
58
|
+
declare class AzureProcessor implements Processor {
|
|
59
|
+
process(agent: Prompty, response: unknown): Promise<unknown>;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
export { AzureExecutor, AzureProcessor, FoundryExecutor, FoundryProcessor };
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,287 @@
|
|
|
1
|
+
var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
|
|
2
|
+
get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
|
|
3
|
+
}) : x)(function(x) {
|
|
4
|
+
if (typeof require !== "undefined") return require.apply(this, arguments);
|
|
5
|
+
throw Error('Dynamic require of "' + x + '" is not supported');
|
|
6
|
+
});
|
|
7
|
+
|
|
8
|
+
// src/executor.ts
|
|
9
|
+
import { FoundryConnection, ReferenceConnection, PromptyStream } from "@prompty/core";
|
|
10
|
+
import { getConnection, traceSpan, sanitizeValue } from "@prompty/core";
|
|
11
|
+
import { OpenAIExecutor } from "@prompty/openai";
|
|
12
|
+
import { buildChatArgs, buildEmbeddingArgs, buildImageArgs, buildResponsesArgs } from "@prompty/openai";
|
|
13
|
+
function getResourceEndpoint(projectEndpoint) {
|
|
14
|
+
const url = new URL(projectEndpoint);
|
|
15
|
+
return `${url.protocol}//${url.host}`;
|
|
16
|
+
}
|
|
17
|
+
var FoundryExecutor = class extends OpenAIExecutor {
|
|
18
|
+
async execute(agent, messages) {
|
|
19
|
+
return traceSpan("FoundryExecutor", async (emit) => {
|
|
20
|
+
emit("signature", "prompty.foundry.executor.FoundryExecutor.invoke");
|
|
21
|
+
emit("inputs", { data: messages });
|
|
22
|
+
const client = this.resolveClient(agent);
|
|
23
|
+
const clientName = client.constructor?.name ?? "OpenAI";
|
|
24
|
+
await traceSpan(clientName, async (ctorEmit) => {
|
|
25
|
+
ctorEmit("signature", `${clientName}.ctor`);
|
|
26
|
+
const conn = agent.model?.connection;
|
|
27
|
+
if (conn instanceof ReferenceConnection) {
|
|
28
|
+
ctorEmit("inputs", { source: "reference", name: conn.name });
|
|
29
|
+
} else if (conn instanceof FoundryConnection) {
|
|
30
|
+
ctorEmit("inputs", sanitizeValue("ctor", {
|
|
31
|
+
endpoint: conn.endpoint ? getResourceEndpoint(conn.endpoint) : void 0,
|
|
32
|
+
deployment: agent.model?.id,
|
|
33
|
+
apiVersion: "2025-04-01-preview",
|
|
34
|
+
auth: "DefaultAzureCredential"
|
|
35
|
+
}));
|
|
36
|
+
}
|
|
37
|
+
ctorEmit("result", clientName);
|
|
38
|
+
});
|
|
39
|
+
const apiType = agent.model?.apiType ?? "chat";
|
|
40
|
+
const result = await this.dispatchApiCall(client, clientName, agent, messages, apiType);
|
|
41
|
+
emit("result", result);
|
|
42
|
+
return result;
|
|
43
|
+
});
|
|
44
|
+
}
|
|
45
|
+
async dispatchApiCall(client, clientName, agent, messages, apiType) {
|
|
46
|
+
switch (apiType) {
|
|
47
|
+
case "chat":
|
|
48
|
+
case "agent": {
|
|
49
|
+
const args = buildChatArgs(agent, messages);
|
|
50
|
+
const isStreaming = !!args.stream;
|
|
51
|
+
return traceSpan("create", async (callEmit) => {
|
|
52
|
+
callEmit("signature", `${clientName}.chat.completions.create`);
|
|
53
|
+
callEmit("inputs", sanitizeValue("create", args));
|
|
54
|
+
const result = await client.chat.completions.create(
|
|
55
|
+
args
|
|
56
|
+
);
|
|
57
|
+
if (isStreaming) {
|
|
58
|
+
return new PromptyStream(`${clientName}Executor`, result);
|
|
59
|
+
}
|
|
60
|
+
callEmit("result", result);
|
|
61
|
+
return result;
|
|
62
|
+
});
|
|
63
|
+
}
|
|
64
|
+
case "embedding": {
|
|
65
|
+
const args = buildEmbeddingArgs(agent, messages);
|
|
66
|
+
return traceSpan("create", async (callEmit) => {
|
|
67
|
+
callEmit("signature", `${clientName}.embeddings.create`);
|
|
68
|
+
callEmit("inputs", sanitizeValue("create", args));
|
|
69
|
+
const result = await client.embeddings.create(
|
|
70
|
+
args
|
|
71
|
+
);
|
|
72
|
+
callEmit("result", result);
|
|
73
|
+
return result;
|
|
74
|
+
});
|
|
75
|
+
}
|
|
76
|
+
case "image": {
|
|
77
|
+
const args = buildImageArgs(agent, messages);
|
|
78
|
+
return traceSpan("generate", async (callEmit) => {
|
|
79
|
+
callEmit("signature", `${clientName}.images.generate`);
|
|
80
|
+
callEmit("inputs", sanitizeValue("generate", args));
|
|
81
|
+
const result = await client.images.generate(
|
|
82
|
+
args
|
|
83
|
+
);
|
|
84
|
+
callEmit("result", result);
|
|
85
|
+
return result;
|
|
86
|
+
});
|
|
87
|
+
}
|
|
88
|
+
case "responses": {
|
|
89
|
+
const args = buildResponsesArgs(agent, messages);
|
|
90
|
+
const isStreaming = !!args.stream;
|
|
91
|
+
return traceSpan("create", async (callEmit) => {
|
|
92
|
+
callEmit("signature", `${clientName}.responses.create`);
|
|
93
|
+
callEmit("inputs", sanitizeValue("create", args));
|
|
94
|
+
const result = await client.responses.create(
|
|
95
|
+
args
|
|
96
|
+
);
|
|
97
|
+
if (isStreaming) {
|
|
98
|
+
return new PromptyStream(`${clientName}Executor`, result);
|
|
99
|
+
}
|
|
100
|
+
callEmit("result", result);
|
|
101
|
+
return result;
|
|
102
|
+
});
|
|
103
|
+
}
|
|
104
|
+
default:
|
|
105
|
+
throw new Error(`Unsupported apiType: ${apiType}`);
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
resolveClient(agent) {
|
|
109
|
+
const conn = agent.model?.connection;
|
|
110
|
+
if (conn instanceof ReferenceConnection) {
|
|
111
|
+
return getConnection(conn.name);
|
|
112
|
+
}
|
|
113
|
+
if (conn instanceof FoundryConnection && conn.endpoint) {
|
|
114
|
+
const { AzureOpenAI: AzureOpenAI2 } = __require("openai");
|
|
115
|
+
const { DefaultAzureCredential, getBearerTokenProvider } = __require("@azure/identity");
|
|
116
|
+
const credential = new DefaultAzureCredential();
|
|
117
|
+
const scope = "https://cognitiveservices.azure.com/.default";
|
|
118
|
+
const azureADTokenProvider = getBearerTokenProvider(credential, scope);
|
|
119
|
+
const resourceEndpoint = getResourceEndpoint(conn.endpoint);
|
|
120
|
+
return new AzureOpenAI2({
|
|
121
|
+
endpoint: resourceEndpoint,
|
|
122
|
+
azureADTokenProvider,
|
|
123
|
+
deployment: agent.model?.id,
|
|
124
|
+
apiVersion: "2025-04-01-preview"
|
|
125
|
+
});
|
|
126
|
+
}
|
|
127
|
+
throw new Error(
|
|
128
|
+
"Foundry executor requires a FoundryConnection (with endpoint) or a ReferenceConnection (with a pre-registered client). Set model.connection.kind to 'foundry' with an endpoint, or register a client with registerConnection()."
|
|
129
|
+
);
|
|
130
|
+
}
|
|
131
|
+
};
|
|
132
|
+
|
|
133
|
+
// src/processor.ts
|
|
134
|
+
import { processResponse } from "@prompty/openai";
|
|
135
|
+
import { traceSpan as traceSpan2 } from "@prompty/core";
|
|
136
|
+
var FoundryProcessor = class {
|
|
137
|
+
async process(agent, response) {
|
|
138
|
+
return traceSpan2("FoundryProcessor", async (emit) => {
|
|
139
|
+
emit("signature", "prompty.foundry.processor.FoundryProcessor.invoke");
|
|
140
|
+
emit("inputs", { data: response });
|
|
141
|
+
const result = processResponse(agent, response);
|
|
142
|
+
emit("result", result);
|
|
143
|
+
return result;
|
|
144
|
+
});
|
|
145
|
+
}
|
|
146
|
+
};
|
|
147
|
+
|
|
148
|
+
// src/azure-executor.ts
|
|
149
|
+
import { AzureOpenAI } from "openai";
|
|
150
|
+
import { ApiKeyConnection, ReferenceConnection as ReferenceConnection2, PromptyStream as PromptyStream2 } from "@prompty/core";
|
|
151
|
+
import { getConnection as getConnection2, traceSpan as traceSpan3, sanitizeValue as sanitizeValue2 } from "@prompty/core";
|
|
152
|
+
import { OpenAIExecutor as OpenAIExecutor2 } from "@prompty/openai";
|
|
153
|
+
import { buildChatArgs as buildChatArgs2, buildEmbeddingArgs as buildEmbeddingArgs2, buildImageArgs as buildImageArgs2, buildResponsesArgs as buildResponsesArgs2 } from "@prompty/openai";
|
|
154
|
+
var AzureExecutor = class extends OpenAIExecutor2 {
|
|
155
|
+
async execute(agent, messages) {
|
|
156
|
+
return traceSpan3("AzureExecutor", async (emit) => {
|
|
157
|
+
emit("signature", "prompty.azure.executor.AzureExecutor.invoke");
|
|
158
|
+
emit("inputs", { data: messages });
|
|
159
|
+
const client = this.resolveClient(agent);
|
|
160
|
+
const clientName = client.constructor?.name ?? "AzureOpenAI";
|
|
161
|
+
await traceSpan3(clientName, async (ctorEmit) => {
|
|
162
|
+
ctorEmit("signature", `${clientName}.ctor`);
|
|
163
|
+
const conn = agent.model?.connection;
|
|
164
|
+
if (conn instanceof ReferenceConnection2) {
|
|
165
|
+
ctorEmit("inputs", { source: "reference", name: conn.name });
|
|
166
|
+
} else {
|
|
167
|
+
ctorEmit("inputs", sanitizeValue2("ctor", this.clientKwargs(agent)));
|
|
168
|
+
}
|
|
169
|
+
ctorEmit("result", clientName);
|
|
170
|
+
});
|
|
171
|
+
const apiType = agent.model?.apiType ?? "chat";
|
|
172
|
+
const result = await this.dispatchApiCall(client, clientName, agent, messages, apiType);
|
|
173
|
+
emit("result", result);
|
|
174
|
+
return result;
|
|
175
|
+
});
|
|
176
|
+
}
|
|
177
|
+
async dispatchApiCall(client, clientName, agent, messages, apiType) {
|
|
178
|
+
switch (apiType) {
|
|
179
|
+
case "chat":
|
|
180
|
+
case "agent": {
|
|
181
|
+
const args = buildChatArgs2(agent, messages);
|
|
182
|
+
const isStreaming = !!args.stream;
|
|
183
|
+
return traceSpan3("create", async (callEmit) => {
|
|
184
|
+
callEmit("signature", `${clientName}.chat.completions.create`);
|
|
185
|
+
callEmit("inputs", sanitizeValue2("create", args));
|
|
186
|
+
const result = await client.chat.completions.create(
|
|
187
|
+
args
|
|
188
|
+
);
|
|
189
|
+
if (isStreaming) {
|
|
190
|
+
return new PromptyStream2(`${clientName}Executor`, result);
|
|
191
|
+
}
|
|
192
|
+
callEmit("result", result);
|
|
193
|
+
return result;
|
|
194
|
+
});
|
|
195
|
+
}
|
|
196
|
+
case "embedding": {
|
|
197
|
+
const args = buildEmbeddingArgs2(agent, messages);
|
|
198
|
+
return traceSpan3("create", async (callEmit) => {
|
|
199
|
+
callEmit("signature", `${clientName}.embeddings.create`);
|
|
200
|
+
callEmit("inputs", sanitizeValue2("create", args));
|
|
201
|
+
const result = await client.embeddings.create(
|
|
202
|
+
args
|
|
203
|
+
);
|
|
204
|
+
callEmit("result", result);
|
|
205
|
+
return result;
|
|
206
|
+
});
|
|
207
|
+
}
|
|
208
|
+
case "image": {
|
|
209
|
+
const args = buildImageArgs2(agent, messages);
|
|
210
|
+
return traceSpan3("generate", async (callEmit) => {
|
|
211
|
+
callEmit("signature", `${clientName}.images.generate`);
|
|
212
|
+
callEmit("inputs", sanitizeValue2("generate", args));
|
|
213
|
+
const result = await client.images.generate(
|
|
214
|
+
args
|
|
215
|
+
);
|
|
216
|
+
callEmit("result", result);
|
|
217
|
+
return result;
|
|
218
|
+
});
|
|
219
|
+
}
|
|
220
|
+
case "responses": {
|
|
221
|
+
const args = buildResponsesArgs2(agent, messages);
|
|
222
|
+
const isStreaming = !!args.stream;
|
|
223
|
+
return traceSpan3("create", async (callEmit) => {
|
|
224
|
+
callEmit("signature", `${clientName}.responses.create`);
|
|
225
|
+
callEmit("inputs", sanitizeValue2("create", args));
|
|
226
|
+
const result = await client.responses.create(
|
|
227
|
+
args
|
|
228
|
+
);
|
|
229
|
+
if (isStreaming) {
|
|
230
|
+
return new PromptyStream2(`${clientName}Executor`, result);
|
|
231
|
+
}
|
|
232
|
+
callEmit("result", result);
|
|
233
|
+
return result;
|
|
234
|
+
});
|
|
235
|
+
}
|
|
236
|
+
default:
|
|
237
|
+
throw new Error(`Unsupported apiType: ${apiType}`);
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
resolveClient(agent) {
|
|
241
|
+
const conn = agent.model?.connection;
|
|
242
|
+
if (conn instanceof ReferenceConnection2) {
|
|
243
|
+
return getConnection2(conn.name);
|
|
244
|
+
}
|
|
245
|
+
const kwargs = this.clientKwargs(agent);
|
|
246
|
+
return new AzureOpenAI(kwargs);
|
|
247
|
+
}
|
|
248
|
+
clientKwargs(agent) {
|
|
249
|
+
const kwargs = {};
|
|
250
|
+
const conn = agent.model?.connection;
|
|
251
|
+
if (conn instanceof ApiKeyConnection) {
|
|
252
|
+
if (conn.apiKey) kwargs.apiKey = conn.apiKey;
|
|
253
|
+
if (conn.endpoint) kwargs.endpoint = conn.endpoint;
|
|
254
|
+
}
|
|
255
|
+
kwargs.deployment = agent.model?.id;
|
|
256
|
+
return kwargs;
|
|
257
|
+
}
|
|
258
|
+
};
|
|
259
|
+
|
|
260
|
+
// src/azure-processor.ts
|
|
261
|
+
import { processResponse as processResponse2 } from "@prompty/openai";
|
|
262
|
+
import { traceSpan as traceSpan4 } from "@prompty/core";
|
|
263
|
+
var AzureProcessor = class {
|
|
264
|
+
async process(agent, response) {
|
|
265
|
+
return traceSpan4("AzureProcessor", async (emit) => {
|
|
266
|
+
emit("signature", "prompty.azure.processor.AzureProcessor.invoke");
|
|
267
|
+
emit("inputs", { data: response });
|
|
268
|
+
const result = processResponse2(agent, response);
|
|
269
|
+
emit("result", result);
|
|
270
|
+
return result;
|
|
271
|
+
});
|
|
272
|
+
}
|
|
273
|
+
};
|
|
274
|
+
|
|
275
|
+
// src/index.ts
|
|
276
|
+
import { registerExecutor, registerProcessor } from "@prompty/core";
|
|
277
|
+
registerExecutor("foundry", new FoundryExecutor());
|
|
278
|
+
registerProcessor("foundry", new FoundryProcessor());
|
|
279
|
+
registerExecutor("azure", new AzureExecutor());
|
|
280
|
+
registerProcessor("azure", new AzureProcessor());
|
|
281
|
+
export {
|
|
282
|
+
AzureExecutor,
|
|
283
|
+
AzureProcessor,
|
|
284
|
+
FoundryExecutor,
|
|
285
|
+
FoundryProcessor
|
|
286
|
+
};
|
|
287
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/executor.ts","../src/processor.ts","../src/azure-executor.ts","../src/azure-processor.ts","../src/index.ts"],"sourcesContent":["/**\r\n * Foundry executor — extends OpenAI executor with Azure AI Foundry client resolution.\r\n *\r\n * For Chat Completions: builds an AzureOpenAI client from the Foundry resource\r\n * endpoint (derived from the project endpoint) with DefaultAzureCredential.\r\n *\r\n * The Foundry project endpoint is:\r\n * https://<resource>.services.ai.azure.com/api/projects/<project>\r\n * The AzureOpenAI endpoint (for Chat Completions) is:\r\n * https://<resource>.services.ai.azure.com\r\n *\r\n * @module\r\n */\r\n\r\nimport type OpenAI from \"openai\";\r\nimport type { Prompty, Message } from \"@prompty/core\";\r\nimport { FoundryConnection, ReferenceConnection, PromptyStream } from \"@prompty/core\";\r\nimport { getConnection, traceSpan, sanitizeValue } from \"@prompty/core\";\r\nimport { OpenAIExecutor } from \"@prompty/openai\";\r\nimport { buildChatArgs, buildEmbeddingArgs, buildImageArgs, buildResponsesArgs } from \"@prompty/openai\";\r\n\r\n/**\r\n * Extract the resource base endpoint from a Foundry project endpoint.\r\n * e.g. \"https://foo.services.ai.azure.com/api/projects/bar\" → \"https://foo.services.ai.azure.com\"\r\n */\r\nfunction getResourceEndpoint(projectEndpoint: string): string {\r\n const url = new URL(projectEndpoint);\r\n return `${url.protocol}//${url.host}`;\r\n}\r\n\r\nexport class FoundryExecutor extends OpenAIExecutor {\r\n override async execute(agent: Prompty, messages: Message[]): Promise<unknown> {\r\n return traceSpan(\"FoundryExecutor\", async (emit) => {\r\n emit(\"signature\", \"prompty.foundry.executor.FoundryExecutor.invoke\");\r\n emit(\"inputs\", { data: messages });\r\n\r\n const client = this.resolveClient(agent);\r\n const clientName = client.constructor?.name ?? \"OpenAI\";\r\n\r\n // Trace what client we resolved and how\r\n await traceSpan(clientName, async (ctorEmit) => {\r\n ctorEmit(\"signature\", `${clientName}.ctor`);\r\n const conn = agent.model?.connection;\r\n if (conn instanceof ReferenceConnection) {\r\n ctorEmit(\"inputs\", { source: \"reference\", name: conn.name });\r\n } else if (conn instanceof FoundryConnection) {\r\n ctorEmit(\"inputs\", sanitizeValue(\"ctor\", {\r\n endpoint: conn.endpoint ? getResourceEndpoint(conn.endpoint) : undefined,\r\n deployment: agent.model?.id,\r\n apiVersion: \"2025-04-01-preview\",\r\n auth: \"DefaultAzureCredential\",\r\n }));\r\n }\r\n ctorEmit(\"result\", clientName);\r\n });\r\n\r\n const apiType = agent.model?.apiType ?? \"chat\";\r\n const result = await this.dispatchApiCall(client, clientName, agent, messages, apiType);\r\n emit(\"result\", result);\r\n return result;\r\n });\r\n }\r\n\r\n private async dispatchApiCall(\r\n client: OpenAI,\r\n clientName: string,\r\n agent: Prompty,\r\n messages: Message[],\r\n apiType: string,\r\n ): Promise<unknown> {\r\n switch (apiType) {\r\n case \"chat\":\r\n case \"agent\": {\r\n const args = buildChatArgs(agent, messages);\r\n const isStreaming = !!args.stream;\r\n return traceSpan(\"create\", async (callEmit) => {\r\n callEmit(\"signature\", `${clientName}.chat.completions.create`);\r\n callEmit(\"inputs\", sanitizeValue(\"create\", args));\r\n const result = await client.chat.completions.create(\r\n args as unknown as Parameters<typeof client.chat.completions.create>[0],\r\n );\r\n if (isStreaming) {\r\n return new PromptyStream(`${clientName}Executor`, result as unknown as AsyncIterable<unknown>);\r\n }\r\n callEmit(\"result\", result);\r\n return result;\r\n });\r\n }\r\n case \"embedding\": {\r\n const args = buildEmbeddingArgs(agent, messages);\r\n return traceSpan(\"create\", async (callEmit) => {\r\n callEmit(\"signature\", `${clientName}.embeddings.create`);\r\n callEmit(\"inputs\", sanitizeValue(\"create\", args));\r\n const result = await client.embeddings.create(\r\n args as unknown as Parameters<typeof client.embeddings.create>[0],\r\n );\r\n callEmit(\"result\", result);\r\n return result;\r\n });\r\n }\r\n case \"image\": {\r\n const args = buildImageArgs(agent, messages);\r\n return traceSpan(\"generate\", async (callEmit) => {\r\n callEmit(\"signature\", `${clientName}.images.generate`);\r\n callEmit(\"inputs\", sanitizeValue(\"generate\", args));\r\n const result = await client.images.generate(\r\n args as unknown as Parameters<typeof client.images.generate>[0],\r\n );\r\n callEmit(\"result\", result);\r\n return result;\r\n });\r\n }\r\n case \"responses\": {\r\n const args = buildResponsesArgs(agent, messages);\r\n const isStreaming = !!args.stream;\r\n return traceSpan(\"create\", async (callEmit) => {\r\n callEmit(\"signature\", `${clientName}.responses.create`);\r\n callEmit(\"inputs\", sanitizeValue(\"create\", args));\r\n const result = await client.responses.create(\r\n args as unknown as Parameters<typeof client.responses.create>[0],\r\n );\r\n if (isStreaming) {\r\n return new PromptyStream(`${clientName}Executor`, result as unknown as AsyncIterable<unknown>);\r\n }\r\n callEmit(\"result\", result);\r\n return result;\r\n });\r\n }\r\n default:\r\n throw new Error(`Unsupported apiType: ${apiType}`);\r\n }\r\n }\r\n\r\n protected override resolveClient(agent: Prompty): OpenAI {\r\n const conn = agent.model?.connection;\r\n\r\n // Pre-registered client by name\r\n if (conn instanceof ReferenceConnection) {\r\n return getConnection(conn.name) as OpenAI;\r\n }\r\n\r\n // Build an AzureOpenAI client from the FoundryConnection endpoint\r\n if (conn instanceof FoundryConnection && conn.endpoint) {\r\n const { AzureOpenAI } = require(\"openai\");\r\n const { DefaultAzureCredential, getBearerTokenProvider } = require(\"@azure/identity\");\r\n\r\n const credential = new DefaultAzureCredential();\r\n const scope = \"https://cognitiveservices.azure.com/.default\";\r\n const azureADTokenProvider = getBearerTokenProvider(credential, scope);\r\n const resourceEndpoint = getResourceEndpoint(conn.endpoint);\r\n\r\n return new AzureOpenAI({\r\n endpoint: resourceEndpoint,\r\n azureADTokenProvider,\r\n deployment: agent.model?.id,\r\n apiVersion: \"2025-04-01-preview\",\r\n }) as OpenAI;\r\n }\r\n\r\n throw new Error(\r\n \"Foundry executor requires a FoundryConnection (with endpoint) \" +\r\n \"or a ReferenceConnection (with a pre-registered client). \" +\r\n \"Set model.connection.kind to 'foundry' with an endpoint, \" +\r\n \"or register a client with registerConnection().\",\r\n );\r\n }\r\n}\r\n","/**\r\n * Foundry processor — identical to OpenAI processor.\r\n *\r\n * Foundry returns OpenAI-compatible responses via getOpenAIClient().\r\n *\r\n * @module\r\n */\r\n\r\nimport type { Prompty } from \"@prompty/core\";\r\nimport type { Processor } from \"@prompty/core\";\r\nimport { processResponse } from \"@prompty/openai\";\r\nimport { traceSpan } from \"@prompty/core\";\r\n\r\nexport class FoundryProcessor implements Processor {\r\n async process(agent: Prompty, response: unknown): Promise<unknown> {\r\n return traceSpan(\"FoundryProcessor\", async (emit) => {\r\n emit(\"signature\", \"prompty.foundry.processor.FoundryProcessor.invoke\");\r\n emit(\"inputs\", { data: response });\r\n const result = processResponse(agent, response);\r\n emit(\"result\", result);\r\n return result;\r\n });\r\n }\r\n}\r\n","/**\r\n * Azure OpenAI executor — extends OpenAI executor with Azure-specific client.\r\n *\r\n * @module\r\n */\r\n\r\nimport OpenAI, { AzureOpenAI } from \"openai\";\r\nimport type { Prompty, Message } from \"@prompty/core\";\r\nimport { ApiKeyConnection, ReferenceConnection, PromptyStream } from \"@prompty/core\";\r\nimport { getConnection, traceSpan, sanitizeValue } from \"@prompty/core\";\r\nimport { OpenAIExecutor } from \"@prompty/openai\";\r\nimport { buildChatArgs, buildEmbeddingArgs, buildImageArgs, buildResponsesArgs } from \"@prompty/openai\";\r\n\r\nexport class AzureExecutor extends OpenAIExecutor {\r\n override async execute(agent: Prompty, messages: Message[]): Promise<unknown> {\r\n return traceSpan(\"AzureExecutor\", async (emit) => {\r\n emit(\"signature\", \"prompty.azure.executor.AzureExecutor.invoke\");\r\n emit(\"inputs\", { data: messages });\r\n\r\n const client = this.resolveClient(agent);\r\n const clientName = client.constructor?.name ?? \"AzureOpenAI\";\r\n\r\n // Trace what client we resolved and how\r\n await traceSpan(clientName, async (ctorEmit) => {\r\n ctorEmit(\"signature\", `${clientName}.ctor`);\r\n const conn = agent.model?.connection;\r\n if (conn instanceof ReferenceConnection) {\r\n ctorEmit(\"inputs\", { source: \"reference\", name: conn.name });\r\n } else {\r\n ctorEmit(\"inputs\", sanitizeValue(\"ctor\", this.clientKwargs(agent)));\r\n }\r\n ctorEmit(\"result\", clientName);\r\n });\r\n\r\n const apiType = agent.model?.apiType ?? \"chat\";\r\n const result = await this.dispatchApiCall(client, clientName, agent, messages, apiType);\r\n emit(\"result\", result);\r\n return result;\r\n });\r\n }\r\n\r\n private async dispatchApiCall(\r\n client: OpenAI,\r\n clientName: string,\r\n agent: Prompty,\r\n messages: Message[],\r\n apiType: string,\r\n ): Promise<unknown> {\r\n switch (apiType) {\r\n case \"chat\":\r\n case \"agent\": {\r\n const args = buildChatArgs(agent, messages);\r\n const isStreaming = !!args.stream;\r\n return traceSpan(\"create\", async (callEmit) => {\r\n callEmit(\"signature\", `${clientName}.chat.completions.create`);\r\n callEmit(\"inputs\", sanitizeValue(\"create\", args));\r\n const result = await client.chat.completions.create(\r\n args as unknown as Parameters<typeof client.chat.completions.create>[0],\r\n );\r\n if (isStreaming) {\r\n return new PromptyStream(`${clientName}Executor`, result as unknown as AsyncIterable<unknown>);\r\n }\r\n callEmit(\"result\", result);\r\n return result;\r\n });\r\n }\r\n case \"embedding\": {\r\n const args = buildEmbeddingArgs(agent, messages);\r\n return traceSpan(\"create\", async (callEmit) => {\r\n callEmit(\"signature\", `${clientName}.embeddings.create`);\r\n callEmit(\"inputs\", sanitizeValue(\"create\", args));\r\n const result = await client.embeddings.create(\r\n args as unknown as Parameters<typeof client.embeddings.create>[0],\r\n );\r\n callEmit(\"result\", result);\r\n return result;\r\n });\r\n }\r\n case \"image\": {\r\n const args = buildImageArgs(agent, messages);\r\n return traceSpan(\"generate\", async (callEmit) => {\r\n callEmit(\"signature\", `${clientName}.images.generate`);\r\n callEmit(\"inputs\", sanitizeValue(\"generate\", args));\r\n const result = await client.images.generate(\r\n args as unknown as Parameters<typeof client.images.generate>[0],\r\n );\r\n callEmit(\"result\", result);\r\n return result;\r\n });\r\n }\r\n case \"responses\": {\r\n const args = buildResponsesArgs(agent, messages);\r\n const isStreaming = !!args.stream;\r\n return traceSpan(\"create\", async (callEmit) => {\r\n callEmit(\"signature\", `${clientName}.responses.create`);\r\n callEmit(\"inputs\", sanitizeValue(\"create\", args));\r\n const result = await client.responses.create(\r\n args as unknown as Parameters<typeof client.responses.create>[0],\r\n );\r\n if (isStreaming) {\r\n return new PromptyStream(`${clientName}Executor`, result as unknown as AsyncIterable<unknown>);\r\n }\r\n callEmit(\"result\", result);\r\n return result;\r\n });\r\n }\r\n default:\r\n throw new Error(`Unsupported apiType: ${apiType}`);\r\n }\r\n }\r\n\r\n protected override resolveClient(agent: Prompty): OpenAI {\r\n const conn = agent.model?.connection;\r\n\r\n if (conn instanceof ReferenceConnection) {\r\n return getConnection(conn.name) as OpenAI;\r\n }\r\n\r\n const kwargs = this.clientKwargs(agent);\r\n return new AzureOpenAI(kwargs as ConstructorParameters<typeof AzureOpenAI>[0]);\r\n }\r\n\r\n protected override clientKwargs(agent: Prompty): Record<string, unknown> {\r\n const kwargs: Record<string, unknown> = {};\r\n const conn = agent.model?.connection;\r\n\r\n if (conn instanceof ApiKeyConnection) {\r\n if (conn.apiKey) kwargs.apiKey = conn.apiKey;\r\n if (conn.endpoint) kwargs.endpoint = conn.endpoint;\r\n }\r\n\r\n // Azure requires deployment = model id\r\n kwargs.deployment = agent.model?.id;\r\n\r\n return kwargs;\r\n }\r\n}\r\n","/**\r\n * Azure OpenAI processor — identical to OpenAI processor.\r\n *\r\n * Azure uses the same response format as OpenAI.\r\n *\r\n * @module\r\n */\r\n\r\nimport type { Prompty } from \"@prompty/core\";\r\nimport type { Processor } from \"@prompty/core\";\r\nimport { processResponse } from \"@prompty/openai\";\r\nimport { traceSpan } from \"@prompty/core\";\r\n\r\nexport class AzureProcessor implements Processor {\r\n async process(agent: Prompty, response: unknown): Promise<unknown> {\r\n return traceSpan(\"AzureProcessor\", async (emit) => {\r\n emit(\"signature\", \"prompty.azure.processor.AzureProcessor.invoke\");\r\n emit(\"inputs\", { data: response });\r\n const result = processResponse(agent, response);\r\n emit(\"result\", result);\r\n return result;\r\n });\r\n }\r\n}\r\n","/**\r\n * @prompty/foundry — Microsoft Foundry and Azure OpenAI provider for Prompty.\r\n *\r\n * Importing this package auto-registers:\r\n * - \"foundry\" executor and processor (primary)\r\n * - \"azure\" executor and processor (deprecated alias for backward compatibility)\r\n *\r\n * @module @prompty/foundry\r\n */\r\n\r\nexport { FoundryExecutor } from \"./executor.js\";\r\nexport { FoundryProcessor } from \"./processor.js\";\r\nexport { AzureExecutor } from \"./azure-executor.js\";\r\nexport { AzureProcessor } from \"./azure-processor.js\";\r\n\r\n// Auto-register on import\r\nimport { registerExecutor, registerProcessor } from \"@prompty/core\";\r\nimport { FoundryExecutor } from \"./executor.js\";\r\nimport { FoundryProcessor } from \"./processor.js\";\r\nimport { AzureExecutor } from \"./azure-executor.js\";\r\nimport { AzureProcessor } from \"./azure-processor.js\";\r\n\r\n// Primary registration\r\nregisterExecutor(\"foundry\", new FoundryExecutor());\r\nregisterProcessor(\"foundry\", new FoundryProcessor());\r\n\r\n// Deprecated backward-compat alias\r\nregisterExecutor(\"azure\", new AzureExecutor());\r\nregisterProcessor(\"azure\", new AzureProcessor());\r\n"],"mappings":";;;;;;;;AAgBA,SAAS,mBAAmB,qBAAqB,qBAAqB;AACtE,SAAS,eAAe,WAAW,qBAAqB;AACxD,SAAS,sBAAsB;AAC/B,SAAS,eAAe,oBAAoB,gBAAgB,0BAA0B;AAMtF,SAAS,oBAAoB,iBAAiC;AAC5D,QAAM,MAAM,IAAI,IAAI,eAAe;AACnC,SAAO,GAAG,IAAI,QAAQ,KAAK,IAAI,IAAI;AACrC;AAEO,IAAM,kBAAN,cAA8B,eAAe;AAAA,EAClD,MAAe,QAAQ,OAAgB,UAAuC;AAC5E,WAAO,UAAU,mBAAmB,OAAO,SAAS;AAClD,WAAK,aAAa,iDAAiD;AACnE,WAAK,UAAU,EAAE,MAAM,SAAS,CAAC;AAEjC,YAAM,SAAS,KAAK,cAAc,KAAK;AACvC,YAAM,aAAa,OAAO,aAAa,QAAQ;AAG/C,YAAM,UAAU,YAAY,OAAO,aAAa;AAC9C,iBAAS,aAAa,GAAG,UAAU,OAAO;AAC1C,cAAM,OAAO,MAAM,OAAO;AAC1B,YAAI,gBAAgB,qBAAqB;AACvC,mBAAS,UAAU,EAAE,QAAQ,aAAa,MAAM,KAAK,KAAK,CAAC;AAAA,QAC7D,WAAW,gBAAgB,mBAAmB;AAC5C,mBAAS,UAAU,cAAc,QAAQ;AAAA,YACvC,UAAU,KAAK,WAAW,oBAAoB,KAAK,QAAQ,IAAI;AAAA,YAC/D,YAAY,MAAM,OAAO;AAAA,YACzB,YAAY;AAAA,YACZ,MAAM;AAAA,UACR,CAAC,CAAC;AAAA,QACJ;AACA,iBAAS,UAAU,UAAU;AAAA,MAC/B,CAAC;AAED,YAAM,UAAU,MAAM,OAAO,WAAW;AACxC,YAAM,SAAS,MAAM,KAAK,gBAAgB,QAAQ,YAAY,OAAO,UAAU,OAAO;AACtF,WAAK,UAAU,MAAM;AACrB,aAAO;AAAA,IACT,CAAC;AAAA,EACH;AAAA,EAEA,MAAc,gBACZ,QACA,YACA,OACA,UACA,SACkB;AAClB,YAAQ,SAAS;AAAA,MACf,KAAK;AAAA,MACL,KAAK,SAAS;AACZ,cAAM,OAAO,cAAc,OAAO,QAAQ;AAC1C,cAAM,cAAc,CAAC,CAAC,KAAK;AAC3B,eAAO,UAAU,UAAU,OAAO,aAAa;AAC7C,mBAAS,aAAa,GAAG,UAAU,0BAA0B;AAC7D,mBAAS,UAAU,cAAc,UAAU,IAAI,CAAC;AAChD,gBAAM,SAAS,MAAM,OAAO,KAAK,YAAY;AAAA,YAC3C;AAAA,UACF;AACA,cAAI,aAAa;AACf,mBAAO,IAAI,cAAc,GAAG,UAAU,YAAY,MAA2C;AAAA,UAC/F;AACA,mBAAS,UAAU,MAAM;AACzB,iBAAO;AAAA,QACT,CAAC;AAAA,MACH;AAAA,MACA,KAAK,aAAa;AAChB,cAAM,OAAO,mBAAmB,OAAO,QAAQ;AAC/C,eAAO,UAAU,UAAU,OAAO,aAAa;AAC7C,mBAAS,aAAa,GAAG,UAAU,oBAAoB;AACvD,mBAAS,UAAU,cAAc,UAAU,IAAI,CAAC;AAChD,gBAAM,SAAS,MAAM,OAAO,WAAW;AAAA,YACrC;AAAA,UACF;AACA,mBAAS,UAAU,MAAM;AACzB,iBAAO;AAAA,QACT,CAAC;AAAA,MACH;AAAA,MACA,KAAK,SAAS;AACZ,cAAM,OAAO,eAAe,OAAO,QAAQ;AAC3C,eAAO,UAAU,YAAY,OAAO,aAAa;AAC/C,mBAAS,aAAa,GAAG,UAAU,kBAAkB;AACrD,mBAAS,UAAU,cAAc,YAAY,IAAI,CAAC;AAClD,gBAAM,SAAS,MAAM,OAAO,OAAO;AAAA,YACjC;AAAA,UACF;AACA,mBAAS,UAAU,MAAM;AACzB,iBAAO;AAAA,QACT,CAAC;AAAA,MACH;AAAA,MACA,KAAK,aAAa;AAChB,cAAM,OAAO,mBAAmB,OAAO,QAAQ;AAC/C,cAAM,cAAc,CAAC,CAAC,KAAK;AAC3B,eAAO,UAAU,UAAU,OAAO,aAAa;AAC7C,mBAAS,aAAa,GAAG,UAAU,mBAAmB;AACtD,mBAAS,UAAU,cAAc,UAAU,IAAI,CAAC;AAChD,gBAAM,SAAS,MAAM,OAAO,UAAU;AAAA,YACpC;AAAA,UACF;AACA,cAAI,aAAa;AACf,mBAAO,IAAI,cAAc,GAAG,UAAU,YAAY,MAA2C;AAAA,UAC/F;AACA,mBAAS,UAAU,MAAM;AACzB,iBAAO;AAAA,QACT,CAAC;AAAA,MACH;AAAA,MACA;AACE,cAAM,IAAI,MAAM,wBAAwB,OAAO,EAAE;AAAA,IACrD;AAAA,EACF;AAAA,EAEmB,cAAc,OAAwB;AACvD,UAAM,OAAO,MAAM,OAAO;AAG1B,QAAI,gBAAgB,qBAAqB;AACvC,aAAO,cAAc,KAAK,IAAI;AAAA,IAChC;AAGA,QAAI,gBAAgB,qBAAqB,KAAK,UAAU;AACtD,YAAM,EAAE,aAAAA,aAAY,IAAI,UAAQ,QAAQ;AACxC,YAAM,EAAE,wBAAwB,uBAAuB,IAAI,UAAQ,iBAAiB;AAEpF,YAAM,aAAa,IAAI,uBAAuB;AAC9C,YAAM,QAAQ;AACd,YAAM,uBAAuB,uBAAuB,YAAY,KAAK;AACrE,YAAM,mBAAmB,oBAAoB,KAAK,QAAQ;AAE1D,aAAO,IAAIA,aAAY;AAAA,QACrB,UAAU;AAAA,QACV;AAAA,QACA,YAAY,MAAM,OAAO;AAAA,QACzB,YAAY;AAAA,MACd,CAAC;AAAA,IACH;AAEA,UAAM,IAAI;AAAA,MACR;AAAA,IAIF;AAAA,EACF;AACF;;;AC5JA,SAAS,uBAAuB;AAChC,SAAS,aAAAC,kBAAiB;AAEnB,IAAM,mBAAN,MAA4C;AAAA,EACjD,MAAM,QAAQ,OAAgB,UAAqC;AACjE,WAAOA,WAAU,oBAAoB,OAAO,SAAS;AACnD,WAAK,aAAa,mDAAmD;AACrE,WAAK,UAAU,EAAE,MAAM,SAAS,CAAC;AACjC,YAAM,SAAS,gBAAgB,OAAO,QAAQ;AAC9C,WAAK,UAAU,MAAM;AACrB,aAAO;AAAA,IACT,CAAC;AAAA,EACH;AACF;;;ACjBA,SAAiB,mBAAmB;AAEpC,SAAS,kBAAkB,uBAAAC,sBAAqB,iBAAAC,sBAAqB;AACrE,SAAS,iBAAAC,gBAAe,aAAAC,YAAW,iBAAAC,sBAAqB;AACxD,SAAS,kBAAAC,uBAAsB;AAC/B,SAAS,iBAAAC,gBAAe,sBAAAC,qBAAoB,kBAAAC,iBAAgB,sBAAAC,2BAA0B;AAE/E,IAAM,gBAAN,cAA4BJ,gBAAe;AAAA,EAChD,MAAe,QAAQ,OAAgB,UAAuC;AAC5E,WAAOF,WAAU,iBAAiB,OAAO,SAAS;AAChD,WAAK,aAAa,6CAA6C;AAC/D,WAAK,UAAU,EAAE,MAAM,SAAS,CAAC;AAEjC,YAAM,SAAS,KAAK,cAAc,KAAK;AACvC,YAAM,aAAa,OAAO,aAAa,QAAQ;AAG/C,YAAMA,WAAU,YAAY,OAAO,aAAa;AAC9C,iBAAS,aAAa,GAAG,UAAU,OAAO;AAC1C,cAAM,OAAO,MAAM,OAAO;AAC1B,YAAI,gBAAgBH,sBAAqB;AACvC,mBAAS,UAAU,EAAE,QAAQ,aAAa,MAAM,KAAK,KAAK,CAAC;AAAA,QAC7D,OAAO;AACL,mBAAS,UAAUI,eAAc,QAAQ,KAAK,aAAa,KAAK,CAAC,CAAC;AAAA,QACpE;AACA,iBAAS,UAAU,UAAU;AAAA,MAC/B,CAAC;AAED,YAAM,UAAU,MAAM,OAAO,WAAW;AACxC,YAAM,SAAS,MAAM,KAAK,gBAAgB,QAAQ,YAAY,OAAO,UAAU,OAAO;AACtF,WAAK,UAAU,MAAM;AACrB,aAAO;AAAA,IACT,CAAC;AAAA,EACH;AAAA,EAEA,MAAc,gBACZ,QACA,YACA,OACA,UACA,SACkB;AAClB,YAAQ,SAAS;AAAA,MACf,KAAK;AAAA,MACL,KAAK,SAAS;AACZ,cAAM,OAAOE,eAAc,OAAO,QAAQ;AAC1C,cAAM,cAAc,CAAC,CAAC,KAAK;AAC3B,eAAOH,WAAU,UAAU,OAAO,aAAa;AAC7C,mBAAS,aAAa,GAAG,UAAU,0BAA0B;AAC7D,mBAAS,UAAUC,eAAc,UAAU,IAAI,CAAC;AAChD,gBAAM,SAAS,MAAM,OAAO,KAAK,YAAY;AAAA,YAC3C;AAAA,UACF;AACA,cAAI,aAAa;AACf,mBAAO,IAAIH,eAAc,GAAG,UAAU,YAAY,MAA2C;AAAA,UAC/F;AACA,mBAAS,UAAU,MAAM;AACzB,iBAAO;AAAA,QACT,CAAC;AAAA,MACH;AAAA,MACA,KAAK,aAAa;AAChB,cAAM,OAAOM,oBAAmB,OAAO,QAAQ;AAC/C,eAAOJ,WAAU,UAAU,OAAO,aAAa;AAC7C,mBAAS,aAAa,GAAG,UAAU,oBAAoB;AACvD,mBAAS,UAAUC,eAAc,UAAU,IAAI,CAAC;AAChD,gBAAM,SAAS,MAAM,OAAO,WAAW;AAAA,YACrC;AAAA,UACF;AACA,mBAAS,UAAU,MAAM;AACzB,iBAAO;AAAA,QACT,CAAC;AAAA,MACH;AAAA,MACA,KAAK,SAAS;AACZ,cAAM,OAAOI,gBAAe,OAAO,QAAQ;AAC3C,eAAOL,WAAU,YAAY,OAAO,aAAa;AAC/C,mBAAS,aAAa,GAAG,UAAU,kBAAkB;AACrD,mBAAS,UAAUC,eAAc,YAAY,IAAI,CAAC;AAClD,gBAAM,SAAS,MAAM,OAAO,OAAO;AAAA,YACjC;AAAA,UACF;AACA,mBAAS,UAAU,MAAM;AACzB,iBAAO;AAAA,QACT,CAAC;AAAA,MACH;AAAA,MACA,KAAK,aAAa;AAChB,cAAM,OAAOK,oBAAmB,OAAO,QAAQ;AAC/C,cAAM,cAAc,CAAC,CAAC,KAAK;AAC3B,eAAON,WAAU,UAAU,OAAO,aAAa;AAC7C,mBAAS,aAAa,GAAG,UAAU,mBAAmB;AACtD,mBAAS,UAAUC,eAAc,UAAU,IAAI,CAAC;AAChD,gBAAM,SAAS,MAAM,OAAO,UAAU;AAAA,YACpC;AAAA,UACF;AACA,cAAI,aAAa;AACf,mBAAO,IAAIH,eAAc,GAAG,UAAU,YAAY,MAA2C;AAAA,UAC/F;AACA,mBAAS,UAAU,MAAM;AACzB,iBAAO;AAAA,QACT,CAAC;AAAA,MACH;AAAA,MACA;AACE,cAAM,IAAI,MAAM,wBAAwB,OAAO,EAAE;AAAA,IACrD;AAAA,EACF;AAAA,EAEmB,cAAc,OAAwB;AACvD,UAAM,OAAO,MAAM,OAAO;AAE1B,QAAI,gBAAgBD,sBAAqB;AACvC,aAAOE,eAAc,KAAK,IAAI;AAAA,IAChC;AAEA,UAAM,SAAS,KAAK,aAAa,KAAK;AACtC,WAAO,IAAI,YAAY,MAAsD;AAAA,EAC/E;AAAA,EAEmB,aAAa,OAAyC;AACvE,UAAM,SAAkC,CAAC;AACzC,UAAM,OAAO,MAAM,OAAO;AAE1B,QAAI,gBAAgB,kBAAkB;AACpC,UAAI,KAAK,OAAQ,QAAO,SAAS,KAAK;AACtC,UAAI,KAAK,SAAU,QAAO,WAAW,KAAK;AAAA,IAC5C;AAGA,WAAO,aAAa,MAAM,OAAO;AAEjC,WAAO;AAAA,EACT;AACF;;;AC9HA,SAAS,mBAAAQ,wBAAuB;AAChC,SAAS,aAAAC,kBAAiB;AAEnB,IAAM,iBAAN,MAA0C;AAAA,EAC/C,MAAM,QAAQ,OAAgB,UAAqC;AACjE,WAAOA,WAAU,kBAAkB,OAAO,SAAS;AACjD,WAAK,aAAa,+CAA+C;AACjE,WAAK,UAAU,EAAE,MAAM,SAAS,CAAC;AACjC,YAAM,SAASD,iBAAgB,OAAO,QAAQ;AAC9C,WAAK,UAAU,MAAM;AACrB,aAAO;AAAA,IACT,CAAC;AAAA,EACH;AACF;;;ACPA,SAAS,kBAAkB,yBAAyB;AAOpD,iBAAiB,WAAW,IAAI,gBAAgB,CAAC;AACjD,kBAAkB,WAAW,IAAI,iBAAiB,CAAC;AAGnD,iBAAiB,SAAS,IAAI,cAAc,CAAC;AAC7C,kBAAkB,SAAS,IAAI,eAAe,CAAC;","names":["AzureOpenAI","traceSpan","ReferenceConnection","PromptyStream","getConnection","traceSpan","sanitizeValue","OpenAIExecutor","buildChatArgs","buildEmbeddingArgs","buildImageArgs","buildResponsesArgs","processResponse","traceSpan"]}
|
package/package.json
ADDED
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@prompty/foundry",
|
|
3
|
+
"version": "2.0.0-alpha.1",
|
|
4
|
+
"description": "Microsoft Foundry provider for Prompty — executor and processor for Azure AI Foundry",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "./dist/index.cjs",
|
|
7
|
+
"module": "./dist/index.js",
|
|
8
|
+
"types": "./dist/index.d.ts",
|
|
9
|
+
"exports": {
|
|
10
|
+
".": {
|
|
11
|
+
"types": "./dist/index.d.ts",
|
|
12
|
+
"import": "./dist/index.js",
|
|
13
|
+
"require": "./dist/index.cjs"
|
|
14
|
+
}
|
|
15
|
+
},
|
|
16
|
+
"files": [
|
|
17
|
+
"dist"
|
|
18
|
+
],
|
|
19
|
+
"scripts": {
|
|
20
|
+
"build": "tsup",
|
|
21
|
+
"dev": "tsup --watch",
|
|
22
|
+
"test": "vitest run --passWithNoTests",
|
|
23
|
+
"test:watch": "vitest",
|
|
24
|
+
"lint": "tsc --noEmit",
|
|
25
|
+
"clean": "rimraf dist"
|
|
26
|
+
},
|
|
27
|
+
"keywords": [
|
|
28
|
+
"prompty",
|
|
29
|
+
"azure",
|
|
30
|
+
"foundry",
|
|
31
|
+
"llm",
|
|
32
|
+
"ai"
|
|
33
|
+
],
|
|
34
|
+
"author": "Microsoft",
|
|
35
|
+
"license": "MIT",
|
|
36
|
+
"publishConfig": {
|
|
37
|
+
"access": "public"
|
|
38
|
+
},
|
|
39
|
+
"repository": {
|
|
40
|
+
"type": "git",
|
|
41
|
+
"url": "git+https://github.com/microsoft/prompty.git",
|
|
42
|
+
"directory": "runtime/typescript/packages/foundry"
|
|
43
|
+
},
|
|
44
|
+
"engines": {
|
|
45
|
+
"node": ">=18.0.0"
|
|
46
|
+
},
|
|
47
|
+
"peerDependencies": {
|
|
48
|
+
"@prompty/core": "^2.0.0-alpha.1",
|
|
49
|
+
"@prompty/openai": "^2.0.0-alpha.1"
|
|
50
|
+
},
|
|
51
|
+
"dependencies": {
|
|
52
|
+
"@azure/ai-projects": "^2.0.1",
|
|
53
|
+
"@azure/identity": "^4.13.1",
|
|
54
|
+
"openai": "^4.80.0"
|
|
55
|
+
},
|
|
56
|
+
"devDependencies": {
|
|
57
|
+
"@prompty/core": "^2.0.0-alpha.1",
|
|
58
|
+
"@prompty/openai": "^2.0.0-alpha.1",
|
|
59
|
+
"@types/node": "^20.11.0",
|
|
60
|
+
"tsup": "^8.4.0",
|
|
61
|
+
"typescript": "^5.7.0",
|
|
62
|
+
"vitest": "^3.0.0"
|
|
63
|
+
}
|
|
64
|
+
}
|