@effect/ai-openai 0.37.2 → 4.0.0-beta.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (117) hide show
  1. package/dist/Generated.d.ts +70887 -0
  2. package/dist/Generated.d.ts.map +1 -0
  3. package/dist/Generated.js +4 -0
  4. package/dist/Generated.js.map +1 -0
  5. package/dist/OpenAiClient.d.ts +124 -0
  6. package/dist/OpenAiClient.d.ts.map +1 -0
  7. package/dist/OpenAiClient.js +128 -0
  8. package/dist/OpenAiClient.js.map +1 -0
  9. package/dist/{dts/OpenAiConfig.d.ts → OpenAiConfig.d.ts} +9 -9
  10. package/dist/OpenAiConfig.d.ts.map +1 -0
  11. package/dist/{esm/OpenAiConfig.js → OpenAiConfig.js} +8 -5
  12. package/dist/OpenAiConfig.js.map +1 -0
  13. package/dist/OpenAiError.d.ts +98 -0
  14. package/dist/OpenAiError.d.ts.map +1 -0
  15. package/dist/OpenAiError.js +10 -0
  16. package/dist/OpenAiError.js.map +1 -0
  17. package/dist/OpenAiLanguageModel.d.ts +318 -0
  18. package/dist/OpenAiLanguageModel.d.ts.map +1 -0
  19. package/dist/OpenAiLanguageModel.js +2207 -0
  20. package/dist/OpenAiLanguageModel.js.map +1 -0
  21. package/dist/{dts/OpenAiTelemetry.d.ts → OpenAiTelemetry.d.ts} +31 -13
  22. package/dist/OpenAiTelemetry.d.ts.map +1 -0
  23. package/dist/{esm/OpenAiTelemetry.js → OpenAiTelemetry.js} +11 -6
  24. package/dist/OpenAiTelemetry.js.map +1 -0
  25. package/dist/OpenAiTool.d.ts +479 -0
  26. package/dist/OpenAiTool.d.ts.map +1 -0
  27. package/dist/OpenAiTool.js +231 -0
  28. package/dist/OpenAiTool.js.map +1 -0
  29. package/dist/index.d.ts +58 -0
  30. package/dist/index.d.ts.map +1 -0
  31. package/dist/index.js +59 -0
  32. package/dist/index.js.map +1 -0
  33. package/dist/internal/errors.d.ts +2 -0
  34. package/dist/internal/errors.d.ts.map +1 -0
  35. package/dist/internal/errors.js +316 -0
  36. package/dist/internal/errors.js.map +1 -0
  37. package/dist/{dts/internal → internal}/utilities.d.ts.map +1 -1
  38. package/dist/{esm/internal → internal}/utilities.js +4 -3
  39. package/dist/internal/utilities.js.map +1 -0
  40. package/package.json +45 -97
  41. package/src/Generated.ts +28521 -20036
  42. package/src/OpenAiClient.ts +220 -1816
  43. package/src/OpenAiConfig.ts +20 -34
  44. package/src/OpenAiError.ts +107 -0
  45. package/src/OpenAiLanguageModel.ts +1807 -638
  46. package/src/OpenAiTelemetry.ts +24 -19
  47. package/src/OpenAiTool.ts +216 -70
  48. package/src/index.ts +35 -8
  49. package/src/internal/errors.ts +347 -0
  50. package/src/internal/utilities.ts +7 -5
  51. package/Generated/package.json +0 -6
  52. package/OpenAiClient/package.json +0 -6
  53. package/OpenAiConfig/package.json +0 -6
  54. package/OpenAiEmbeddingModel/package.json +0 -6
  55. package/OpenAiLanguageModel/package.json +0 -6
  56. package/OpenAiTelemetry/package.json +0 -6
  57. package/OpenAiTokenizer/package.json +0 -6
  58. package/OpenAiTool/package.json +0 -6
  59. package/README.md +0 -5
  60. package/dist/cjs/Generated.js +0 -7150
  61. package/dist/cjs/Generated.js.map +0 -1
  62. package/dist/cjs/OpenAiClient.js +0 -1567
  63. package/dist/cjs/OpenAiClient.js.map +0 -1
  64. package/dist/cjs/OpenAiConfig.js +0 -30
  65. package/dist/cjs/OpenAiConfig.js.map +0 -1
  66. package/dist/cjs/OpenAiEmbeddingModel.js +0 -155
  67. package/dist/cjs/OpenAiEmbeddingModel.js.map +0 -1
  68. package/dist/cjs/OpenAiLanguageModel.js +0 -1147
  69. package/dist/cjs/OpenAiLanguageModel.js.map +0 -1
  70. package/dist/cjs/OpenAiTelemetry.js +0 -38
  71. package/dist/cjs/OpenAiTelemetry.js.map +0 -1
  72. package/dist/cjs/OpenAiTokenizer.js +0 -83
  73. package/dist/cjs/OpenAiTokenizer.js.map +0 -1
  74. package/dist/cjs/OpenAiTool.js +0 -93
  75. package/dist/cjs/OpenAiTool.js.map +0 -1
  76. package/dist/cjs/index.js +0 -24
  77. package/dist/cjs/index.js.map +0 -1
  78. package/dist/cjs/internal/utilities.js +0 -32
  79. package/dist/cjs/internal/utilities.js.map +0 -1
  80. package/dist/dts/Generated.d.ts +0 -40661
  81. package/dist/dts/Generated.d.ts.map +0 -1
  82. package/dist/dts/OpenAiClient.d.ts +0 -3120
  83. package/dist/dts/OpenAiClient.d.ts.map +0 -1
  84. package/dist/dts/OpenAiConfig.d.ts.map +0 -1
  85. package/dist/dts/OpenAiEmbeddingModel.d.ts +0 -109
  86. package/dist/dts/OpenAiEmbeddingModel.d.ts.map +0 -1
  87. package/dist/dts/OpenAiLanguageModel.d.ts +0 -235
  88. package/dist/dts/OpenAiLanguageModel.d.ts.map +0 -1
  89. package/dist/dts/OpenAiTelemetry.d.ts.map +0 -1
  90. package/dist/dts/OpenAiTokenizer.d.ts +0 -17
  91. package/dist/dts/OpenAiTokenizer.d.ts.map +0 -1
  92. package/dist/dts/OpenAiTool.d.ts +0 -200
  93. package/dist/dts/OpenAiTool.d.ts.map +0 -1
  94. package/dist/dts/index.d.ts +0 -33
  95. package/dist/dts/index.d.ts.map +0 -1
  96. package/dist/esm/Generated.js +0 -7150
  97. package/dist/esm/Generated.js.map +0 -1
  98. package/dist/esm/OpenAiClient.js +0 -1504
  99. package/dist/esm/OpenAiClient.js.map +0 -1
  100. package/dist/esm/OpenAiConfig.js.map +0 -1
  101. package/dist/esm/OpenAiEmbeddingModel.js +0 -143
  102. package/dist/esm/OpenAiEmbeddingModel.js.map +0 -1
  103. package/dist/esm/OpenAiLanguageModel.js +0 -1134
  104. package/dist/esm/OpenAiLanguageModel.js.map +0 -1
  105. package/dist/esm/OpenAiTelemetry.js.map +0 -1
  106. package/dist/esm/OpenAiTokenizer.js +0 -73
  107. package/dist/esm/OpenAiTokenizer.js.map +0 -1
  108. package/dist/esm/OpenAiTool.js +0 -84
  109. package/dist/esm/OpenAiTool.js.map +0 -1
  110. package/dist/esm/index.js +0 -33
  111. package/dist/esm/index.js.map +0 -1
  112. package/dist/esm/internal/utilities.js.map +0 -1
  113. package/dist/esm/package.json +0 -4
  114. package/index/package.json +0 -6
  115. package/src/OpenAiEmbeddingModel.ts +0 -243
  116. package/src/OpenAiTokenizer.ts +0 -70
  117. /package/dist/{dts/internal → internal}/utilities.d.ts +0 -0
@@ -1,1147 +0,0 @@
1
- "use strict";
2
-
3
- Object.defineProperty(exports, "__esModule", {
4
- value: true
5
- });
6
- exports.withConfigOverride = exports.modelWithTokenizer = exports.model = exports.make = exports.layerWithTokenizer = exports.layer = exports.Config = void 0;
7
- var AiError = _interopRequireWildcard(require("@effect/ai/AiError"));
8
- var IdGenerator = _interopRequireWildcard(require("@effect/ai/IdGenerator"));
9
- var LanguageModel = _interopRequireWildcard(require("@effect/ai/LanguageModel"));
10
- var AiModel = _interopRequireWildcard(require("@effect/ai/Model"));
11
- var Tool = _interopRequireWildcard(require("@effect/ai/Tool"));
12
- var Context = _interopRequireWildcard(require("effect/Context"));
13
- var DateTime = _interopRequireWildcard(require("effect/DateTime"));
14
- var Effect = _interopRequireWildcard(require("effect/Effect"));
15
- var Encoding = _interopRequireWildcard(require("effect/Encoding"));
16
- var _Function = require("effect/Function");
17
- var Layer = _interopRequireWildcard(require("effect/Layer"));
18
- var Predicate = _interopRequireWildcard(require("effect/Predicate"));
19
- var Stream = _interopRequireWildcard(require("effect/Stream"));
20
- var InternalUtilities = _interopRequireWildcard(require("./internal/utilities.js"));
21
- var _OpenAiClient = require("./OpenAiClient.js");
22
- var _OpenAiTelemetry = require("./OpenAiTelemetry.js");
23
- var OpenAiTokenizer = _interopRequireWildcard(require("./OpenAiTokenizer.js"));
24
- var OpenAiTool = _interopRequireWildcard(require("./OpenAiTool.js"));
25
- function _interopRequireWildcard(e, t) { if ("function" == typeof WeakMap) var r = new WeakMap(), n = new WeakMap(); return (_interopRequireWildcard = function (e, t) { if (!t && e && e.__esModule) return e; var o, i, f = { __proto__: null, default: e }; if (null === e || "object" != typeof e && "function" != typeof e) return f; if (o = t ? n : r) { if (o.has(e)) return o.get(e); o.set(e, f); } for (const t in e) "default" !== t && {}.hasOwnProperty.call(e, t) && ((i = (o = Object.defineProperty) && Object.getOwnPropertyDescriptor(e, t)) && (i.get || i.set) ? o(f, t, i) : f[t] = e[t]); return f; })(e, t); }
26
- /**
27
- * @since 1.0.0
28
- */
29
-
30
- // =============================================================================
31
- // Configuration
32
- // =============================================================================
33
- /**
34
- * @since 1.0.0
35
- * @category Context
36
- */
37
- class Config extends /*#__PURE__*/Context.Tag("@effect/ai-openai/OpenAiLanguageModel/Config")() {
38
- /**
39
- * @since 1.0.0
40
- */
41
- static getOrUndefined = /*#__PURE__*/Effect.map(/*#__PURE__*/Effect.context(), context => context.unsafeMap.get(Config.key));
42
- }
43
- // =============================================================================
44
- // OpenAI Language Model
45
- // =============================================================================
46
- /**
47
- * @since 1.0.0
48
- * @category Ai Models
49
- */
50
- exports.Config = Config;
51
- const model = (model, config) => AiModel.make("openai", layer({
52
- model,
53
- config
54
- }));
55
- /**
56
- * @since 1.0.0
57
- * @category Ai Models
58
- */
59
- exports.model = model;
60
- const modelWithTokenizer = (model, config) => AiModel.make("openai", layerWithTokenizer({
61
- model,
62
- config
63
- }));
64
- /**
65
- * @since 1.0.0
66
- * @category Constructors
67
- */
68
- exports.modelWithTokenizer = modelWithTokenizer;
69
- const make = exports.make = /*#__PURE__*/Effect.fnUntraced(function* (options) {
70
- const client = yield* _OpenAiClient.OpenAiClient;
71
- const makeRequest = Effect.fnUntraced(function* (providerOptions) {
72
- const context = yield* Effect.context();
73
- const config = {
74
- model: options.model,
75
- ...options.config,
76
- ...context.unsafeMap.get(Config.key)
77
- };
78
- const messages = yield* prepareMessages(providerOptions, config);
79
- const {
80
- toolChoice,
81
- tools
82
- } = yield* prepareTools(providerOptions);
83
- const include = prepareInclude(providerOptions, config);
84
- const responseFormat = prepareResponseFormat(providerOptions);
85
- const verbosity = config.text?.verbosity;
86
- const request = {
87
- ...config,
88
- input: messages,
89
- include,
90
- text: {
91
- format: responseFormat,
92
- verbosity
93
- },
94
- tools,
95
- tool_choice: toolChoice
96
- };
97
- return request;
98
- });
99
- return yield* LanguageModel.make({
100
- generateText: Effect.fnUntraced(function* (options) {
101
- const request = yield* makeRequest(options);
102
- annotateRequest(options.span, request);
103
- const rawResponse = yield* client.createResponse(request);
104
- annotateResponse(options.span, rawResponse);
105
- return yield* makeResponse(rawResponse, options);
106
- }),
107
- streamText: Effect.fnUntraced(function* (options) {
108
- const request = yield* makeRequest(options);
109
- annotateRequest(options.span, request);
110
- return client.createResponseStream(request);
111
- }, (effect, options) => effect.pipe(Effect.flatMap(stream => makeStreamResponse(stream, options)), Stream.unwrap, Stream.map(response => {
112
- annotateStreamResponse(options.span, response);
113
- return response;
114
- })))
115
- });
116
- });
117
- /**
118
- * @since 1.0.0
119
- * @category Layers
120
- */
121
- const layer = options => Layer.effect(LanguageModel.LanguageModel, make({
122
- model: options.model,
123
- config: options.config
124
- }));
125
- /**
126
- * @since 1.0.0
127
- * @category Layers
128
- */
129
- exports.layer = layer;
130
- const layerWithTokenizer = options => Layer.merge(layer(options), OpenAiTokenizer.layer(options));
131
- /**
132
- * @since 1.0.0
133
- * @category Configuration
134
- */
135
- exports.layerWithTokenizer = layerWithTokenizer;
136
- const withConfigOverride = exports.withConfigOverride = /*#__PURE__*/(0, _Function.dual)(2, (self, overrides) => Effect.flatMap(Config.getOrUndefined, config => Effect.provideService(self, Config, {
137
- ...config,
138
- ...overrides
139
- })));
140
- // =============================================================================
141
- // Prompt Conversion
142
- // =============================================================================
143
- const getSystemMessageMode = model => model.startsWith("o") || model.startsWith("gpt-5") || model.startsWith("codex-") || model.startsWith("computer-use") ? "developer" : "system";
144
- const prepareMessages = /*#__PURE__*/Effect.fnUntraced(function* (options, config) {
145
- const messages = [];
146
- for (const message of options.prompt.content) {
147
- switch (message.role) {
148
- case "system":
149
- {
150
- messages.push({
151
- role: getSystemMessageMode(config.model),
152
- content: message.content
153
- });
154
- break;
155
- }
156
- case "user":
157
- {
158
- const content = [];
159
- for (let index = 0; index < message.content.length; index++) {
160
- const part = message.content[index];
161
- switch (part.type) {
162
- case "text":
163
- {
164
- content.push({
165
- type: "input_text",
166
- text: part.text
167
- });
168
- break;
169
- }
170
- case "file":
171
- {
172
- if (part.mediaType.startsWith("image/")) {
173
- const detail = getImageDetail(part);
174
- const mediaType = part.mediaType === "image/*" ? "image/jpeg" : part.mediaType;
175
- if (typeof part.data === "string" && isFileId(part.data, config)) {
176
- content.push({
177
- type: "input_image",
178
- file_id: part.data,
179
- detail
180
- });
181
- }
182
- if (part.data instanceof URL) {
183
- content.push({
184
- type: "input_image",
185
- image_url: part.data.toString(),
186
- detail
187
- });
188
- }
189
- if (part.data instanceof Uint8Array) {
190
- const base64 = Encoding.encodeBase64(part.data);
191
- const imageUrl = `data:${mediaType};base64,${base64}`;
192
- content.push({
193
- type: "input_image",
194
- image_url: imageUrl,
195
- detail
196
- });
197
- }
198
- } else if (part.mediaType === "application/pdf") {
199
- if (typeof part.data === "string" && isFileId(part.data, config)) {
200
- content.push({
201
- type: "input_file",
202
- file_id: part.data
203
- });
204
- }
205
- if (part.data instanceof URL) {
206
- content.push({
207
- type: "input_file",
208
- file_url: part.data.toString()
209
- });
210
- }
211
- if (part.data instanceof Uint8Array) {
212
- const base64 = Encoding.encodeBase64(part.data);
213
- const fileName = part.fileName ?? `part-${index}.pdf`;
214
- const fileData = `data:application/pdf;base64,${base64}`;
215
- content.push({
216
- type: "input_file",
217
- filename: fileName,
218
- file_data: fileData
219
- });
220
- }
221
- } else {
222
- return yield* new AiError.MalformedInput({
223
- module: "OpenAiLanguageModel",
224
- method: "prepareMessages",
225
- description: `Detected unsupported media type for file: '${part.mediaType}'`
226
- });
227
- }
228
- }
229
- }
230
- }
231
- messages.push({
232
- role: "user",
233
- content
234
- });
235
- break;
236
- }
237
- case "assistant":
238
- {
239
- const reasoningMessages = {};
240
- for (const part of message.content) {
241
- switch (part.type) {
242
- case "text":
243
- {
244
- messages.push({
245
- role: "assistant",
246
- content: [{
247
- type: "output_text",
248
- text: part.text
249
- }],
250
- id: getItemId(part)
251
- });
252
- break;
253
- }
254
- case "reasoning":
255
- {
256
- const options = part.options.openai;
257
- if (Predicate.isNotUndefined(options?.itemId)) {
258
- const reasoningMessage = reasoningMessages[options.itemId];
259
- const summaryParts = [];
260
- if (part.text.length > 0) {
261
- summaryParts.push({
262
- type: "summary_text",
263
- text: part.text
264
- });
265
- }
266
- if (Predicate.isUndefined(reasoningMessage)) {
267
- reasoningMessages[options.itemId] = {
268
- id: options.itemId,
269
- type: "reasoning",
270
- summary: summaryParts,
271
- encrypted_content: options.encryptedContent
272
- };
273
- messages.push(reasoningMessages[options.itemId]);
274
- } else {
275
- for (const summaryPart of summaryParts) {
276
- reasoningMessage.summary.push(summaryPart);
277
- }
278
- }
279
- }
280
- break;
281
- }
282
- case "tool-call":
283
- {
284
- if (!part.providerExecuted) {
285
- messages.push({
286
- id: getItemId(part),
287
- type: "function_call",
288
- call_id: part.id,
289
- name: part.name,
290
- arguments: JSON.stringify(part.params)
291
- });
292
- }
293
- break;
294
- }
295
- }
296
- }
297
- break;
298
- }
299
- case "tool":
300
- {
301
- for (const part of message.content) {
302
- messages.push({
303
- type: "function_call_output",
304
- call_id: part.id,
305
- output: JSON.stringify(part.result)
306
- });
307
- }
308
- break;
309
- }
310
- }
311
- }
312
- return messages;
313
- });
314
- // =============================================================================
315
- // Response Conversion
316
- // =============================================================================
317
- const makeResponse = /*#__PURE__*/Effect.fnUntraced(function* (response, options) {
318
- const idGenerator = yield* IdGenerator.IdGenerator;
319
- const webSearchTool = options.tools.find(tool => Tool.isProviderDefined(tool) && (tool.id === "openai.web_search" || tool.id === "openai.web_search_preview"));
320
- let hasToolCalls = false;
321
- const parts = [];
322
- const createdAt = new Date(response.created_at * 1000);
323
- parts.push({
324
- type: "response-metadata",
325
- id: response.id,
326
- modelId: response.model,
327
- timestamp: DateTime.formatIso(DateTime.unsafeFromDate(createdAt))
328
- });
329
- for (const part of response.output) {
330
- switch (part.type) {
331
- case "message":
332
- {
333
- for (const contentPart of part.content) {
334
- switch (contentPart.type) {
335
- case "output_text":
336
- {
337
- parts.push({
338
- type: "text",
339
- text: contentPart.text,
340
- metadata: {
341
- openai: {
342
- itemId: part.id
343
- }
344
- }
345
- });
346
- for (const annotation of contentPart.annotations) {
347
- if (annotation.type === "file_citation") {
348
- const metadata = {
349
- type: annotation.type,
350
- index: annotation.index
351
- };
352
- parts.push({
353
- type: "source",
354
- sourceType: "document",
355
- id: yield* idGenerator.generateId(),
356
- mediaType: "text/plain",
357
- title: annotation.filename ?? "Untitled Document",
358
- metadata: {
359
- openai: metadata
360
- }
361
- });
362
- }
363
- if (annotation.type === "url_citation") {
364
- const metadata = {
365
- type: annotation.type,
366
- startIndex: annotation.start_index,
367
- endIndex: annotation.end_index
368
- };
369
- parts.push({
370
- type: "source",
371
- sourceType: "url",
372
- id: yield* idGenerator.generateId(),
373
- url: annotation.url,
374
- title: annotation.title,
375
- metadata: {
376
- openai: metadata
377
- }
378
- });
379
- }
380
- }
381
- break;
382
- }
383
- case "refusal":
384
- {
385
- parts.push({
386
- type: "text",
387
- text: "",
388
- metadata: {
389
- openai: {
390
- refusal: contentPart.refusal
391
- }
392
- }
393
- });
394
- break;
395
- }
396
- }
397
- }
398
- break;
399
- }
400
- case "function_call":
401
- {
402
- hasToolCalls = true;
403
- const toolName = part.name;
404
- const toolParams = part.arguments;
405
- const params = yield* Effect.try({
406
- try: () => Tool.unsafeSecureJsonParse(toolParams),
407
- catch: cause => new AiError.MalformedOutput({
408
- module: "OpenAiLanguageModel",
409
- method: "makeResponse",
410
- description: "Failed to securely parse tool call parameters " + `for tool '${toolName}':\nParameters: ${toolParams}`,
411
- cause
412
- })
413
- });
414
- parts.push({
415
- type: "tool-call",
416
- id: part.call_id,
417
- name: toolName,
418
- params,
419
- metadata: {
420
- openai: {
421
- itemId: part.id
422
- }
423
- }
424
- });
425
- break;
426
- }
427
- case "code_interpreter_call":
428
- {
429
- parts.push({
430
- type: "tool-call",
431
- id: part.id,
432
- name: "OpenAiCodeInterpreter",
433
- params: {
434
- code: part.code,
435
- container_id: part.container_id
436
- },
437
- providerName: "code_interpreter",
438
- providerExecuted: true
439
- });
440
- parts.push({
441
- type: "tool-result",
442
- id: part.id,
443
- name: "OpenAiCodeInterpreter",
444
- isFailure: false,
445
- result: part.outputs,
446
- providerName: "code_interpreter",
447
- providerExecuted: true
448
- });
449
- break;
450
- }
451
- case "file_search_call":
452
- {
453
- parts.push({
454
- type: "tool-call",
455
- id: part.id,
456
- name: "OpenAiFileSearch",
457
- params: {},
458
- providerName: "file_search",
459
- providerExecuted: true
460
- });
461
- parts.push({
462
- type: "tool-result",
463
- id: part.id,
464
- name: "OpenAiFileSearch",
465
- isFailure: false,
466
- result: {
467
- status: part.status,
468
- queries: part.queries,
469
- ...(part.results && {
470
- results: part.results
471
- })
472
- },
473
- providerName: "file_search",
474
- providerExecuted: true
475
- });
476
- break;
477
- }
478
- case "web_search_call":
479
- {
480
- parts.push({
481
- type: "tool-call",
482
- id: part.id,
483
- name: webSearchTool?.name ?? "OpenAiWebSearch",
484
- params: {
485
- action: part.action
486
- },
487
- providerName: webSearchTool?.providerName ?? "web_search",
488
- providerExecuted: true
489
- });
490
- parts.push({
491
- type: "tool-result",
492
- id: part.id,
493
- name: webSearchTool?.name ?? "OpenAiWebSearch",
494
- isFailure: false,
495
- result: {
496
- status: part.status
497
- },
498
- providerName: webSearchTool?.providerName ?? "web_search",
499
- providerExecuted: true
500
- });
501
- break;
502
- }
503
- // TODO(Max): support computer use
504
- // case "computer_call": {
505
- // parts.push({
506
- // type: "tool-call",
507
- // id: part.id,
508
- // name: "OpenAiComputerUse",
509
- // params: { action: part.action },
510
- // providerName: webSearchTool?.providerName ?? "web_search",
511
- // providerExecuted: true
512
- // })
513
- //
514
- // parts.push({
515
- // type: "tool-result",
516
- // id: part.id,
517
- // name: webSearchTool?.name ?? "OpenAiWebSearch",
518
- // result: { status: part.status },
519
- // providerName: webSearchTool?.providerName ?? "web_search",
520
- // providerExecuted: true
521
- // })
522
- // break
523
- // }
524
- case "reasoning":
525
- {
526
- // If there are no summary parts, we have to add an empty one to
527
- // propagate the part identifier
528
- if (part.summary.length === 0) {
529
- parts.push({
530
- type: "reasoning",
531
- text: "",
532
- metadata: {
533
- openai: {
534
- itemId: part.id
535
- }
536
- }
537
- });
538
- } else {
539
- for (const summary of part.summary) {
540
- const metadata = {
541
- itemId: part.id,
542
- encryptedContent: part.encrypted_content ?? undefined
543
- };
544
- parts.push({
545
- type: "reasoning",
546
- text: summary.text,
547
- metadata: {
548
- openai: metadata
549
- }
550
- });
551
- }
552
- }
553
- break;
554
- }
555
- }
556
- }
557
- const finishReason = InternalUtilities.resolveFinishReason(response.incomplete_details?.reason, hasToolCalls);
558
- const metadata = {
559
- serviceTier: response.service_tier
560
- };
561
- parts.push({
562
- type: "finish",
563
- reason: finishReason,
564
- usage: {
565
- inputTokens: response.usage?.input_tokens,
566
- outputTokens: response.usage?.output_tokens,
567
- totalTokens: (response.usage?.input_tokens ?? 0) + (response.usage?.output_tokens ?? 0),
568
- reasoningTokens: response.usage?.output_tokens_details?.reasoning_tokens,
569
- cachedInputTokens: response.usage?.input_tokens_details?.cached_tokens
570
- },
571
- metadata: {
572
- openai: metadata
573
- }
574
- });
575
- return parts;
576
- });
577
- const makeStreamResponse = /*#__PURE__*/Effect.fnUntraced(function* (stream, options) {
578
- const idGenerator = yield* IdGenerator.IdGenerator;
579
- let hasToolCalls = false;
580
- const activeReasoning = {};
581
- const activeToolCalls = {};
582
- const webSearchTool = options.tools.find(tool => Tool.isProviderDefined(tool) && (tool.id === "openai.web_search" || tool.id === "openai.web_search_preview"));
583
- return stream.pipe(Stream.mapEffect(Effect.fnUntraced(function* (event) {
584
- const parts = [];
585
- switch (event.type) {
586
- case "response.created":
587
- {
588
- const createdAt = new Date(event.response.created_at * 1000);
589
- parts.push({
590
- type: "response-metadata",
591
- id: event.response.id,
592
- modelId: event.response.model,
593
- timestamp: DateTime.formatIso(DateTime.unsafeFromDate(createdAt))
594
- });
595
- break;
596
- }
597
- case "error":
598
- {
599
- parts.push({
600
- type: "error",
601
- error: event
602
- });
603
- break;
604
- }
605
- case "response.completed":
606
- case "response.incomplete":
607
- case "response.failed":
608
- {
609
- parts.push({
610
- type: "finish",
611
- reason: InternalUtilities.resolveFinishReason(event.response.incomplete_details?.reason, hasToolCalls),
612
- usage: {
613
- inputTokens: event.response.usage?.input_tokens,
614
- outputTokens: event.response.usage?.output_tokens,
615
- totalTokens: (event.response.usage?.input_tokens ?? 0) + (event.response.usage?.output_tokens ?? 0),
616
- reasoningTokens: event.response.usage?.output_tokens_details?.reasoning_tokens,
617
- cachedInputTokens: event.response.usage?.input_tokens_details?.cached_tokens
618
- },
619
- metadata: {
620
- openai: {
621
- serviceTier: event.response.service_tier
622
- }
623
- }
624
- });
625
- break;
626
- }
627
- case "response.output_item.added":
628
- {
629
- switch (event.item.type) {
630
- case "computer_call":
631
- {
632
- // TODO(Max): support computer use
633
- break;
634
- }
635
- case "file_search_call":
636
- {
637
- activeToolCalls[event.output_index] = {
638
- id: event.item.id,
639
- name: "OpenAiFileSearch"
640
- };
641
- parts.push({
642
- type: "tool-params-start",
643
- id: event.item.id,
644
- name: "OpenAiFileSearch",
645
- providerName: "file_search",
646
- providerExecuted: true
647
- });
648
- break;
649
- }
650
- case "function_call":
651
- {
652
- activeToolCalls[event.output_index] = {
653
- id: event.item.call_id,
654
- name: event.item.name
655
- };
656
- parts.push({
657
- type: "tool-params-start",
658
- id: event.item.call_id,
659
- name: event.item.name
660
- });
661
- break;
662
- }
663
- case "message":
664
- {
665
- parts.push({
666
- type: "text-start",
667
- id: event.item.id,
668
- metadata: {
669
- openai: {
670
- itemId: event.item.id
671
- }
672
- }
673
- });
674
- break;
675
- }
676
- case "reasoning":
677
- {
678
- activeReasoning[event.item.id] = {
679
- summaryParts: [0],
680
- encryptedContent: event.item.encrypted_content
681
- };
682
- parts.push({
683
- type: "reasoning-start",
684
- id: `${event.item.id}:0`,
685
- metadata: {
686
- openai: {
687
- itemId: event.item.id,
688
- encryptedContent: event.item.encrypted_content
689
- }
690
- }
691
- });
692
- break;
693
- }
694
- case "web_search_call":
695
- {
696
- activeToolCalls[event.output_index] = {
697
- id: event.item.id,
698
- name: webSearchTool?.name ?? "OpenAiWebSearch"
699
- };
700
- parts.push({
701
- type: "tool-params-start",
702
- id: event.item.id,
703
- name: webSearchTool?.name ?? "OpenAiWebSearch",
704
- providerName: webSearchTool?.providerName ?? "web_search",
705
- providerExecuted: true
706
- });
707
- break;
708
- }
709
- }
710
- break;
711
- }
712
- case "response.output_item.done":
713
- {
714
- switch (event.item.type) {
715
- case "code_interpreter_call":
716
- {
717
- parts.push({
718
- type: "tool-call",
719
- id: event.item.id,
720
- name: "OpenAiCodeInterpreter",
721
- params: {
722
- code: event.item.code,
723
- container_id: event.item.container_id
724
- },
725
- providerName: "code_interpreter",
726
- providerExecuted: true
727
- });
728
- parts.push({
729
- type: "tool-result",
730
- id: event.item.id,
731
- name: "OpenAiCodeInterpreter",
732
- isFailure: false,
733
- result: {
734
- outputs: event.item.outputs
735
- },
736
- providerName: "code_interpreter",
737
- providerExecuted: true
738
- });
739
- break;
740
- }
741
- // TODO(Max): support computer use
742
- case "computer_call":
743
- {
744
- break;
745
- }
746
- case "file_search_call":
747
- {
748
- delete activeToolCalls[event.output_index];
749
- parts.push({
750
- type: "tool-params-end",
751
- id: event.item.id
752
- });
753
- parts.push({
754
- type: "tool-call",
755
- id: event.item.id,
756
- name: "OpenAiFileSearch",
757
- params: {},
758
- providerName: "file_search",
759
- providerExecuted: true
760
- });
761
- parts.push({
762
- type: "tool-result",
763
- id: event.item.id,
764
- name: "OpenAiFileSearch",
765
- isFailure: false,
766
- result: {
767
- status: event.item.status,
768
- queries: event.item.queries,
769
- ...(event.item.results && {
770
- results: event.item.results
771
- })
772
- },
773
- providerName: "file_search",
774
- providerExecuted: true
775
- });
776
- break;
777
- }
778
- case "function_call":
779
- {
780
- hasToolCalls = true;
781
- const toolName = event.item.name;
782
- const toolParams = event.item.arguments;
783
- const params = yield* Effect.try({
784
- try: () => Tool.unsafeSecureJsonParse(toolParams),
785
- catch: cause => new AiError.MalformedOutput({
786
- module: "OpenAiLanguageModel",
787
- method: "makeStreamResponse",
788
- description: "Failed to securely parse tool call parameters " + `for tool '${toolName}':\nParameters: ${toolParams}`,
789
- cause
790
- })
791
- });
792
- parts.push({
793
- type: "tool-params-end",
794
- id: event.item.call_id
795
- });
796
- parts.push({
797
- type: "tool-call",
798
- id: event.item.call_id,
799
- name: toolName,
800
- params,
801
- metadata: {
802
- openai: {
803
- itemId: event.item.id
804
- }
805
- }
806
- });
807
- delete activeToolCalls[event.output_index];
808
- break;
809
- }
810
- case "message":
811
- {
812
- parts.push({
813
- type: "text-end",
814
- id: event.item.id
815
- });
816
- break;
817
- }
818
- case "reasoning":
819
- {
820
- const reasoningPart = activeReasoning[event.item.id];
821
- for (const summaryIndex of reasoningPart.summaryParts) {
822
- parts.push({
823
- type: "reasoning-end",
824
- id: `${event.item.id}:${summaryIndex}`,
825
- metadata: {
826
- openai: {
827
- itemId: event.item.id,
828
- encryptedContent: event.item.encrypted_content
829
- }
830
- }
831
- });
832
- }
833
- delete activeReasoning[event.item.id];
834
- break;
835
- }
836
- case "web_search_call":
837
- {
838
- delete activeToolCalls[event.output_index];
839
- parts.push({
840
- type: "tool-params-end",
841
- id: event.item.id
842
- });
843
- parts.push({
844
- type: "tool-call",
845
- id: event.item.id,
846
- name: "OpenAiWebSearch",
847
- params: {
848
- action: event.item.action
849
- },
850
- providerName: "web_search",
851
- providerExecuted: true
852
- });
853
- parts.push({
854
- type: "tool-result",
855
- id: event.item.id,
856
- name: "OpenAiWebSearch",
857
- isFailure: false,
858
- result: {
859
- status: event.item.status
860
- },
861
- providerName: "web_search",
862
- providerExecuted: true
863
- });
864
- break;
865
- }
866
- }
867
- break;
868
- }
869
- case "response.output_text.delta":
870
- {
871
- parts.push({
872
- type: "text-delta",
873
- id: event.item_id,
874
- delta: event.delta
875
- });
876
- break;
877
- }
878
- case "response.output_text.annotation.added":
879
- {
880
- if (event.annotation.type === "file_citation") {
881
- parts.push({
882
- type: "source",
883
- sourceType: "document",
884
- id: yield* idGenerator.generateId(),
885
- mediaType: "text/plain",
886
- title: event.annotation.filename ?? "Untitled Document",
887
- fileName: event.annotation.filename ?? event.annotation.file_id
888
- });
889
- }
890
- if (event.annotation.type === "url_citation") {
891
- parts.push({
892
- type: "source",
893
- sourceType: "url",
894
- id: yield* idGenerator.generateId(),
895
- url: event.annotation.url,
896
- title: event.annotation.title
897
- });
898
- }
899
- break;
900
- }
901
- case "response.function_call_arguments.delta":
902
- {
903
- const toolCallPart = activeToolCalls[event.output_index];
904
- if (Predicate.isNotUndefined(toolCallPart)) {
905
- parts.push({
906
- type: "tool-params-delta",
907
- id: toolCallPart.id,
908
- delta: event.delta
909
- });
910
- }
911
- break;
912
- }
913
- case "response.reasoning_summary_part.added":
914
- {
915
- // The first reasoning start is pushed in the `response.output_item.added` block
916
- if (event.summary_index > 0) {
917
- const reasoningPart = activeReasoning[event.item_id];
918
- if (Predicate.isNotUndefined(reasoningPart)) {
919
- reasoningPart.summaryParts.push(event.summary_index);
920
- }
921
- parts.push({
922
- type: "reasoning-start",
923
- id: `${event.item_id}:${event.summary_index}`,
924
- metadata: {
925
- openai: {
926
- itemId: event.item_id,
927
- encryptedContent: reasoningPart?.encryptedContent
928
- }
929
- }
930
- });
931
- }
932
- break;
933
- }
934
- case "response.reasoning_summary_text.delta":
935
- {
936
- parts.push({
937
- type: "reasoning-delta",
938
- id: `${event.item_id}:${event.summary_index}`,
939
- delta: event.delta,
940
- metadata: {
941
- openai: {
942
- itemId: event.item_id
943
- }
944
- }
945
- });
946
- break;
947
- }
948
- }
949
- return parts;
950
- })), Stream.flattenIterables);
951
- });
952
- // =============================================================================
953
- // Telemetry
954
- // =============================================================================
955
- const annotateRequest = (span, request) => {
956
- (0, _OpenAiTelemetry.addGenAIAnnotations)(span, {
957
- system: "openai",
958
- operation: {
959
- name: "chat"
960
- },
961
- request: {
962
- model: request.model,
963
- temperature: request.temperature,
964
- topP: request.top_p,
965
- maxTokens: request.max_output_tokens
966
- },
967
- openai: {
968
- request: {
969
- responseFormat: request.text?.format?.type,
970
- serviceTier: request.service_tier
971
- }
972
- }
973
- });
974
- };
975
- const annotateResponse = (span, response) => {
976
- const finishReason = response.incomplete_details?.reason;
977
- (0, _OpenAiTelemetry.addGenAIAnnotations)(span, {
978
- response: {
979
- id: response.id,
980
- model: response.model,
981
- finishReasons: Predicate.isNotUndefined(finishReason) ? [finishReason] : undefined
982
- },
983
- usage: {
984
- inputTokens: response.usage?.input_tokens,
985
- outputTokens: response.usage?.output_tokens
986
- },
987
- openai: {
988
- response: {
989
- serviceTier: response.service_tier
990
- }
991
- }
992
- });
993
- };
994
- const annotateStreamResponse = (span, part) => {
995
- if (part.type === "response-metadata") {
996
- (0, _OpenAiTelemetry.addGenAIAnnotations)(span, {
997
- response: {
998
- id: part.id,
999
- model: part.modelId
1000
- }
1001
- });
1002
- }
1003
- if (part.type === "finish") {
1004
- const serviceTier = part.metadata?.openai?.serviceTier;
1005
- (0, _OpenAiTelemetry.addGenAIAnnotations)(span, {
1006
- response: {
1007
- finishReasons: [part.reason]
1008
- },
1009
- usage: {
1010
- inputTokens: part.usage.inputTokens,
1011
- outputTokens: part.usage.outputTokens
1012
- },
1013
- openai: {
1014
- response: {
1015
- serviceTier
1016
- }
1017
- }
1018
- });
1019
- }
1020
- };
1021
- const prepareTools = /*#__PURE__*/Effect.fnUntraced(function* (options) {
1022
- // Return immediately if no tools are in the toolkit
1023
- if (options.tools.length === 0) {
1024
- return {
1025
- tools: undefined,
1026
- toolChoice: undefined
1027
- };
1028
- }
1029
- const tools = [];
1030
- let toolChoice = undefined;
1031
- // Filter the incoming tools down to the set of allowed tools as indicated by
1032
- // the tool choice. This must be done here given that there is no tool name
1033
- // in OpenAI's provider-defined tools, so there would be no way to perform
1034
- // this filter otherwise
1035
- let allowedTools = options.tools;
1036
- if (typeof options.toolChoice === "object" && "oneOf" in options.toolChoice) {
1037
- const allowedToolNames = new Set(options.toolChoice.oneOf);
1038
- allowedTools = options.tools.filter(tool => allowedToolNames.has(tool.name));
1039
- toolChoice = options.toolChoice.mode === "required" ? "required" : "auto";
1040
- }
1041
- // Convert the tools in the toolkit to the provider-defined format
1042
- for (const tool of allowedTools) {
1043
- if (Tool.isUserDefined(tool)) {
1044
- tools.push({
1045
- type: "function",
1046
- name: tool.name,
1047
- description: Tool.getDescription(tool),
1048
- parameters: Tool.getJsonSchema(tool),
1049
- strict: true
1050
- });
1051
- }
1052
- if (Tool.isProviderDefined(tool)) {
1053
- switch (tool.id) {
1054
- case "openai.code_interpreter":
1055
- {
1056
- tools.push({
1057
- ...tool.args,
1058
- type: "code_interpreter"
1059
- });
1060
- break;
1061
- }
1062
- case "openai.file_search":
1063
- {
1064
- tools.push({
1065
- ...tool.args,
1066
- type: "file_search"
1067
- });
1068
- break;
1069
- }
1070
- case "openai.web_search":
1071
- {
1072
- tools.push({
1073
- ...tool.args,
1074
- type: "web_search"
1075
- });
1076
- break;
1077
- }
1078
- case "openai.web_search_preview":
1079
- {
1080
- tools.push({
1081
- ...tool.args,
1082
- type: "web_search_preview"
1083
- });
1084
- break;
1085
- }
1086
- default:
1087
- {
1088
- return yield* new AiError.MalformedInput({
1089
- module: "AnthropicLanguageModel",
1090
- method: "prepareTools",
1091
- description: `Received request to call unknown provider-defined tool '${tool.name}'`
1092
- });
1093
- }
1094
- }
1095
- }
1096
- }
1097
- if (options.toolChoice === "auto" || options.toolChoice === "none" || options.toolChoice === "required") {
1098
- toolChoice = options.toolChoice;
1099
- }
1100
- if (typeof options.toolChoice === "object" && "tool" in options.toolChoice) {
1101
- toolChoice = Predicate.isUndefined(OpenAiTool.getProviderDefinedToolName(options.toolChoice.tool)) ? {
1102
- type: "function",
1103
- name: options.toolChoice.tool
1104
- } : {
1105
- type: options.toolChoice.tool
1106
- };
1107
- }
1108
- return {
1109
- tools,
1110
- toolChoice
1111
- };
1112
- });
1113
- // =============================================================================
1114
- // Utilities
1115
- // =============================================================================
1116
- const isFileId = (data, config) => Predicate.isNotUndefined(config.fileIdPrefixes) && config.fileIdPrefixes.some(prefix => data.startsWith(prefix));
1117
- const getItemId = part => part.options.openai?.itemId;
1118
- const getImageDetail = part => part.options.openai?.imageDetail ?? "auto";
1119
- const prepareInclude = (options, config) => {
1120
- const include = new Set(config.include ?? []);
1121
- const codeInterpreterTool = options.tools.find(tool => Tool.isProviderDefined(tool) && tool.id === "openai.code_interpreter");
1122
- if (Predicate.isNotUndefined(codeInterpreterTool)) {
1123
- include.add("code_interpreter_call.outputs");
1124
- }
1125
- const webSearchTool = options.tools.find(tool => Tool.isProviderDefined(tool) && (tool.id === "openai.web_search" || tool.id === "openai.web_search_preview"));
1126
- if (Predicate.isNotUndefined(webSearchTool)) {
1127
- include.add("web_search_call.action.sources");
1128
- }
1129
- return Array.from(include);
1130
- };
1131
- const prepareResponseFormat = options => {
1132
- if (options.responseFormat.type === "json") {
1133
- const name = options.responseFormat.objectName;
1134
- const schema = options.responseFormat.schema;
1135
- return {
1136
- type: "json_schema",
1137
- name,
1138
- description: Tool.getDescriptionFromSchemaAst(schema.ast) ?? "Response with a JSON object",
1139
- schema: Tool.getJsonSchemaFromSchemaAst(schema.ast),
1140
- strict: true
1141
- };
1142
- }
1143
- return {
1144
- type: "text"
1145
- };
1146
- };
1147
- //# sourceMappingURL=OpenAiLanguageModel.js.map