promptlayer 1.1.0 → 1.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. package/README.md +22 -0
  2. package/dist/claude-agents.d.mts +20 -0
  3. package/dist/claude-agents.d.ts +20 -0
  4. package/dist/claude-agents.js +2 -0
  5. package/dist/claude-agents.js.map +1 -0
  6. package/dist/esm/{chunk-SWBNW72U.js → chunk-UKSCOWKT.js} +2 -2
  7. package/dist/esm/{chunk-SWBNW72U.js.map → chunk-UKSCOWKT.js.map} +1 -1
  8. package/dist/esm/claude-agents.js +2 -0
  9. package/dist/esm/claude-agents.js.map +1 -0
  10. package/dist/esm/index.js +1 -1
  11. package/dist/esm/openai-agents.js +2 -2
  12. package/dist/esm/openai-agents.js.map +1 -1
  13. package/dist/index.js +2 -2
  14. package/dist/index.js.map +1 -1
  15. package/dist/openai-agents.js +2 -2
  16. package/dist/openai-agents.js.map +1 -1
  17. package/package.json +18 -1
  18. package/vendor/claude-agents/trace/.claude-plugin/plugin.json +8 -0
  19. package/vendor/claude-agents/trace/hooks/hook_utils.py +38 -0
  20. package/vendor/claude-agents/trace/hooks/hooks.json +60 -0
  21. package/vendor/claude-agents/trace/hooks/lib.sh +577 -0
  22. package/vendor/claude-agents/trace/hooks/parse_stop_transcript.py +375 -0
  23. package/vendor/claude-agents/trace/hooks/post_tool_use.sh +41 -0
  24. package/vendor/claude-agents/trace/hooks/session_end.sh +37 -0
  25. package/vendor/claude-agents/trace/hooks/session_start.sh +57 -0
  26. package/vendor/claude-agents/trace/hooks/stop_hook.sh +123 -0
  27. package/vendor/claude-agents/trace/hooks/user_prompt_submit.sh +25 -0
  28. package/vendor/claude-agents/vendor_metadata.json +5 -0
  29. package/.github/CODEOWNERS +0 -1
  30. package/.github/workflows/node.js.yml +0 -30
  31. package/.github/workflows/npm-publish.yml +0 -35
  32. package/src/groups.ts +0 -16
  33. package/src/index.ts +0 -383
  34. package/src/integrations/openai-agents/helpers.test.ts +0 -254
  35. package/src/integrations/openai-agents/ids.ts +0 -27
  36. package/src/integrations/openai-agents/index.ts +0 -8
  37. package/src/integrations/openai-agents/instrumentation.test.ts +0 -46
  38. package/src/integrations/openai-agents/instrumentation.ts +0 -47
  39. package/src/integrations/openai-agents/mapping.ts +0 -714
  40. package/src/integrations/openai-agents/otlp-json.ts +0 -120
  41. package/src/integrations/openai-agents/processor.test.ts +0 -509
  42. package/src/integrations/openai-agents/processor.ts +0 -388
  43. package/src/integrations/openai-agents/time.ts +0 -56
  44. package/src/integrations/openai-agents/types.ts +0 -49
  45. package/src/integrations/openai-agents/url.ts +0 -9
  46. package/src/openai-agents.ts +0 -1
  47. package/src/promptlayer.ts +0 -125
  48. package/src/run-error-tracking.test.ts +0 -146
  49. package/src/span-exporter.ts +0 -120
  50. package/src/span-wrapper.ts +0 -51
  51. package/src/templates.ts +0 -37
  52. package/src/tracing.ts +0 -20
  53. package/src/track.ts +0 -84
  54. package/src/types.ts +0 -689
  55. package/src/utils/blueprint-builder.test.ts +0 -727
  56. package/src/utils/blueprint-builder.ts +0 -1453
  57. package/src/utils/errors.test.ts +0 -68
  58. package/src/utils/errors.ts +0 -62
  59. package/src/utils/streaming.test.ts +0 -498
  60. package/src/utils/streaming.ts +0 -1402
  61. package/src/utils/utils.ts +0 -1228
  62. package/tsconfig.json +0 -115
  63. package/tsup.config.ts +0 -20
  64. package/vitest.config.ts +0 -9
@@ -1,1402 +0,0 @@
1
- import {
2
- Completion as AnthropicCompletion,
3
- Message,
4
- } from "@anthropic-ai/sdk/resources";
5
- import { MessageStreamEvent } from "@anthropic-ai/sdk/resources/messages";
6
- import {
7
- ChatCompletion,
8
- ChatCompletionChunk,
9
- Completion,
10
- } from "openai/resources";
11
- import {
12
- buildPromptBlueprintFromAnthropicEvent,
13
- buildPromptBlueprintFromBedrockEvent,
14
- buildPromptBlueprintFromGoogleEvent,
15
- buildPromptBlueprintFromOpenAIEvent,
16
- buildPromptBlueprintFromOpenAIImagesEvent,
17
- buildPromptBlueprintFromOpenAIResponsesEvent,
18
- } from "./blueprint-builder";
19
-
20
- export const STREAMING_PROVIDERS_WITH_USAGE = ["openai", "openai.azure"] as const;
21
-
22
-
23
- export const openaiResponsesStreamChat = (results: any[]) => {
24
- const response_data: any = {
25
- id: null,
26
- object: "response",
27
- created_at: null,
28
- status: null,
29
- error: null,
30
- incomplete_details: null,
31
- instructions: null,
32
- max_output_tokens: null,
33
- model: null,
34
- output: [],
35
- parallel_tool_calls: true,
36
- previous_response_id: null,
37
- reasoning: {},
38
- store: true,
39
- temperature: 1,
40
- text: undefined,
41
- tool_choice: "auto",
42
- tools: [],
43
- top_p: 1,
44
- truncation: "disabled",
45
- usage: null,
46
- user: null,
47
- metadata: {},
48
- };
49
-
50
- const current_items: Record<string, any> = {};
51
- const output_index_to_item_id: Record<number, string> = {};
52
-
53
- for (const chunk of results as any[]) {
54
- const event_type = chunk?.type;
55
-
56
- if (event_type === "response.created") {
57
- const response = chunk.response || {};
58
- response_data.id = response.id ?? response_data.id;
59
- response_data.created_at = response.created_at ?? response_data.created_at;
60
- response_data.model = response.model ?? response_data.model;
61
- response_data.status = response.status ?? response_data.status;
62
- response_data.parallel_tool_calls =
63
- response.parallel_tool_calls ?? response_data.parallel_tool_calls;
64
- response_data.temperature =
65
- response.temperature ?? response_data.temperature;
66
- response_data.tool_choice = response.tool_choice ?? response_data.tool_choice;
67
- response_data.tools = response.tools ?? response_data.tools;
68
- response_data.top_p = response.top_p ?? response_data.top_p;
69
- response_data.truncation = response.truncation ?? response_data.truncation;
70
- response_data.max_output_tokens =
71
- response.max_output_tokens ?? response_data.max_output_tokens;
72
- response_data.previous_response_id =
73
- response.previous_response_id ?? response_data.previous_response_id;
74
- response_data.store = response.store ?? response_data.store;
75
- response_data.user = response.user ?? response_data.user;
76
- response_data.metadata = response.metadata ?? response_data.metadata;
77
-
78
- const text_config = response.text;
79
- if (text_config) {
80
- response_data.text = text_config;
81
- }
82
- const reasoning = response.reasoning;
83
- if (reasoning) {
84
- response_data.reasoning = reasoning;
85
- }
86
- continue;
87
- }
88
-
89
- if (event_type === "response.in_progress") {
90
- const response = chunk.response || {};
91
- response_data.status = response.status ?? response_data.status;
92
- continue;
93
- }
94
-
95
- if (event_type === "response.output_item.added") {
96
- const item = chunk.item || {};
97
- const item_id = item.id;
98
- const item_type = item.type;
99
- const output_index = chunk.output_index;
100
- if (output_index != null && item_id) {
101
- output_index_to_item_id[output_index] = item_id;
102
- }
103
-
104
- if (item_type === "reasoning") {
105
- current_items[item_id] = {
106
- type: "reasoning",
107
- id: item_id,
108
- summary: [],
109
- status: item.status ?? "in_progress",
110
- };
111
- } else if (item_type === "function_call") {
112
- current_items[item_id] = {
113
- type: "function_call",
114
- id: item_id,
115
- call_id: item.call_id,
116
- name: item.name,
117
- arguments: "",
118
- status: item.status ?? "in_progress",
119
- };
120
- } else if (item_type === "message") {
121
- current_items[item_id] = {
122
- type: "message",
123
- id: item_id,
124
- role: item.role ?? "assistant",
125
- content: [],
126
- status: item.status ?? "in_progress",
127
- };
128
- } else if (item_type === "code_interpreter_call") {
129
- current_items[item_id] = {
130
- type: "code_interpreter_call",
131
- id: item_id,
132
- container_id: item.container_id,
133
- code: item.code ?? "",
134
- status: item.status ?? "in_progress",
135
- };
136
- } else if (item_type === "image_generation_call") {
137
- current_items[item_id] = {
138
- type: "image_generation_call",
139
- id: item_id,
140
- status: item.status ?? "in_progress",
141
- revised_prompt: item.revised_prompt ?? "",
142
- result: item.result ?? "",
143
- background: item.background,
144
- size: item.size,
145
- quality: item.quality,
146
- output_format: item.output_format,
147
- };
148
- } else if (item_type === "web_search_call") {
149
- current_items[item_id] = {
150
- type: "web_search_call",
151
- id: item_id,
152
- status: item.status ?? "in_progress",
153
- };
154
- } else if (item_type === "file_search_call") {
155
- current_items[item_id] = {
156
- type: "file_search_call",
157
- id: item_id,
158
- status: item.status ?? "in_progress",
159
- };
160
- } else if (item_type === "mcp_list_tools") {
161
- current_items[item_id] = {
162
- type: "mcp_list_tools",
163
- id: item_id,
164
- server_label: item.server_label ?? "",
165
- tools: item.tools ?? [],
166
- error: item.error ?? null,
167
- };
168
- } else if (item_type === "mcp_call") {
169
- current_items[item_id] = {
170
- type: "mcp_call",
171
- id: item_id,
172
- name: item.name ?? "",
173
- server_label: item.server_label ?? "",
174
- arguments: item.arguments ?? "",
175
- output: item.output ?? null,
176
- error: item.error ?? null,
177
- approval_request_id: item.approval_request_id ?? null,
178
- status: item.status ?? "in_progress",
179
- };
180
- } else if (item_type === "mcp_approval_request") {
181
- current_items[item_id] = {
182
- type: "mcp_approval_request",
183
- id: item_id,
184
- name: item.name ?? "",
185
- arguments: item.arguments ?? "",
186
- server_label: item.server_label ?? "",
187
- };
188
- } else if (item_type === "shell_call") {
189
- current_items[item_id] = {
190
- type: "shell_call",
191
- id: item_id,
192
- call_id: item.call_id ?? "",
193
- action: item.action ?? {},
194
- status: item.status ?? "in_progress",
195
- };
196
- } else if (item_type === "shell_call_output") {
197
- current_items[item_id] = {
198
- type: "shell_call_output",
199
- id: item_id,
200
- call_id: item.call_id ?? "",
201
- output: item.output ?? [],
202
- status: item.status ?? "in_progress",
203
- };
204
- } else if (item_type === "apply_patch_call") {
205
- current_items[item_id] = {
206
- type: "apply_patch_call",
207
- id: item_id,
208
- call_id: item.call_id ?? "",
209
- operation: item.operation ?? {},
210
- status: item.status ?? "in_progress",
211
- };
212
- } else if (item_type === "apply_patch_call_output") {
213
- current_items[item_id] = {
214
- type: "apply_patch_call_output",
215
- id: item_id,
216
- call_id: item.call_id ?? "",
217
- output: item.output ?? "",
218
- status: item.status ?? "in_progress",
219
- };
220
- }
221
- continue;
222
- }
223
-
224
- if (event_type === "response.reasoning_summary_part.added") {
225
- const item_id = chunk.item_id;
226
- const part = chunk.part || {};
227
- if (item_id in current_items && current_items[item_id].type === "reasoning") {
228
- const summary_part = {
229
- type: part.type ?? "summary_text",
230
- text: part.text ?? "",
231
- };
232
- current_items[item_id].summary.push(summary_part);
233
- }
234
- continue;
235
- }
236
-
237
- if (event_type === "response.reasoning_summary_text.delta") {
238
- const item_id = chunk.item_id;
239
- const delta = chunk.delta ?? "";
240
- const summary_index = chunk.summary_index ?? 0;
241
-
242
- if (item_id in current_items && current_items[item_id].type === "reasoning") {
243
- while ((current_items[item_id].summary as any[]).length <= summary_index) {
244
- current_items[item_id].summary.push({ type: "summary_text", text: "" });
245
- }
246
- current_items[item_id].summary[summary_index].text += delta;
247
- }
248
- continue;
249
- }
250
-
251
- if (event_type === "response.reasoning_summary_text.done") {
252
- const item_id = chunk.item_id;
253
- const final_text = chunk.text ?? "";
254
- const summary_index = chunk.summary_index ?? 0;
255
-
256
- if (item_id in current_items && current_items[item_id].type === "reasoning") {
257
- while ((current_items[item_id].summary as any[]).length <= summary_index) {
258
- current_items[item_id].summary.push({ type: "summary_text", text: "" });
259
- }
260
- current_items[item_id].summary[summary_index].text = final_text;
261
- }
262
- continue;
263
- }
264
-
265
- if (event_type === "response.reasoning_summary_part.done") {
266
- const item_id = chunk.item_id;
267
- const part = chunk.part || {};
268
- if (item_id in current_items && current_items[item_id].type === "reasoning") {
269
- const summary_index = chunk.summary_index ?? 0;
270
- if (summary_index < current_items[item_id].summary.length) {
271
- current_items[item_id].summary[summary_index] = {
272
- type: part.type ?? "summary_text",
273
- text: part.text ?? "",
274
- };
275
- }
276
- }
277
- continue;
278
- }
279
-
280
- if (event_type === "response.function_call_arguments.delta") {
281
- const item_id = chunk.item_id;
282
- const delta = chunk.delta ?? "";
283
- if (item_id in current_items) {
284
- current_items[item_id].arguments = `${
285
- current_items[item_id].arguments || ""
286
- }${delta}`;
287
- }
288
- continue;
289
- }
290
-
291
- if (event_type === "response.function_call_arguments.done") {
292
- const item_id = chunk.item_id;
293
- const final_arguments = chunk.arguments ?? "";
294
- if (item_id in current_items) {
295
- current_items[item_id].arguments = final_arguments;
296
- }
297
- continue;
298
- }
299
-
300
- if (event_type === "response.content_part.added") {
301
- const part = chunk.part || {};
302
- const messageItem = Object.values(current_items).find(
303
- (it: any) => it.type === "message"
304
- ) as any;
305
- if (messageItem) {
306
- const content_part = {
307
- type: part.type ?? "output_text",
308
- text: part.text ?? "",
309
- annotations: part.annotations ?? [],
310
- };
311
- messageItem.content.push(content_part);
312
- }
313
- continue;
314
- }
315
-
316
- if (event_type === "response.output_text.delta") {
317
- const delta_text = chunk.delta ?? "";
318
- for (const item of Object.values(current_items) as any[]) {
319
- if (item.type === "message" && item.content && item.content.length) {
320
- const last = item.content[item.content.length - 1];
321
- if (last && last.type === "output_text") {
322
- last.text = `${last.text || ""}${delta_text}`;
323
- }
324
- break;
325
- }
326
- }
327
- continue;
328
- }
329
-
330
- if (event_type === "response.output_text.done") {
331
- const final_text = chunk.text ?? "";
332
- for (const item of Object.values(current_items) as any[]) {
333
- if (item.type === "message" && item.content && item.content.length) {
334
- const last = item.content[item.content.length - 1];
335
- if (last && last.type === "output_text") {
336
- last.text = final_text;
337
- }
338
- break;
339
- }
340
- }
341
- continue;
342
- }
343
-
344
- if (event_type === "response.output_item.done") {
345
- const item = chunk.item || {};
346
- const item_id = item.id;
347
- if (item_id in current_items) {
348
- current_items[item_id].status = item.status ?? "completed";
349
- if (item.type === "reasoning") {
350
- current_items[item_id].summary =
351
- item.summary ?? current_items[item_id].summary;
352
- } else if (item.type === "function_call") {
353
- current_items[item_id].arguments =
354
- item.arguments ?? current_items[item_id].arguments;
355
- current_items[item_id].call_id =
356
- item.call_id ?? current_items[item_id].call_id;
357
- current_items[item_id].name = item.name ?? current_items[item_id].name;
358
- } else if (item.type === "message") {
359
- current_items[item_id].content =
360
- item.content ?? current_items[item_id].content;
361
- current_items[item_id].role = item.role ?? current_items[item_id].role;
362
- } else if (item.type === "image_generation_call") {
363
- current_items[item_id].result =
364
- item.result ?? current_items[item_id].result;
365
- current_items[item_id].revised_prompt =
366
- item.revised_prompt ?? current_items[item_id].revised_prompt;
367
- current_items[item_id].background =
368
- item.background ?? current_items[item_id].background;
369
- current_items[item_id].size =
370
- item.size ?? current_items[item_id].size;
371
- current_items[item_id].quality =
372
- item.quality ?? current_items[item_id].quality;
373
- current_items[item_id].output_format =
374
- item.output_format ?? current_items[item_id].output_format;
375
- } else if (item.type === "code_interpreter_call") {
376
- current_items[item_id].code =
377
- item.code ?? current_items[item_id].code;
378
- current_items[item_id].container_id =
379
- item.container_id ?? current_items[item_id].container_id;
380
- } else if (item.type === "mcp_list_tools") {
381
- current_items[item_id].tools =
382
- item.tools ?? current_items[item_id].tools;
383
- current_items[item_id].error =
384
- item.error ?? current_items[item_id].error;
385
- } else if (item.type === "mcp_call") {
386
- current_items[item_id].name =
387
- item.name ?? current_items[item_id].name;
388
- current_items[item_id].arguments =
389
- item.arguments ?? current_items[item_id].arguments;
390
- current_items[item_id].output =
391
- item.output ?? current_items[item_id].output;
392
- current_items[item_id].error =
393
- item.error ?? current_items[item_id].error;
394
- current_items[item_id].server_label =
395
- item.server_label ?? current_items[item_id].server_label;
396
- } else if (item.type === "mcp_approval_request") {
397
- current_items[item_id].name =
398
- item.name ?? current_items[item_id].name;
399
- current_items[item_id].arguments =
400
- item.arguments ?? current_items[item_id].arguments;
401
- current_items[item_id].server_label =
402
- item.server_label ?? current_items[item_id].server_label;
403
- } else if (item.type === "shell_call") {
404
- current_items[item_id].action =
405
- item.action ?? current_items[item_id].action;
406
- current_items[item_id].call_id =
407
- item.call_id ?? current_items[item_id].call_id;
408
- } else if (item.type === "shell_call_output") {
409
- current_items[item_id].output =
410
- item.output ?? current_items[item_id].output;
411
- current_items[item_id].call_id =
412
- item.call_id ?? current_items[item_id].call_id;
413
- } else if (item.type === "apply_patch_call") {
414
- current_items[item_id].operation =
415
- item.operation ?? current_items[item_id].operation;
416
- current_items[item_id].call_id =
417
- item.call_id ?? current_items[item_id].call_id;
418
- } else if (item.type === "apply_patch_call_output") {
419
- current_items[item_id].output =
420
- item.output ?? current_items[item_id].output;
421
- current_items[item_id].call_id =
422
- item.call_id ?? current_items[item_id].call_id;
423
- } else if (item.type === "web_search_call") {
424
- current_items[item_id].action = item.action;
425
- } else if (item.type === "file_search_call") {
426
- current_items[item_id].action = item.action;
427
- }
428
- response_data.output.push(current_items[item_id]);
429
- }
430
- continue;
431
- }
432
-
433
- if (event_type === "response.image_generation_call.in_progress") {
434
- const item_id = chunk.item_id;
435
- if (item_id in current_items) {
436
- current_items[item_id].status = "in_progress";
437
- }
438
- continue;
439
- }
440
-
441
- if (event_type === "response.image_generation_call.generating") {
442
- const item_id = chunk.item_id;
443
- if (item_id in current_items) {
444
- current_items[item_id].status = "generating";
445
- }
446
- continue;
447
- }
448
-
449
- if (event_type === "response.image_generation_call.partial_image") {
450
- const item_id = chunk.item_id;
451
- if (item_id in current_items) {
452
- current_items[item_id].result = chunk.partial_image_b64 ?? current_items[item_id].result;
453
- current_items[item_id].background = chunk.background ?? current_items[item_id].background;
454
- current_items[item_id].size = chunk.size ?? current_items[item_id].size;
455
- current_items[item_id].quality = chunk.quality ?? current_items[item_id].quality;
456
- current_items[item_id].output_format = chunk.output_format ?? current_items[item_id].output_format;
457
- }
458
- continue;
459
- }
460
-
461
- if (event_type === "response.shell_call_command.added") {
462
- const item_id = output_index_to_item_id[chunk.output_index];
463
- if (item_id && item_id in current_items) {
464
- const action = current_items[item_id].action || { commands: [] };
465
- if (!action.commands) action.commands = [];
466
- action.commands[chunk.command_index] = chunk.command ?? "";
467
- current_items[item_id].action = action;
468
- }
469
- continue;
470
- }
471
-
472
- if (event_type === "response.shell_call_command.delta") {
473
- const item_id = output_index_to_item_id[chunk.output_index];
474
- if (item_id && item_id in current_items) {
475
- const action = current_items[item_id].action || { commands: [] };
476
- if (!action.commands) action.commands = [];
477
- const idx = chunk.command_index ?? 0;
478
- action.commands[idx] = (action.commands[idx] ?? "") + (chunk.delta ?? "");
479
- current_items[item_id].action = action;
480
- }
481
- continue;
482
- }
483
-
484
- if (event_type === "response.shell_call_command.done") {
485
- const item_id = output_index_to_item_id[chunk.output_index];
486
- if (item_id && item_id in current_items) {
487
- const action = current_items[item_id].action || { commands: [] };
488
- if (!action.commands) action.commands = [];
489
- action.commands[chunk.command_index] = chunk.command ?? "";
490
- current_items[item_id].action = action;
491
- }
492
- continue;
493
- }
494
-
495
- if (event_type === "response.shell_call_output_content.delta") {
496
- const item_id = chunk.item_id;
497
- if (item_id && item_id in current_items) {
498
- if (!current_items[item_id].output) current_items[item_id].output = [];
499
- const idx = chunk.command_index ?? 0;
500
- const existing = current_items[item_id].output[idx] ?? { stdout: "", stderr: "" };
501
- const delta = chunk.delta ?? {};
502
- if (delta.stdout) existing.stdout = (existing.stdout ?? "") + delta.stdout;
503
- if (delta.stderr) existing.stderr = (existing.stderr ?? "") + delta.stderr;
504
- current_items[item_id].output[idx] = existing;
505
- }
506
- continue;
507
- }
508
-
509
- if (event_type === "response.shell_call_output_content.done") {
510
- const item_id = chunk.item_id;
511
- if (item_id && item_id in current_items) {
512
- current_items[item_id].output = chunk.output ?? current_items[item_id].output;
513
- }
514
- continue;
515
- }
516
-
517
- if (event_type === "response.apply_patch_call_operation_diff.delta") {
518
- const item_id = chunk.item_id;
519
- if (item_id && item_id in current_items) {
520
- const operation = current_items[item_id].operation || {};
521
- operation.diff = (operation.diff ?? "") + (chunk.delta ?? "");
522
- current_items[item_id].operation = operation;
523
- }
524
- continue;
525
- }
526
-
527
- if (event_type === "response.apply_patch_call_operation_diff.done") {
528
- const item_id = chunk.item_id;
529
- if (item_id && item_id in current_items) {
530
- const operation = current_items[item_id].operation || {};
531
- operation.diff = chunk.diff ?? operation.diff;
532
- current_items[item_id].operation = operation;
533
- }
534
- continue;
535
- }
536
-
537
- if (event_type === "response.completed") {
538
- const response = chunk.response || {};
539
- response_data.status = response.status ?? response_data.status ?? "completed";
540
- response_data.usage = response.usage ?? response_data.usage;
541
- response_data.output = response.output ?? response_data.output;
542
- if (response.reasoning) {
543
- response_data.reasoning = response.reasoning;
544
- }
545
- continue;
546
- }
547
- }
548
-
549
- return response_data;
550
- }
551
-
552
- export const openaiStreamChat = (results: ChatCompletionChunk[]): ChatCompletion => {
553
- let content: ChatCompletion.Choice["message"]["content"] = null;
554
- let functionCall: ChatCompletion.Choice["message"]["function_call"] =
555
- undefined;
556
- const response: ChatCompletion = {
557
- id: "",
558
- choices: [],
559
- created: Date.now(),
560
- model: "",
561
- object: "chat.completion",
562
- };
563
- const lastResult = results.at(-1);
564
- if (!lastResult) return response;
565
- let toolCalls: any[] | undefined = undefined;
566
- for (const result of results) {
567
- if (result.choices.length === 0) continue;
568
- const delta = result.choices[0].delta;
569
-
570
- if (delta.content) {
571
- content = `${content || ""}${delta.content || ""}`;
572
- }
573
- if (delta.function_call) {
574
- functionCall = {
575
- name: `${functionCall ? functionCall.name : ""}${
576
- delta.function_call.name || ""
577
- }`,
578
- arguments: `${functionCall ? functionCall.arguments : ""}${
579
- delta.function_call.arguments || ""
580
- }`,
581
- };
582
- }
583
- const toolCall = delta.tool_calls?.[0];
584
- if (toolCall) {
585
- toolCalls = toolCalls || [];
586
- const lastToolCall = toolCalls.at(-1);
587
- if (!lastToolCall || toolCall.id) {
588
- toolCalls.push({
589
- id: toolCall.id || "",
590
- type: toolCall.type || "function",
591
- function: {
592
- name: toolCall.function?.name || "",
593
- arguments: toolCall.function?.arguments || "",
594
- },
595
- });
596
- continue;
597
- }
598
- (lastToolCall as any).function.name = `${(lastToolCall as any).function.name}${
599
- toolCall.function?.name || ""
600
- }`;
601
- (lastToolCall as any).function.arguments = `${(lastToolCall as any).function.arguments}${
602
- toolCall.function?.arguments || ""
603
- }`;
604
- }
605
- }
606
- const firstChoice = results[0].choices.at(0);
607
- response.choices.push({
608
- finish_reason: firstChoice?.finish_reason ?? "stop",
609
- index: firstChoice?.index ?? 0,
610
- logprobs: firstChoice?.logprobs ?? null,
611
- message: {
612
- role: "assistant",
613
- content,
614
- function_call: functionCall ? functionCall : undefined,
615
- tool_calls: toolCalls ? toolCalls : undefined,
616
- refusal: firstChoice?.delta.refusal ?? null,
617
- },
618
- });
619
- response.id = lastResult.id;
620
- response.model = lastResult.model;
621
- response.created = lastResult.created;
622
- response.system_fingerprint = lastResult.system_fingerprint;
623
- response.usage = lastResult.usage ?? undefined;
624
- return response;
625
- };
626
-
627
- export const openaiStreamCompletion = (results: Completion[]) => {
628
- const response: Completion = {
629
- id: "",
630
- choices: [
631
- {
632
- finish_reason: "stop",
633
- index: 0,
634
- text: "",
635
- logprobs: null,
636
- },
637
- ],
638
- created: Date.now(),
639
- model: "",
640
- object: "text_completion",
641
- };
642
- const lastResult = results.at(-1);
643
- if (!lastResult) return response;
644
- let text = "";
645
- for (const result of results) {
646
- if (result.choices.length > 0 && result.choices[0].text) {
647
- text = `${text}${result.choices[0].text}`;
648
- }
649
- }
650
- response.choices[0].text = text;
651
- response.id = lastResult.id;
652
- response.created = lastResult.created;
653
- response.model = lastResult.model;
654
- response.system_fingerprint = lastResult.system_fingerprint;
655
- response.usage = lastResult.usage;
656
- return response;
657
- };
658
-
659
- export const anthropicStreamMessage = (results: MessageStreamEvent[]): Message => {
660
- let response: Message = {
661
- id: "",
662
- model: "",
663
- content: [],
664
- role: "assistant",
665
- type: "message",
666
- stop_reason: "stop_sequence",
667
- stop_sequence: null,
668
- usage: {
669
- input_tokens: 0,
670
- output_tokens: 0,
671
- cache_creation_input_tokens: 0,
672
- cache_read_input_tokens: 0,
673
- server_tool_use: null,
674
- service_tier: null,
675
- },
676
- };
677
- const lastResult = results.at(-1);
678
- if (!lastResult) return response;
679
- let currentBlock: any = null;
680
- let currentBlockIndex: number | null = null;
681
- let currentSignature = "";
682
- let currentThinking = "";
683
- let currentText = "";
684
- let currentToolInputJson = "";
685
- const citationsByBlockIndex: Record<number, any[]> = {};
686
-
687
- for (const event of results) {
688
- if (event.type === "message_start") {
689
- response = { ...event.message };
690
- } else if (event.type === "content_block_start") {
691
- currentBlock = { ...event.content_block };
692
- currentBlockIndex = "index" in event && typeof event.index === "number" ? event.index : null;
693
- if (currentBlock.type === "thinking") {
694
- currentSignature = "";
695
- currentThinking = "";
696
- } else if (currentBlock.type === "text") {
697
- currentText = "";
698
- } else if (
699
- currentBlock.type === "tool_use" ||
700
- currentBlock.type === "server_tool_use"
701
- ) {
702
- currentToolInputJson = "";
703
- }
704
- } else if (event.type === "content_block_delta") {
705
- const delta = event.delta as unknown as Record<string, unknown> | undefined;
706
- const eventIndex = "index" in event && typeof event.index === "number" ? event.index : null;
707
-
708
- if (delta?.type === "citations_delta") {
709
- const citation = delta.citation as Record<string, unknown> | undefined;
710
- if (
711
- citation &&
712
- typeof citation === "object" &&
713
- citation.type === "web_search_result_location" &&
714
- eventIndex !== null
715
- ) {
716
- const annotation = {
717
- type: "url_citation",
718
- url: citation.url ?? "",
719
- title: citation.title ?? "",
720
- start_index: citation.start_index ?? 0,
721
- end_index: citation.end_index ?? 0,
722
- ...(citation.cited_text != null ? { cited_text: citation.cited_text } : {}),
723
- ...(citation.encrypted_index != null ? { encrypted_index: citation.encrypted_index } : {}),
724
- };
725
- if (!citationsByBlockIndex[eventIndex]) citationsByBlockIndex[eventIndex] = [];
726
- citationsByBlockIndex[eventIndex].push(annotation);
727
- }
728
- } else if (currentBlock !== null) {
729
- if (currentBlock.type === "thinking") {
730
- if (delta && "signature" in delta) {
731
- currentSignature = (delta.signature as string) || "";
732
- }
733
- if (delta && "thinking" in delta) {
734
- currentThinking += (delta.thinking as string) || "";
735
- }
736
- } else if (currentBlock.type === "text") {
737
- if (delta && "text" in delta) {
738
- currentText += (delta.text as string) || "";
739
- }
740
- } else if (
741
- currentBlock.type === "tool_use" ||
742
- currentBlock.type === "server_tool_use"
743
- ) {
744
- if (delta?.type === "input_json_delta") {
745
- const inputJsonDelta = delta as { partial_json?: string };
746
- currentToolInputJson += inputJsonDelta.partial_json || "";
747
- }
748
- }
749
- }
750
- } else if (event.type === "content_block_stop" && currentBlock !== null) {
751
- if (currentBlock.type === "thinking") {
752
- currentBlock.signature = currentSignature;
753
- currentBlock.thinking = currentThinking;
754
- } else if (currentBlock.type === "text") {
755
- currentBlock.text = currentText;
756
- currentBlock.citations = null;
757
- if (currentBlockIndex !== null && citationsByBlockIndex[currentBlockIndex]?.length) {
758
- currentBlock.annotations = citationsByBlockIndex[currentBlockIndex];
759
- }
760
- } else if (
761
- currentBlock.type === "tool_use" ||
762
- currentBlock.type === "server_tool_use"
763
- ) {
764
- try {
765
- currentBlock.input = currentToolInputJson
766
- ? JSON.parse(currentToolInputJson)
767
- : {};
768
- } catch (e) {
769
- currentBlock.input = {};
770
- }
771
- }
772
- response.content!.push(currentBlock);
773
- currentBlock = null;
774
- currentBlockIndex = null;
775
- currentSignature = "";
776
- currentThinking = "";
777
- currentText = "";
778
- currentToolInputJson = "";
779
- } else if (event.type === "message_delta") {
780
- if ("usage" in event && event.usage) {
781
- response.usage = {
782
- ...response.usage,
783
- output_tokens: event.usage.output_tokens ?? 0,
784
- };
785
- }
786
- if ("delta" in event && event.delta) {
787
- if (
788
- "stop_reason" in event.delta &&
789
- event.delta.stop_reason !== undefined
790
- ) {
791
- response.stop_reason = event.delta.stop_reason;
792
- }
793
- if (
794
- "stop_sequence" in event.delta &&
795
- event.delta.stop_sequence !== undefined
796
- ) {
797
- response.stop_sequence = event.delta.stop_sequence;
798
- }
799
- }
800
- }
801
- }
802
-
803
- return response;
804
- };
805
-
806
- export const anthropicStreamCompletion = (results: AnthropicCompletion[]) => {
807
- const response: AnthropicCompletion = {
808
- completion: "",
809
- id: "",
810
- model: "",
811
- stop_reason: "",
812
- type: "completion",
813
- };
814
- const lastResult = results.at(-1);
815
- if (!lastResult) return response;
816
- let completion = "";
817
- for (const result of results) {
818
- completion = `${completion}${result.completion}`;
819
- }
820
- response.completion = completion;
821
- response.id = lastResult.id;
822
- response.model = lastResult.model;
823
- response.stop_reason = lastResult.stop_reason;
824
- return response;
825
- };
826
-
827
- export const mistralStreamChat = (results: any[]) => {
828
- let content: ChatCompletion.Choice["message"]["content"] = null;
829
- const response: ChatCompletion = {
830
- id: "",
831
- choices: [],
832
- created: Date.now(),
833
- model: "",
834
- object: "chat.completion",
835
- };
836
- const lastResult = results.at(-1).data;
837
- if (!lastResult) return response;
838
- let toolCalls: any[] | undefined = undefined;
839
-
840
- for (const result of results) {
841
- if (result.data.choices.length === 0) continue;
842
- const delta = result.data.choices[0].delta;
843
-
844
- if (delta.content) {
845
- content = `${content || ""}${delta.content || ""}`;
846
- }
847
-
848
- const toolCall = delta.toolCalls?.[0];
849
- if (toolCall) {
850
- toolCalls = toolCalls || [];
851
- const lastToolCall = toolCalls.at(-1);
852
- if (!lastToolCall || toolCall.id) {
853
- toolCalls.push({
854
- id: toolCall.id || "",
855
- type: toolCall.type || "function",
856
- function: {
857
- name: toolCall.function?.name || "",
858
- arguments: toolCall.function?.arguments || "",
859
- },
860
- });
861
- continue;
862
- }
863
- (lastToolCall as any).function.name = `${(lastToolCall as any).function.name}${
864
- toolCall.function?.name || ""
865
- }`;
866
- (lastToolCall as any).function.arguments = `${(lastToolCall as any).function.arguments}${
867
- toolCall.function?.arguments || ""
868
- }`;
869
- }
870
- }
871
- const firstChoice = results[0].data.choices.at(0);
872
- response.choices.push({
873
- finish_reason: firstChoice?.finish_reason ?? "stop",
874
- index: firstChoice?.index ?? 0,
875
- logprobs: firstChoice?.logprobs ?? null,
876
- message: {
877
- role: "assistant",
878
- content,
879
- tool_calls: toolCalls ? toolCalls : undefined,
880
- refusal: firstChoice?.delta.refusal ?? null,
881
- },
882
- });
883
- response.id = lastResult.id;
884
- response.model = lastResult.model;
885
- response.created = lastResult.created;
886
- response.usage = lastResult.usage ?? undefined;
887
- return response;
888
- };
889
-
890
- export const bedrockStreamMessage = (results: any[]) => {
891
- const response: any = {
892
- ResponseMetadata: {},
893
- output: { message: {} },
894
- stopReason: "end_turn",
895
- metrics: {},
896
- usage: {},
897
- };
898
-
899
- const content_blocks: any[] = [];
900
- let current_tool_call: any = null;
901
- let current_tool_input = "";
902
- let current_text = "";
903
- let current_signature = "";
904
- let current_thinking = "";
905
-
906
- for (const event of results) {
907
- if ("contentBlockStart" in event) {
908
- const content_block = event["contentBlockStart"];
909
- if ("start" in content_block && "toolUse" in content_block["start"]) {
910
- const tool_use = content_block["start"]["toolUse"];
911
- current_tool_call = {
912
- toolUse: {
913
- toolUseId: tool_use["toolUseId"],
914
- name: tool_use["name"],
915
- },
916
- };
917
- current_tool_input = "";
918
- }
919
- } else if ("contentBlockDelta" in event) {
920
- const delta = event["contentBlockDelta"]["delta"];
921
- if ("text" in delta) {
922
- current_text += delta["text"];
923
- } else if ("reasoningContent" in delta) {
924
- const reasoning_content = delta["reasoningContent"];
925
- if ("text" in reasoning_content) {
926
- current_thinking += reasoning_content["text"];
927
- } else if ("signature" in reasoning_content) {
928
- current_signature += reasoning_content["signature"];
929
- }
930
- } else if ("toolUse" in delta) {
931
- if ("input" in delta["toolUse"]) {
932
- const input_chunk = delta["toolUse"]["input"];
933
- current_tool_input += input_chunk;
934
- if (!input_chunk.trim()) {
935
- continue;
936
- }
937
- }
938
- }
939
- } else if ("contentBlockStop" in event) {
940
- if (current_tool_call && current_tool_input) {
941
- try {
942
- current_tool_call.toolUse.input = JSON.parse(current_tool_input);
943
- } catch {
944
- current_tool_call.toolUse.input = {};
945
- }
946
- content_blocks.push(current_tool_call);
947
- current_tool_call = null;
948
- current_tool_input = "";
949
- } else if (current_text) {
950
- content_blocks.push({ text: current_text });
951
- current_text = "";
952
- } else if (current_thinking && current_signature) {
953
- content_blocks.push({
954
- reasoningContent: {
955
- reasoningText: {
956
- text: current_thinking,
957
- signature: current_signature,
958
- },
959
- },
960
- });
961
- current_thinking = "";
962
- current_signature = "";
963
- }
964
- } else if ("messageStop" in event) {
965
- response.stopReason = event["messageStop"]["stopReason"];
966
- } else if ("metadata" in event) {
967
- const metadata = event["metadata"];
968
- response.usage = metadata?.usage || {};
969
- response.metrics = metadata?.metrics || {};
970
- }
971
- }
972
-
973
- response.output.message = { role: "assistant", content: content_blocks };
974
- return response;
975
- };
976
-
977
- const buildGoogleResponseFromParts = (
978
- thoughtContent: string,
979
- regularContent: string,
980
- functionCalls: any[],
981
- inlineDataParts: any[],
982
- executableCodeParts: any[],
983
- codeExecutionResultParts: any[],
984
- lastResult: any,
985
- lastThoughtSignature: string | null,
986
- lastRegularThoughtSignature: string | null
987
- ) => {
988
- const response = { ...lastResult };
989
- const finalParts: any[] = [];
990
-
991
- if (thoughtContent) {
992
- const part: any = { text: thoughtContent, thought: true };
993
- if (lastThoughtSignature) part.thoughtSignature = lastThoughtSignature;
994
- finalParts.push(part);
995
- }
996
-
997
- if (regularContent) {
998
- const part: any = { text: regularContent, thought: null };
999
- if (lastRegularThoughtSignature) part.thoughtSignature = lastRegularThoughtSignature;
1000
- finalParts.push(part);
1001
- }
1002
-
1003
- for (const executableCode of executableCodeParts) {
1004
- finalParts.push({ executableCode });
1005
- }
1006
-
1007
- for (const codeExecutionResult of codeExecutionResultParts) {
1008
- finalParts.push({ codeExecutionResult });
1009
- }
1010
-
1011
- for (const inlineData of inlineDataParts) {
1012
- finalParts.push({ inlineData });
1013
- }
1014
-
1015
- for (const functionCall of functionCalls) {
1016
- finalParts.push({ functionCall });
1017
- }
1018
-
1019
- if (finalParts.length > 0 && response.candidates?.[0]?.content) {
1020
- response.candidates[0].content.parts = finalParts;
1021
- }
1022
-
1023
- const lastCandidate = lastResult?.candidates?.[0];
1024
- if (lastCandidate) {
1025
- if (!response.candidates) response.candidates = [];
1026
- if (!response.candidates[0]) response.candidates[0] = { content: { parts: [] } };
1027
- if (lastCandidate.groundingMetadata != null) {
1028
- response.candidates[0].groundingMetadata = lastCandidate.groundingMetadata;
1029
- }
1030
- if (lastCandidate.urlContextMetadata != null) {
1031
- response.candidates[0].urlContextMetadata = lastCandidate.urlContextMetadata;
1032
- }
1033
- if (lastCandidate.citationMetadata != null) {
1034
- response.candidates[0].citationMetadata = lastCandidate.citationMetadata;
1035
- }
1036
- }
1037
-
1038
- return response;
1039
- };
1040
-
1041
- const googleStreamResponse = (results: any[]) => {
1042
- const { GenerateContentResponse } = require("@google/genai");
1043
-
1044
- if (!results.length) {
1045
- return new GenerateContentResponse();
1046
- }
1047
-
1048
- let thoughtContent = "";
1049
- let regularContent = "";
1050
- const functionCalls: any[] = [];
1051
- const inlineDataParts: any[] = [];
1052
- const executableCodeParts: any[] = [];
1053
- const codeExecutionResultParts: any[] = [];
1054
- let lastThoughtSignature: string | null = null;
1055
- let lastRegularThoughtSignature: string | null = null;
1056
-
1057
- for (const result of results) {
1058
- if (result.candidates && result.candidates[0]?.content?.parts) {
1059
- for (const part of result.candidates[0].content.parts) {
1060
- if (part.text != null) {
1061
- if (part.thought === true) {
1062
- thoughtContent += part.text;
1063
- if (part.thoughtSignature) lastThoughtSignature = part.thoughtSignature;
1064
- } else {
1065
- regularContent += part.text;
1066
- if (part.thoughtSignature) lastRegularThoughtSignature = part.thoughtSignature;
1067
- }
1068
- } else if (part.functionCall) {
1069
- functionCalls.push(part.functionCall);
1070
- } else if (part.inlineData) {
1071
- const raw = part.inlineData;
1072
- inlineDataParts.push({
1073
- data: raw.data ?? "",
1074
- mimeType: raw.mimeType ?? "image/png",
1075
- });
1076
- } else if (part.executableCode) {
1077
- executableCodeParts.push({
1078
- code: part.executableCode.code ?? "",
1079
- language: part.executableCode.language,
1080
- });
1081
- } else if (part.codeExecutionResult) {
1082
- codeExecutionResultParts.push({
1083
- output: part.codeExecutionResult.output ?? "",
1084
- outcome: part.codeExecutionResult.outcome ?? "OUTCOME_OK",
1085
- });
1086
- }
1087
- }
1088
- }
1089
- }
1090
-
1091
- return buildGoogleResponseFromParts(
1092
- thoughtContent,
1093
- regularContent,
1094
- functionCalls,
1095
- inlineDataParts,
1096
- executableCodeParts,
1097
- codeExecutionResultParts,
1098
- results[results.length - 1],
1099
- lastThoughtSignature,
1100
- lastRegularThoughtSignature
1101
- );
1102
- };
1103
-
1104
- export const googleStreamChat = (results: any[]) => {
1105
- return googleStreamResponse(results);
1106
- };
1107
-
1108
- export const googleStreamCompletion = (results: any[]) => {
1109
- return googleStreamResponse(results);
1110
- };
1111
-
1112
- const _RESPONSE_METADATA_KEYS = ["size", "quality", "background", "output_format"] as const;
1113
-
1114
- export const openaiImagesStream = (results: any[]) => {
1115
- const response_data: any = {
1116
- created: null,
1117
- data: [],
1118
- usage: null,
1119
- };
1120
-
1121
- const partial_images_by_index: Record<number, string> = {};
1122
- let output_format = "png";
1123
-
1124
- for (const chunk of results as any[]) {
1125
- const event_type = chunk?.type ?? "";
1126
-
1127
- if (event_type === "image_generation.partial_image") {
1128
- const b64 = chunk.b64_json;
1129
- if (b64 != null) {
1130
- const idx = chunk.partial_image_index ?? 0;
1131
- partial_images_by_index[idx] = b64;
1132
- }
1133
- } else if (event_type === "image_generation.completed") {
1134
- const b64 = chunk.b64_json;
1135
- if (b64 != null) {
1136
- const idx = chunk.partial_image_index ?? 0;
1137
- partial_images_by_index[idx] = b64;
1138
- }
1139
- response_data.created = chunk.created_at ?? response_data.created;
1140
- response_data.usage = chunk.usage ?? response_data.usage;
1141
- for (const key of _RESPONSE_METADATA_KEYS) {
1142
- if (chunk[key] != null) response_data[key] = chunk[key];
1143
- }
1144
- if (chunk.output_format) output_format = chunk.output_format;
1145
- }
1146
- }
1147
-
1148
- if (!response_data.output_format) {
1149
- response_data.output_format = output_format;
1150
- }
1151
-
1152
- const indices = Object.keys(partial_images_by_index)
1153
- .map(Number)
1154
- .sort((a, b) => a - b);
1155
- if (indices.length > 0) {
1156
- response_data.data = indices.map((idx) => ({
1157
- b64_json: partial_images_by_index[idx],
1158
- }));
1159
- }
1160
-
1161
- return response_data;
1162
- };
1163
-
1164
- export const cleaned_result = (
1165
- results: any[],
1166
- function_name = "openai.chat.completions.create"
1167
- ) => {
1168
- if (
1169
- function_name === "openai.responses.create" ||
1170
- function_name === "openai.AzureOpenAI.responses.create"
1171
- ) {
1172
- return openaiResponsesStreamChat(results);
1173
- }
1174
-
1175
- if (
1176
- function_name === "openai.images.generate" ||
1177
- function_name === "openai.AzureOpenAI.images.generate"
1178
- ) {
1179
- return openaiImagesStream(results);
1180
- }
1181
-
1182
- if ("completion" in results[0]) {
1183
- return results.reduce(
1184
- (prev, current) => ({
1185
- ...current,
1186
- completion: `${prev.completion}${current.completion}`,
1187
- }),
1188
- {}
1189
- );
1190
- }
1191
-
1192
- if (function_name === "anthropic.messages.create")
1193
- return anthropicStreamMessage(results);
1194
-
1195
- if ("text" in results[0].choices[0]) {
1196
- let response = "";
1197
- for (const result of results) {
1198
- response = `${response}${result.choices[0].text}`;
1199
- }
1200
- const final_result = structuredClone(results.at(-1));
1201
- final_result.choices[0].text = response;
1202
- return final_result;
1203
- }
1204
-
1205
- if ("delta" in results[0].choices[0]) {
1206
- const response = openaiStreamChat(results);
1207
- response.choices[0] = {
1208
- ...response.choices[0],
1209
- ...response.choices[0].message,
1210
- };
1211
- return response;
1212
- }
1213
-
1214
- return "";
1215
- };
1216
-
1217
-
1218
- const buildStreamBlueprint = (
1219
- result: any,
1220
- metadata: any,
1221
- streamContext?: { anthropicBlockTypeByIndex?: Record<number, string> }
1222
- ) => {
1223
- const provider = metadata.model.provider;
1224
- const model = metadata.model.name;
1225
-
1226
- if (provider === "anthropic" || provider === "anthropic.bedrock" || (provider === "vertexai" && model.startsWith("claude"))) {
1227
- return buildPromptBlueprintFromAnthropicEvent(result, metadata, streamContext?.anthropicBlockTypeByIndex);
1228
- }
1229
-
1230
- if (provider === "google" || (provider === "vertexai" && model.startsWith("gemini"))) {
1231
- return buildPromptBlueprintFromGoogleEvent(result, metadata);
1232
- }
1233
-
1234
- if (provider === "amazon.bedrock") {
1235
- return buildPromptBlueprintFromBedrockEvent(result, metadata);
1236
- }
1237
-
1238
- if (provider === "mistral") {
1239
- return buildPromptBlueprintFromOpenAIEvent(result.data, metadata);
1240
- }
1241
-
1242
- if (provider === "openai" || provider === "openai.azure") {
1243
- const api_type = metadata.model.api_type || "chat-completions";
1244
- if (api_type === "responses") {
1245
- return buildPromptBlueprintFromOpenAIResponsesEvent(result, metadata);
1246
- }
1247
- if (api_type === "images") {
1248
- return buildPromptBlueprintFromOpenAIImagesEvent(result, metadata);
1249
- }
1250
- return buildPromptBlueprintFromOpenAIEvent(result, metadata);
1251
- }
1252
-
1253
- return null;
1254
- }
1255
-
1256
- export async function* streamResponse<Item>(
1257
- generator: AsyncIterable<Item> | any,
1258
- afterStream: (body: object) => any,
1259
- mapResults: any,
1260
- metadata: any
1261
- ) {
1262
- const data: {
1263
- request_id: number | null;
1264
- raw_response: any;
1265
- prompt_blueprint: any;
1266
- } = {
1267
- request_id: null,
1268
- raw_response: null,
1269
- prompt_blueprint: null,
1270
- };
1271
- let response_metadata: any = {};
1272
- const provider = metadata.model.provider;
1273
- if (provider == "amazon.bedrock") {
1274
- response_metadata = generator?.$metadata;
1275
- generator = generator?.stream;
1276
- }
1277
- const results = [];
1278
- const isAnthropic = provider === "anthropic" || provider === "anthropic.bedrock" || (provider === "vertexai" && metadata.model?.name?.startsWith?.("claude"));
1279
- const anthropicBlockTypeByIndex: Record<number, string> = {};
1280
- for await (const result of generator) {
1281
- results.push(result);
1282
- if (isAnthropic && result?.type === "content_block_start" && result.content_block) {
1283
- anthropicBlockTypeByIndex[result.index] = result.content_block.type;
1284
- }
1285
- data.raw_response = result;
1286
- data.prompt_blueprint = buildStreamBlueprint(result, metadata, { anthropicBlockTypeByIndex });
1287
-
1288
- yield data;
1289
- }
1290
- const request_response = mapResults(results);
1291
- if (provider === "amazon.bedrock") {
1292
- request_response.ResponseMetadata = response_metadata;
1293
- }
1294
- data.raw_response = request_response;
1295
- data.prompt_blueprint = buildStreamBlueprint(request_response, metadata);
1296
- yield data;
1297
- const response = await afterStream({ request_response });
1298
- data.request_id = response.request_id;
1299
- data.prompt_blueprint = response.prompt_blueprint;
1300
- yield data;
1301
- }
1302
-
1303
- export const MAP_PROVIDER_TO_FUNCTION_NAME = {
1304
- "openai:chat-completions": {
1305
- chat: {
1306
- function_name: "openai.chat.completions.create",
1307
- stream_function: openaiStreamChat,
1308
- },
1309
- completion: {
1310
- function_name: "openai.completions.create",
1311
- stream_function: openaiStreamCompletion,
1312
- },
1313
- },
1314
- "openai:responses": {
1315
- chat: {
1316
- function_name: "openai.responses.create",
1317
- stream_function: openaiResponsesStreamChat,
1318
- },
1319
- },
1320
- "openai:images": {
1321
- completion: {
1322
- function_name: "openai.images.generate",
1323
- stream_function: openaiImagesStream,
1324
- },
1325
- },
1326
- anthropic: {
1327
- chat: {
1328
- function_name: "anthropic.messages.create",
1329
- stream_function: anthropicStreamMessage,
1330
- },
1331
- completion: {
1332
- function_name: "anthropic.completions.create",
1333
- stream_function: anthropicStreamCompletion,
1334
- },
1335
- },
1336
- "openai.azure:chat-completions": {
1337
- chat: {
1338
- function_name: "openai.AzureOpenAI.chat.completions.create",
1339
- stream_function: openaiStreamChat,
1340
- },
1341
- completion: {
1342
- function_name: "openai.AzureOpenAI.completions.create",
1343
- stream_function: openaiStreamCompletion,
1344
- },
1345
- },
1346
- "openai.azure:responses": {
1347
- chat: {
1348
- function_name: "openai.AzureOpenAI.responses.create",
1349
- stream_function: openaiResponsesStreamChat,
1350
- },
1351
- completion: {
1352
- function_name: "openai.AzureOpenAI.responses.create",
1353
- stream_function: openaiResponsesStreamChat,
1354
- },
1355
- },
1356
- "openai.azure:images": {
1357
- completion: {
1358
- function_name: "openai.AzureOpenAI.images.generate",
1359
- stream_function: openaiImagesStream,
1360
- },
1361
- },
1362
- google: {
1363
- chat: {
1364
- function_name: "google.convo.send_message",
1365
- stream_function: googleStreamChat,
1366
- },
1367
- completion: {
1368
- function_name: "google.model.generate_content",
1369
- stream_function: googleStreamCompletion,
1370
- },
1371
- },
1372
- "amazon.bedrock": {
1373
- chat: {
1374
- function_name: "boto3.bedrock-runtime.converse",
1375
- stream_function: bedrockStreamMessage,
1376
- },
1377
- completion: {
1378
- function_name: "boto3.bedrock-runtime.converse",
1379
- stream_function: bedrockStreamMessage,
1380
- },
1381
- },
1382
- "anthropic.bedrock": {
1383
- chat: {
1384
- function_name: "anthropic.messages.create",
1385
- stream_function: anthropicStreamMessage,
1386
- },
1387
- completion: {
1388
- function_name: "anthropic.completions.create",
1389
- stream_function: anthropicStreamCompletion,
1390
- },
1391
- },
1392
- mistral: {
1393
- chat: {
1394
- function_name: "mistral.client.chat",
1395
- stream_function: mistralStreamChat,
1396
- },
1397
- completion: {
1398
- function_name: "",
1399
- stream_function: null,
1400
- },
1401
- },
1402
- };