@zenning/openai 1.4.5 → 1.4.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2704,37 +2704,58 @@ var OpenAIResponsesLanguageModel = class {
2704
2704
  }
2705
2705
  }
2706
2706
  const reasoningSummary = (_b = (_a15 = response.output.find((item) => item.type === "reasoning")) == null ? void 0 : _a15.summary) != null ? _b : null;
2707
- console.log(JSON.stringify({
2707
+ const allAnnotations = outputTextElements.flatMap((content) => content.annotations);
2708
+ console.log("\u{1F4CB} Processing annotations in doGenerate:", JSON.stringify({
2708
2709
  msg: "ai-sdk: content annotations",
2709
- annotations: outputTextElements.flatMap((content) => content.annotations)
2710
- }));
2710
+ count: allAnnotations.length,
2711
+ annotations: allAnnotations
2712
+ }, null, 2));
2711
2713
  return {
2712
2714
  text: outputTextElements.map((content) => content.text).join("\n"),
2713
2715
  sources: outputTextElements.flatMap(
2714
2716
  (content) => content.annotations.map((annotation) => {
2715
- var _a16, _b2, _c2, _d2, _e2, _f2, _g2, _h, _i;
2716
- if (annotation.type === "url_citation") {
2717
- return {
2718
- sourceType: "url",
2719
- id: (_c2 = (_b2 = (_a16 = this.config).generateId) == null ? void 0 : _b2.call(_a16)) != null ? _c2 : generateId2(),
2720
- url: annotation.url,
2721
- title: annotation.title
2722
- };
2723
- } else if (annotation.type === "file_citation") {
2724
- return {
2725
- sourceType: "document",
2726
- id: (_f2 = (_e2 = (_d2 = this.config).generateId) == null ? void 0 : _e2.call(_d2)) != null ? _f2 : generateId2(),
2727
- mediaType: "text/plain",
2728
- title: annotation.quote || annotation.filename || "Document",
2729
- filename: annotation.filename,
2730
- quote: annotation.quote
2731
- };
2732
- } else {
2717
+ var _a16, _b2, _c2, _d2, _e2, _f2, _g2, _h, _i, _j, _k, _l;
2718
+ console.log("\u{1F517} Processing annotation for source:", JSON.stringify(annotation, null, 2));
2719
+ try {
2720
+ if (annotation.type === "url_citation") {
2721
+ const urlSource = {
2722
+ sourceType: "url",
2723
+ id: (_c2 = (_b2 = (_a16 = this.config).generateId) == null ? void 0 : _b2.call(_a16)) != null ? _c2 : generateId2(),
2724
+ url: annotation.url,
2725
+ title: annotation.title
2726
+ };
2727
+ console.log("\u2705 Created URL source:", JSON.stringify(urlSource, null, 2));
2728
+ return urlSource;
2729
+ } else if (annotation.type === "file_citation") {
2730
+ const documentSource = {
2731
+ sourceType: "document",
2732
+ id: (_f2 = (_e2 = (_d2 = this.config).generateId) == null ? void 0 : _e2.call(_d2)) != null ? _f2 : generateId2(),
2733
+ mediaType: "text/plain",
2734
+ title: annotation.quote || annotation.filename || "Document",
2735
+ filename: annotation.filename,
2736
+ quote: annotation.quote
2737
+ };
2738
+ console.log("\u{1F4C4} Created document source:", JSON.stringify(documentSource, null, 2));
2739
+ return documentSource;
2740
+ } else {
2741
+ console.log("\u26A0\uFE0F Unknown annotation type in doGenerate:", annotation.type);
2742
+ return {
2743
+ sourceType: "url",
2744
+ id: (_i = (_h = (_g2 = this.config).generateId) == null ? void 0 : _h.call(_g2)) != null ? _i : generateId2(),
2745
+ url: "",
2746
+ title: "Unknown Source"
2747
+ };
2748
+ }
2749
+ } catch (error) {
2750
+ console.error("\u274C Error creating source in doGenerate:", {
2751
+ annotation,
2752
+ error: error instanceof Error ? error.message : String(error)
2753
+ });
2733
2754
  return {
2734
2755
  sourceType: "url",
2735
- id: (_i = (_h = (_g2 = this.config).generateId) == null ? void 0 : _h.call(_g2)) != null ? _i : generateId2(),
2756
+ id: (_l = (_k = (_j = this.config).generateId) == null ? void 0 : _k.call(_j)) != null ? _l : generateId2(),
2736
2757
  url: "",
2737
- title: "Unknown Source"
2758
+ title: "Error Source"
2738
2759
  };
2739
2760
  }
2740
2761
  })
@@ -2779,24 +2800,40 @@ var OpenAIResponsesLanguageModel = class {
2779
2800
  };
2780
2801
  }
2781
2802
  async doStream(options) {
2803
+ console.log("\u{1F680} Starting doStream with options:", JSON.stringify({
2804
+ modelId: this.modelId,
2805
+ hasAbortSignal: !!options.abortSignal
2806
+ }, null, 2));
2782
2807
  const { args: body, warnings } = this.getArgs(options);
2783
- const { responseHeaders, value: response } = await postJsonToApi6({
2784
- url: this.config.url({
2785
- path: "/responses",
2786
- modelId: this.modelId
2787
- }),
2788
- headers: combineHeaders7(this.config.headers(), options.headers),
2789
- body: {
2790
- ...body,
2791
- stream: true
2792
- },
2793
- failedResponseHandler: openaiFailedResponseHandler,
2794
- successfulResponseHandler: createEventSourceResponseHandler3(
2795
- openaiResponsesChunkSchema
2796
- ),
2797
- abortSignal: options.abortSignal,
2798
- fetch: this.config.fetch
2799
- });
2808
+ console.log("\u{1F4E4} Request body:", JSON.stringify(body, null, 2));
2809
+ let response;
2810
+ let responseHeaders;
2811
+ try {
2812
+ console.log("\u{1F4E1} Making API request...");
2813
+ const result = await postJsonToApi6({
2814
+ url: this.config.url({
2815
+ path: "/responses",
2816
+ modelId: this.modelId
2817
+ }),
2818
+ headers: combineHeaders7(this.config.headers(), options.headers),
2819
+ body: {
2820
+ ...body,
2821
+ stream: true
2822
+ },
2823
+ failedResponseHandler: openaiFailedResponseHandler,
2824
+ successfulResponseHandler: createEventSourceResponseHandler3(
2825
+ openaiResponsesChunkSchema
2826
+ ),
2827
+ abortSignal: options.abortSignal,
2828
+ fetch: this.config.fetch
2829
+ });
2830
+ response = result.value;
2831
+ responseHeaders = result.responseHeaders;
2832
+ console.log("\u2705 API request successful, starting stream processing");
2833
+ } catch (error) {
2834
+ console.error("\u274C API request failed:", error);
2835
+ throw error;
2836
+ }
2800
2837
  const self = this;
2801
2838
  let finishReason = "unknown";
2802
2839
  let promptTokens = NaN;
@@ -2811,173 +2848,213 @@ var OpenAIResponsesLanguageModel = class {
2811
2848
  new TransformStream({
2812
2849
  transform(chunk, controller) {
2813
2850
  var _a15, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k;
2814
- if (!chunk.success) {
2815
- finishReason = "error";
2816
- controller.enqueue({ type: "error", error: chunk.error });
2817
- return;
2818
- }
2819
- const value = chunk.value;
2820
- if (isResponseOutputItemAddedChunk(value)) {
2821
- if (value.item.type === "function_call") {
2822
- ongoingToolCalls[value.output_index] = {
2823
- toolName: value.item.name,
2824
- toolCallId: value.item.call_id
2825
- };
2826
- controller.enqueue({
2827
- type: "tool-call-delta",
2828
- toolCallType: "function",
2829
- toolCallId: value.item.call_id,
2830
- toolName: value.item.name,
2831
- argsTextDelta: value.item.arguments
2832
- });
2833
- } else if (value.item.type === "web_search_call") {
2834
- ongoingToolCalls[value.output_index] = {
2835
- toolName: "web_search_preview",
2836
- toolCallId: value.item.id
2837
- };
2851
+ try {
2852
+ console.log("\u{1F4E6} Processing chunk:", JSON.stringify(chunk, null, 2));
2853
+ if (!chunk.success) {
2854
+ console.error("\u274C Chunk parsing failed:", chunk.error);
2855
+ finishReason = "error";
2856
+ controller.enqueue({ type: "error", error: chunk.error });
2857
+ return;
2858
+ }
2859
+ const value = chunk.value;
2860
+ console.log("\u{1F4E5} Chunk value type:", value.type);
2861
+ if (isResponseOutputItemAddedChunk(value)) {
2862
+ console.log("\u{1F4DD} Output item added:", JSON.stringify(value, null, 2));
2863
+ if (value.item.type === "function_call") {
2864
+ ongoingToolCalls[value.output_index] = {
2865
+ toolName: value.item.name,
2866
+ toolCallId: value.item.call_id
2867
+ };
2868
+ controller.enqueue({
2869
+ type: "tool-call-delta",
2870
+ toolCallType: "function",
2871
+ toolCallId: value.item.call_id,
2872
+ toolName: value.item.name,
2873
+ argsTextDelta: value.item.arguments
2874
+ });
2875
+ } else if (value.item.type === "web_search_call") {
2876
+ ongoingToolCalls[value.output_index] = {
2877
+ toolName: "web_search_preview",
2878
+ toolCallId: value.item.id
2879
+ };
2880
+ controller.enqueue({
2881
+ type: "tool-call-delta",
2882
+ toolCallType: "function",
2883
+ toolCallId: value.item.id,
2884
+ toolName: "web_search_preview",
2885
+ argsTextDelta: JSON.stringify({ action: value.item.action })
2886
+ });
2887
+ } else if (value.item.type === "computer_call") {
2888
+ ongoingToolCalls[value.output_index] = {
2889
+ toolName: "computer_use",
2890
+ toolCallId: value.item.id
2891
+ };
2892
+ controller.enqueue({
2893
+ type: "tool-call-delta",
2894
+ toolCallType: "function",
2895
+ toolCallId: value.item.id,
2896
+ toolName: "computer_use",
2897
+ argsTextDelta: ""
2898
+ });
2899
+ } else if (value.item.type === "file_search_call") {
2900
+ ongoingToolCalls[value.output_index] = {
2901
+ toolName: "file_search",
2902
+ toolCallId: value.item.id
2903
+ };
2904
+ controller.enqueue({
2905
+ type: "tool-call-delta",
2906
+ toolCallType: "function",
2907
+ toolCallId: value.item.id,
2908
+ toolName: "file_search",
2909
+ argsTextDelta: ""
2910
+ });
2911
+ }
2912
+ } else if (isResponseFunctionCallArgumentsDeltaChunk(value)) {
2913
+ console.log("\u{1F527} Function call arguments delta:", JSON.stringify(value, null, 2));
2914
+ const toolCall = ongoingToolCalls[value.output_index];
2915
+ if (toolCall != null) {
2916
+ controller.enqueue({
2917
+ type: "tool-call-delta",
2918
+ toolCallType: "function",
2919
+ toolCallId: toolCall.toolCallId,
2920
+ toolName: toolCall.toolName,
2921
+ argsTextDelta: value.delta
2922
+ });
2923
+ }
2924
+ } else if (isResponseCreatedChunk(value)) {
2925
+ console.log("\u{1F680} Response created:", JSON.stringify(value, null, 2));
2926
+ responseId = value.response.id;
2838
2927
  controller.enqueue({
2839
- type: "tool-call-delta",
2840
- toolCallType: "function",
2841
- toolCallId: value.item.id,
2842
- toolName: "web_search_preview",
2843
- argsTextDelta: JSON.stringify({ action: value.item.action })
2928
+ type: "response-metadata",
2929
+ id: value.response.id,
2930
+ timestamp: new Date(value.response.created_at * 1e3),
2931
+ modelId: value.response.model
2844
2932
  });
2845
- } else if (value.item.type === "computer_call") {
2846
- ongoingToolCalls[value.output_index] = {
2847
- toolName: "computer_use",
2848
- toolCallId: value.item.id
2849
- };
2933
+ } else if (isTextDeltaChunk(value)) {
2934
+ console.log("\u{1F4DD} Text delta chunk:", JSON.stringify(value, null, 2));
2850
2935
  controller.enqueue({
2851
- type: "tool-call-delta",
2852
- toolCallType: "function",
2853
- toolCallId: value.item.id,
2854
- toolName: "computer_use",
2855
- argsTextDelta: ""
2936
+ type: "text-delta",
2937
+ textDelta: value.delta
2856
2938
  });
2857
- } else if (value.item.type === "file_search_call") {
2858
- ongoingToolCalls[value.output_index] = {
2859
- toolName: "file_search",
2860
- toolCallId: value.item.id
2861
- };
2939
+ } else if (isResponseReasoningSummaryTextDeltaChunk(value)) {
2940
+ console.log("\u{1F9E0} Reasoning summary delta:", JSON.stringify(value, null, 2));
2862
2941
  controller.enqueue({
2863
- type: "tool-call-delta",
2864
- toolCallType: "function",
2865
- toolCallId: value.item.id,
2866
- toolName: "file_search",
2867
- argsTextDelta: ""
2942
+ type: "reasoning",
2943
+ textDelta: value.delta
2868
2944
  });
2869
- }
2870
- } else if (isResponseFunctionCallArgumentsDeltaChunk(value)) {
2871
- const toolCall = ongoingToolCalls[value.output_index];
2872
- if (toolCall != null) {
2873
- controller.enqueue({
2874
- type: "tool-call-delta",
2875
- toolCallType: "function",
2876
- toolCallId: toolCall.toolCallId,
2877
- toolName: toolCall.toolName,
2878
- argsTextDelta: value.delta
2945
+ } else if (isResponseOutputItemDoneChunk(value)) {
2946
+ console.log("\u2705 Output item done:", JSON.stringify(value, null, 2));
2947
+ if (value.item.type === "function_call") {
2948
+ ongoingToolCalls[value.output_index] = void 0;
2949
+ hasToolCalls = true;
2950
+ controller.enqueue({
2951
+ type: "tool-call",
2952
+ toolCallType: "function",
2953
+ toolCallId: value.item.call_id,
2954
+ toolName: value.item.name,
2955
+ args: value.item.arguments
2956
+ });
2957
+ } else if (value.item.type === "web_search_call") {
2958
+ ongoingToolCalls[value.output_index] = void 0;
2959
+ hasToolCalls = true;
2960
+ controller.enqueue({
2961
+ type: "tool-call",
2962
+ toolCallType: "function",
2963
+ toolCallId: value.item.id,
2964
+ toolName: "web_search_preview",
2965
+ args: JSON.stringify({ action: value.item.action })
2966
+ });
2967
+ } else if (value.item.type === "computer_call") {
2968
+ ongoingToolCalls[value.output_index] = void 0;
2969
+ hasToolCalls = true;
2970
+ controller.enqueue({
2971
+ type: "tool-call",
2972
+ toolCallType: "function",
2973
+ toolCallId: value.item.id,
2974
+ toolName: "computer_use",
2975
+ args: ""
2976
+ });
2977
+ } else if (value.item.type === "file_search_call") {
2978
+ ongoingToolCalls[value.output_index] = void 0;
2979
+ hasToolCalls = true;
2980
+ controller.enqueue({
2981
+ type: "tool-call",
2982
+ toolCallType: "function",
2983
+ toolCallId: value.item.id,
2984
+ toolName: "file_search",
2985
+ args: JSON.stringify({
2986
+ queries: value.item.queries,
2987
+ results: value.item.results
2988
+ })
2989
+ });
2990
+ }
2991
+ } else if (isResponseFinishedChunk(value)) {
2992
+ console.log("\u{1F3C1} Response finished:", JSON.stringify(value, null, 2));
2993
+ finishReason = mapOpenAIResponseFinishReason({
2994
+ finishReason: (_a15 = value.response.incomplete_details) == null ? void 0 : _a15.reason,
2995
+ hasToolCalls
2879
2996
  });
2997
+ promptTokens = value.response.usage.input_tokens;
2998
+ completionTokens = value.response.usage.output_tokens;
2999
+ cachedPromptTokens = (_c = (_b = value.response.usage.input_tokens_details) == null ? void 0 : _b.cached_tokens) != null ? _c : cachedPromptTokens;
3000
+ reasoningTokens = (_e = (_d = value.response.usage.output_tokens_details) == null ? void 0 : _d.reasoning_tokens) != null ? _e : reasoningTokens;
3001
+ } else if (isResponseAnnotationAddedChunk(value)) {
3002
+ console.log("\u{1F50D} Processing annotation chunk:", JSON.stringify({
3003
+ type: value.type,
3004
+ annotation: value.annotation
3005
+ }, null, 2));
3006
+ try {
3007
+ if (value.annotation.type === "url_citation") {
3008
+ const urlSource = {
3009
+ sourceType: "url",
3010
+ id: (_h = (_g = (_f = self.config).generateId) == null ? void 0 : _g.call(_f)) != null ? _h : generateId2(),
3011
+ url: value.annotation.url,
3012
+ title: value.annotation.title
3013
+ };
3014
+ console.log("\u2705 Creating URL source:", JSON.stringify(urlSource, null, 2));
3015
+ controller.enqueue({
3016
+ type: "source",
3017
+ source: urlSource
3018
+ });
3019
+ console.log("\u2705 URL source enqueued successfully");
3020
+ } else if (value.annotation.type === "file_citation") {
3021
+ const documentSource = {
3022
+ sourceType: "document",
3023
+ id: (_k = (_j = (_i = self.config).generateId) == null ? void 0 : _j.call(_i)) != null ? _k : generateId2(),
3024
+ mediaType: "text/plain",
3025
+ title: value.annotation.quote || value.annotation.filename || "Document",
3026
+ filename: value.annotation.filename,
3027
+ quote: value.annotation.quote
3028
+ };
3029
+ console.log("\u{1F4C4} Creating document source:", JSON.stringify(documentSource, null, 2));
3030
+ controller.enqueue({
3031
+ type: "source",
3032
+ source: documentSource
3033
+ });
3034
+ console.log("\u2705 Document source enqueued successfully");
3035
+ } else {
3036
+ console.log("\u26A0\uFE0F Unknown annotation type:", value.annotation.type);
3037
+ }
3038
+ } catch (error) {
3039
+ console.error("\u274C Error processing annotation:", {
3040
+ annotation: value.annotation,
3041
+ error: error instanceof Error ? error.message : String(error),
3042
+ stack: error instanceof Error ? error.stack : void 0
3043
+ });
3044
+ }
3045
+ } else {
3046
+ console.log("\u2753 Unhandled chunk type:", value.type, JSON.stringify(value, null, 2));
2880
3047
  }
2881
- } else if (isResponseCreatedChunk(value)) {
2882
- responseId = value.response.id;
2883
- controller.enqueue({
2884
- type: "response-metadata",
2885
- id: value.response.id,
2886
- timestamp: new Date(value.response.created_at * 1e3),
2887
- modelId: value.response.model
2888
- });
2889
- } else if (isTextDeltaChunk(value)) {
2890
- controller.enqueue({
2891
- type: "text-delta",
2892
- textDelta: value.delta
3048
+ } catch (error) {
3049
+ console.error("\u{1F4A5} FATAL ERROR in chunk processing:", {
3050
+ error: error instanceof Error ? error.message : String(error),
3051
+ stack: error instanceof Error ? error.stack : void 0
2893
3052
  });
2894
- } else if (isResponseReasoningSummaryTextDeltaChunk(value)) {
3053
+ finishReason = "error";
2895
3054
  controller.enqueue({
2896
- type: "reasoning",
2897
- textDelta: value.delta
3055
+ type: "error",
3056
+ error: error instanceof Error ? error : new Error(String(error))
2898
3057
  });
2899
- } else if (isResponseOutputItemDoneChunk(value)) {
2900
- if (value.item.type === "function_call") {
2901
- ongoingToolCalls[value.output_index] = void 0;
2902
- hasToolCalls = true;
2903
- controller.enqueue({
2904
- type: "tool-call",
2905
- toolCallType: "function",
2906
- toolCallId: value.item.call_id,
2907
- toolName: value.item.name,
2908
- args: value.item.arguments
2909
- });
2910
- } else if (value.item.type === "web_search_call") {
2911
- ongoingToolCalls[value.output_index] = void 0;
2912
- hasToolCalls = true;
2913
- controller.enqueue({
2914
- type: "tool-call",
2915
- toolCallType: "function",
2916
- toolCallId: value.item.id,
2917
- toolName: "web_search_preview",
2918
- args: JSON.stringify({ action: value.item.action })
2919
- });
2920
- } else if (value.item.type === "computer_call") {
2921
- ongoingToolCalls[value.output_index] = void 0;
2922
- hasToolCalls = true;
2923
- controller.enqueue({
2924
- type: "tool-call",
2925
- toolCallType: "function",
2926
- toolCallId: value.item.id,
2927
- toolName: "computer_use",
2928
- args: ""
2929
- });
2930
- } else if (value.item.type === "file_search_call") {
2931
- ongoingToolCalls[value.output_index] = void 0;
2932
- hasToolCalls = true;
2933
- controller.enqueue({
2934
- type: "tool-call",
2935
- toolCallType: "function",
2936
- toolCallId: value.item.id,
2937
- toolName: "file_search",
2938
- args: JSON.stringify({
2939
- queries: value.item.queries,
2940
- results: value.item.results
2941
- })
2942
- });
2943
- }
2944
- } else if (isResponseFinishedChunk(value)) {
2945
- finishReason = mapOpenAIResponseFinishReason({
2946
- finishReason: (_a15 = value.response.incomplete_details) == null ? void 0 : _a15.reason,
2947
- hasToolCalls
2948
- });
2949
- promptTokens = value.response.usage.input_tokens;
2950
- completionTokens = value.response.usage.output_tokens;
2951
- cachedPromptTokens = (_c = (_b = value.response.usage.input_tokens_details) == null ? void 0 : _b.cached_tokens) != null ? _c : cachedPromptTokens;
2952
- reasoningTokens = (_e = (_d = value.response.usage.output_tokens_details) == null ? void 0 : _d.reasoning_tokens) != null ? _e : reasoningTokens;
2953
- } else if (isResponseAnnotationAddedChunk(value)) {
2954
- console.log(JSON.stringify({
2955
- msg: "ai-sdk: source (stream)",
2956
- source: value.annotation
2957
- }));
2958
- if (value.annotation.type === "url_citation") {
2959
- controller.enqueue({
2960
- type: "source",
2961
- source: {
2962
- sourceType: "url",
2963
- id: (_h = (_g = (_f = self.config).generateId) == null ? void 0 : _g.call(_f)) != null ? _h : generateId2(),
2964
- url: value.annotation.url,
2965
- title: value.annotation.title
2966
- }
2967
- });
2968
- } else if (value.annotation.type === "file_citation") {
2969
- controller.enqueue({
2970
- type: "source",
2971
- source: {
2972
- sourceType: "document",
2973
- id: (_k = (_j = (_i = self.config).generateId) == null ? void 0 : _j.call(_i)) != null ? _k : generateId2(),
2974
- mediaType: "text/plain",
2975
- title: value.annotation.quote || value.annotation.filename || "Document",
2976
- filename: value.annotation.filename,
2977
- quote: value.annotation.quote
2978
- }
2979
- });
2980
- }
2981
3058
  }
2982
3059
  },
2983
3060
  flush(controller) {