ai 3.3.41 → 3.3.43

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -2915,33 +2915,8 @@ function prepareToolsAndToolChoice({
2915
2915
  };
2916
2916
  }
2917
2917
 
2918
- // core/generate-text/to-response-messages.ts
2919
- function toResponseMessages({
2920
- text = "",
2921
- toolCalls,
2922
- toolResults
2923
- }) {
2924
- const responseMessages = [];
2925
- responseMessages.push({
2926
- role: "assistant",
2927
- content: [{ type: "text", text }, ...toolCalls]
2928
- });
2929
- if (toolResults.length > 0) {
2930
- responseMessages.push({
2931
- role: "tool",
2932
- content: toolResults.map((result) => ({
2933
- type: "tool-result",
2934
- toolCallId: result.toolCallId,
2935
- toolName: result.toolName,
2936
- result: result.result
2937
- }))
2938
- });
2939
- }
2940
- return responseMessages;
2941
- }
2942
-
2943
- // core/generate-text/tool-call.ts
2944
- import { safeParseJSON as safeParseJSON2 } from "@ai-sdk/provider-utils";
2918
+ // core/generate-text/parse-tool-call.ts
2919
+ import { safeParseJSON as safeParseJSON2, safeValidateTypes as safeValidateTypes3 } from "@ai-sdk/provider-utils";
2945
2920
  import { asSchema as asSchema3 } from "@ai-sdk/ui-utils";
2946
2921
  function parseToolCall({
2947
2922
  toolCall,
@@ -2958,10 +2933,8 @@ function parseToolCall({
2958
2933
  availableTools: Object.keys(tools)
2959
2934
  });
2960
2935
  }
2961
- const parseResult = safeParseJSON2({
2962
- text: toolCall.args,
2963
- schema: asSchema3(tool2.parameters)
2964
- });
2936
+ const schema = asSchema3(tool2.parameters);
2937
+ const parseResult = toolCall.args.trim() === "" ? safeValidateTypes3({ value: {}, schema }) : safeParseJSON2({ text: toolCall.args, schema });
2965
2938
  if (parseResult.success === false) {
2966
2939
  throw new InvalidToolArgumentsError({
2967
2940
  toolName,
@@ -2977,6 +2950,31 @@ function parseToolCall({
2977
2950
  };
2978
2951
  }
2979
2952
 
2953
+ // core/generate-text/to-response-messages.ts
2954
+ function toResponseMessages({
2955
+ text = "",
2956
+ toolCalls,
2957
+ toolResults
2958
+ }) {
2959
+ const responseMessages = [];
2960
+ responseMessages.push({
2961
+ role: "assistant",
2962
+ content: [{ type: "text", text }, ...toolCalls]
2963
+ });
2964
+ if (toolResults.length > 0) {
2965
+ responseMessages.push({
2966
+ role: "tool",
2967
+ content: toolResults.map((result) => ({
2968
+ type: "tool-result",
2969
+ toolCallId: result.toolCallId,
2970
+ toolName: result.toolName,
2971
+ result: result.result
2972
+ }))
2973
+ });
2974
+ }
2975
+ return responseMessages;
2976
+ }
2977
+
2980
2978
  // core/generate-text/generate-text.ts
2981
2979
  var originalGenerateId3 = createIdGenerator3({ prefix: "aitxt-", size: 24 });
2982
2980
  async function generateText({
@@ -5335,6 +5333,48 @@ function forwardAIMessageChunk(chunk, controller) {
5335
5333
  }
5336
5334
  }
5337
5335
 
5336
+ // streams/llamaindex-adapter.ts
5337
+ var llamaindex_adapter_exports = {};
5338
+ __export(llamaindex_adapter_exports, {
5339
+ toDataStream: () => toDataStream2,
5340
+ toDataStreamResponse: () => toDataStreamResponse2
5341
+ });
5342
+ function toDataStream2(stream, callbacks) {
5343
+ return toReadableStream(stream).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(createStreamDataTransformer());
5344
+ }
5345
+ function toDataStreamResponse2(stream, options = {}) {
5346
+ var _a11;
5347
+ const { init, data, callbacks } = options;
5348
+ const dataStream = toDataStream2(stream, callbacks);
5349
+ const responseStream = data ? mergeStreams(data.stream, dataStream) : dataStream;
5350
+ return new Response(responseStream, {
5351
+ status: (_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200,
5352
+ statusText: init == null ? void 0 : init.statusText,
5353
+ headers: prepareResponseHeaders(init, {
5354
+ contentType: "text/plain; charset=utf-8",
5355
+ dataStreamVersion: "v1"
5356
+ })
5357
+ });
5358
+ }
5359
+ function toReadableStream(res) {
5360
+ const it = res[Symbol.asyncIterator]();
5361
+ const trimStartOfStream = trimStartOfStreamHelper();
5362
+ return new ReadableStream({
5363
+ async pull(controller) {
5364
+ var _a11;
5365
+ const { value, done } = await it.next();
5366
+ if (done) {
5367
+ controller.close();
5368
+ return;
5369
+ }
5370
+ const text = trimStartOfStream((_a11 = value.delta) != null ? _a11 : "");
5371
+ if (text) {
5372
+ controller.enqueue(text);
5373
+ }
5374
+ }
5375
+ });
5376
+ }
5377
+
5338
5378
  // streams/langchain-stream.ts
5339
5379
  function LangChainStream(callbacks) {
5340
5380
  const stream = new TransformStream();
@@ -5825,6 +5865,7 @@ export {
5825
5865
  JSONParseError,
5826
5866
  langchain_adapter_exports as LangChainAdapter,
5827
5867
  LangChainStream,
5868
+ llamaindex_adapter_exports as LlamaIndexAdapter,
5828
5869
  LoadAPIKeyError,
5829
5870
  MessageConversionError,
5830
5871
  MistralStream,