ai 4.0.0-canary.7 → 4.0.0-canary.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,23 @@
1
1
  # ai
2
2
 
3
+ ## 4.0.0-canary.9
4
+
5
+ ### Patch Changes
6
+
7
+ - f0ec721: chore (ai): remove openai peer dependency
8
+
9
+ ## 4.0.0-canary.8
10
+
11
+ ### Major Changes
12
+
13
+ - 007cb81: chore (ai): change `streamText` warnings result to Promise
14
+
15
+ ### Patch Changes
16
+
17
+ - Updated dependencies [70f28f6]
18
+ - @ai-sdk/ui-utils@1.0.0-canary.6
19
+ - @ai-sdk/react@1.0.0-canary.6
20
+
3
21
  ## 4.0.0-canary.7
4
22
 
5
23
  ### Major Changes
package/dist/index.d.mts CHANGED
@@ -7,8 +7,6 @@ export { AISDKError, APICallError, EmptyResponseBodyError, InvalidPromptError, I
7
7
  import { z } from 'zod';
8
8
  import { ServerResponse } from 'http';
9
9
  import { ServerResponse as ServerResponse$1 } from 'node:http';
10
- import { AssistantStream } from 'openai/lib/AssistantStream';
11
- import { Run } from 'openai/resources/beta/threads/runs/runs';
12
10
 
13
11
  /**
14
12
  * Telemetry configuration.
@@ -1589,7 +1587,7 @@ interface StreamTextResult<TOOLS extends Record<string, CoreTool>> {
1589
1587
  /**
1590
1588
  Warnings from the model provider (e.g. unsupported settings) for the first step.
1591
1589
  */
1592
- readonly warnings: CallWarning[] | undefined;
1590
+ readonly warnings: Promise<CallWarning[] | undefined>;
1593
1591
  /**
1594
1592
  The total token usage of the generated response.
1595
1593
  When there are multiple steps, the usage is the sum of all step usages.
@@ -2162,7 +2160,7 @@ type AssistantResponseCallback = (options: {
2162
2160
  /**
2163
2161
  Forwards the assistant response stream to the client. Returns the `Run` object after it completes, or when it requires an action.
2164
2162
  */
2165
- forwardStream: (stream: AssistantStream) => Promise<Run | undefined>;
2163
+ forwardStream: (stream: any) => Promise<any | undefined>;
2166
2164
  }) => Promise<void>;
2167
2165
  /**
2168
2166
  The `AssistantResponse` allows you to send a stream of assistant update to `useAssistant`.
@@ -2213,7 +2211,7 @@ type LangChainStreamEvent = {
2213
2211
  data: any;
2214
2212
  };
2215
2213
  /**
2216
- Converts LangChain output streams to AIStream.
2214
+ Converts LangChain output streams to an AI SDK Data Stream.
2217
2215
 
2218
2216
  The following streams are supported:
2219
2217
  - `LangChainAIMessageChunk` streams (LangChain `model.stream` output)
package/dist/index.d.ts CHANGED
@@ -7,8 +7,6 @@ export { AISDKError, APICallError, EmptyResponseBodyError, InvalidPromptError, I
7
7
  import { z } from 'zod';
8
8
  import { ServerResponse } from 'http';
9
9
  import { ServerResponse as ServerResponse$1 } from 'node:http';
10
- import { AssistantStream } from 'openai/lib/AssistantStream';
11
- import { Run } from 'openai/resources/beta/threads/runs/runs';
12
10
 
13
11
  /**
14
12
  * Telemetry configuration.
@@ -1589,7 +1587,7 @@ interface StreamTextResult<TOOLS extends Record<string, CoreTool>> {
1589
1587
  /**
1590
1588
  Warnings from the model provider (e.g. unsupported settings) for the first step.
1591
1589
  */
1592
- readonly warnings: CallWarning[] | undefined;
1590
+ readonly warnings: Promise<CallWarning[] | undefined>;
1593
1591
  /**
1594
1592
  The total token usage of the generated response.
1595
1593
  When there are multiple steps, the usage is the sum of all step usages.
@@ -2162,7 +2160,7 @@ type AssistantResponseCallback = (options: {
2162
2160
  /**
2163
2161
  Forwards the assistant response stream to the client. Returns the `Run` object after it completes, or when it requires an action.
2164
2162
  */
2165
- forwardStream: (stream: AssistantStream) => Promise<Run | undefined>;
2163
+ forwardStream: (stream: any) => Promise<any | undefined>;
2166
2164
  }) => Promise<void>;
2167
2165
  /**
2168
2166
  The `AssistantResponse` allows you to send a stream of assistant update to `useAssistant`.
@@ -2213,7 +2211,7 @@ type LangChainStreamEvent = {
2213
2211
  data: any;
2214
2212
  };
2215
2213
  /**
2216
- Converts LangChain output streams to AIStream.
2214
+ Converts LangChain output streams to an AI SDK Data Stream.
2217
2215
 
2218
2216
  The following streams are supported:
2219
2217
  - `LangChainAIMessageChunk` streams (LangChain `model.stream` output)
package/dist/index.js CHANGED
@@ -4063,7 +4063,7 @@ var DefaultStreamTextResult = class {
4063
4063
  generateId: generateId3,
4064
4064
  tools
4065
4065
  }) {
4066
- this.warnings = warnings;
4066
+ this.rawWarnings = warnings;
4067
4067
  this.rawResponse = rawResponse;
4068
4068
  const { resolve: resolveUsage, promise: usagePromise } = createResolvablePromise();
4069
4069
  this.usage = usagePromise;
@@ -4086,6 +4086,8 @@ var DefaultStreamTextResult = class {
4086
4086
  this.request = requestPromise;
4087
4087
  const { resolve: resolveResponse, promise: responsePromise } = createResolvablePromise();
4088
4088
  this.response = responsePromise;
4089
+ const { resolve: resolveWarnings, promise: warningsPromise } = createResolvablePromise();
4090
+ this.warnings = warningsPromise;
4089
4091
  const {
4090
4092
  stream: stitchableStream,
4091
4093
  addStream,
@@ -4239,7 +4241,7 @@ var DefaultStreamTextResult = class {
4239
4241
  },
4240
4242
  // invoke onFinish callback and resolve toolResults promise when the stream is about to close:
4241
4243
  async flush(controller) {
4242
- var _a11;
4244
+ var _a11, _b;
4243
4245
  const stepToolCallsJson = stepToolCalls.length > 0 ? JSON.stringify(stepToolCalls) : void 0;
4244
4246
  let nextStepType = "done";
4245
4247
  if (currentStep + 1 < maxSteps) {
@@ -4331,7 +4333,7 @@ var DefaultStreamTextResult = class {
4331
4333
  toolResults: stepToolResults,
4332
4334
  finishReason: stepFinishReason,
4333
4335
  usage: stepUsage,
4334
- warnings: self.warnings,
4336
+ warnings: self.rawWarnings,
4335
4337
  logprobs: stepLogProbs,
4336
4338
  request: stepRequest,
4337
4339
  response: {
@@ -4356,7 +4358,7 @@ var DefaultStreamTextResult = class {
4356
4358
  doStreamSpan: doStreamSpan3,
4357
4359
  startTimestampMs: startTimestamp2
4358
4360
  } = await startStep({ responseMessages });
4359
- self.warnings = result.warnings;
4361
+ self.rawWarnings = result.warnings;
4360
4362
  self.rawResponse = result.rawResponse;
4361
4363
  addStepStream({
4362
4364
  stream: result.stream,
@@ -4411,6 +4413,7 @@ var DefaultStreamTextResult = class {
4411
4413
  messages: responseMessages
4412
4414
  });
4413
4415
  resolveSteps(stepResults);
4416
+ resolveWarnings((_b = self.rawWarnings) != null ? _b : []);
4414
4417
  await (onFinish == null ? void 0 : onFinish({
4415
4418
  finishReason: stepFinishReason,
4416
4419
  logprobs: stepLogProbs,