ai 4.0.0-canary.4 → 4.0.0-canary.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -1499,19 +1499,18 @@ function calculateLanguageModelUsage(usage) {
1499
1499
  }
1500
1500
 
1501
1501
  // core/util/prepare-response-headers.ts
1502
- function prepareResponseHeaders(init, {
1502
+ function prepareResponseHeaders(headers, {
1503
1503
  contentType,
1504
1504
  dataStreamVersion
1505
1505
  }) {
1506
- var _a11;
1507
- const headers = new Headers((_a11 = init == null ? void 0 : init.headers) != null ? _a11 : {});
1508
- if (!headers.has("Content-Type")) {
1509
- headers.set("Content-Type", contentType);
1506
+ const responseHeaders = new Headers(headers != null ? headers : {});
1507
+ if (!responseHeaders.has("Content-Type")) {
1508
+ responseHeaders.set("Content-Type", contentType);
1510
1509
  }
1511
1510
  if (dataStreamVersion !== void 0) {
1512
- headers.set("X-Vercel-AI-Data-Stream", dataStreamVersion);
1511
+ responseHeaders.set("X-Vercel-AI-Data-Stream", dataStreamVersion);
1513
1512
  }
1514
- return headers;
1513
+ return responseHeaders;
1515
1514
  }
1516
1515
 
1517
1516
  // core/generate-object/inject-json-instruction.ts
@@ -2085,9 +2084,6 @@ async function generateObject({
2085
2084
  "ai.response.timestamp": responseData.timestamp.toISOString(),
2086
2085
  "ai.usage.promptTokens": result2.usage.promptTokens,
2087
2086
  "ai.usage.completionTokens": result2.usage.completionTokens,
2088
- // deprecated:
2089
- "ai.finishReason": result2.finishReason,
2090
- "ai.result.object": { output: () => result2.text },
2091
2087
  // standardized gen-ai llm span attributes:
2092
2088
  "gen_ai.response.finish_reasons": [result2.finishReason],
2093
2089
  "gen_ai.response.id": responseData.id,
@@ -2192,9 +2188,6 @@ async function generateObject({
2192
2188
  "ai.response.timestamp": responseData.timestamp.toISOString(),
2193
2189
  "ai.usage.promptTokens": result2.usage.promptTokens,
2194
2190
  "ai.usage.completionTokens": result2.usage.completionTokens,
2195
- // deprecated:
2196
- "ai.finishReason": result2.finishReason,
2197
- "ai.result.object": { output: () => objectText },
2198
2191
  // standardized gen-ai llm span attributes:
2199
2192
  "gen_ai.response.finish_reasons": [result2.finishReason],
2200
2193
  "gen_ai.response.id": responseData.id,
@@ -2248,12 +2241,7 @@ async function generateObject({
2248
2241
  output: () => JSON.stringify(validationResult.value)
2249
2242
  },
2250
2243
  "ai.usage.promptTokens": usage.promptTokens,
2251
- "ai.usage.completionTokens": usage.completionTokens,
2252
- // deprecated:
2253
- "ai.finishReason": finishReason,
2254
- "ai.result.object": {
2255
- output: () => JSON.stringify(validationResult.value)
2256
- }
2244
+ "ai.usage.completionTokens": usage.completionTokens
2257
2245
  }
2258
2246
  })
2259
2247
  );
@@ -2282,16 +2270,13 @@ var DefaultGenerateObjectResult = class {
2282
2270
  this.experimental_providerMetadata = options.providerMetadata;
2283
2271
  this.response = options.response;
2284
2272
  this.request = options.request;
2285
- this.rawResponse = {
2286
- headers: options.response.headers
2287
- };
2288
2273
  this.logprobs = options.logprobs;
2289
2274
  }
2290
2275
  toJsonResponse(init) {
2291
2276
  var _a11;
2292
2277
  return new Response(JSON.stringify(this.object), {
2293
2278
  status: (_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200,
2294
- headers: prepareResponseHeaders(init, {
2279
+ headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
2295
2280
  contentType: "application/json; charset=utf-8"
2296
2281
  })
2297
2282
  });
@@ -2365,23 +2350,23 @@ function now() {
2365
2350
  }
2366
2351
 
2367
2352
  // core/util/prepare-outgoing-http-headers.ts
2368
- function prepareOutgoingHttpHeaders(init, {
2353
+ function prepareOutgoingHttpHeaders(headers, {
2369
2354
  contentType,
2370
2355
  dataStreamVersion
2371
2356
  }) {
2372
- const headers = {};
2373
- if ((init == null ? void 0 : init.headers) != null) {
2374
- for (const [key, value] of Object.entries(init.headers)) {
2375
- headers[key] = value;
2357
+ const outgoingHeaders = {};
2358
+ if (headers != null) {
2359
+ for (const [key, value] of Object.entries(headers)) {
2360
+ outgoingHeaders[key] = value;
2376
2361
  }
2377
2362
  }
2378
- if (headers["Content-Type"] == null) {
2379
- headers["Content-Type"] = contentType;
2363
+ if (outgoingHeaders["Content-Type"] == null) {
2364
+ outgoingHeaders["Content-Type"] = contentType;
2380
2365
  }
2381
2366
  if (dataStreamVersion !== void 0) {
2382
- headers["X-Vercel-AI-Data-Stream"] = dataStreamVersion;
2367
+ outgoingHeaders["X-Vercel-AI-Data-Stream"] = dataStreamVersion;
2383
2368
  }
2384
- return headers;
2369
+ return outgoingHeaders;
2385
2370
  }
2386
2371
 
2387
2372
  // core/util/write-to-server-response.ts
@@ -2662,7 +2647,6 @@ var DefaultStreamObjectResult = class {
2662
2647
  generateId: generateId3
2663
2648
  }) {
2664
2649
  this.warnings = warnings;
2665
- this.rawResponse = rawResponse;
2666
2650
  this.outputStrategy = outputStrategy;
2667
2651
  this.request = Promise.resolve(request);
2668
2652
  this.objectPromise = new DelayedPromise();
@@ -2795,9 +2779,6 @@ var DefaultStreamObjectResult = class {
2795
2779
  "ai.response.timestamp": response.timestamp.toISOString(),
2796
2780
  "ai.usage.promptTokens": finalUsage.promptTokens,
2797
2781
  "ai.usage.completionTokens": finalUsage.completionTokens,
2798
- // deprecated
2799
- "ai.finishReason": finishReason,
2800
- "ai.result.object": { output: () => JSON.stringify(object) },
2801
2782
  // standardized gen-ai llm span attributes:
2802
2783
  "gen_ai.response.finish_reasons": [finishReason],
2803
2784
  "gen_ai.response.id": response.id,
@@ -2816,9 +2797,7 @@ var DefaultStreamObjectResult = class {
2816
2797
  "ai.usage.completionTokens": finalUsage.completionTokens,
2817
2798
  "ai.response.object": {
2818
2799
  output: () => JSON.stringify(object)
2819
- },
2820
- // deprecated
2821
- "ai.result.object": { output: () => JSON.stringify(object) }
2800
+ }
2822
2801
  }
2823
2802
  })
2824
2803
  );
@@ -2826,7 +2805,6 @@ var DefaultStreamObjectResult = class {
2826
2805
  usage: finalUsage,
2827
2806
  object,
2828
2807
  error,
2829
- rawResponse,
2830
2808
  response: {
2831
2809
  ...response,
2832
2810
  headers: rawResponse == null ? void 0 : rawResponse.headers
@@ -2903,7 +2881,7 @@ var DefaultStreamObjectResult = class {
2903
2881
  response,
2904
2882
  status: init == null ? void 0 : init.status,
2905
2883
  statusText: init == null ? void 0 : init.statusText,
2906
- headers: prepareOutgoingHttpHeaders(init, {
2884
+ headers: prepareOutgoingHttpHeaders(init == null ? void 0 : init.headers, {
2907
2885
  contentType: "text/plain; charset=utf-8"
2908
2886
  }),
2909
2887
  stream: this.textStream.pipeThrough(new TextEncoderStream())
@@ -2913,7 +2891,7 @@ var DefaultStreamObjectResult = class {
2913
2891
  var _a11;
2914
2892
  return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {
2915
2893
  status: (_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200,
2916
- headers: prepareResponseHeaders(init, {
2894
+ headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
2917
2895
  contentType: "text/plain; charset=utf-8"
2918
2896
  })
2919
2897
  });
@@ -3290,14 +3268,6 @@ async function generateText({
3290
3268
  "ai.response.timestamp": responseData.timestamp.toISOString(),
3291
3269
  "ai.usage.promptTokens": result.usage.promptTokens,
3292
3270
  "ai.usage.completionTokens": result.usage.completionTokens,
3293
- // deprecated:
3294
- "ai.finishReason": result.finishReason,
3295
- "ai.result.text": {
3296
- output: () => result.text
3297
- },
3298
- "ai.result.toolCalls": {
3299
- output: () => JSON.stringify(result.toolCalls)
3300
- },
3301
3271
  // standardized gen-ai llm span attributes:
3302
3272
  "gen_ai.response.finish_reasons": [result.finishReason],
3303
3273
  "gen_ai.response.id": responseData.id,
@@ -3400,15 +3370,7 @@ async function generateText({
3400
3370
  output: () => JSON.stringify(currentModelResponse.toolCalls)
3401
3371
  },
3402
3372
  "ai.usage.promptTokens": currentModelResponse.usage.promptTokens,
3403
- "ai.usage.completionTokens": currentModelResponse.usage.completionTokens,
3404
- // deprecated:
3405
- "ai.finishReason": currentModelResponse.finishReason,
3406
- "ai.result.text": {
3407
- output: () => currentModelResponse.text
3408
- },
3409
- "ai.result.toolCalls": {
3410
- output: () => JSON.stringify(currentModelResponse.toolCalls)
3411
- }
3373
+ "ai.usage.completionTokens": currentModelResponse.usage.completionTokens
3412
3374
  }
3413
3375
  })
3414
3376
  );
@@ -3506,9 +3468,6 @@ var DefaultGenerateTextResult = class {
3506
3468
  this.responseMessages = options.responseMessages;
3507
3469
  this.steps = options.steps;
3508
3470
  this.experimental_providerMetadata = options.providerMetadata;
3509
- this.rawResponse = {
3510
- headers: options.response.headers
3511
- };
3512
3471
  this.logprobs = options.logprobs;
3513
3472
  }
3514
3473
  };
@@ -4165,14 +4124,10 @@ var DefaultStreamTextResult = class {
4165
4124
  const msToFirstChunk = now2() - startTimestamp;
4166
4125
  stepFirstChunk = false;
4167
4126
  doStreamSpan2.addEvent("ai.stream.firstChunk", {
4168
- "ai.response.msToFirstChunk": msToFirstChunk,
4169
- // deprecated:
4170
- "ai.stream.msToFirstChunk": msToFirstChunk
4127
+ "ai.response.msToFirstChunk": msToFirstChunk
4171
4128
  });
4172
4129
  doStreamSpan2.setAttributes({
4173
- "ai.response.msToFirstChunk": msToFirstChunk,
4174
- // deprecated:
4175
- "ai.stream.msToFirstChunk": msToFirstChunk
4130
+ "ai.response.msToFirstChunk": msToFirstChunk
4176
4131
  });
4177
4132
  }
4178
4133
  if (chunk.type === "text-delta" && chunk.textDelta.length === 0) {
@@ -4297,12 +4252,6 @@ var DefaultStreamTextResult = class {
4297
4252
  "ai.response.timestamp": stepResponse.timestamp.toISOString(),
4298
4253
  "ai.usage.promptTokens": stepUsage.promptTokens,
4299
4254
  "ai.usage.completionTokens": stepUsage.completionTokens,
4300
- // deprecated
4301
- "ai.finishReason": stepFinishReason,
4302
- "ai.result.text": { output: () => stepText },
4303
- "ai.result.toolCalls": {
4304
- output: () => stepToolCallsJson
4305
- },
4306
4255
  // standardized gen-ai llm span attributes:
4307
4256
  "gen_ai.response.finish_reasons": [stepFinishReason],
4308
4257
  "gen_ai.response.id": stepResponse.id,
@@ -4357,7 +4306,6 @@ var DefaultStreamTextResult = class {
4357
4306
  warnings: self.warnings,
4358
4307
  logprobs: stepLogProbs,
4359
4308
  request: stepRequest,
4360
- rawResponse: self.rawResponse,
4361
4309
  response: {
4362
4310
  ...stepResponse,
4363
4311
  headers: (_a11 = self.rawResponse) == null ? void 0 : _a11.headers,
@@ -4418,13 +4366,7 @@ var DefaultStreamTextResult = class {
4418
4366
  output: () => stepToolCallsJson
4419
4367
  },
4420
4368
  "ai.usage.promptTokens": combinedUsage.promptTokens,
4421
- "ai.usage.completionTokens": combinedUsage.completionTokens,
4422
- // deprecated
4423
- "ai.finishReason": stepFinishReason,
4424
- "ai.result.text": { output: () => fullStepText },
4425
- "ai.result.toolCalls": {
4426
- output: () => stepToolCallsJson
4427
- }
4369
+ "ai.usage.completionTokens": combinedUsage.completionTokens
4428
4370
  }
4429
4371
  })
4430
4372
  );
@@ -4454,7 +4396,6 @@ var DefaultStreamTextResult = class {
4454
4396
  // The type exposed to the users will be correctly inferred.
4455
4397
  toolResults: stepToolResults,
4456
4398
  request: stepRequest,
4457
- rawResponse,
4458
4399
  response: {
4459
4400
  ...stepResponse,
4460
4401
  headers: rawResponse == null ? void 0 : rawResponse.headers,
@@ -4617,20 +4558,19 @@ var DefaultStreamTextResult = class {
4617
4558
  });
4618
4559
  return this.fullStream.pipeThrough(callbackTransformer).pipeThrough(streamPartsTransformer).pipeThrough(new TextEncoderStream());
4619
4560
  }
4620
- pipeDataStreamToResponse(response, options) {
4621
- const init = options == null ? void 0 : "init" in options ? options.init : {
4622
- headers: "headers" in options ? options.headers : void 0,
4623
- status: "status" in options ? options.status : void 0,
4624
- statusText: "statusText" in options ? options.statusText : void 0
4625
- };
4626
- const data = options == null ? void 0 : "data" in options ? options.data : void 0;
4627
- const getErrorMessage3 = options == null ? void 0 : "getErrorMessage" in options ? options.getErrorMessage : void 0;
4628
- const sendUsage = options == null ? void 0 : "sendUsage" in options ? options.sendUsage : void 0;
4561
+ pipeDataStreamToResponse(response, {
4562
+ status,
4563
+ statusText,
4564
+ headers,
4565
+ data,
4566
+ getErrorMessage: getErrorMessage3,
4567
+ sendUsage
4568
+ } = {}) {
4629
4569
  writeToServerResponse({
4630
4570
  response,
4631
- status: init == null ? void 0 : init.status,
4632
- statusText: init == null ? void 0 : init.statusText,
4633
- headers: prepareOutgoingHttpHeaders(init, {
4571
+ status,
4572
+ statusText,
4573
+ headers: prepareOutgoingHttpHeaders(headers, {
4634
4574
  contentType: "text/plain; charset=utf-8",
4635
4575
  dataStreamVersion: "v1"
4636
4576
  }),
@@ -4642,7 +4582,7 @@ var DefaultStreamTextResult = class {
4642
4582
  response,
4643
4583
  status: init == null ? void 0 : init.status,
4644
4584
  statusText: init == null ? void 0 : init.statusText,
4645
- headers: prepareOutgoingHttpHeaders(init, {
4585
+ headers: prepareOutgoingHttpHeaders(init == null ? void 0 : init.headers, {
4646
4586
  contentType: "text/plain; charset=utf-8"
4647
4587
  }),
4648
4588
  stream: this.textStream.pipeThrough(new TextEncoderStream())
@@ -4655,22 +4595,20 @@ var DefaultStreamTextResult = class {
4655
4595
  });
4656
4596
  return (options == null ? void 0 : options.data) ? mergeStreams(options == null ? void 0 : options.data.stream, stream) : stream;
4657
4597
  }
4658
- toDataStreamResponse(options) {
4659
- var _a11;
4660
- const init = options == null ? void 0 : "init" in options ? options.init : {
4661
- headers: "headers" in options ? options.headers : void 0,
4662
- status: "status" in options ? options.status : void 0,
4663
- statusText: "statusText" in options ? options.statusText : void 0
4664
- };
4665
- const data = options == null ? void 0 : "data" in options ? options.data : void 0;
4666
- const getErrorMessage3 = options == null ? void 0 : "getErrorMessage" in options ? options.getErrorMessage : void 0;
4667
- const sendUsage = options == null ? void 0 : "sendUsage" in options ? options.sendUsage : void 0;
4598
+ toDataStreamResponse({
4599
+ headers,
4600
+ status,
4601
+ statusText,
4602
+ data,
4603
+ getErrorMessage: getErrorMessage3,
4604
+ sendUsage
4605
+ } = {}) {
4668
4606
  return new Response(
4669
4607
  this.toDataStream({ data, getErrorMessage: getErrorMessage3, sendUsage }),
4670
4608
  {
4671
- status: (_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200,
4672
- statusText: init == null ? void 0 : init.statusText,
4673
- headers: prepareResponseHeaders(init, {
4609
+ status,
4610
+ statusText,
4611
+ headers: prepareResponseHeaders(headers, {
4674
4612
  contentType: "text/plain; charset=utf-8",
4675
4613
  dataStreamVersion: "v1"
4676
4614
  })
@@ -4681,7 +4619,7 @@ var DefaultStreamTextResult = class {
4681
4619
  var _a11;
4682
4620
  return new Response(this.textStream.pipeThrough(new TextEncoderStream()), {
4683
4621
  status: (_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200,
4684
- headers: prepareResponseHeaders(init, {
4622
+ headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
4685
4623
  contentType: "text/plain; charset=utf-8"
4686
4624
  })
4687
4625
  });
@@ -5121,7 +5059,7 @@ function toDataStreamResponse(stream, options) {
5121
5059
  return new Response(responseStream, {
5122
5060
  status: (_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200,
5123
5061
  statusText: init == null ? void 0 : init.statusText,
5124
- headers: prepareResponseHeaders(init, {
5062
+ headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
5125
5063
  contentType: "text/plain; charset=utf-8",
5126
5064
  dataStreamVersion: "v1"
5127
5065
  })
@@ -5165,7 +5103,7 @@ function toDataStreamResponse2(stream, options = {}) {
5165
5103
  return new Response(responseStream, {
5166
5104
  status: (_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200,
5167
5105
  statusText: init == null ? void 0 : init.statusText,
5168
- headers: prepareResponseHeaders(init, {
5106
+ headers: prepareResponseHeaders(init == null ? void 0 : init.headers, {
5169
5107
  contentType: "text/plain; charset=utf-8",
5170
5108
  dataStreamVersion: "v1"
5171
5109
  })
@@ -5182,48 +5120,6 @@ function trimStartOfStream() {
5182
5120
  return text;
5183
5121
  };
5184
5122
  }
5185
-
5186
- // streams/stream-to-response.ts
5187
- function streamToResponse(res, response, init, data) {
5188
- var _a11;
5189
- response.writeHead((_a11 = init == null ? void 0 : init.status) != null ? _a11 : 200, {
5190
- "Content-Type": "text/plain; charset=utf-8",
5191
- ...init == null ? void 0 : init.headers
5192
- });
5193
- let processedStream = res;
5194
- if (data) {
5195
- processedStream = mergeStreams(data.stream, res);
5196
- }
5197
- const reader = processedStream.getReader();
5198
- function read() {
5199
- reader.read().then(({ done, value }) => {
5200
- if (done) {
5201
- response.end();
5202
- return;
5203
- }
5204
- response.write(value);
5205
- read();
5206
- });
5207
- }
5208
- read();
5209
- }
5210
-
5211
- // streams/streaming-text-response.ts
5212
- var StreamingTextResponse = class extends Response {
5213
- constructor(res, init, data) {
5214
- let processedStream = res;
5215
- if (data) {
5216
- processedStream = mergeStreams(data.stream, res);
5217
- }
5218
- super(processedStream, {
5219
- ...init,
5220
- status: 200,
5221
- headers: prepareResponseHeaders(init, {
5222
- contentType: "text/plain; charset=utf-8"
5223
- })
5224
- });
5225
- }
5226
- };
5227
5123
  export {
5228
5124
  AISDKError10 as AISDKError,
5229
5125
  APICallError2 as APICallError,
@@ -5248,7 +5144,6 @@ export {
5248
5144
  NoSuchToolError,
5249
5145
  RetryError,
5250
5146
  StreamData,
5251
- StreamingTextResponse,
5252
5147
  TypeValidationError2 as TypeValidationError,
5253
5148
  UnsupportedFunctionalityError2 as UnsupportedFunctionalityError,
5254
5149
  convertToCoreMessages,
@@ -5272,7 +5167,6 @@ export {
5272
5167
  readDataStream,
5273
5168
  streamObject,
5274
5169
  streamText,
5275
- streamToResponse,
5276
5170
  tool
5277
5171
  };
5278
5172
  //# sourceMappingURL=index.mjs.map