ai 3.2.23 → 3.2.25

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -201,7 +201,7 @@ function convertDataContentToUint8Array(content) {
201
201
  return convertBase64ToUint8Array(content);
202
202
  } catch (error) {
203
203
  throw new InvalidDataContentError({
204
- message: "Invalid data content. Content string is not a base64-encoded image.",
204
+ message: "Invalid data content. Content string is not a base64-encoded media.",
205
205
  content,
206
206
  cause: error
207
207
  });
@@ -212,6 +212,13 @@ function convertDataContentToUint8Array(content) {
212
212
  }
213
213
  throw new InvalidDataContentError({ content });
214
214
  }
215
+ function convertUint8ArrayToText(uint8Array) {
216
+ try {
217
+ return new TextDecoder().decode(uint8Array);
218
+ } catch (error) {
219
+ throw new Error("Error decoding Uint8Array to text");
220
+ }
221
+ }
215
222
 
216
223
  // core/prompt/invalid-message-role-error.ts
217
224
  var InvalidMessageRoleError = class extends Error {
@@ -1339,7 +1346,7 @@ async function generateText({
1339
1346
  },
1340
1347
  tracer,
1341
1348
  fn: async (span) => {
1342
- var _a2, _b, _c;
1349
+ var _a2, _b, _c, _d;
1343
1350
  const retry = retryWithExponentialBackoff({ maxRetries });
1344
1351
  const validatedPrompt = getValidatedPrompt({
1345
1352
  system,
@@ -1355,10 +1362,11 @@ async function generateText({
1355
1362
  let currentModelResponse;
1356
1363
  let currentToolCalls = [];
1357
1364
  let currentToolResults = [];
1358
- let roundtrips = 0;
1365
+ let roundtripCount = 0;
1359
1366
  const responseMessages = [];
1367
+ const roundtrips = [];
1360
1368
  do {
1361
- const currentInputFormat = roundtrips === 0 ? validatedPrompt.type : "messages";
1369
+ const currentInputFormat = roundtripCount === 0 ? validatedPrompt.type : "messages";
1362
1370
  currentModelResponse = await retry(
1363
1371
  () => recordSpan({
1364
1372
  name: "ai.generateText.doGenerate",
@@ -1396,9 +1404,18 @@ async function generateText({
1396
1404
  tools,
1397
1405
  tracer
1398
1406
  });
1399
- const newResponseMessages = toResponseMessages({
1407
+ roundtrips.push({
1400
1408
  text: (_b = currentModelResponse.text) != null ? _b : "",
1401
1409
  toolCalls: currentToolCalls,
1410
+ toolResults: currentToolResults,
1411
+ finishReason: currentModelResponse.finishReason,
1412
+ usage: calculateCompletionTokenUsage(currentModelResponse.usage),
1413
+ warnings: currentModelResponse.warnings,
1414
+ logprobs: currentModelResponse.logprobs
1415
+ });
1416
+ const newResponseMessages = toResponseMessages({
1417
+ text: (_c = currentModelResponse.text) != null ? _c : "",
1418
+ toolCalls: currentToolCalls,
1402
1419
  toolResults: currentToolResults
1403
1420
  });
1404
1421
  responseMessages.push(...newResponseMessages);
@@ -1409,7 +1426,7 @@ async function generateText({
1409
1426
  // there are tool calls:
1410
1427
  currentToolCalls.length > 0 && // all current tool calls have results:
1411
1428
  currentToolResults.length === currentToolCalls.length && // the number of roundtrips is less than the maximum:
1412
- roundtrips++ < maxToolRoundtrips
1429
+ roundtripCount++ < maxToolRoundtrips
1413
1430
  );
1414
1431
  span.setAttributes({
1415
1432
  "ai.finishReason": currentModelResponse.finishReason,
@@ -1422,7 +1439,7 @@ async function generateText({
1422
1439
  // Always return a string so that the caller doesn't have to check for undefined.
1423
1440
  // If they need to check if the model did not return any text,
1424
1441
  // they can check the length of the string:
1425
- text: (_c = currentModelResponse.text) != null ? _c : "",
1442
+ text: (_d = currentModelResponse.text) != null ? _d : "",
1426
1443
  toolCalls: currentToolCalls,
1427
1444
  toolResults: currentToolResults,
1428
1445
  finishReason: currentModelResponse.finishReason,
@@ -1430,7 +1447,8 @@ async function generateText({
1430
1447
  warnings: currentModelResponse.warnings,
1431
1448
  rawResponse: currentModelResponse.rawResponse,
1432
1449
  logprobs: currentModelResponse.logprobs,
1433
- responseMessages
1450
+ responseMessages,
1451
+ roundtrips
1434
1452
  });
1435
1453
  }
1436
1454
  });
@@ -1488,6 +1506,7 @@ var GenerateTextResult = class {
1488
1506
  this.rawResponse = options.rawResponse;
1489
1507
  this.logprobs = options.logprobs;
1490
1508
  this.responseMessages = options.responseMessages;
1509
+ this.roundtrips = options.roundtrips;
1491
1510
  }
1492
1511
  };
1493
1512
  function toResponseMessages({
@@ -2085,13 +2104,79 @@ var StreamTextResult = class {
2085
2104
  };
2086
2105
  var experimental_streamText = streamText;
2087
2106
 
2107
+ // core/prompt/attachments-to-parts.ts
2108
+ function attachmentsToParts(attachments) {
2109
+ var _a, _b, _c;
2110
+ const parts = [];
2111
+ for (const attachment of attachments) {
2112
+ let url;
2113
+ try {
2114
+ url = new URL(attachment.url);
2115
+ } catch (error) {
2116
+ throw new Error(`Invalid URL: ${attachment.url}`);
2117
+ }
2118
+ switch (url.protocol) {
2119
+ case "http:":
2120
+ case "https:": {
2121
+ if ((_a = attachment.contentType) == null ? void 0 : _a.startsWith("image/")) {
2122
+ parts.push({ type: "image", image: url });
2123
+ }
2124
+ break;
2125
+ }
2126
+ case "data:": {
2127
+ let header;
2128
+ let base64Content;
2129
+ let mimeType;
2130
+ try {
2131
+ [header, base64Content] = attachment.url.split(",");
2132
+ mimeType = header.split(";")[0].split(":")[1];
2133
+ } catch (error) {
2134
+ throw new Error(`Error processing data URL: ${attachment.url}`);
2135
+ }
2136
+ if (mimeType == null || base64Content == null) {
2137
+ throw new Error(`Invalid data URL format: ${attachment.url}`);
2138
+ }
2139
+ if ((_b = attachment.contentType) == null ? void 0 : _b.startsWith("image/")) {
2140
+ parts.push({
2141
+ type: "image",
2142
+ image: convertDataContentToUint8Array(base64Content)
2143
+ });
2144
+ } else if ((_c = attachment.contentType) == null ? void 0 : _c.startsWith("text/")) {
2145
+ parts.push({
2146
+ type: "text",
2147
+ text: convertUint8ArrayToText(
2148
+ convertDataContentToUint8Array(base64Content)
2149
+ )
2150
+ });
2151
+ }
2152
+ break;
2153
+ }
2154
+ default: {
2155
+ throw new Error(`Unsupported URL protocol: ${url.protocol}`);
2156
+ }
2157
+ }
2158
+ }
2159
+ return parts;
2160
+ }
2161
+
2088
2162
  // core/prompt/convert-to-core-messages.ts
2089
2163
  function convertToCoreMessages(messages) {
2090
2164
  const coreMessages = [];
2091
- for (const { role, content, toolInvocations } of messages) {
2165
+ for (const {
2166
+ role,
2167
+ content,
2168
+ toolInvocations,
2169
+ experimental_attachments
2170
+ } of messages) {
2092
2171
  switch (role) {
2093
2172
  case "user": {
2094
- coreMessages.push({ role: "user", content });
2173
+ coreMessages.push({
2174
+ role: "user",
2175
+ content: experimental_attachments ? [
2176
+ { type: "text", text: content },
2177
+ ...attachmentsToParts(experimental_attachments)
2178
+ ] : content
2179
+ });
2095
2180
  break;
2096
2181
  }
2097
2182
  case "assistant": {
@@ -3458,6 +3543,7 @@ export {
3458
3543
  convertDataContentToBase64String,
3459
3544
  convertDataContentToUint8Array,
3460
3545
  convertToCoreMessages,
3546
+ convertUint8ArrayToText,
3461
3547
  cosineSimilarity,
3462
3548
  createCallbacksTransformer,
3463
3549
  createEventStreamTransformer,