ai 4.3.15 → 4.3.16

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -2619,6 +2619,26 @@ function validateObjectGenerationInput({
2619
2619
  }
2620
2620
  }
2621
2621
 
2622
+ // core/prompt/stringify-for-telemetry.ts
2623
+ function stringifyForTelemetry(prompt) {
2624
+ const processedPrompt = prompt.map((message) => {
2625
+ return {
2626
+ ...message,
2627
+ content: typeof message.content === "string" ? message.content : message.content.map(processPart)
2628
+ };
2629
+ });
2630
+ return JSON.stringify(processedPrompt);
2631
+ }
2632
+ function processPart(part) {
2633
+ if (part.type === "image") {
2634
+ return {
2635
+ ...part,
2636
+ image: part.image instanceof Uint8Array ? convertDataContentToBase64String(part.image) : part.image
2637
+ };
2638
+ }
2639
+ return part;
2640
+ }
2641
+
2622
2642
  // core/generate-object/generate-object.ts
2623
2643
  var originalGenerateId = createIdGenerator({ prefix: "aiobj", size: 24 });
2624
2644
  async function generateObject({
@@ -2846,7 +2866,7 @@ async function generateObject({
2846
2866
  input: () => inputFormat
2847
2867
  },
2848
2868
  "ai.prompt.messages": {
2849
- input: () => JSON.stringify(promptMessages)
2869
+ input: () => stringifyForTelemetry(promptMessages)
2850
2870
  },
2851
2871
  "ai.settings.mode": mode,
2852
2872
  // standardized gen-ai llm span attributes:
@@ -3448,7 +3468,7 @@ var DefaultStreamObjectResult = class {
3448
3468
  input: () => callOptions.inputFormat
3449
3469
  },
3450
3470
  "ai.prompt.messages": {
3451
- input: () => JSON.stringify(callOptions.prompt)
3471
+ input: () => stringifyForTelemetry(callOptions.prompt)
3452
3472
  },
3453
3473
  "ai.settings.mode": mode,
3454
3474
  // standardized gen-ai llm span attributes:
@@ -4239,7 +4259,7 @@ async function generateText({
4239
4259
  // prompt:
4240
4260
  "ai.prompt.format": { input: () => promptFormat },
4241
4261
  "ai.prompt.messages": {
4242
- input: () => JSON.stringify(promptMessages)
4262
+ input: () => stringifyForTelemetry(promptMessages)
4243
4263
  },
4244
4264
  "ai.prompt.tools": {
4245
4265
  // convert the language model level tools:
@@ -5600,7 +5620,7 @@ var DefaultStreamTextResult = class {
5600
5620
  input: () => promptFormat
5601
5621
  },
5602
5622
  "ai.prompt.messages": {
5603
- input: () => JSON.stringify(promptMessages)
5623
+ input: () => stringifyForTelemetry(promptMessages)
5604
5624
  },
5605
5625
  "ai.prompt.tools": {
5606
5626
  // convert the language model level tools: