ai 5.0.0-beta.32 → 5.0.0-beta.33
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/index.d.mts +8 -0
- package/dist/index.d.ts +8 -0
- package/dist/index.js +81 -50
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +81 -50
- package/dist/index.mjs.map +1 -1
- package/dist/test/index.js.map +1 -1
- package/dist/test/index.mjs.map +1 -1
- package/package.json +18 -9
- package/dist/bin/ai.min.js +0 -126
package/CHANGELOG.md
CHANGED
@@ -1,5 +1,17 @@
|
|
1
1
|
# ai
|
2
2
|
|
3
|
+
## 5.0.0-beta.33
|
4
|
+
|
5
|
+
### Patch Changes
|
6
|
+
|
7
|
+
- 48378b9: fix (ai): send null as tool output when tools return undefined
|
8
|
+
- 93d53a1: chore (ai): remove cli
|
9
|
+
- 27deb4d: feat (provider/gateway): Add providerMetadata to embeddings response
|
10
|
+
- Updated dependencies [27deb4d]
|
11
|
+
- @ai-sdk/gateway@1.0.0-beta.18
|
12
|
+
- @ai-sdk/provider@2.0.0-beta.2
|
13
|
+
- @ai-sdk/provider-utils@3.0.0-beta.9
|
14
|
+
|
3
15
|
## 5.0.0-beta.32
|
4
16
|
|
5
17
|
### Patch Changes
|
package/dist/index.d.mts
CHANGED
@@ -2060,6 +2060,10 @@ interface EmbedResult<VALUE> {
|
|
2060
2060
|
*/
|
2061
2061
|
readonly usage: EmbeddingModelUsage;
|
2062
2062
|
/**
|
2063
|
+
Optional provider-specific metadata.
|
2064
|
+
*/
|
2065
|
+
readonly providerMetadata?: ProviderMetadata;
|
2066
|
+
/**
|
2063
2067
|
Optional response data.
|
2064
2068
|
*/
|
2065
2069
|
readonly response?: {
|
@@ -2140,6 +2144,10 @@ interface EmbedManyResult<VALUE> {
|
|
2140
2144
|
*/
|
2141
2145
|
readonly usage: EmbeddingModelUsage;
|
2142
2146
|
/**
|
2147
|
+
Optional provider-specific metadata.
|
2148
|
+
*/
|
2149
|
+
readonly providerMetadata?: ProviderMetadata;
|
2150
|
+
/**
|
2143
2151
|
Optional raw response data.
|
2144
2152
|
*/
|
2145
2153
|
readonly responses?: Array<{
|
package/dist/index.d.ts
CHANGED
@@ -2060,6 +2060,10 @@ interface EmbedResult<VALUE> {
|
|
2060
2060
|
*/
|
2061
2061
|
readonly usage: EmbeddingModelUsage;
|
2062
2062
|
/**
|
2063
|
+
Optional provider-specific metadata.
|
2064
|
+
*/
|
2065
|
+
readonly providerMetadata?: ProviderMetadata;
|
2066
|
+
/**
|
2063
2067
|
Optional response data.
|
2064
2068
|
*/
|
2065
2069
|
readonly response?: {
|
@@ -2140,6 +2144,10 @@ interface EmbedManyResult<VALUE> {
|
|
2140
2144
|
*/
|
2141
2145
|
readonly usage: EmbeddingModelUsage;
|
2142
2146
|
/**
|
2147
|
+
Optional provider-specific metadata.
|
2148
|
+
*/
|
2149
|
+
readonly providerMetadata?: ProviderMetadata;
|
2150
|
+
/**
|
2143
2151
|
Optional raw response data.
|
2144
2152
|
*/
|
2145
2153
|
readonly responses?: Array<{
|
package/dist/index.js
CHANGED
@@ -1895,12 +1895,15 @@ function createToolModelOutput({
|
|
1895
1895
|
if (errorMode === "text") {
|
1896
1896
|
return { type: "error-text", value: (0, import_provider21.getErrorMessage)(output) };
|
1897
1897
|
} else if (errorMode === "json") {
|
1898
|
-
return { type: "error-json", value: output };
|
1898
|
+
return { type: "error-json", value: toJSONValue(output) };
|
1899
1899
|
}
|
1900
1900
|
if (tool3 == null ? void 0 : tool3.toModelOutput) {
|
1901
1901
|
return tool3.toModelOutput(output);
|
1902
1902
|
}
|
1903
|
-
return typeof output === "string" ? { type: "text", value: output } : { type: "json", value: output };
|
1903
|
+
return typeof output === "string" ? { type: "text", value: output } : { type: "json", value: toJSONValue(output) };
|
1904
|
+
}
|
1905
|
+
function toJSONValue(value) {
|
1906
|
+
return value === void 0 ? null : value;
|
1904
1907
|
}
|
1905
1908
|
|
1906
1909
|
// src/generate-text/to-response-messages.ts
|
@@ -5406,7 +5409,7 @@ async function embed({
|
|
5406
5409
|
}),
|
5407
5410
|
tracer,
|
5408
5411
|
fn: async (span) => {
|
5409
|
-
const { embedding, usage, response } = await retry(
|
5412
|
+
const { embedding, usage, response, providerMetadata } = await retry(
|
5410
5413
|
() => (
|
5411
5414
|
// nested spans to align with the embedMany telemetry data:
|
5412
5415
|
recordSpan({
|
@@ -5450,6 +5453,7 @@ async function embed({
|
|
5450
5453
|
return {
|
5451
5454
|
embedding: embedding2,
|
5452
5455
|
usage: usage2,
|
5456
|
+
providerMetadata: modelResponse.providerMetadata,
|
5453
5457
|
response: modelResponse.response
|
5454
5458
|
};
|
5455
5459
|
}
|
@@ -5469,6 +5473,7 @@ async function embed({
|
|
5469
5473
|
value,
|
5470
5474
|
embedding,
|
5471
5475
|
usage,
|
5476
|
+
providerMetadata,
|
5472
5477
|
response
|
5473
5478
|
});
|
5474
5479
|
}
|
@@ -5479,6 +5484,7 @@ var DefaultEmbedResult = class {
|
|
5479
5484
|
this.value = options.value;
|
5480
5485
|
this.embedding = options.embedding;
|
5481
5486
|
this.usage = options.usage;
|
5487
|
+
this.providerMetadata = options.providerMetadata;
|
5482
5488
|
this.response = options.response;
|
5483
5489
|
}
|
5484
5490
|
};
|
@@ -5536,58 +5542,64 @@ async function embedMany({
|
|
5536
5542
|
}),
|
5537
5543
|
tracer,
|
5538
5544
|
fn: async (span) => {
|
5545
|
+
var _a16;
|
5539
5546
|
const [maxEmbeddingsPerCall, supportsParallelCalls] = await Promise.all([
|
5540
5547
|
model.maxEmbeddingsPerCall,
|
5541
5548
|
model.supportsParallelCalls
|
5542
5549
|
]);
|
5543
5550
|
if (maxEmbeddingsPerCall == null || maxEmbeddingsPerCall === Infinity) {
|
5544
|
-
const { embeddings: embeddings2, usage, response } = await retry(
|
5545
|
-
|
5546
|
-
|
5547
|
-
|
5548
|
-
|
5549
|
-
|
5550
|
-
|
5551
|
-
|
5552
|
-
|
5553
|
-
|
5554
|
-
|
5555
|
-
|
5556
|
-
|
5557
|
-
|
5551
|
+
const { embeddings: embeddings2, usage, response, providerMetadata: providerMetadata2 } = await retry(
|
5552
|
+
() => {
|
5553
|
+
return recordSpan({
|
5554
|
+
name: "ai.embedMany.doEmbed",
|
5555
|
+
attributes: selectTelemetryAttributes({
|
5556
|
+
telemetry,
|
5557
|
+
attributes: {
|
5558
|
+
...assembleOperationName({
|
5559
|
+
operationId: "ai.embedMany.doEmbed",
|
5560
|
+
telemetry
|
5561
|
+
}),
|
5562
|
+
...baseTelemetryAttributes,
|
5563
|
+
// specific settings that only make sense on the outer level:
|
5564
|
+
"ai.values": {
|
5565
|
+
input: () => values.map((value) => JSON.stringify(value))
|
5566
|
+
}
|
5558
5567
|
}
|
5568
|
+
}),
|
5569
|
+
tracer,
|
5570
|
+
fn: async (doEmbedSpan) => {
|
5571
|
+
var _a17;
|
5572
|
+
const modelResponse = await model.doEmbed({
|
5573
|
+
values,
|
5574
|
+
abortSignal,
|
5575
|
+
headers,
|
5576
|
+
providerOptions
|
5577
|
+
});
|
5578
|
+
const embeddings3 = modelResponse.embeddings;
|
5579
|
+
const usage2 = (_a17 = modelResponse.usage) != null ? _a17 : { tokens: NaN };
|
5580
|
+
doEmbedSpan.setAttributes(
|
5581
|
+
selectTelemetryAttributes({
|
5582
|
+
telemetry,
|
5583
|
+
attributes: {
|
5584
|
+
"ai.embeddings": {
|
5585
|
+
output: () => embeddings3.map(
|
5586
|
+
(embedding) => JSON.stringify(embedding)
|
5587
|
+
)
|
5588
|
+
},
|
5589
|
+
"ai.usage.tokens": usage2.tokens
|
5590
|
+
}
|
5591
|
+
})
|
5592
|
+
);
|
5593
|
+
return {
|
5594
|
+
embeddings: embeddings3,
|
5595
|
+
usage: usage2,
|
5596
|
+
providerMetadata: modelResponse.providerMetadata,
|
5597
|
+
response: modelResponse.response
|
5598
|
+
};
|
5559
5599
|
}
|
5560
|
-
})
|
5561
|
-
|
5562
|
-
|
5563
|
-
var _a16;
|
5564
|
-
const modelResponse = await model.doEmbed({
|
5565
|
-
values,
|
5566
|
-
abortSignal,
|
5567
|
-
headers,
|
5568
|
-
providerOptions
|
5569
|
-
});
|
5570
|
-
const embeddings3 = modelResponse.embeddings;
|
5571
|
-
const usage2 = (_a16 = modelResponse.usage) != null ? _a16 : { tokens: NaN };
|
5572
|
-
doEmbedSpan.setAttributes(
|
5573
|
-
selectTelemetryAttributes({
|
5574
|
-
telemetry,
|
5575
|
-
attributes: {
|
5576
|
-
"ai.embeddings": {
|
5577
|
-
output: () => embeddings3.map((embedding) => JSON.stringify(embedding))
|
5578
|
-
},
|
5579
|
-
"ai.usage.tokens": usage2.tokens
|
5580
|
-
}
|
5581
|
-
})
|
5582
|
-
);
|
5583
|
-
return {
|
5584
|
-
embeddings: embeddings3,
|
5585
|
-
usage: usage2,
|
5586
|
-
response: modelResponse.response
|
5587
|
-
};
|
5588
|
-
}
|
5589
|
-
});
|
5590
|
-
});
|
5600
|
+
});
|
5601
|
+
}
|
5602
|
+
);
|
5591
5603
|
span.setAttributes(
|
5592
5604
|
selectTelemetryAttributes({
|
5593
5605
|
telemetry,
|
@@ -5603,6 +5615,7 @@ async function embedMany({
|
|
5603
5615
|
values,
|
5604
5616
|
embeddings: embeddings2,
|
5605
5617
|
usage,
|
5618
|
+
providerMetadata: providerMetadata2,
|
5606
5619
|
responses: [response]
|
5607
5620
|
});
|
5608
5621
|
}
|
@@ -5610,6 +5623,7 @@ async function embedMany({
|
|
5610
5623
|
const embeddings = [];
|
5611
5624
|
const responses = [];
|
5612
5625
|
let tokens = 0;
|
5626
|
+
let providerMetadata;
|
5613
5627
|
const parallelChunks = splitArray(
|
5614
5628
|
valueChunks,
|
5615
5629
|
supportsParallelCalls ? maxParallelCalls : 1
|
@@ -5636,7 +5650,7 @@ async function embedMany({
|
|
5636
5650
|
}),
|
5637
5651
|
tracer,
|
5638
5652
|
fn: async (doEmbedSpan) => {
|
5639
|
-
var
|
5653
|
+
var _a17;
|
5640
5654
|
const modelResponse = await model.doEmbed({
|
5641
5655
|
values: chunk,
|
5642
5656
|
abortSignal,
|
@@ -5644,7 +5658,7 @@ async function embedMany({
|
|
5644
5658
|
providerOptions
|
5645
5659
|
});
|
5646
5660
|
const embeddings2 = modelResponse.embeddings;
|
5647
|
-
const usage = (
|
5661
|
+
const usage = (_a17 = modelResponse.usage) != null ? _a17 : { tokens: NaN };
|
5648
5662
|
doEmbedSpan.setAttributes(
|
5649
5663
|
selectTelemetryAttributes({
|
5650
5664
|
telemetry,
|
@@ -5661,6 +5675,7 @@ async function embedMany({
|
|
5661
5675
|
return {
|
5662
5676
|
embeddings: embeddings2,
|
5663
5677
|
usage,
|
5678
|
+
providerMetadata: modelResponse.providerMetadata,
|
5664
5679
|
response: modelResponse.response
|
5665
5680
|
};
|
5666
5681
|
}
|
@@ -5672,6 +5687,20 @@ async function embedMany({
|
|
5672
5687
|
embeddings.push(...result.embeddings);
|
5673
5688
|
responses.push(result.response);
|
5674
5689
|
tokens += result.usage.tokens;
|
5690
|
+
if (result.providerMetadata) {
|
5691
|
+
if (!providerMetadata) {
|
5692
|
+
providerMetadata = { ...result.providerMetadata };
|
5693
|
+
} else {
|
5694
|
+
for (const [providerName, metadata] of Object.entries(
|
5695
|
+
result.providerMetadata
|
5696
|
+
)) {
|
5697
|
+
providerMetadata[providerName] = {
|
5698
|
+
...(_a16 = providerMetadata[providerName]) != null ? _a16 : {},
|
5699
|
+
...metadata
|
5700
|
+
};
|
5701
|
+
}
|
5702
|
+
}
|
5703
|
+
}
|
5675
5704
|
}
|
5676
5705
|
}
|
5677
5706
|
span.setAttributes(
|
@@ -5689,6 +5718,7 @@ async function embedMany({
|
|
5689
5718
|
values,
|
5690
5719
|
embeddings,
|
5691
5720
|
usage: { tokens },
|
5721
|
+
providerMetadata,
|
5692
5722
|
responses
|
5693
5723
|
});
|
5694
5724
|
}
|
@@ -5699,6 +5729,7 @@ var DefaultEmbedManyResult = class {
|
|
5699
5729
|
this.values = options.values;
|
5700
5730
|
this.embeddings = options.embeddings;
|
5701
5731
|
this.usage = options.usage;
|
5732
|
+
this.providerMetadata = options.providerMetadata;
|
5702
5733
|
this.responses = options.responses;
|
5703
5734
|
}
|
5704
5735
|
};
|