@ai-sdk/anthropic 3.0.0-beta.61 → 3.0.0-beta.63

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,36 @@
1
1
  # @ai-sdk/anthropic
2
2
 
3
+ ## 3.0.0-beta.63
4
+
5
+ ### Patch Changes
6
+
7
+ - 457318b: chore(provider,ai): switch to SharedV3Warning and unified warnings
8
+ - Updated dependencies [457318b]
9
+ - @ai-sdk/provider@3.0.0-beta.20
10
+ - @ai-sdk/provider-utils@4.0.0-beta.37
11
+
12
+ ## 3.0.0-beta.62
13
+
14
+ ### Patch Changes
15
+
16
+ - 8d9e8ad: chore(provider): remove generics from EmbeddingModelV3
17
+
18
+ Before
19
+
20
+ ```ts
21
+ model.textEmbeddingModel('my-model-id');
22
+ ```
23
+
24
+ After
25
+
26
+ ```ts
27
+ model.embeddingModel('my-model-id');
28
+ ```
29
+
30
+ - Updated dependencies [8d9e8ad]
31
+ - @ai-sdk/provider@3.0.0-beta.19
32
+ - @ai-sdk/provider-utils@4.0.0-beta.36
33
+
3
34
  ## 3.0.0-beta.61
4
35
 
5
36
  ### Patch Changes
package/dist/index.js CHANGED
@@ -31,7 +31,7 @@ var import_provider4 = require("@ai-sdk/provider");
31
31
  var import_provider_utils20 = require("@ai-sdk/provider-utils");
32
32
 
33
33
  // src/version.ts
34
- var VERSION = true ? "3.0.0-beta.61" : "0.0.0-test";
34
+ var VERSION = true ? "3.0.0-beta.63" : "0.0.0-test";
35
35
 
36
36
  // src/anthropic-messages-language-model.ts
37
37
  var import_provider3 = require("@ai-sdk/provider");
@@ -692,8 +692,8 @@ var CacheControlValidator = class {
692
692
  }
693
693
  if (!context.canCache) {
694
694
  this.warnings.push({
695
- type: "unsupported-setting",
696
- setting: "cacheControl",
695
+ type: "unsupported",
696
+ feature: "cache_control on non-cacheable context",
697
697
  details: `cache_control cannot be set on ${context.type}. It will be ignored.`
698
698
  });
699
699
  return void 0;
@@ -701,8 +701,8 @@ var CacheControlValidator = class {
701
701
  this.breakpointCount++;
702
702
  if (this.breakpointCount > MAX_CACHE_BREAKPOINTS) {
703
703
  this.warnings.push({
704
- type: "unsupported-setting",
705
- setting: "cacheControl",
704
+ type: "unsupported",
705
+ feature: "cacheControl breakpoint limit",
706
706
  details: `Maximum ${MAX_CACHE_BREAKPOINTS} cache breakpoints exceeded (found ${this.breakpointCount}). This breakpoint will be ignored.`
707
707
  });
708
708
  return void 0;
@@ -1028,14 +1028,20 @@ async function prepareTools({
1028
1028
  break;
1029
1029
  }
1030
1030
  default: {
1031
- toolWarnings.push({ type: "unsupported-tool", tool });
1031
+ toolWarnings.push({
1032
+ type: "unsupported",
1033
+ feature: `provider-defined tool ${tool.id}`
1034
+ });
1032
1035
  break;
1033
1036
  }
1034
1037
  }
1035
1038
  break;
1036
1039
  }
1037
1040
  default: {
1038
- toolWarnings.push({ type: "unsupported-tool", tool });
1041
+ toolWarnings.push({
1042
+ type: "unsupported",
1043
+ feature: `tool ${tool}`
1044
+ });
1039
1045
  break;
1040
1046
  }
1041
1047
  }
@@ -1922,34 +1928,25 @@ var AnthropicMessagesLanguageModel = class {
1922
1928
  var _a, _b, _c, _d, _e, _f;
1923
1929
  const warnings = [];
1924
1930
  if (frequencyPenalty != null) {
1925
- warnings.push({
1926
- type: "unsupported-setting",
1927
- setting: "frequencyPenalty"
1928
- });
1931
+ warnings.push({ type: "unsupported", feature: "frequencyPenalty" });
1929
1932
  }
1930
1933
  if (presencePenalty != null) {
1931
- warnings.push({
1932
- type: "unsupported-setting",
1933
- setting: "presencePenalty"
1934
- });
1934
+ warnings.push({ type: "unsupported", feature: "presencePenalty" });
1935
1935
  }
1936
1936
  if (seed != null) {
1937
- warnings.push({
1938
- type: "unsupported-setting",
1939
- setting: "seed"
1940
- });
1937
+ warnings.push({ type: "unsupported", feature: "seed" });
1941
1938
  }
1942
1939
  if (temperature != null && temperature > 1) {
1943
1940
  warnings.push({
1944
- type: "unsupported-setting",
1945
- setting: "temperature",
1941
+ type: "unsupported",
1942
+ feature: "temperature",
1946
1943
  details: `${temperature} exceeds anthropic maximum of 1.0. clamped to 1.0`
1947
1944
  });
1948
1945
  temperature = 1;
1949
1946
  } else if (temperature != null && temperature < 0) {
1950
1947
  warnings.push({
1951
- type: "unsupported-setting",
1952
- setting: "temperature",
1948
+ type: "unsupported",
1949
+ feature: "temperature",
1953
1950
  details: `${temperature} is below anthropic minimum of 0. clamped to 0`
1954
1951
  });
1955
1952
  temperature = 0;
@@ -1957,8 +1954,8 @@ var AnthropicMessagesLanguageModel = class {
1957
1954
  if ((responseFormat == null ? void 0 : responseFormat.type) === "json") {
1958
1955
  if (responseFormat.schema == null) {
1959
1956
  warnings.push({
1960
- type: "unsupported-setting",
1961
- setting: "responseFormat",
1957
+ type: "unsupported",
1958
+ feature: "responseFormat",
1962
1959
  details: "JSON response format requires a schema. The response format is ignored."
1963
1960
  });
1964
1961
  }
@@ -2051,24 +2048,24 @@ var AnthropicMessagesLanguageModel = class {
2051
2048
  if (baseArgs.temperature != null) {
2052
2049
  baseArgs.temperature = void 0;
2053
2050
  warnings.push({
2054
- type: "unsupported-setting",
2055
- setting: "temperature",
2051
+ type: "unsupported",
2052
+ feature: "temperature",
2056
2053
  details: "temperature is not supported when thinking is enabled"
2057
2054
  });
2058
2055
  }
2059
2056
  if (topK != null) {
2060
2057
  baseArgs.top_k = void 0;
2061
2058
  warnings.push({
2062
- type: "unsupported-setting",
2063
- setting: "topK",
2059
+ type: "unsupported",
2060
+ feature: "topK",
2064
2061
  details: "topK is not supported when thinking is enabled"
2065
2062
  });
2066
2063
  }
2067
2064
  if (topP != null) {
2068
2065
  baseArgs.top_p = void 0;
2069
2066
  warnings.push({
2070
- type: "unsupported-setting",
2071
- setting: "topP",
2067
+ type: "unsupported",
2068
+ feature: "topP",
2072
2069
  details: "topP is not supported when thinking is enabled"
2073
2070
  });
2074
2071
  }
@@ -2077,8 +2074,8 @@ var AnthropicMessagesLanguageModel = class {
2077
2074
  if (isKnownModel && baseArgs.max_tokens > maxOutputTokensForModel) {
2078
2075
  if (maxOutputTokens != null) {
2079
2076
  warnings.push({
2080
- type: "unsupported-setting",
2081
- setting: "maxOutputTokens",
2077
+ type: "unsupported",
2078
+ feature: "maxOutputTokens",
2082
2079
  details: `${baseArgs.max_tokens} (maxOutputTokens + thinkingBudget) is greater than ${this.modelId} ${maxOutputTokensForModel} max output tokens. The max output tokens have been limited to ${maxOutputTokensForModel}.`
2083
2080
  });
2084
2081
  }
@@ -3458,8 +3455,8 @@ function createAnthropic(options = {}) {
3458
3455
  provider.languageModel = createChatModel;
3459
3456
  provider.chat = createChatModel;
3460
3457
  provider.messages = createChatModel;
3461
- provider.textEmbeddingModel = (modelId) => {
3462
- throw new import_provider4.NoSuchModelError({ modelId, modelType: "textEmbeddingModel" });
3458
+ provider.embeddingModel = (modelId) => {
3459
+ throw new import_provider4.NoSuchModelError({ modelId, modelType: "embeddingModel" });
3463
3460
  };
3464
3461
  provider.imageModel = (modelId) => {
3465
3462
  throw new import_provider4.NoSuchModelError({ modelId, modelType: "imageModel" });