ai 6.0.0-beta.74 → 6.0.0-beta.76
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +20 -0
- package/dist/index.d.mts +7 -3
- package/dist/index.d.ts +7 -3
- package/dist/index.js +72 -23
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +73 -23
- package/dist/index.mjs.map +1 -1
- package/dist/internal/index.js +1 -1
- package/dist/internal/index.mjs +1 -1
- package/package.json +4 -4
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,25 @@
|
|
|
1
1
|
# ai
|
|
2
2
|
|
|
3
|
+
## 6.0.0-beta.76
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- a755db5: feat(ai): improve warnings with provider and model id
|
|
8
|
+
- Updated dependencies [a755db5]
|
|
9
|
+
- @ai-sdk/provider@3.0.0-beta.9
|
|
10
|
+
- @ai-sdk/gateway@2.0.0-beta.40
|
|
11
|
+
- @ai-sdk/provider-utils@4.0.0-beta.23
|
|
12
|
+
|
|
13
|
+
## 6.0.0-beta.75
|
|
14
|
+
|
|
15
|
+
### Patch Changes
|
|
16
|
+
|
|
17
|
+
- 58920e0: fix(ai): do not drop custom headers in HttpChatTransport
|
|
18
|
+
- 58920e0: refactor: consolidate header normalization across packages, remove duplicates, preserve custom headers
|
|
19
|
+
- Updated dependencies [58920e0]
|
|
20
|
+
- @ai-sdk/provider-utils@4.0.0-beta.22
|
|
21
|
+
- @ai-sdk/gateway@2.0.0-beta.39
|
|
22
|
+
|
|
3
23
|
## 6.0.0-beta.74
|
|
4
24
|
|
|
5
25
|
### Patch Changes
|
package/dist/index.d.mts
CHANGED
|
@@ -3,7 +3,7 @@ import * as _ai_sdk_provider_utils from '@ai-sdk/provider-utils';
|
|
|
3
3
|
import { Tool, InferToolInput, InferToolOutput, AssistantModelMessage, ToolModelMessage, ReasoningPart, FlexibleSchema, InferSchema, ModelMessage, SystemModelMessage, UserModelMessage, ProviderOptions, IdGenerator, ToolCall, MaybePromiseLike, TextPart, FilePart, Resolvable, FetchFunction, DataContent } from '@ai-sdk/provider-utils';
|
|
4
4
|
export { AssistantContent, AssistantModelMessage, DataContent, FilePart, FlexibleSchema, IdGenerator, ImagePart, InferSchema, InferToolInput, InferToolOutput, ModelMessage, Schema, SystemModelMessage, TextPart, Tool, ToolApprovalRequest, ToolApprovalResponse, ToolCallOptions, ToolCallPart, ToolContent, ToolExecuteFunction, ToolModelMessage, ToolResultPart, UserContent, UserModelMessage, asSchema, createIdGenerator, dynamicTool, generateId, jsonSchema, parseJsonEventStream, tool, zodSchema } from '@ai-sdk/provider-utils';
|
|
5
5
|
import * as _ai_sdk_provider from '@ai-sdk/provider';
|
|
6
|
-
import { EmbeddingModelV3, EmbeddingModelV2, EmbeddingModelV3Embedding, ImageModelV3, ImageModelV3CallWarning, ImageModelV3ProviderMetadata, JSONValue as JSONValue$1, LanguageModelV3, LanguageModelV2, LanguageModelV3FinishReason, LanguageModelV3CallWarning, LanguageModelV3Source, LanguageModelV3Middleware, SharedV3ProviderMetadata, SpeechModelV3, SpeechModelV2, SpeechModelV3CallWarning, TranscriptionModelV3, TranscriptionModelV2, TranscriptionModelV3CallWarning, LanguageModelV3Usage, LanguageModelV3CallOptions, AISDKError, LanguageModelV3ToolCall, JSONSchema7, JSONParseError, TypeValidationError, ProviderV3, ProviderV2, NoSuchModelError } from '@ai-sdk/provider';
|
|
6
|
+
import { EmbeddingModelV3, EmbeddingModelV2, EmbeddingModelV3Embedding, ImageModelV3, ImageModelV3CallWarning, ImageModelV3ProviderMetadata, JSONValue as JSONValue$1, LanguageModelV3, LanguageModelV2, LanguageModelV3FinishReason, LanguageModelV3CallWarning, LanguageModelV3Source, LanguageModelV3Middleware, SharedV3ProviderMetadata, SpeechModelV3, SpeechModelV2, SpeechModelV3CallWarning, TranscriptionModelV3, TranscriptionModelV2, TranscriptionModelV3CallWarning, LanguageModelV3Usage, LanguageModelV3CallOptions, AISDKError, LanguageModelV3ToolCall, JSONSchema7, JSONParseError, TypeValidationError, SharedV3Warning, ProviderV3, ProviderV2, NoSuchModelError } from '@ai-sdk/provider';
|
|
7
7
|
export { AISDKError, APICallError, EmptyResponseBodyError, InvalidPromptError, InvalidResponseDataError, JSONParseError, JSONSchema7, LoadAPIKeyError, LoadSettingError, NoContentGeneratedError, NoSuchModelError, TooManyEmbeddingValuesForCallError, TypeValidationError, UnsupportedFunctionalityError } from '@ai-sdk/provider';
|
|
8
8
|
import { ServerResponse } from 'node:http';
|
|
9
9
|
import { AttributeValue, Tracer } from '@opentelemetry/api';
|
|
@@ -4603,8 +4603,12 @@ declare function generateSpeech({ model, text, voice, outputFormat, instructions
|
|
|
4603
4603
|
headers?: Record<string, string>;
|
|
4604
4604
|
}): Promise<SpeechResult>;
|
|
4605
4605
|
|
|
4606
|
-
type Warning = LanguageModelV3CallWarning | ImageModelV3CallWarning | SpeechModelV3CallWarning | TranscriptionModelV3CallWarning;
|
|
4607
|
-
type LogWarningsFunction = (
|
|
4606
|
+
type Warning = LanguageModelV3CallWarning | ImageModelV3CallWarning | SpeechModelV3CallWarning | TranscriptionModelV3CallWarning | SharedV3Warning;
|
|
4607
|
+
type LogWarningsFunction = (options: {
|
|
4608
|
+
warnings: Warning[];
|
|
4609
|
+
provider: string;
|
|
4610
|
+
model: string;
|
|
4611
|
+
}) => void;
|
|
4608
4612
|
|
|
4609
4613
|
/**
|
|
4610
4614
|
* Applies default settings for a language model.
|
package/dist/index.d.ts
CHANGED
|
@@ -3,7 +3,7 @@ import * as _ai_sdk_provider_utils from '@ai-sdk/provider-utils';
|
|
|
3
3
|
import { Tool, InferToolInput, InferToolOutput, AssistantModelMessage, ToolModelMessage, ReasoningPart, FlexibleSchema, InferSchema, ModelMessage, SystemModelMessage, UserModelMessage, ProviderOptions, IdGenerator, ToolCall, MaybePromiseLike, TextPart, FilePart, Resolvable, FetchFunction, DataContent } from '@ai-sdk/provider-utils';
|
|
4
4
|
export { AssistantContent, AssistantModelMessage, DataContent, FilePart, FlexibleSchema, IdGenerator, ImagePart, InferSchema, InferToolInput, InferToolOutput, ModelMessage, Schema, SystemModelMessage, TextPart, Tool, ToolApprovalRequest, ToolApprovalResponse, ToolCallOptions, ToolCallPart, ToolContent, ToolExecuteFunction, ToolModelMessage, ToolResultPart, UserContent, UserModelMessage, asSchema, createIdGenerator, dynamicTool, generateId, jsonSchema, parseJsonEventStream, tool, zodSchema } from '@ai-sdk/provider-utils';
|
|
5
5
|
import * as _ai_sdk_provider from '@ai-sdk/provider';
|
|
6
|
-
import { EmbeddingModelV3, EmbeddingModelV2, EmbeddingModelV3Embedding, ImageModelV3, ImageModelV3CallWarning, ImageModelV3ProviderMetadata, JSONValue as JSONValue$1, LanguageModelV3, LanguageModelV2, LanguageModelV3FinishReason, LanguageModelV3CallWarning, LanguageModelV3Source, LanguageModelV3Middleware, SharedV3ProviderMetadata, SpeechModelV3, SpeechModelV2, SpeechModelV3CallWarning, TranscriptionModelV3, TranscriptionModelV2, TranscriptionModelV3CallWarning, LanguageModelV3Usage, LanguageModelV3CallOptions, AISDKError, LanguageModelV3ToolCall, JSONSchema7, JSONParseError, TypeValidationError, ProviderV3, ProviderV2, NoSuchModelError } from '@ai-sdk/provider';
|
|
6
|
+
import { EmbeddingModelV3, EmbeddingModelV2, EmbeddingModelV3Embedding, ImageModelV3, ImageModelV3CallWarning, ImageModelV3ProviderMetadata, JSONValue as JSONValue$1, LanguageModelV3, LanguageModelV2, LanguageModelV3FinishReason, LanguageModelV3CallWarning, LanguageModelV3Source, LanguageModelV3Middleware, SharedV3ProviderMetadata, SpeechModelV3, SpeechModelV2, SpeechModelV3CallWarning, TranscriptionModelV3, TranscriptionModelV2, TranscriptionModelV3CallWarning, LanguageModelV3Usage, LanguageModelV3CallOptions, AISDKError, LanguageModelV3ToolCall, JSONSchema7, JSONParseError, TypeValidationError, SharedV3Warning, ProviderV3, ProviderV2, NoSuchModelError } from '@ai-sdk/provider';
|
|
7
7
|
export { AISDKError, APICallError, EmptyResponseBodyError, InvalidPromptError, InvalidResponseDataError, JSONParseError, JSONSchema7, LoadAPIKeyError, LoadSettingError, NoContentGeneratedError, NoSuchModelError, TooManyEmbeddingValuesForCallError, TypeValidationError, UnsupportedFunctionalityError } from '@ai-sdk/provider';
|
|
8
8
|
import { ServerResponse } from 'node:http';
|
|
9
9
|
import { AttributeValue, Tracer } from '@opentelemetry/api';
|
|
@@ -4603,8 +4603,12 @@ declare function generateSpeech({ model, text, voice, outputFormat, instructions
|
|
|
4603
4603
|
headers?: Record<string, string>;
|
|
4604
4604
|
}): Promise<SpeechResult>;
|
|
4605
4605
|
|
|
4606
|
-
type Warning = LanguageModelV3CallWarning | ImageModelV3CallWarning | SpeechModelV3CallWarning | TranscriptionModelV3CallWarning;
|
|
4607
|
-
type LogWarningsFunction = (
|
|
4606
|
+
type Warning = LanguageModelV3CallWarning | ImageModelV3CallWarning | SpeechModelV3CallWarning | TranscriptionModelV3CallWarning | SharedV3Warning;
|
|
4607
|
+
type LogWarningsFunction = (options: {
|
|
4608
|
+
warnings: Warning[];
|
|
4609
|
+
provider: string;
|
|
4610
|
+
model: string;
|
|
4611
|
+
}) => void;
|
|
4608
4612
|
|
|
4609
4613
|
/**
|
|
4610
4614
|
* Applies default settings for a language model.
|
package/dist/index.js
CHANGED
|
@@ -161,21 +161,32 @@ var NoOutputSpecifiedError = class extends import_provider.AISDKError {
|
|
|
161
161
|
_a = symbol;
|
|
162
162
|
|
|
163
163
|
// src/logger/log-warnings.ts
|
|
164
|
-
function formatWarning(
|
|
165
|
-
|
|
164
|
+
function formatWarning({
|
|
165
|
+
warning,
|
|
166
|
+
provider,
|
|
167
|
+
model
|
|
168
|
+
}) {
|
|
169
|
+
const prefix = `AI SDK Warning (${provider} / ${model}):`;
|
|
166
170
|
switch (warning.type) {
|
|
167
171
|
case "unsupported-setting": {
|
|
168
|
-
let message = `${prefix} The "${warning.setting}" setting is not supported
|
|
172
|
+
let message = `${prefix} The "${warning.setting}" setting is not supported.`;
|
|
173
|
+
if (warning.details) {
|
|
174
|
+
message += ` ${warning.details}`;
|
|
175
|
+
}
|
|
176
|
+
return message;
|
|
177
|
+
}
|
|
178
|
+
case "compatibility": {
|
|
179
|
+
let message = `${prefix} The "${warning.feature}" feature is not fully supported.`;
|
|
169
180
|
if (warning.details) {
|
|
170
|
-
message += `
|
|
181
|
+
message += ` ${warning.details}`;
|
|
171
182
|
}
|
|
172
183
|
return message;
|
|
173
184
|
}
|
|
174
185
|
case "unsupported-tool": {
|
|
175
186
|
const toolName = "name" in warning.tool ? warning.tool.name : "unknown tool";
|
|
176
|
-
let message = `${prefix} The tool "${toolName}" is not supported
|
|
187
|
+
let message = `${prefix} The tool "${toolName}" is not supported.`;
|
|
177
188
|
if (warning.details) {
|
|
178
|
-
message += `
|
|
189
|
+
message += ` ${warning.details}`;
|
|
179
190
|
}
|
|
180
191
|
return message;
|
|
181
192
|
}
|
|
@@ -189,8 +200,8 @@ function formatWarning(warning) {
|
|
|
189
200
|
}
|
|
190
201
|
var FIRST_WARNING_INFO_MESSAGE = "AI SDK Warning System: To turn off warning logging, set the AI_SDK_LOG_WARNINGS global to false.";
|
|
191
202
|
var hasLoggedBefore = false;
|
|
192
|
-
var logWarnings = (
|
|
193
|
-
if (warnings.length === 0) {
|
|
203
|
+
var logWarnings = (options) => {
|
|
204
|
+
if (options.warnings.length === 0) {
|
|
194
205
|
return;
|
|
195
206
|
}
|
|
196
207
|
const logger = globalThis.AI_SDK_LOG_WARNINGS;
|
|
@@ -198,15 +209,21 @@ var logWarnings = (warnings) => {
|
|
|
198
209
|
return;
|
|
199
210
|
}
|
|
200
211
|
if (typeof logger === "function") {
|
|
201
|
-
logger(
|
|
212
|
+
logger(options);
|
|
202
213
|
return;
|
|
203
214
|
}
|
|
204
215
|
if (!hasLoggedBefore) {
|
|
205
216
|
hasLoggedBefore = true;
|
|
206
217
|
console.info(FIRST_WARNING_INFO_MESSAGE);
|
|
207
218
|
}
|
|
208
|
-
for (const warning of warnings) {
|
|
209
|
-
console.warn(
|
|
219
|
+
for (const warning of options.warnings) {
|
|
220
|
+
console.warn(
|
|
221
|
+
formatWarning({
|
|
222
|
+
warning,
|
|
223
|
+
provider: options.provider,
|
|
224
|
+
model: options.model
|
|
225
|
+
})
|
|
226
|
+
);
|
|
210
227
|
}
|
|
211
228
|
};
|
|
212
229
|
|
|
@@ -849,7 +866,7 @@ function detectMediaType({
|
|
|
849
866
|
var import_provider_utils2 = require("@ai-sdk/provider-utils");
|
|
850
867
|
|
|
851
868
|
// src/version.ts
|
|
852
|
-
var VERSION = true ? "6.0.0-beta.
|
|
869
|
+
var VERSION = true ? "6.0.0-beta.76" : "0.0.0-test";
|
|
853
870
|
|
|
854
871
|
// src/util/download/download.ts
|
|
855
872
|
var download = async ({ url }) => {
|
|
@@ -2957,7 +2974,11 @@ async function generateText({
|
|
|
2957
2974
|
messages: structuredClone(responseMessages)
|
|
2958
2975
|
}
|
|
2959
2976
|
});
|
|
2960
|
-
logWarnings(
|
|
2977
|
+
logWarnings({
|
|
2978
|
+
warnings: (_g = currentModelResponse.warnings) != null ? _g : [],
|
|
2979
|
+
provider: stepModel.provider,
|
|
2980
|
+
model: stepModel.modelId
|
|
2981
|
+
});
|
|
2961
2982
|
steps.push(currentStepResult);
|
|
2962
2983
|
await (onStepFinish == null ? void 0 : onStepFinish(currentStepResult));
|
|
2963
2984
|
} while (
|
|
@@ -5243,7 +5264,11 @@ var DefaultStreamTextResult = class {
|
|
|
5243
5264
|
providerMetadata: part.providerMetadata
|
|
5244
5265
|
});
|
|
5245
5266
|
await (onStepFinish == null ? void 0 : onStepFinish(currentStepResult));
|
|
5246
|
-
logWarnings(
|
|
5267
|
+
logWarnings({
|
|
5268
|
+
warnings: recordedWarnings,
|
|
5269
|
+
provider: model.provider,
|
|
5270
|
+
model: model.modelId
|
|
5271
|
+
});
|
|
5247
5272
|
recordedSteps.push(currentStepResult);
|
|
5248
5273
|
recordedResponseMessages.push(...stepMessages);
|
|
5249
5274
|
stepFinish.resolve();
|
|
@@ -7634,7 +7659,7 @@ async function generateImage({
|
|
|
7634
7659
|
}
|
|
7635
7660
|
responses.push(result.response);
|
|
7636
7661
|
}
|
|
7637
|
-
logWarnings(warnings);
|
|
7662
|
+
logWarnings({ warnings, provider: model.provider, model: model.modelId });
|
|
7638
7663
|
if (!images.length) {
|
|
7639
7664
|
throw new NoImageGeneratedError({ responses });
|
|
7640
7665
|
}
|
|
@@ -8311,7 +8336,11 @@ async function generateObject(options) {
|
|
|
8311
8336
|
request = (_a16 = generateResult.request) != null ? _a16 : {};
|
|
8312
8337
|
response = generateResult.responseData;
|
|
8313
8338
|
reasoning = generateResult.reasoning;
|
|
8314
|
-
logWarnings(
|
|
8339
|
+
logWarnings({
|
|
8340
|
+
warnings,
|
|
8341
|
+
provider: model.provider,
|
|
8342
|
+
model: model.modelId
|
|
8343
|
+
});
|
|
8315
8344
|
const object2 = await parseAndValidateObjectResultWithRepair(
|
|
8316
8345
|
result,
|
|
8317
8346
|
outputStrategy,
|
|
@@ -8826,7 +8855,11 @@ var DefaultStreamObjectResult = class {
|
|
|
8826
8855
|
usage,
|
|
8827
8856
|
response: fullResponse
|
|
8828
8857
|
});
|
|
8829
|
-
logWarnings(
|
|
8858
|
+
logWarnings({
|
|
8859
|
+
warnings: warnings != null ? warnings : [],
|
|
8860
|
+
provider: model.provider,
|
|
8861
|
+
model: model.modelId
|
|
8862
|
+
});
|
|
8830
8863
|
self._usage.resolve(usage);
|
|
8831
8864
|
self._providerMetadata.resolve(providerMetadata);
|
|
8832
8865
|
self._warnings.resolve(warnings);
|
|
@@ -9104,7 +9137,11 @@ async function generateSpeech({
|
|
|
9104
9137
|
if (!result.audio || result.audio.length === 0) {
|
|
9105
9138
|
throw new NoSpeechGeneratedError({ responses: [result.response] });
|
|
9106
9139
|
}
|
|
9107
|
-
logWarnings(
|
|
9140
|
+
logWarnings({
|
|
9141
|
+
warnings: result.warnings,
|
|
9142
|
+
provider: resolvedModel.provider,
|
|
9143
|
+
model: resolvedModel.modelId
|
|
9144
|
+
});
|
|
9108
9145
|
return new DefaultSpeechResult({
|
|
9109
9146
|
audio: new DefaultGeneratedAudioFile({
|
|
9110
9147
|
data: result.audio,
|
|
@@ -10124,7 +10161,11 @@ async function transcribe({
|
|
|
10124
10161
|
});
|
|
10125
10162
|
}
|
|
10126
10163
|
);
|
|
10127
|
-
logWarnings(
|
|
10164
|
+
logWarnings({
|
|
10165
|
+
warnings: result.warnings,
|
|
10166
|
+
provider: resolvedModel.provider,
|
|
10167
|
+
model: resolvedModel.modelId
|
|
10168
|
+
});
|
|
10128
10169
|
if (!result.text) {
|
|
10129
10170
|
throw new NoTranscriptGeneratedError({ responses: [result.response] });
|
|
10130
10171
|
}
|
|
@@ -10349,19 +10390,23 @@ var HttpChatTransport = class {
|
|
|
10349
10390
|
const resolvedBody = await (0, import_provider_utils33.resolve)(this.body);
|
|
10350
10391
|
const resolvedHeaders = await (0, import_provider_utils33.resolve)(this.headers);
|
|
10351
10392
|
const resolvedCredentials = await (0, import_provider_utils33.resolve)(this.credentials);
|
|
10393
|
+
const baseHeaders = {
|
|
10394
|
+
...(0, import_provider_utils33.normalizeHeaders)(resolvedHeaders),
|
|
10395
|
+
...(0, import_provider_utils33.normalizeHeaders)(options.headers)
|
|
10396
|
+
};
|
|
10352
10397
|
const preparedRequest = await ((_a16 = this.prepareSendMessagesRequest) == null ? void 0 : _a16.call(this, {
|
|
10353
10398
|
api: this.api,
|
|
10354
10399
|
id: options.chatId,
|
|
10355
10400
|
messages: options.messages,
|
|
10356
10401
|
body: { ...resolvedBody, ...options.body },
|
|
10357
|
-
headers:
|
|
10402
|
+
headers: baseHeaders,
|
|
10358
10403
|
credentials: resolvedCredentials,
|
|
10359
10404
|
requestMetadata: options.metadata,
|
|
10360
10405
|
trigger: options.trigger,
|
|
10361
10406
|
messageId: options.messageId
|
|
10362
10407
|
}));
|
|
10363
10408
|
const api = (_b = preparedRequest == null ? void 0 : preparedRequest.api) != null ? _b : this.api;
|
|
10364
|
-
const headers = (preparedRequest == null ? void 0 : preparedRequest.headers) !== void 0 ? preparedRequest.headers :
|
|
10409
|
+
const headers = (preparedRequest == null ? void 0 : preparedRequest.headers) !== void 0 ? (0, import_provider_utils33.normalizeHeaders)(preparedRequest.headers) : baseHeaders;
|
|
10365
10410
|
const body = (preparedRequest == null ? void 0 : preparedRequest.body) !== void 0 ? preparedRequest.body : {
|
|
10366
10411
|
...resolvedBody,
|
|
10367
10412
|
...options.body,
|
|
@@ -10401,16 +10446,20 @@ var HttpChatTransport = class {
|
|
|
10401
10446
|
const resolvedBody = await (0, import_provider_utils33.resolve)(this.body);
|
|
10402
10447
|
const resolvedHeaders = await (0, import_provider_utils33.resolve)(this.headers);
|
|
10403
10448
|
const resolvedCredentials = await (0, import_provider_utils33.resolve)(this.credentials);
|
|
10449
|
+
const baseHeaders = {
|
|
10450
|
+
...(0, import_provider_utils33.normalizeHeaders)(resolvedHeaders),
|
|
10451
|
+
...(0, import_provider_utils33.normalizeHeaders)(options.headers)
|
|
10452
|
+
};
|
|
10404
10453
|
const preparedRequest = await ((_a16 = this.prepareReconnectToStreamRequest) == null ? void 0 : _a16.call(this, {
|
|
10405
10454
|
api: this.api,
|
|
10406
10455
|
id: options.chatId,
|
|
10407
10456
|
body: { ...resolvedBody, ...options.body },
|
|
10408
|
-
headers:
|
|
10457
|
+
headers: baseHeaders,
|
|
10409
10458
|
credentials: resolvedCredentials,
|
|
10410
10459
|
requestMetadata: options.metadata
|
|
10411
10460
|
}));
|
|
10412
10461
|
const api = (_b = preparedRequest == null ? void 0 : preparedRequest.api) != null ? _b : `${this.api}/${options.chatId}/stream`;
|
|
10413
|
-
const headers = (preparedRequest == null ? void 0 : preparedRequest.headers) !== void 0 ? preparedRequest.headers :
|
|
10462
|
+
const headers = (preparedRequest == null ? void 0 : preparedRequest.headers) !== void 0 ? (0, import_provider_utils33.normalizeHeaders)(preparedRequest.headers) : baseHeaders;
|
|
10414
10463
|
const credentials = (_c = preparedRequest == null ? void 0 : preparedRequest.credentials) != null ? _c : resolvedCredentials;
|
|
10415
10464
|
const fetch2 = (_d = this.fetch) != null ? _d : globalThis.fetch;
|
|
10416
10465
|
const response = await fetch2(api, {
|