@ai-sdk/amazon-bedrock 4.0.28 → 4.0.29
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +10 -0
- package/dist/anthropic/index.js +1 -1
- package/dist/anthropic/index.mjs +1 -1
- package/dist/index.js +1 -1
- package/dist/index.mjs +1 -1
- package/package.json +9 -5
- package/src/__fixtures__/bedrock-json-only-text-first.1.chunks.txt +0 -7
- package/src/__fixtures__/bedrock-json-other-tool.1.chunks.txt +0 -6
- package/src/__fixtures__/bedrock-json-other-tool.1.json +0 -24
- package/src/__fixtures__/bedrock-json-tool-text-then-weather-then-json.1.chunks.txt +0 -12
- package/src/__fixtures__/bedrock-json-tool-with-answer.1.json +0 -29
- package/src/__fixtures__/bedrock-json-tool.1.chunks.txt +0 -4
- package/src/__fixtures__/bedrock-json-tool.1.json +0 -35
- package/src/__fixtures__/bedrock-json-tool.2.chunks.txt +0 -6
- package/src/__fixtures__/bedrock-json-tool.2.json +0 -28
- package/src/__fixtures__/bedrock-json-tool.3.chunks.txt +0 -7
- package/src/__fixtures__/bedrock-json-tool.3.json +0 -36
- package/src/__fixtures__/bedrock-json-with-tool.1.chunks.txt +0 -9
- package/src/__fixtures__/bedrock-json-with-tool.1.json +0 -41
- package/src/__fixtures__/bedrock-json-with-tools.1.chunks.txt +0 -12
- package/src/__fixtures__/bedrock-json-with-tools.1.json +0 -50
- package/src/__fixtures__/bedrock-tool-call.1.chunks.txt +0 -6
- package/src/__fixtures__/bedrock-tool-call.1.json +0 -24
- package/src/__fixtures__/bedrock-tool-no-args.chunks.txt +0 -8
- package/src/__fixtures__/bedrock-tool-no-args.json +0 -25
- package/src/anthropic/bedrock-anthropic-fetch.test.ts +0 -344
- package/src/anthropic/bedrock-anthropic-provider.test.ts +0 -456
- package/src/bedrock-chat-language-model.test.ts +0 -4569
- package/src/bedrock-embedding-model.test.ts +0 -148
- package/src/bedrock-event-stream-response-handler.test.ts +0 -233
- package/src/bedrock-image-model.test.ts +0 -866
- package/src/bedrock-provider.test.ts +0 -457
- package/src/bedrock-sigv4-fetch.test.ts +0 -675
- package/src/convert-bedrock-usage.test.ts +0 -207
- package/src/convert-to-bedrock-chat-messages.test.ts +0 -1175
- package/src/inject-fetch-headers.test.ts +0 -135
- package/src/normalize-tool-call-id.test.ts +0 -72
- package/src/reranking/__fixtures__/bedrock-reranking.1.json +0 -12
- package/src/reranking/bedrock-reranking-model.test.ts +0 -299
|
@@ -1,344 +0,0 @@
|
|
|
1
|
-
import { createBedrockAnthropicFetch } from './bedrock-anthropic-fetch';
|
|
2
|
-
import { EventStreamCodec } from '@smithy/eventstream-codec';
|
|
3
|
-
import { toUtf8, fromUtf8 } from '@smithy/util-utf8';
|
|
4
|
-
import { describe, it, expect, vi } from 'vitest';
|
|
5
|
-
|
|
6
|
-
describe('createBedrockAnthropicFetch', () => {
|
|
7
|
-
function createMockResponse(
|
|
8
|
-
body: ReadableStream<Uint8Array> | null,
|
|
9
|
-
contentType: string,
|
|
10
|
-
): Response {
|
|
11
|
-
return new Response(body, {
|
|
12
|
-
headers: { 'content-type': contentType },
|
|
13
|
-
});
|
|
14
|
-
}
|
|
15
|
-
|
|
16
|
-
function createMockFetch(
|
|
17
|
-
response: Response,
|
|
18
|
-
): (url: RequestInfo | URL, options?: RequestInit) => Promise<Response> {
|
|
19
|
-
return vi.fn().mockResolvedValue(response);
|
|
20
|
-
}
|
|
21
|
-
|
|
22
|
-
it('should pass through non-streaming responses unchanged', async () => {
|
|
23
|
-
const jsonBody = JSON.stringify({ id: 'msg_123', content: [] });
|
|
24
|
-
const mockResponse = new Response(jsonBody, {
|
|
25
|
-
headers: { 'content-type': 'application/json' },
|
|
26
|
-
});
|
|
27
|
-
const baseFetch = createMockFetch(mockResponse);
|
|
28
|
-
const wrappedFetch = createBedrockAnthropicFetch(baseFetch);
|
|
29
|
-
|
|
30
|
-
const response = await wrappedFetch('https://example.com', {});
|
|
31
|
-
|
|
32
|
-
expect(response.headers.get('content-type')).toBe('application/json');
|
|
33
|
-
expect(await response.text()).toBe(jsonBody);
|
|
34
|
-
});
|
|
35
|
-
|
|
36
|
-
it('should transform Bedrock event stream to SSE format', async () => {
|
|
37
|
-
const codec = new EventStreamCodec(toUtf8, fromUtf8);
|
|
38
|
-
|
|
39
|
-
// Create a mock Anthropic event
|
|
40
|
-
const anthropicEvent = JSON.stringify({
|
|
41
|
-
type: 'content_block_delta',
|
|
42
|
-
delta: { type: 'text_delta', text: 'Hello' },
|
|
43
|
-
});
|
|
44
|
-
|
|
45
|
-
// Encode as Bedrock chunk with base64 bytes
|
|
46
|
-
const chunkPayload = JSON.stringify({
|
|
47
|
-
bytes: btoa(anthropicEvent),
|
|
48
|
-
});
|
|
49
|
-
|
|
50
|
-
// Create Bedrock event stream message
|
|
51
|
-
const bedrockEvent = codec.encode({
|
|
52
|
-
headers: {
|
|
53
|
-
':message-type': { type: 'string', value: 'event' },
|
|
54
|
-
':event-type': { type: 'string', value: 'chunk' },
|
|
55
|
-
},
|
|
56
|
-
body: fromUtf8(chunkPayload),
|
|
57
|
-
});
|
|
58
|
-
|
|
59
|
-
// Create a readable stream from the event
|
|
60
|
-
const stream = new ReadableStream({
|
|
61
|
-
start(controller) {
|
|
62
|
-
controller.enqueue(bedrockEvent);
|
|
63
|
-
controller.close();
|
|
64
|
-
},
|
|
65
|
-
});
|
|
66
|
-
|
|
67
|
-
const mockResponse = createMockResponse(
|
|
68
|
-
stream,
|
|
69
|
-
'application/vnd.amazon.eventstream',
|
|
70
|
-
);
|
|
71
|
-
const baseFetch = createMockFetch(mockResponse);
|
|
72
|
-
const wrappedFetch = createBedrockAnthropicFetch(baseFetch);
|
|
73
|
-
|
|
74
|
-
const response = await wrappedFetch('https://example.com', {});
|
|
75
|
-
|
|
76
|
-
expect(response.headers.get('content-type')).toBe('text/event-stream');
|
|
77
|
-
|
|
78
|
-
const reader = response.body!.getReader();
|
|
79
|
-
const { value } = await reader.read();
|
|
80
|
-
const text = new TextDecoder().decode(value);
|
|
81
|
-
|
|
82
|
-
expect(text).toBe(`data: ${anthropicEvent}\n\n`);
|
|
83
|
-
});
|
|
84
|
-
|
|
85
|
-
it('should handle messageStop event and emit [DONE]', async () => {
|
|
86
|
-
const codec = new EventStreamCodec(toUtf8, fromUtf8);
|
|
87
|
-
|
|
88
|
-
const bedrockEvent = codec.encode({
|
|
89
|
-
headers: {
|
|
90
|
-
':message-type': { type: 'string', value: 'event' },
|
|
91
|
-
':event-type': { type: 'string', value: 'messageStop' },
|
|
92
|
-
},
|
|
93
|
-
body: fromUtf8('{}'),
|
|
94
|
-
});
|
|
95
|
-
|
|
96
|
-
const stream = new ReadableStream({
|
|
97
|
-
start(controller) {
|
|
98
|
-
controller.enqueue(bedrockEvent);
|
|
99
|
-
controller.close();
|
|
100
|
-
},
|
|
101
|
-
});
|
|
102
|
-
|
|
103
|
-
const mockResponse = createMockResponse(
|
|
104
|
-
stream,
|
|
105
|
-
'application/vnd.amazon.eventstream',
|
|
106
|
-
);
|
|
107
|
-
const baseFetch = createMockFetch(mockResponse);
|
|
108
|
-
const wrappedFetch = createBedrockAnthropicFetch(baseFetch);
|
|
109
|
-
|
|
110
|
-
const response = await wrappedFetch('https://example.com', {});
|
|
111
|
-
const reader = response.body!.getReader();
|
|
112
|
-
const { value } = await reader.read();
|
|
113
|
-
const text = new TextDecoder().decode(value);
|
|
114
|
-
|
|
115
|
-
expect(text).toBe('data: [DONE]\n\n');
|
|
116
|
-
});
|
|
117
|
-
|
|
118
|
-
it('should handle exception messages', async () => {
|
|
119
|
-
const codec = new EventStreamCodec(toUtf8, fromUtf8);
|
|
120
|
-
|
|
121
|
-
const errorData = JSON.stringify({ message: 'Rate limit exceeded' });
|
|
122
|
-
const bedrockEvent = codec.encode({
|
|
123
|
-
headers: {
|
|
124
|
-
':message-type': { type: 'string', value: 'exception' },
|
|
125
|
-
':exception-type': { type: 'string', value: 'ThrottlingException' },
|
|
126
|
-
},
|
|
127
|
-
body: fromUtf8(errorData),
|
|
128
|
-
});
|
|
129
|
-
|
|
130
|
-
const stream = new ReadableStream({
|
|
131
|
-
start(controller) {
|
|
132
|
-
controller.enqueue(bedrockEvent);
|
|
133
|
-
controller.close();
|
|
134
|
-
},
|
|
135
|
-
});
|
|
136
|
-
|
|
137
|
-
const mockResponse = createMockResponse(
|
|
138
|
-
stream,
|
|
139
|
-
'application/vnd.amazon.eventstream',
|
|
140
|
-
);
|
|
141
|
-
const baseFetch = createMockFetch(mockResponse);
|
|
142
|
-
const wrappedFetch = createBedrockAnthropicFetch(baseFetch);
|
|
143
|
-
|
|
144
|
-
const response = await wrappedFetch('https://example.com', {});
|
|
145
|
-
const reader = response.body!.getReader();
|
|
146
|
-
const { value } = await reader.read();
|
|
147
|
-
const text = new TextDecoder().decode(value);
|
|
148
|
-
|
|
149
|
-
expect(text).toBe(
|
|
150
|
-
`data: ${JSON.stringify({ type: 'error', error: errorData })}\n\n`,
|
|
151
|
-
);
|
|
152
|
-
});
|
|
153
|
-
|
|
154
|
-
it('should handle multiple events in sequence', async () => {
|
|
155
|
-
const codec = new EventStreamCodec(toUtf8, fromUtf8);
|
|
156
|
-
|
|
157
|
-
const event1 = JSON.stringify({
|
|
158
|
-
type: 'message_start',
|
|
159
|
-
message: { id: 'msg_123' },
|
|
160
|
-
});
|
|
161
|
-
const event2 = JSON.stringify({
|
|
162
|
-
type: 'content_block_delta',
|
|
163
|
-
delta: { text: 'Hi' },
|
|
164
|
-
});
|
|
165
|
-
|
|
166
|
-
const chunk1 = codec.encode({
|
|
167
|
-
headers: {
|
|
168
|
-
':message-type': { type: 'string', value: 'event' },
|
|
169
|
-
':event-type': { type: 'string', value: 'chunk' },
|
|
170
|
-
},
|
|
171
|
-
body: fromUtf8(JSON.stringify({ bytes: btoa(event1) })),
|
|
172
|
-
});
|
|
173
|
-
|
|
174
|
-
const chunk2 = codec.encode({
|
|
175
|
-
headers: {
|
|
176
|
-
':message-type': { type: 'string', value: 'event' },
|
|
177
|
-
':event-type': { type: 'string', value: 'chunk' },
|
|
178
|
-
},
|
|
179
|
-
body: fromUtf8(JSON.stringify({ bytes: btoa(event2) })),
|
|
180
|
-
});
|
|
181
|
-
|
|
182
|
-
const stopEvent = codec.encode({
|
|
183
|
-
headers: {
|
|
184
|
-
':message-type': { type: 'string', value: 'event' },
|
|
185
|
-
':event-type': { type: 'string', value: 'messageStop' },
|
|
186
|
-
},
|
|
187
|
-
body: fromUtf8('{}'),
|
|
188
|
-
});
|
|
189
|
-
|
|
190
|
-
const stream = new ReadableStream({
|
|
191
|
-
start(controller) {
|
|
192
|
-
controller.enqueue(chunk1);
|
|
193
|
-
controller.enqueue(chunk2);
|
|
194
|
-
controller.enqueue(stopEvent);
|
|
195
|
-
controller.close();
|
|
196
|
-
},
|
|
197
|
-
});
|
|
198
|
-
|
|
199
|
-
const mockResponse = createMockResponse(
|
|
200
|
-
stream,
|
|
201
|
-
'application/vnd.amazon.eventstream',
|
|
202
|
-
);
|
|
203
|
-
const baseFetch = createMockFetch(mockResponse);
|
|
204
|
-
const wrappedFetch = createBedrockAnthropicFetch(baseFetch);
|
|
205
|
-
|
|
206
|
-
const response = await wrappedFetch('https://example.com', {});
|
|
207
|
-
const reader = response.body!.getReader();
|
|
208
|
-
|
|
209
|
-
const chunks: string[] = [];
|
|
210
|
-
while (true) {
|
|
211
|
-
const { done, value } = await reader.read();
|
|
212
|
-
if (done) break;
|
|
213
|
-
chunks.push(new TextDecoder().decode(value));
|
|
214
|
-
}
|
|
215
|
-
|
|
216
|
-
const fullText = chunks.join('');
|
|
217
|
-
expect(fullText).toContain(`data: ${event1}\n\n`);
|
|
218
|
-
expect(fullText).toContain(`data: ${event2}\n\n`);
|
|
219
|
-
expect(fullText).toContain('data: [DONE]\n\n');
|
|
220
|
-
});
|
|
221
|
-
|
|
222
|
-
it('should handle chunked event data spanning multiple network chunks', async () => {
|
|
223
|
-
const codec = new EventStreamCodec(toUtf8, fromUtf8);
|
|
224
|
-
|
|
225
|
-
const anthropicEvent = JSON.stringify({
|
|
226
|
-
type: 'content_block_delta',
|
|
227
|
-
delta: { text: 'Hello World' },
|
|
228
|
-
});
|
|
229
|
-
|
|
230
|
-
const bedrockEvent = codec.encode({
|
|
231
|
-
headers: {
|
|
232
|
-
':message-type': { type: 'string', value: 'event' },
|
|
233
|
-
':event-type': { type: 'string', value: 'chunk' },
|
|
234
|
-
},
|
|
235
|
-
body: fromUtf8(JSON.stringify({ bytes: btoa(anthropicEvent) })),
|
|
236
|
-
});
|
|
237
|
-
|
|
238
|
-
// Split the event into two chunks to simulate network fragmentation
|
|
239
|
-
const midpoint = Math.floor(bedrockEvent.length / 2);
|
|
240
|
-
const firstHalf = bedrockEvent.slice(0, midpoint);
|
|
241
|
-
const secondHalf = bedrockEvent.slice(midpoint);
|
|
242
|
-
|
|
243
|
-
const stream = new ReadableStream({
|
|
244
|
-
start(controller) {
|
|
245
|
-
controller.enqueue(firstHalf);
|
|
246
|
-
controller.enqueue(secondHalf);
|
|
247
|
-
controller.close();
|
|
248
|
-
},
|
|
249
|
-
});
|
|
250
|
-
|
|
251
|
-
const mockResponse = createMockResponse(
|
|
252
|
-
stream,
|
|
253
|
-
'application/vnd.amazon.eventstream',
|
|
254
|
-
);
|
|
255
|
-
const baseFetch = createMockFetch(mockResponse);
|
|
256
|
-
const wrappedFetch = createBedrockAnthropicFetch(baseFetch);
|
|
257
|
-
|
|
258
|
-
const response = await wrappedFetch('https://example.com', {});
|
|
259
|
-
const reader = response.body!.getReader();
|
|
260
|
-
|
|
261
|
-
const chunks: string[] = [];
|
|
262
|
-
while (true) {
|
|
263
|
-
const { done, value } = await reader.read();
|
|
264
|
-
if (done) break;
|
|
265
|
-
if (value) {
|
|
266
|
-
chunks.push(new TextDecoder().decode(value));
|
|
267
|
-
}
|
|
268
|
-
}
|
|
269
|
-
|
|
270
|
-
const fullText = chunks.join('');
|
|
271
|
-
expect(fullText).toBe(`data: ${anthropicEvent}\n\n`);
|
|
272
|
-
});
|
|
273
|
-
|
|
274
|
-
it('should preserve response status and statusText', async () => {
|
|
275
|
-
const codec = new EventStreamCodec(toUtf8, fromUtf8);
|
|
276
|
-
|
|
277
|
-
const bedrockEvent = codec.encode({
|
|
278
|
-
headers: {
|
|
279
|
-
':message-type': { type: 'string', value: 'event' },
|
|
280
|
-
':event-type': { type: 'string', value: 'messageStop' },
|
|
281
|
-
},
|
|
282
|
-
body: fromUtf8('{}'),
|
|
283
|
-
});
|
|
284
|
-
|
|
285
|
-
const stream = new ReadableStream({
|
|
286
|
-
start(controller) {
|
|
287
|
-
controller.enqueue(bedrockEvent);
|
|
288
|
-
controller.close();
|
|
289
|
-
},
|
|
290
|
-
});
|
|
291
|
-
|
|
292
|
-
const mockResponse = new Response(stream, {
|
|
293
|
-
status: 200,
|
|
294
|
-
statusText: 'OK',
|
|
295
|
-
headers: { 'content-type': 'application/vnd.amazon.eventstream' },
|
|
296
|
-
});
|
|
297
|
-
const baseFetch = createMockFetch(mockResponse);
|
|
298
|
-
const wrappedFetch = createBedrockAnthropicFetch(baseFetch);
|
|
299
|
-
|
|
300
|
-
const response = await wrappedFetch('https://example.com', {});
|
|
301
|
-
|
|
302
|
-
expect(response.status).toBe(200);
|
|
303
|
-
expect(response.statusText).toBe('OK');
|
|
304
|
-
});
|
|
305
|
-
|
|
306
|
-
it('should handle chunk events with missing bytes field', async () => {
|
|
307
|
-
const codec = new EventStreamCodec(toUtf8, fromUtf8);
|
|
308
|
-
|
|
309
|
-
// Create a chunk without a bytes field - this should fall back to emitting raw data
|
|
310
|
-
const chunkPayload = JSON.stringify({
|
|
311
|
-
someOtherField: 'value',
|
|
312
|
-
});
|
|
313
|
-
|
|
314
|
-
const bedrockEvent = codec.encode({
|
|
315
|
-
headers: {
|
|
316
|
-
':message-type': { type: 'string', value: 'event' },
|
|
317
|
-
':event-type': { type: 'string', value: 'chunk' },
|
|
318
|
-
},
|
|
319
|
-
body: fromUtf8(chunkPayload),
|
|
320
|
-
});
|
|
321
|
-
|
|
322
|
-
const stream = new ReadableStream({
|
|
323
|
-
start(controller) {
|
|
324
|
-
controller.enqueue(bedrockEvent);
|
|
325
|
-
controller.close();
|
|
326
|
-
},
|
|
327
|
-
});
|
|
328
|
-
|
|
329
|
-
const mockResponse = createMockResponse(
|
|
330
|
-
stream,
|
|
331
|
-
'application/vnd.amazon.eventstream',
|
|
332
|
-
);
|
|
333
|
-
const baseFetch = createMockFetch(mockResponse);
|
|
334
|
-
const wrappedFetch = createBedrockAnthropicFetch(baseFetch);
|
|
335
|
-
|
|
336
|
-
const response = await wrappedFetch('https://example.com', {});
|
|
337
|
-
const reader = response.body!.getReader();
|
|
338
|
-
const { value } = await reader.read();
|
|
339
|
-
const text = new TextDecoder().decode(value);
|
|
340
|
-
|
|
341
|
-
// Should emit the raw payload data as fallback
|
|
342
|
-
expect(text).toBe(`data: ${chunkPayload}\n\n`);
|
|
343
|
-
});
|
|
344
|
-
});
|