ai 2.2.24 → 2.2.26
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +20 -1
- package/dist/index.js +79 -27
- package/dist/index.mjs +75 -27
- package/package.json +5 -1
- package/prompts/dist/index.d.ts +8 -1
- package/prompts/dist/index.js +17 -0
- package/prompts/dist/index.mjs +16 -0
- package/react/dist/index.d.ts +27 -9
- package/react/dist/index.js +75 -55
- package/react/dist/index.mjs +75 -55
- package/solid/dist/index.d.ts +7 -1
- package/solid/dist/index.js +14 -10
- package/solid/dist/index.mjs +14 -10
- package/svelte/dist/index.d.ts +12 -1
- package/svelte/dist/index.js +291 -94
- package/svelte/dist/index.mjs +291 -94
- package/vue/dist/index.d.ts +12 -1
- package/vue/dist/index.js +312 -85
- package/vue/dist/index.mjs +312 -85
package/dist/index.d.ts
CHANGED
@@ -35,6 +35,7 @@ interface Function {
|
|
35
35
|
*/
|
36
36
|
description?: string;
|
37
37
|
}
|
38
|
+
type IdGenerator = () => string;
|
38
39
|
/**
|
39
40
|
* Shared types between the API and UI packages.
|
40
41
|
*/
|
@@ -115,6 +116,11 @@ type UseChatOptions = {
|
|
115
116
|
* Callback function to be called when an error is encountered.
|
116
117
|
*/
|
117
118
|
onError?: (error: Error) => void;
|
119
|
+
/**
|
120
|
+
* A way to provide a function that is going to be used for ids for messages.
|
121
|
+
* If not provided nanoid is used by default.
|
122
|
+
*/
|
123
|
+
generateId?: IdGenerator;
|
118
124
|
/**
|
119
125
|
* The credentials mode to be used for the fetch request.
|
120
126
|
* Possible values are: 'omit', 'same-origin', 'include'.
|
@@ -461,6 +467,18 @@ declare function AIStream(response: Response, customParser?: AIStreamParser, cal
|
|
461
467
|
*/
|
462
468
|
declare function readableFromAsyncIterable<T>(iterable: AsyncIterable<T>): ReadableStream<T>;
|
463
469
|
|
470
|
+
interface AWSBedrockResponse {
|
471
|
+
body?: AsyncIterable<{
|
472
|
+
chunk?: {
|
473
|
+
bytes?: Uint8Array;
|
474
|
+
};
|
475
|
+
}>;
|
476
|
+
}
|
477
|
+
declare function AWSBedrockAnthropicStream(response: AWSBedrockResponse, callbacks?: AIStreamCallbacksAndOptions): ReadableStream;
|
478
|
+
declare function AWSBedrockCohereStream(response: AWSBedrockResponse, callbacks?: AIStreamCallbacksAndOptions): ReadableStream;
|
479
|
+
declare function AWSBedrockLlama2Stream(response: AWSBedrockResponse, callbacks?: AIStreamCallbacksAndOptions): ReadableStream;
|
480
|
+
declare function AWSBedrockStream(response: AWSBedrockResponse, callbacks: AIStreamCallbacksAndOptions | undefined, extractTextDeltaFromChunk: (chunk: any) => string): ReadableStream<any>;
|
481
|
+
|
464
482
|
/**
|
465
483
|
* A stream wrapper to send custom JSON-encoded data back to the client.
|
466
484
|
*/
|
@@ -680,6 +698,7 @@ declare class experimental_StreamingReactResponse {
|
|
680
698
|
data?: JSONValue[] | undefined;
|
681
699
|
}) => UINode | Promise<UINode>;
|
682
700
|
data?: experimental_StreamData;
|
701
|
+
generateId?: IdGenerator;
|
683
702
|
});
|
684
703
|
}
|
685
704
|
|
@@ -692,4 +711,4 @@ declare function experimental_AssistantResponse({ threadId, messageId }: {
|
|
692
711
|
sendMessage: (message: AssistantMessage) => void;
|
693
712
|
}) => Promise<void>): Response;
|
694
713
|
|
695
|
-
export { AIStream, AIStreamCallbacksAndOptions, AIStreamParser, AnthropicStream, AssistantMessage, COMPLEX_HEADER, ChatRequest, ChatRequestOptions, CohereStream, CompletionUsage, CreateMessage, FunctionCall, FunctionCallHandler, FunctionCallPayload, HuggingFaceStream, JSONValue, LangChainStream, Message, OpenAIStream, OpenAIStreamCallbacks, ReactResponseRow, ReplicateStream, RequestOptions, StreamString, StreamingTextResponse, UseChatOptions, UseCompletionOptions, createCallbacksTransformer, createChunkDecoder, createEventStreamTransformer, createStreamDataTransformer, experimental_AssistantResponse, experimental_StreamData, experimental_StreamingReactResponse, isStreamStringEqualToType, nanoid, readableFromAsyncIterable, streamToResponse, trimStartOfStreamHelper };
|
714
|
+
export { AIStream, AIStreamCallbacksAndOptions, AIStreamParser, AWSBedrockAnthropicStream, AWSBedrockCohereStream, AWSBedrockLlama2Stream, AWSBedrockStream, AnthropicStream, AssistantMessage, COMPLEX_HEADER, ChatRequest, ChatRequestOptions, CohereStream, CompletionUsage, CreateMessage, FunctionCall, FunctionCallHandler, FunctionCallPayload, HuggingFaceStream, IdGenerator, JSONValue, LangChainStream, Message, OpenAIStream, OpenAIStreamCallbacks, ReactResponseRow, ReplicateStream, RequestOptions, StreamString, StreamingTextResponse, UseChatOptions, UseCompletionOptions, createCallbacksTransformer, createChunkDecoder, createEventStreamTransformer, createStreamDataTransformer, experimental_AssistantResponse, experimental_StreamData, experimental_StreamingReactResponse, isStreamStringEqualToType, nanoid, readableFromAsyncIterable, streamToResponse, trimStartOfStreamHelper };
|
package/dist/index.js
CHANGED
@@ -21,6 +21,10 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
|
|
21
21
|
var streams_exports = {};
|
22
22
|
__export(streams_exports, {
|
23
23
|
AIStream: () => AIStream,
|
24
|
+
AWSBedrockAnthropicStream: () => AWSBedrockAnthropicStream,
|
25
|
+
AWSBedrockCohereStream: () => AWSBedrockCohereStream,
|
26
|
+
AWSBedrockLlama2Stream: () => AWSBedrockLlama2Stream,
|
27
|
+
AWSBedrockStream: () => AWSBedrockStream,
|
24
28
|
AnthropicStream: () => AnthropicStream,
|
25
29
|
COMPLEX_HEADER: () => COMPLEX_HEADER,
|
26
30
|
CohereStream: () => CohereStream,
|
@@ -289,29 +293,6 @@ function formatStreamPart(type, value) {
|
|
289
293
|
`;
|
290
294
|
}
|
291
295
|
|
292
|
-
// shared/utils.ts
|
293
|
-
var import_non_secure = require("nanoid/non-secure");
|
294
|
-
var nanoid = (0, import_non_secure.customAlphabet)(
|
295
|
-
"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
|
296
|
-
7
|
297
|
-
);
|
298
|
-
function createChunkDecoder(complex) {
|
299
|
-
const decoder = new TextDecoder();
|
300
|
-
if (!complex) {
|
301
|
-
return function(chunk) {
|
302
|
-
if (!chunk)
|
303
|
-
return "";
|
304
|
-
return decoder.decode(chunk, { stream: true });
|
305
|
-
};
|
306
|
-
}
|
307
|
-
return function(chunk) {
|
308
|
-
const decoded = decoder.decode(chunk, { stream: true }).split("\n").filter((line) => line !== "");
|
309
|
-
return decoded.map(parseStreamPart).filter(Boolean);
|
310
|
-
};
|
311
|
-
}
|
312
|
-
var isStreamStringEqualToType = (type, value) => value.startsWith(`${StreamStringPrefixes[type]}:`) && value.endsWith("\n");
|
313
|
-
var COMPLEX_HEADER = "X-Experimental-Stream-Data";
|
314
|
-
|
315
296
|
// streams/stream-data.ts
|
316
297
|
var experimental_StreamData = class {
|
317
298
|
constructor() {
|
@@ -397,6 +378,71 @@ function createStreamDataTransformer(experimental_streamData) {
|
|
397
378
|
});
|
398
379
|
}
|
399
380
|
|
381
|
+
// streams/aws-bedrock-stream.ts
|
382
|
+
async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
|
383
|
+
var _a, _b;
|
384
|
+
const decoder = new TextDecoder();
|
385
|
+
for await (const chunk of (_a = response.body) != null ? _a : []) {
|
386
|
+
const bytes = (_b = chunk.chunk) == null ? void 0 : _b.bytes;
|
387
|
+
if (bytes != null) {
|
388
|
+
const chunkText = decoder.decode(bytes);
|
389
|
+
const chunkJSON = JSON.parse(chunkText);
|
390
|
+
const delta = extractTextDeltaFromChunk(chunkJSON);
|
391
|
+
if (delta != null) {
|
392
|
+
yield delta;
|
393
|
+
}
|
394
|
+
}
|
395
|
+
}
|
396
|
+
}
|
397
|
+
function AWSBedrockAnthropicStream(response, callbacks) {
|
398
|
+
return AWSBedrockStream(response, callbacks, (chunk) => chunk.completion);
|
399
|
+
}
|
400
|
+
function AWSBedrockCohereStream(response, callbacks) {
|
401
|
+
return AWSBedrockStream(
|
402
|
+
response,
|
403
|
+
callbacks,
|
404
|
+
// As of 2023-11-17, Bedrock does not support streaming for Cohere,
|
405
|
+
// so we take the full generation:
|
406
|
+
(chunk) => {
|
407
|
+
var _a, _b;
|
408
|
+
return (_b = (_a = chunk.generations) == null ? void 0 : _a[0]) == null ? void 0 : _b.text;
|
409
|
+
}
|
410
|
+
);
|
411
|
+
}
|
412
|
+
function AWSBedrockLlama2Stream(response, callbacks) {
|
413
|
+
return AWSBedrockStream(response, callbacks, (chunk) => chunk.generation);
|
414
|
+
}
|
415
|
+
function AWSBedrockStream(response, callbacks, extractTextDeltaFromChunk) {
|
416
|
+
return readableFromAsyncIterable(
|
417
|
+
asDeltaIterable(response, extractTextDeltaFromChunk)
|
418
|
+
).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
419
|
+
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
420
|
+
);
|
421
|
+
}
|
422
|
+
|
423
|
+
// shared/utils.ts
|
424
|
+
var import_non_secure = require("nanoid/non-secure");
|
425
|
+
var nanoid = (0, import_non_secure.customAlphabet)(
|
426
|
+
"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
|
427
|
+
7
|
428
|
+
);
|
429
|
+
function createChunkDecoder(complex) {
|
430
|
+
const decoder = new TextDecoder();
|
431
|
+
if (!complex) {
|
432
|
+
return function(chunk) {
|
433
|
+
if (!chunk)
|
434
|
+
return "";
|
435
|
+
return decoder.decode(chunk, { stream: true });
|
436
|
+
};
|
437
|
+
}
|
438
|
+
return function(chunk) {
|
439
|
+
const decoded = decoder.decode(chunk, { stream: true }).split("\n").filter((line) => line !== "");
|
440
|
+
return decoded.map(parseStreamPart).filter(Boolean);
|
441
|
+
};
|
442
|
+
}
|
443
|
+
var isStreamStringEqualToType = (type, value) => value.startsWith(`${StreamStringPrefixes[type]}:`) && value.endsWith("\n");
|
444
|
+
var COMPLEX_HEADER = "X-Experimental-Stream-Data";
|
445
|
+
|
400
446
|
// streams/openai-stream.ts
|
401
447
|
function parseOpenAIStream() {
|
402
448
|
const extract = chunkToText();
|
@@ -914,6 +960,7 @@ async function parseComplexResponse({
|
|
914
960
|
// streams/streaming-react-response.ts
|
915
961
|
var experimental_StreamingReactResponse = class {
|
916
962
|
constructor(res, options) {
|
963
|
+
var _a;
|
917
964
|
let resolveFunc = () => {
|
918
965
|
};
|
919
966
|
let next = new Promise((resolve) => {
|
@@ -927,8 +974,8 @@ var experimental_StreamingReactResponse = class {
|
|
927
974
|
parseComplexResponse({
|
928
975
|
reader: processedStream.getReader(),
|
929
976
|
update: (merged, data) => {
|
930
|
-
var
|
931
|
-
const content2 = (_b = (
|
977
|
+
var _a2, _b, _c;
|
978
|
+
const content2 = (_b = (_a2 = merged[0]) == null ? void 0 : _a2.content) != null ? _b : "";
|
932
979
|
const ui = ((_c = options == null ? void 0 : options.ui) == null ? void 0 : _c.call(options, { content: content2, data })) || content2;
|
933
980
|
const payload = { ui, content: content2 };
|
934
981
|
const resolvePrevious = resolveFunc;
|
@@ -941,6 +988,7 @@ var experimental_StreamingReactResponse = class {
|
|
941
988
|
});
|
942
989
|
lastPayload = payload;
|
943
990
|
},
|
991
|
+
generateId: (_a = options.generateId) != null ? _a : nanoid,
|
944
992
|
onFinish: () => {
|
945
993
|
if (lastPayload !== void 0) {
|
946
994
|
resolveFunc({
|
@@ -956,12 +1004,12 @@ var experimental_StreamingReactResponse = class {
|
|
956
1004
|
const decode = createChunkDecoder();
|
957
1005
|
const reader = res.getReader();
|
958
1006
|
async function readChunk() {
|
959
|
-
var
|
1007
|
+
var _a2;
|
960
1008
|
const { done, value } = await reader.read();
|
961
1009
|
if (!done) {
|
962
1010
|
content += decode(value);
|
963
1011
|
}
|
964
|
-
const ui = ((
|
1012
|
+
const ui = ((_a2 = options == null ? void 0 : options.ui) == null ? void 0 : _a2.call(options, { content })) || content;
|
965
1013
|
const payload = {
|
966
1014
|
ui,
|
967
1015
|
content
|
@@ -1035,6 +1083,10 @@ function experimental_AssistantResponse({ threadId, messageId }, process2) {
|
|
1035
1083
|
// Annotate the CommonJS export names for ESM import in node:
|
1036
1084
|
0 && (module.exports = {
|
1037
1085
|
AIStream,
|
1086
|
+
AWSBedrockAnthropicStream,
|
1087
|
+
AWSBedrockCohereStream,
|
1088
|
+
AWSBedrockLlama2Stream,
|
1089
|
+
AWSBedrockStream,
|
1038
1090
|
AnthropicStream,
|
1039
1091
|
COMPLEX_HEADER,
|
1040
1092
|
CohereStream,
|
package/dist/index.mjs
CHANGED
@@ -245,29 +245,6 @@ function formatStreamPart(type, value) {
|
|
245
245
|
`;
|
246
246
|
}
|
247
247
|
|
248
|
-
// shared/utils.ts
|
249
|
-
import { customAlphabet } from "nanoid/non-secure";
|
250
|
-
var nanoid = customAlphabet(
|
251
|
-
"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
|
252
|
-
7
|
253
|
-
);
|
254
|
-
function createChunkDecoder(complex) {
|
255
|
-
const decoder = new TextDecoder();
|
256
|
-
if (!complex) {
|
257
|
-
return function(chunk) {
|
258
|
-
if (!chunk)
|
259
|
-
return "";
|
260
|
-
return decoder.decode(chunk, { stream: true });
|
261
|
-
};
|
262
|
-
}
|
263
|
-
return function(chunk) {
|
264
|
-
const decoded = decoder.decode(chunk, { stream: true }).split("\n").filter((line) => line !== "");
|
265
|
-
return decoded.map(parseStreamPart).filter(Boolean);
|
266
|
-
};
|
267
|
-
}
|
268
|
-
var isStreamStringEqualToType = (type, value) => value.startsWith(`${StreamStringPrefixes[type]}:`) && value.endsWith("\n");
|
269
|
-
var COMPLEX_HEADER = "X-Experimental-Stream-Data";
|
270
|
-
|
271
248
|
// streams/stream-data.ts
|
272
249
|
var experimental_StreamData = class {
|
273
250
|
constructor() {
|
@@ -353,6 +330,71 @@ function createStreamDataTransformer(experimental_streamData) {
|
|
353
330
|
});
|
354
331
|
}
|
355
332
|
|
333
|
+
// streams/aws-bedrock-stream.ts
|
334
|
+
async function* asDeltaIterable(response, extractTextDeltaFromChunk) {
|
335
|
+
var _a, _b;
|
336
|
+
const decoder = new TextDecoder();
|
337
|
+
for await (const chunk of (_a = response.body) != null ? _a : []) {
|
338
|
+
const bytes = (_b = chunk.chunk) == null ? void 0 : _b.bytes;
|
339
|
+
if (bytes != null) {
|
340
|
+
const chunkText = decoder.decode(bytes);
|
341
|
+
const chunkJSON = JSON.parse(chunkText);
|
342
|
+
const delta = extractTextDeltaFromChunk(chunkJSON);
|
343
|
+
if (delta != null) {
|
344
|
+
yield delta;
|
345
|
+
}
|
346
|
+
}
|
347
|
+
}
|
348
|
+
}
|
349
|
+
function AWSBedrockAnthropicStream(response, callbacks) {
|
350
|
+
return AWSBedrockStream(response, callbacks, (chunk) => chunk.completion);
|
351
|
+
}
|
352
|
+
function AWSBedrockCohereStream(response, callbacks) {
|
353
|
+
return AWSBedrockStream(
|
354
|
+
response,
|
355
|
+
callbacks,
|
356
|
+
// As of 2023-11-17, Bedrock does not support streaming for Cohere,
|
357
|
+
// so we take the full generation:
|
358
|
+
(chunk) => {
|
359
|
+
var _a, _b;
|
360
|
+
return (_b = (_a = chunk.generations) == null ? void 0 : _a[0]) == null ? void 0 : _b.text;
|
361
|
+
}
|
362
|
+
);
|
363
|
+
}
|
364
|
+
function AWSBedrockLlama2Stream(response, callbacks) {
|
365
|
+
return AWSBedrockStream(response, callbacks, (chunk) => chunk.generation);
|
366
|
+
}
|
367
|
+
function AWSBedrockStream(response, callbacks, extractTextDeltaFromChunk) {
|
368
|
+
return readableFromAsyncIterable(
|
369
|
+
asDeltaIterable(response, extractTextDeltaFromChunk)
|
370
|
+
).pipeThrough(createCallbacksTransformer(callbacks)).pipeThrough(
|
371
|
+
createStreamDataTransformer(callbacks == null ? void 0 : callbacks.experimental_streamData)
|
372
|
+
);
|
373
|
+
}
|
374
|
+
|
375
|
+
// shared/utils.ts
|
376
|
+
import { customAlphabet } from "nanoid/non-secure";
|
377
|
+
var nanoid = customAlphabet(
|
378
|
+
"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
|
379
|
+
7
|
380
|
+
);
|
381
|
+
function createChunkDecoder(complex) {
|
382
|
+
const decoder = new TextDecoder();
|
383
|
+
if (!complex) {
|
384
|
+
return function(chunk) {
|
385
|
+
if (!chunk)
|
386
|
+
return "";
|
387
|
+
return decoder.decode(chunk, { stream: true });
|
388
|
+
};
|
389
|
+
}
|
390
|
+
return function(chunk) {
|
391
|
+
const decoded = decoder.decode(chunk, { stream: true }).split("\n").filter((line) => line !== "");
|
392
|
+
return decoded.map(parseStreamPart).filter(Boolean);
|
393
|
+
};
|
394
|
+
}
|
395
|
+
var isStreamStringEqualToType = (type, value) => value.startsWith(`${StreamStringPrefixes[type]}:`) && value.endsWith("\n");
|
396
|
+
var COMPLEX_HEADER = "X-Experimental-Stream-Data";
|
397
|
+
|
356
398
|
// streams/openai-stream.ts
|
357
399
|
function parseOpenAIStream() {
|
358
400
|
const extract = chunkToText();
|
@@ -870,6 +912,7 @@ async function parseComplexResponse({
|
|
870
912
|
// streams/streaming-react-response.ts
|
871
913
|
var experimental_StreamingReactResponse = class {
|
872
914
|
constructor(res, options) {
|
915
|
+
var _a;
|
873
916
|
let resolveFunc = () => {
|
874
917
|
};
|
875
918
|
let next = new Promise((resolve) => {
|
@@ -883,8 +926,8 @@ var experimental_StreamingReactResponse = class {
|
|
883
926
|
parseComplexResponse({
|
884
927
|
reader: processedStream.getReader(),
|
885
928
|
update: (merged, data) => {
|
886
|
-
var
|
887
|
-
const content2 = (_b = (
|
929
|
+
var _a2, _b, _c;
|
930
|
+
const content2 = (_b = (_a2 = merged[0]) == null ? void 0 : _a2.content) != null ? _b : "";
|
888
931
|
const ui = ((_c = options == null ? void 0 : options.ui) == null ? void 0 : _c.call(options, { content: content2, data })) || content2;
|
889
932
|
const payload = { ui, content: content2 };
|
890
933
|
const resolvePrevious = resolveFunc;
|
@@ -897,6 +940,7 @@ var experimental_StreamingReactResponse = class {
|
|
897
940
|
});
|
898
941
|
lastPayload = payload;
|
899
942
|
},
|
943
|
+
generateId: (_a = options.generateId) != null ? _a : nanoid,
|
900
944
|
onFinish: () => {
|
901
945
|
if (lastPayload !== void 0) {
|
902
946
|
resolveFunc({
|
@@ -912,12 +956,12 @@ var experimental_StreamingReactResponse = class {
|
|
912
956
|
const decode = createChunkDecoder();
|
913
957
|
const reader = res.getReader();
|
914
958
|
async function readChunk() {
|
915
|
-
var
|
959
|
+
var _a2;
|
916
960
|
const { done, value } = await reader.read();
|
917
961
|
if (!done) {
|
918
962
|
content += decode(value);
|
919
963
|
}
|
920
|
-
const ui = ((
|
964
|
+
const ui = ((_a2 = options == null ? void 0 : options.ui) == null ? void 0 : _a2.call(options, { content })) || content;
|
921
965
|
const payload = {
|
922
966
|
ui,
|
923
967
|
content
|
@@ -990,6 +1034,10 @@ function experimental_AssistantResponse({ threadId, messageId }, process2) {
|
|
990
1034
|
}
|
991
1035
|
export {
|
992
1036
|
AIStream,
|
1037
|
+
AWSBedrockAnthropicStream,
|
1038
|
+
AWSBedrockCohereStream,
|
1039
|
+
AWSBedrockLlama2Stream,
|
1040
|
+
AWSBedrockStream,
|
993
1041
|
AnthropicStream,
|
994
1042
|
COMPLEX_HEADER,
|
995
1043
|
CohereStream,
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "ai",
|
3
|
-
"version": "2.2.
|
3
|
+
"version": "2.2.26",
|
4
4
|
"license": "Apache-2.0",
|
5
5
|
"sideEffects": false,
|
6
6
|
"main": "./dist/index.js",
|
@@ -64,15 +64,19 @@
|
|
64
64
|
"swrv": "1.0.4"
|
65
65
|
},
|
66
66
|
"devDependencies": {
|
67
|
+
"@anthropic-ai/sdk": "0.10.0",
|
68
|
+
"@aws-sdk/client-bedrock-runtime": "3.451.0",
|
67
69
|
"@edge-runtime/jest-environment": "1.1.0-beta.31",
|
68
70
|
"@huggingface/inference": "2.6.4",
|
69
71
|
"@testing-library/jest-dom": "^6.1.4",
|
70
72
|
"@testing-library/react": "^14.0.0",
|
71
73
|
"@testing-library/user-event": "^14.5.1",
|
74
|
+
"@testing-library/vue": "^8.0.1",
|
72
75
|
"@types/jest": "29.2.0",
|
73
76
|
"@types/node": "^17.0.12",
|
74
77
|
"@types/react": "^18.2.8",
|
75
78
|
"@types/react-dom": "^18.2.0",
|
79
|
+
"@vue/vue3-jest": "28",
|
76
80
|
"eslint": "^7.32.0",
|
77
81
|
"jest": "29.2.1",
|
78
82
|
"jest-environment-jsdom": "^29.7.0",
|
package/prompts/dist/index.d.ts
CHANGED
@@ -33,6 +33,13 @@ interface Message {
|
|
33
33
|
function_call?: string | FunctionCall;
|
34
34
|
}
|
35
35
|
|
36
|
+
/**
|
37
|
+
* A prompt constructor for Anthropic models.
|
38
|
+
* Does not support `function` messages.
|
39
|
+
* @see https://docs.anthropic.com/claude/reference/getting-started-with-the-api
|
40
|
+
*/
|
41
|
+
declare function experimental_buildAnthropicPrompt(messages: Pick<Message, 'content' | 'role'>[]): string;
|
42
|
+
|
36
43
|
/**
|
37
44
|
* A prompt constructor for the HuggingFace StarChat Beta model.
|
38
45
|
* Does not support `function` messages.
|
@@ -52,4 +59,4 @@ declare function experimental_buildOpenAssistantPrompt(messages: Pick<Message, '
|
|
52
59
|
*/
|
53
60
|
declare function experimental_buildLlama2Prompt(messages: Pick<Message, 'content' | 'role'>[]): string;
|
54
61
|
|
55
|
-
export { experimental_buildLlama2Prompt, experimental_buildOpenAssistantPrompt, experimental_buildStarChatBetaPrompt };
|
62
|
+
export { experimental_buildAnthropicPrompt, experimental_buildLlama2Prompt, experimental_buildOpenAssistantPrompt, experimental_buildStarChatBetaPrompt };
|
package/prompts/dist/index.js
CHANGED
@@ -20,12 +20,28 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
|
|
20
20
|
// prompts/index.ts
|
21
21
|
var prompts_exports = {};
|
22
22
|
__export(prompts_exports, {
|
23
|
+
experimental_buildAnthropicPrompt: () => experimental_buildAnthropicPrompt,
|
23
24
|
experimental_buildLlama2Prompt: () => experimental_buildLlama2Prompt,
|
24
25
|
experimental_buildOpenAssistantPrompt: () => experimental_buildOpenAssistantPrompt,
|
25
26
|
experimental_buildStarChatBetaPrompt: () => experimental_buildStarChatBetaPrompt
|
26
27
|
});
|
27
28
|
module.exports = __toCommonJS(prompts_exports);
|
28
29
|
|
30
|
+
// prompts/anthropic.ts
|
31
|
+
function experimental_buildAnthropicPrompt(messages) {
|
32
|
+
return messages.map(({ content, role }) => {
|
33
|
+
if (role === "user") {
|
34
|
+
return `
|
35
|
+
|
36
|
+
Human: ${content}`;
|
37
|
+
} else {
|
38
|
+
return `
|
39
|
+
|
40
|
+
Assistant: ${content}`;
|
41
|
+
}
|
42
|
+
}) + "\n\nAssistant:";
|
43
|
+
}
|
44
|
+
|
29
45
|
// prompts/huggingface.ts
|
30
46
|
function experimental_buildStarChatBetaPrompt(messages) {
|
31
47
|
return messages.map(({ content, role }) => {
|
@@ -83,6 +99,7 @@ ${content}
|
|
83
99
|
}
|
84
100
|
// Annotate the CommonJS export names for ESM import in node:
|
85
101
|
0 && (module.exports = {
|
102
|
+
experimental_buildAnthropicPrompt,
|
86
103
|
experimental_buildLlama2Prompt,
|
87
104
|
experimental_buildOpenAssistantPrompt,
|
88
105
|
experimental_buildStarChatBetaPrompt
|
package/prompts/dist/index.mjs
CHANGED
@@ -1,3 +1,18 @@
|
|
1
|
+
// prompts/anthropic.ts
|
2
|
+
function experimental_buildAnthropicPrompt(messages) {
|
3
|
+
return messages.map(({ content, role }) => {
|
4
|
+
if (role === "user") {
|
5
|
+
return `
|
6
|
+
|
7
|
+
Human: ${content}`;
|
8
|
+
} else {
|
9
|
+
return `
|
10
|
+
|
11
|
+
Assistant: ${content}`;
|
12
|
+
}
|
13
|
+
}) + "\n\nAssistant:";
|
14
|
+
}
|
15
|
+
|
1
16
|
// prompts/huggingface.ts
|
2
17
|
function experimental_buildStarChatBetaPrompt(messages) {
|
3
18
|
return messages.map(({ content, role }) => {
|
@@ -54,6 +69,7 @@ ${content}
|
|
54
69
|
return startPrompt + conversation.join("") + endPrompt;
|
55
70
|
}
|
56
71
|
export {
|
72
|
+
experimental_buildAnthropicPrompt,
|
57
73
|
experimental_buildLlama2Prompt,
|
58
74
|
experimental_buildOpenAssistantPrompt,
|
59
75
|
experimental_buildStarChatBetaPrompt
|
package/react/dist/index.d.ts
CHANGED
@@ -33,6 +33,7 @@ interface Function {
|
|
33
33
|
*/
|
34
34
|
description?: string;
|
35
35
|
}
|
36
|
+
type IdGenerator = () => string;
|
36
37
|
/**
|
37
38
|
* Shared types between the API and UI packages.
|
38
39
|
*/
|
@@ -113,6 +114,11 @@ type UseChatOptions = {
|
|
113
114
|
* Callback function to be called when an error is encountered.
|
114
115
|
*/
|
115
116
|
onError?: (error: Error) => void;
|
117
|
+
/**
|
118
|
+
* A way to provide a function that is going to be used for ids for messages.
|
119
|
+
* If not provided nanoid is used by default.
|
120
|
+
*/
|
121
|
+
generateId?: IdGenerator;
|
116
122
|
/**
|
117
123
|
* The credentials mode to be used for the fetch request.
|
118
124
|
* Possible values are: 'omit', 'same-origin', 'include'.
|
@@ -240,6 +246,7 @@ declare class experimental_StreamingReactResponse {
|
|
240
246
|
data?: JSONValue[] | undefined;
|
241
247
|
}) => UINode | Promise<UINode>;
|
242
248
|
data?: experimental_StreamData;
|
249
|
+
generateId?: IdGenerator;
|
243
250
|
});
|
244
251
|
}
|
245
252
|
|
@@ -289,7 +296,7 @@ type StreamingReactResponseAction = (payload: {
|
|
289
296
|
messages: Message[];
|
290
297
|
data?: Record<string, string>;
|
291
298
|
}) => Promise<experimental_StreamingReactResponse>;
|
292
|
-
declare function useChat({ api, id, initialMessages, initialInput, sendExtraMessageFields, experimental_onFunctionCall, onResponse, onFinish, onError, credentials, headers, body, }?: Omit<UseChatOptions, 'api'> & {
|
299
|
+
declare function useChat({ api, id, initialMessages, initialInput, sendExtraMessageFields, experimental_onFunctionCall, onResponse, onFinish, onError, credentials, headers, body, generateId, }?: Omit<UseChatOptions, 'api'> & {
|
293
300
|
api?: string | StreamingReactResponseAction;
|
294
301
|
}): UseChatHelpers;
|
295
302
|
|
@@ -338,19 +345,30 @@ type UseCompletionHelpers = {
|
|
338
345
|
declare function useCompletion({ api, id, initialCompletion, initialInput, credentials, headers, body, onResponse, onFinish, onError, }?: UseCompletionOptions): UseCompletionHelpers;
|
339
346
|
|
340
347
|
type AssistantStatus = 'in_progress' | 'awaiting_message';
|
341
|
-
|
342
|
-
|
343
|
-
threadId?: string | undefined;
|
344
|
-
}): {
|
348
|
+
type UseAssistantHelpers = {
|
349
|
+
/** Current messages in the chat */
|
345
350
|
messages: Message[];
|
351
|
+
/** Current thread ID */
|
352
|
+
threadId: string | undefined;
|
353
|
+
/** The current value of the input */
|
346
354
|
input: string;
|
347
|
-
|
348
|
-
|
355
|
+
/** An input/textarea-ready onChange handler to control the value of the input */
|
356
|
+
handleInputChange: (event: React.ChangeEvent<HTMLInputElement> | React.ChangeEvent<HTMLTextAreaElement>) => void;
|
357
|
+
/** Form submission handler to automatically reset input and append a user message */
|
358
|
+
submitMessage: (event?: React.FormEvent<HTMLFormElement>, requestOptions?: {
|
359
|
+
data?: Record<string, string>;
|
360
|
+
}) => Promise<void>;
|
361
|
+
/** Current status of the assistant */
|
349
362
|
status: AssistantStatus;
|
350
|
-
error
|
363
|
+
/** Current error, if any */
|
364
|
+
error: undefined | unknown;
|
351
365
|
};
|
366
|
+
declare function experimental_useAssistant({ api, threadId: threadIdParam, }: {
|
367
|
+
api: string;
|
368
|
+
threadId?: string | undefined;
|
369
|
+
}): UseAssistantHelpers;
|
352
370
|
|
353
|
-
export { AssistantStatus, CreateMessage, Message, UseChatHelpers, UseChatOptions, UseCompletionHelpers, experimental_useAssistant, useChat, useCompletion };
|
371
|
+
export { AssistantStatus, CreateMessage, Message, UseAssistantHelpers, UseChatHelpers, UseChatOptions, UseCompletionHelpers, experimental_useAssistant, useChat, useCompletion };
|
354
372
|
import * as react_jsx_runtime from 'react/jsx-runtime';
|
355
373
|
|
356
374
|
type Props = {
|