langchain 0.0.179 → 0.0.181
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/chat_models/bedrock/web.cjs +1 -0
- package/chat_models/bedrock/web.d.ts +1 -0
- package/chat_models/bedrock/web.js +1 -0
- package/chat_models/bedrock.cjs +1 -1
- package/chat_models/bedrock.d.ts +1 -1
- package/chat_models/bedrock.js +1 -1
- package/dist/cache/base.cjs +2 -5
- package/dist/cache/base.js +2 -2
- package/dist/chains/combine_documents/reduce.cjs +3 -1
- package/dist/chains/combine_documents/reduce.js +3 -1
- package/dist/chat_models/baiduwenxin.cjs +16 -2
- package/dist/chat_models/baiduwenxin.js +16 -2
- package/dist/chat_models/bedrock/index.cjs +24 -0
- package/dist/chat_models/bedrock/index.d.ts +12 -0
- package/dist/chat_models/bedrock/index.js +18 -0
- package/dist/chat_models/{bedrock.cjs → bedrock/web.cjs} +44 -15
- package/dist/chat_models/{bedrock.d.ts → bedrock/web.d.ts} +14 -5
- package/dist/chat_models/{bedrock.js → bedrock/web.js} +42 -13
- package/dist/embeddings/cache_backed.cjs +2 -5
- package/dist/embeddings/cache_backed.js +2 -2
- package/dist/embeddings/voyage.cjs +120 -0
- package/dist/embeddings/voyage.d.ts +66 -0
- package/dist/embeddings/voyage.js +116 -0
- package/dist/llms/bedrock/index.cjs +17 -0
- package/dist/llms/bedrock/index.d.ts +7 -0
- package/dist/llms/bedrock/index.js +13 -0
- package/dist/llms/{bedrock.cjs → bedrock/web.cjs} +46 -12
- package/dist/llms/{bedrock.d.ts → bedrock/web.d.ts} +17 -4
- package/dist/llms/{bedrock.js → bedrock/web.js} +46 -12
- package/dist/load/import_constants.cjs +3 -0
- package/dist/load/import_constants.js +3 -0
- package/dist/load/import_map.cjs +3 -2
- package/dist/load/import_map.d.ts +1 -0
- package/dist/load/import_map.js +1 -0
- package/dist/stores/message/cassandra.cjs +135 -0
- package/dist/stores/message/cassandra.d.ts +44 -0
- package/dist/stores/message/cassandra.js +131 -0
- package/dist/util/bedrock.cjs +13 -1
- package/dist/util/bedrock.d.ts +5 -2
- package/dist/util/bedrock.js +13 -1
- package/dist/util/js-sha1/hash.cjs +358 -0
- package/dist/util/js-sha1/hash.d.ts +1 -0
- package/dist/util/js-sha1/hash.js +355 -0
- package/dist/util/stream.cjs +4 -1
- package/dist/util/stream.d.ts +4 -1
- package/dist/util/stream.js +4 -1
- package/dist/vectorstores/cassandra.cjs +197 -47
- package/dist/vectorstores/cassandra.d.ts +47 -4
- package/dist/vectorstores/cassandra.js +197 -47
- package/embeddings/voyage.cjs +1 -0
- package/embeddings/voyage.d.ts +1 -0
- package/embeddings/voyage.js +1 -0
- package/llms/bedrock/web.cjs +1 -0
- package/llms/bedrock/web.d.ts +1 -0
- package/llms/bedrock/web.js +1 -0
- package/llms/bedrock.cjs +1 -1
- package/llms/bedrock.d.ts +1 -1
- package/llms/bedrock.js +1 -1
- package/package.json +34 -4
- package/stores/message/cassandra.cjs +1 -0
- package/stores/message/cassandra.d.ts +1 -0
- package/stores/message/cassandra.js +1 -0
- package/dist/schema/runnable/remote.cjs +0 -225
- package/dist/schema/runnable/remote.d.ts +0 -28
- package/dist/schema/runnable/remote.js +0 -221
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.VoyageEmbeddings = void 0;
|
|
4
|
+
const chunk_js_1 = require("../util/chunk.cjs");
|
|
5
|
+
const env_js_1 = require("../util/env.cjs");
|
|
6
|
+
const base_js_1 = require("./base.cjs");
|
|
7
|
+
/**
|
|
8
|
+
* A class for generating embeddings using the Voyage AI API.
|
|
9
|
+
*/
|
|
10
|
+
class VoyageEmbeddings extends base_js_1.Embeddings {
|
|
11
|
+
/**
|
|
12
|
+
* Constructor for the VoyageEmbeddings class.
|
|
13
|
+
* @param fields - An optional object with properties to configure the instance.
|
|
14
|
+
*/
|
|
15
|
+
constructor(fields) {
|
|
16
|
+
const fieldsWithDefaults = { ...fields };
|
|
17
|
+
super(fieldsWithDefaults);
|
|
18
|
+
Object.defineProperty(this, "modelName", {
|
|
19
|
+
enumerable: true,
|
|
20
|
+
configurable: true,
|
|
21
|
+
writable: true,
|
|
22
|
+
value: "voyage-01"
|
|
23
|
+
});
|
|
24
|
+
Object.defineProperty(this, "batchSize", {
|
|
25
|
+
enumerable: true,
|
|
26
|
+
configurable: true,
|
|
27
|
+
writable: true,
|
|
28
|
+
value: 8
|
|
29
|
+
});
|
|
30
|
+
Object.defineProperty(this, "apiKey", {
|
|
31
|
+
enumerable: true,
|
|
32
|
+
configurable: true,
|
|
33
|
+
writable: true,
|
|
34
|
+
value: void 0
|
|
35
|
+
});
|
|
36
|
+
Object.defineProperty(this, "basePath", {
|
|
37
|
+
enumerable: true,
|
|
38
|
+
configurable: true,
|
|
39
|
+
writable: true,
|
|
40
|
+
value: "https://api.voyageai.com/v1"
|
|
41
|
+
});
|
|
42
|
+
Object.defineProperty(this, "apiUrl", {
|
|
43
|
+
enumerable: true,
|
|
44
|
+
configurable: true,
|
|
45
|
+
writable: true,
|
|
46
|
+
value: void 0
|
|
47
|
+
});
|
|
48
|
+
Object.defineProperty(this, "headers", {
|
|
49
|
+
enumerable: true,
|
|
50
|
+
configurable: true,
|
|
51
|
+
writable: true,
|
|
52
|
+
value: void 0
|
|
53
|
+
});
|
|
54
|
+
const apiKey = fieldsWithDefaults?.apiKey || (0, env_js_1.getEnvironmentVariable)("VOYAGEAI_API_KEY");
|
|
55
|
+
if (!apiKey) {
|
|
56
|
+
throw new Error("Voyage AI API key not found");
|
|
57
|
+
}
|
|
58
|
+
this.modelName = fieldsWithDefaults?.modelName ?? this.modelName;
|
|
59
|
+
this.batchSize = fieldsWithDefaults?.batchSize ?? this.batchSize;
|
|
60
|
+
this.apiKey = apiKey;
|
|
61
|
+
this.apiUrl = `${this.basePath}/embeddings`;
|
|
62
|
+
}
|
|
63
|
+
/**
|
|
64
|
+
* Generates embeddings for an array of texts.
|
|
65
|
+
* @param texts - An array of strings to generate embeddings for.
|
|
66
|
+
* @returns A Promise that resolves to an array of embeddings.
|
|
67
|
+
*/
|
|
68
|
+
async embedDocuments(texts) {
|
|
69
|
+
const batches = (0, chunk_js_1.chunkArray)(texts, this.batchSize);
|
|
70
|
+
const batchRequests = batches.map((batch) => this.embeddingWithRetry({
|
|
71
|
+
model: this.modelName,
|
|
72
|
+
input: batch,
|
|
73
|
+
}));
|
|
74
|
+
const batchResponses = await Promise.all(batchRequests);
|
|
75
|
+
const embeddings = [];
|
|
76
|
+
for (let i = 0; i < batchResponses.length; i += 1) {
|
|
77
|
+
const batch = batches[i];
|
|
78
|
+
const { data: batchResponse } = batchResponses[i];
|
|
79
|
+
for (let j = 0; j < batch.length; j += 1) {
|
|
80
|
+
embeddings.push(batchResponse[j].embedding);
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
return embeddings;
|
|
84
|
+
}
|
|
85
|
+
/**
|
|
86
|
+
* Generates an embedding for a single text.
|
|
87
|
+
* @param text - A string to generate an embedding for.
|
|
88
|
+
* @returns A Promise that resolves to an array of numbers representing the embedding.
|
|
89
|
+
*/
|
|
90
|
+
async embedQuery(text) {
|
|
91
|
+
const { data } = await this.embeddingWithRetry({
|
|
92
|
+
model: this.modelName,
|
|
93
|
+
input: text,
|
|
94
|
+
});
|
|
95
|
+
return data[0].embedding;
|
|
96
|
+
}
|
|
97
|
+
/**
|
|
98
|
+
* Makes a request to the Voyage AI API to generate embeddings for an array of texts.
|
|
99
|
+
* @param request - An object with properties to configure the request.
|
|
100
|
+
* @returns A Promise that resolves to the response from the Voyage AI API.
|
|
101
|
+
*/
|
|
102
|
+
async embeddingWithRetry(request) {
|
|
103
|
+
const makeCompletionRequest = async () => {
|
|
104
|
+
const url = `${this.apiUrl}`;
|
|
105
|
+
const response = await fetch(url, {
|
|
106
|
+
method: "POST",
|
|
107
|
+
headers: {
|
|
108
|
+
"Content-Type": "application/json",
|
|
109
|
+
Authorization: `Bearer ${this.apiKey}`,
|
|
110
|
+
...this.headers,
|
|
111
|
+
},
|
|
112
|
+
body: JSON.stringify(request),
|
|
113
|
+
});
|
|
114
|
+
const json = await response.json();
|
|
115
|
+
return json;
|
|
116
|
+
};
|
|
117
|
+
return this.caller.call(makeCompletionRequest);
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
exports.VoyageEmbeddings = VoyageEmbeddings;
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
import { Embeddings, EmbeddingsParams } from "./base.js";
|
|
2
|
+
/**
|
|
3
|
+
* Interface that extends EmbeddingsParams and defines additional
|
|
4
|
+
* parameters specific to the VoyageEmbeddings class.
|
|
5
|
+
*/
|
|
6
|
+
export interface VoyageEmbeddingsParams extends EmbeddingsParams {
|
|
7
|
+
modelName: string;
|
|
8
|
+
/**
|
|
9
|
+
* The maximum number of documents to embed in a single request. This is
|
|
10
|
+
* limited by the Voyage AI API to a maximum of 8.
|
|
11
|
+
*/
|
|
12
|
+
batchSize?: number;
|
|
13
|
+
}
|
|
14
|
+
/**
|
|
15
|
+
* Interface for the request body to generate embeddings.
|
|
16
|
+
*/
|
|
17
|
+
export interface CreateVoyageEmbeddingRequest {
|
|
18
|
+
/**
|
|
19
|
+
* @type {string}
|
|
20
|
+
* @memberof CreateVoyageEmbeddingRequest
|
|
21
|
+
*/
|
|
22
|
+
model: string;
|
|
23
|
+
/**
|
|
24
|
+
* Text to generate vector expectation
|
|
25
|
+
* @type {CreateEmbeddingRequestInput}
|
|
26
|
+
* @memberof CreateVoyageEmbeddingRequest
|
|
27
|
+
*/
|
|
28
|
+
input: string | string[];
|
|
29
|
+
}
|
|
30
|
+
/**
|
|
31
|
+
* A class for generating embeddings using the Voyage AI API.
|
|
32
|
+
*/
|
|
33
|
+
export declare class VoyageEmbeddings extends Embeddings implements VoyageEmbeddingsParams {
|
|
34
|
+
modelName: string;
|
|
35
|
+
batchSize: number;
|
|
36
|
+
private apiKey;
|
|
37
|
+
basePath?: string;
|
|
38
|
+
apiUrl: string;
|
|
39
|
+
headers?: Record<string, string>;
|
|
40
|
+
/**
|
|
41
|
+
* Constructor for the VoyageEmbeddings class.
|
|
42
|
+
* @param fields - An optional object with properties to configure the instance.
|
|
43
|
+
*/
|
|
44
|
+
constructor(fields?: Partial<VoyageEmbeddingsParams> & {
|
|
45
|
+
verbose?: boolean;
|
|
46
|
+
apiKey?: string;
|
|
47
|
+
});
|
|
48
|
+
/**
|
|
49
|
+
* Generates embeddings for an array of texts.
|
|
50
|
+
* @param texts - An array of strings to generate embeddings for.
|
|
51
|
+
* @returns A Promise that resolves to an array of embeddings.
|
|
52
|
+
*/
|
|
53
|
+
embedDocuments(texts: string[]): Promise<number[][]>;
|
|
54
|
+
/**
|
|
55
|
+
* Generates an embedding for a single text.
|
|
56
|
+
* @param text - A string to generate an embedding for.
|
|
57
|
+
* @returns A Promise that resolves to an array of numbers representing the embedding.
|
|
58
|
+
*/
|
|
59
|
+
embedQuery(text: string): Promise<number[]>;
|
|
60
|
+
/**
|
|
61
|
+
* Makes a request to the Voyage AI API to generate embeddings for an array of texts.
|
|
62
|
+
* @param request - An object with properties to configure the request.
|
|
63
|
+
* @returns A Promise that resolves to the response from the Voyage AI API.
|
|
64
|
+
*/
|
|
65
|
+
private embeddingWithRetry;
|
|
66
|
+
}
|
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
import { chunkArray } from "../util/chunk.js";
|
|
2
|
+
import { getEnvironmentVariable } from "../util/env.js";
|
|
3
|
+
import { Embeddings } from "./base.js";
|
|
4
|
+
/**
|
|
5
|
+
* A class for generating embeddings using the Voyage AI API.
|
|
6
|
+
*/
|
|
7
|
+
export class VoyageEmbeddings extends Embeddings {
|
|
8
|
+
/**
|
|
9
|
+
* Constructor for the VoyageEmbeddings class.
|
|
10
|
+
* @param fields - An optional object with properties to configure the instance.
|
|
11
|
+
*/
|
|
12
|
+
constructor(fields) {
|
|
13
|
+
const fieldsWithDefaults = { ...fields };
|
|
14
|
+
super(fieldsWithDefaults);
|
|
15
|
+
Object.defineProperty(this, "modelName", {
|
|
16
|
+
enumerable: true,
|
|
17
|
+
configurable: true,
|
|
18
|
+
writable: true,
|
|
19
|
+
value: "voyage-01"
|
|
20
|
+
});
|
|
21
|
+
Object.defineProperty(this, "batchSize", {
|
|
22
|
+
enumerable: true,
|
|
23
|
+
configurable: true,
|
|
24
|
+
writable: true,
|
|
25
|
+
value: 8
|
|
26
|
+
});
|
|
27
|
+
Object.defineProperty(this, "apiKey", {
|
|
28
|
+
enumerable: true,
|
|
29
|
+
configurable: true,
|
|
30
|
+
writable: true,
|
|
31
|
+
value: void 0
|
|
32
|
+
});
|
|
33
|
+
Object.defineProperty(this, "basePath", {
|
|
34
|
+
enumerable: true,
|
|
35
|
+
configurable: true,
|
|
36
|
+
writable: true,
|
|
37
|
+
value: "https://api.voyageai.com/v1"
|
|
38
|
+
});
|
|
39
|
+
Object.defineProperty(this, "apiUrl", {
|
|
40
|
+
enumerable: true,
|
|
41
|
+
configurable: true,
|
|
42
|
+
writable: true,
|
|
43
|
+
value: void 0
|
|
44
|
+
});
|
|
45
|
+
Object.defineProperty(this, "headers", {
|
|
46
|
+
enumerable: true,
|
|
47
|
+
configurable: true,
|
|
48
|
+
writable: true,
|
|
49
|
+
value: void 0
|
|
50
|
+
});
|
|
51
|
+
const apiKey = fieldsWithDefaults?.apiKey || getEnvironmentVariable("VOYAGEAI_API_KEY");
|
|
52
|
+
if (!apiKey) {
|
|
53
|
+
throw new Error("Voyage AI API key not found");
|
|
54
|
+
}
|
|
55
|
+
this.modelName = fieldsWithDefaults?.modelName ?? this.modelName;
|
|
56
|
+
this.batchSize = fieldsWithDefaults?.batchSize ?? this.batchSize;
|
|
57
|
+
this.apiKey = apiKey;
|
|
58
|
+
this.apiUrl = `${this.basePath}/embeddings`;
|
|
59
|
+
}
|
|
60
|
+
/**
|
|
61
|
+
* Generates embeddings for an array of texts.
|
|
62
|
+
* @param texts - An array of strings to generate embeddings for.
|
|
63
|
+
* @returns A Promise that resolves to an array of embeddings.
|
|
64
|
+
*/
|
|
65
|
+
async embedDocuments(texts) {
|
|
66
|
+
const batches = chunkArray(texts, this.batchSize);
|
|
67
|
+
const batchRequests = batches.map((batch) => this.embeddingWithRetry({
|
|
68
|
+
model: this.modelName,
|
|
69
|
+
input: batch,
|
|
70
|
+
}));
|
|
71
|
+
const batchResponses = await Promise.all(batchRequests);
|
|
72
|
+
const embeddings = [];
|
|
73
|
+
for (let i = 0; i < batchResponses.length; i += 1) {
|
|
74
|
+
const batch = batches[i];
|
|
75
|
+
const { data: batchResponse } = batchResponses[i];
|
|
76
|
+
for (let j = 0; j < batch.length; j += 1) {
|
|
77
|
+
embeddings.push(batchResponse[j].embedding);
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
return embeddings;
|
|
81
|
+
}
|
|
82
|
+
/**
|
|
83
|
+
* Generates an embedding for a single text.
|
|
84
|
+
* @param text - A string to generate an embedding for.
|
|
85
|
+
* @returns A Promise that resolves to an array of numbers representing the embedding.
|
|
86
|
+
*/
|
|
87
|
+
async embedQuery(text) {
|
|
88
|
+
const { data } = await this.embeddingWithRetry({
|
|
89
|
+
model: this.modelName,
|
|
90
|
+
input: text,
|
|
91
|
+
});
|
|
92
|
+
return data[0].embedding;
|
|
93
|
+
}
|
|
94
|
+
/**
|
|
95
|
+
* Makes a request to the Voyage AI API to generate embeddings for an array of texts.
|
|
96
|
+
* @param request - An object with properties to configure the request.
|
|
97
|
+
* @returns A Promise that resolves to the response from the Voyage AI API.
|
|
98
|
+
*/
|
|
99
|
+
async embeddingWithRetry(request) {
|
|
100
|
+
const makeCompletionRequest = async () => {
|
|
101
|
+
const url = `${this.apiUrl}`;
|
|
102
|
+
const response = await fetch(url, {
|
|
103
|
+
method: "POST",
|
|
104
|
+
headers: {
|
|
105
|
+
"Content-Type": "application/json",
|
|
106
|
+
Authorization: `Bearer ${this.apiKey}`,
|
|
107
|
+
...this.headers,
|
|
108
|
+
},
|
|
109
|
+
body: JSON.stringify(request),
|
|
110
|
+
});
|
|
111
|
+
const json = await response.json();
|
|
112
|
+
return json;
|
|
113
|
+
};
|
|
114
|
+
return this.caller.call(makeCompletionRequest);
|
|
115
|
+
}
|
|
116
|
+
}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.Bedrock = void 0;
|
|
4
|
+
const credential_provider_node_1 = require("@aws-sdk/credential-provider-node");
|
|
5
|
+
const web_js_1 = require("./web.cjs");
|
|
6
|
+
class Bedrock extends web_js_1.Bedrock {
|
|
7
|
+
static lc_name() {
|
|
8
|
+
return "Bedrock";
|
|
9
|
+
}
|
|
10
|
+
constructor(fields) {
|
|
11
|
+
super({
|
|
12
|
+
...fields,
|
|
13
|
+
credentials: fields?.credentials ?? (0, credential_provider_node_1.defaultProvider)(),
|
|
14
|
+
});
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
exports.Bedrock = Bedrock;
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import { BaseBedrockInput } from "../../util/bedrock.js";
|
|
2
|
+
import { BaseLLMParams } from "../base.js";
|
|
3
|
+
import { Bedrock as BaseBedrock } from "./web.js";
|
|
4
|
+
export declare class Bedrock extends BaseBedrock {
|
|
5
|
+
static lc_name(): string;
|
|
6
|
+
constructor(fields?: Partial<BaseBedrockInput> & BaseLLMParams);
|
|
7
|
+
}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import { defaultProvider } from "@aws-sdk/credential-provider-node";
|
|
2
|
+
import { Bedrock as BaseBedrock } from "./web.js";
|
|
3
|
+
export class Bedrock extends BaseBedrock {
|
|
4
|
+
static lc_name() {
|
|
5
|
+
return "Bedrock";
|
|
6
|
+
}
|
|
7
|
+
constructor(fields) {
|
|
8
|
+
super({
|
|
9
|
+
...fields,
|
|
10
|
+
credentials: fields?.credentials ?? defaultProvider(),
|
|
11
|
+
});
|
|
12
|
+
}
|
|
13
|
+
}
|
|
@@ -2,15 +2,14 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.Bedrock = void 0;
|
|
4
4
|
const signature_v4_1 = require("@smithy/signature-v4");
|
|
5
|
-
const credential_provider_node_1 = require("@aws-sdk/credential-provider-node");
|
|
6
5
|
const protocol_http_1 = require("@smithy/protocol-http");
|
|
7
6
|
const eventstream_codec_1 = require("@smithy/eventstream-codec");
|
|
8
7
|
const util_utf8_1 = require("@smithy/util-utf8");
|
|
9
8
|
const sha256_js_1 = require("@aws-crypto/sha256-js");
|
|
10
|
-
const bedrock_js_1 = require("
|
|
11
|
-
const env_js_1 = require("
|
|
12
|
-
const base_js_1 = require("
|
|
13
|
-
const index_js_1 = require("
|
|
9
|
+
const bedrock_js_1 = require("../../util/bedrock.cjs");
|
|
10
|
+
const env_js_1 = require("../../util/env.cjs");
|
|
11
|
+
const base_js_1 = require("../base.cjs");
|
|
12
|
+
const index_js_1 = require("../../schema/index.cjs");
|
|
14
13
|
/**
|
|
15
14
|
* A type of Large Language Model (LLM) that interacts with the Bedrock
|
|
16
15
|
* service. It extends the base `LLM` class and implements the
|
|
@@ -21,8 +20,20 @@ const index_js_1 = require("../schema/index.cjs");
|
|
|
21
20
|
* region, and the maximum number of tokens to generate.
|
|
22
21
|
*/
|
|
23
22
|
class Bedrock extends base_js_1.LLM {
|
|
23
|
+
get lc_aliases() {
|
|
24
|
+
return {
|
|
25
|
+
model: "model_id",
|
|
26
|
+
region: "region_name",
|
|
27
|
+
};
|
|
28
|
+
}
|
|
24
29
|
get lc_secrets() {
|
|
25
|
-
return {
|
|
30
|
+
return {
|
|
31
|
+
"credentials.accessKeyId": "BEDROCK_AWS_ACCESS_KEY_ID",
|
|
32
|
+
"credentials.secretAccessKey": "BEDROCK_AWS_SECRET_ACCESS_KEY",
|
|
33
|
+
};
|
|
34
|
+
}
|
|
35
|
+
get lc_attributes() {
|
|
36
|
+
return { region: this.region };
|
|
26
37
|
}
|
|
27
38
|
_llmType() {
|
|
28
39
|
return "bedrock";
|
|
@@ -74,6 +85,7 @@ class Bedrock extends base_js_1.LLM {
|
|
|
74
85
|
writable: true,
|
|
75
86
|
value: void 0
|
|
76
87
|
});
|
|
88
|
+
/** @deprecated */
|
|
77
89
|
Object.defineProperty(this, "stopSequences", {
|
|
78
90
|
enumerable: true,
|
|
79
91
|
configurable: true,
|
|
@@ -98,8 +110,14 @@ class Bedrock extends base_js_1.LLM {
|
|
|
98
110
|
writable: true,
|
|
99
111
|
value: false
|
|
100
112
|
});
|
|
113
|
+
Object.defineProperty(this, "lc_serializable", {
|
|
114
|
+
enumerable: true,
|
|
115
|
+
configurable: true,
|
|
116
|
+
writable: true,
|
|
117
|
+
value: true
|
|
118
|
+
});
|
|
101
119
|
this.model = fields?.model ?? this.model;
|
|
102
|
-
const allowedModels = ["ai21", "anthropic", "amazon"];
|
|
120
|
+
const allowedModels = ["ai21", "anthropic", "amazon", "cohere"];
|
|
103
121
|
if (!allowedModels.includes(this.model.split(".")[0])) {
|
|
104
122
|
throw new Error(`Unknown model: '${this.model}', only these are supported: ${allowedModels}`);
|
|
105
123
|
}
|
|
@@ -108,7 +126,11 @@ class Bedrock extends base_js_1.LLM {
|
|
|
108
126
|
throw new Error("Please set the AWS_DEFAULT_REGION environment variable or pass it to the constructor as the region field.");
|
|
109
127
|
}
|
|
110
128
|
this.region = region;
|
|
111
|
-
|
|
129
|
+
const credentials = fields?.credentials;
|
|
130
|
+
if (!credentials) {
|
|
131
|
+
throw new Error("Please set the AWS credentials in the 'credentials' field.");
|
|
132
|
+
}
|
|
133
|
+
this.credentials = credentials;
|
|
112
134
|
this.temperature = fields?.temperature ?? this.temperature;
|
|
113
135
|
this.maxTokens = fields?.maxTokens ?? this.maxTokens;
|
|
114
136
|
this.fetchFn = fields?.fetchFn ?? fetch;
|
|
@@ -158,7 +180,7 @@ class Bedrock extends base_js_1.LLM {
|
|
|
158
180
|
}
|
|
159
181
|
async _signedFetch(prompt, options, fields) {
|
|
160
182
|
const { bedrockMethod, endpointHost, provider } = fields;
|
|
161
|
-
const inputBody = bedrock_js_1.BedrockLLMInputOutputAdapter.prepareInput(provider, prompt, this.maxTokens, this.temperature, this.stopSequences, this.modelKwargs);
|
|
183
|
+
const inputBody = bedrock_js_1.BedrockLLMInputOutputAdapter.prepareInput(provider, prompt, this.maxTokens, this.temperature, options.stop ?? this.stopSequences, this.modelKwargs, fields.bedrockMethod);
|
|
162
184
|
const url = new URL(`https://${endpointHost}/model/${this.model}/${bedrockMethod}`);
|
|
163
185
|
const request = new protocol_http_1.HttpRequest({
|
|
164
186
|
hostname: url.hostname,
|
|
@@ -189,9 +211,21 @@ class Bedrock extends base_js_1.LLM {
|
|
|
189
211
|
}));
|
|
190
212
|
return response;
|
|
191
213
|
}
|
|
214
|
+
invocationParams(options) {
|
|
215
|
+
return {
|
|
216
|
+
model: this.model,
|
|
217
|
+
region: this.region,
|
|
218
|
+
temperature: this.temperature,
|
|
219
|
+
maxTokens: this.maxTokens,
|
|
220
|
+
stop: options?.stop ?? this.stopSequences,
|
|
221
|
+
modelKwargs: this.modelKwargs,
|
|
222
|
+
};
|
|
223
|
+
}
|
|
192
224
|
async *_streamResponseChunks(prompt, options, runManager) {
|
|
193
225
|
const provider = this.model.split(".")[0];
|
|
194
|
-
const bedrockMethod = provider === "anthropic"
|
|
226
|
+
const bedrockMethod = provider === "anthropic" || provider === "cohere"
|
|
227
|
+
? "invoke-with-response-stream"
|
|
228
|
+
: "invoke";
|
|
195
229
|
const service = "bedrock-runtime";
|
|
196
230
|
const endpointHost = this.endpointHost ?? `${service}.${this.region}.amazonaws.com`;
|
|
197
231
|
// Send request to AWS using the low-level fetch API
|
|
@@ -203,7 +237,7 @@ class Bedrock extends base_js_1.LLM {
|
|
|
203
237
|
if (response.status < 200 || response.status >= 300) {
|
|
204
238
|
throw Error(`Failed to access underlying url '${endpointHost}': got ${response.status} ${response.statusText}: ${await response.text()}`);
|
|
205
239
|
}
|
|
206
|
-
if (provider === "anthropic") {
|
|
240
|
+
if (provider === "anthropic" || provider === "cohere") {
|
|
207
241
|
const reader = response.body?.getReader();
|
|
208
242
|
const decoder = new TextDecoder();
|
|
209
243
|
for await (const chunk of this._readChunks(reader)) {
|
|
@@ -218,7 +252,7 @@ class Bedrock extends base_js_1.LLM {
|
|
|
218
252
|
throw new Error(body.message);
|
|
219
253
|
}
|
|
220
254
|
if (body.bytes !== undefined) {
|
|
221
|
-
const chunkResult = JSON.parse(
|
|
255
|
+
const chunkResult = JSON.parse(decoder.decode(Uint8Array.from(atob(body.bytes), (m) => m.codePointAt(0) ?? 0)));
|
|
222
256
|
const text = bedrock_js_1.BedrockLLMInputOutputAdapter.prepareOutput(provider, chunkResult);
|
|
223
257
|
yield new index_js_1.GenerationChunk({
|
|
224
258
|
text,
|
|
@@ -1,8 +1,9 @@
|
|
|
1
1
|
import { EventStreamCodec } from "@smithy/eventstream-codec";
|
|
2
|
-
import { BaseBedrockInput, type CredentialType } from "
|
|
3
|
-
import { LLM, BaseLLMParams } from "
|
|
4
|
-
import { CallbackManagerForLLMRun } from "
|
|
5
|
-
import { GenerationChunk } from "
|
|
2
|
+
import { BaseBedrockInput, type CredentialType } from "../../util/bedrock.js";
|
|
3
|
+
import { LLM, BaseLLMParams } from "../base.js";
|
|
4
|
+
import { CallbackManagerForLLMRun } from "../../callbacks/manager.js";
|
|
5
|
+
import { GenerationChunk } from "../../schema/index.js";
|
|
6
|
+
import { SerializedFields } from "../../load/map_keys.js";
|
|
6
7
|
/**
|
|
7
8
|
* A type of Large Language Model (LLM) that interacts with the Bedrock
|
|
8
9
|
* service. It extends the base `LLM` class and implements the
|
|
@@ -20,13 +21,17 @@ export declare class Bedrock extends LLM implements BaseBedrockInput {
|
|
|
20
21
|
maxTokens?: number | undefined;
|
|
21
22
|
fetchFn: typeof fetch;
|
|
22
23
|
endpointHost?: string;
|
|
24
|
+
/** @deprecated */
|
|
23
25
|
stopSequences?: string[];
|
|
24
26
|
modelKwargs?: Record<string, unknown>;
|
|
25
27
|
codec: EventStreamCodec;
|
|
26
28
|
streaming: boolean;
|
|
29
|
+
lc_serializable: boolean;
|
|
30
|
+
get lc_aliases(): Record<string, string>;
|
|
27
31
|
get lc_secrets(): {
|
|
28
32
|
[key: string]: string;
|
|
29
33
|
} | undefined;
|
|
34
|
+
get lc_attributes(): SerializedFields | undefined;
|
|
30
35
|
_llmType(): string;
|
|
31
36
|
static lc_name(): string;
|
|
32
37
|
constructor(fields?: Partial<BaseBedrockInput> & BaseLLMParams);
|
|
@@ -46,6 +51,14 @@ export declare class Bedrock extends LLM implements BaseBedrockInput {
|
|
|
46
51
|
endpointHost: string;
|
|
47
52
|
provider: string;
|
|
48
53
|
}): Promise<Response>;
|
|
54
|
+
invocationParams(options?: this["ParsedCallOptions"]): {
|
|
55
|
+
model: string;
|
|
56
|
+
region: string;
|
|
57
|
+
temperature: number | undefined;
|
|
58
|
+
maxTokens: number | undefined;
|
|
59
|
+
stop: string[] | undefined;
|
|
60
|
+
modelKwargs: Record<string, unknown> | undefined;
|
|
61
|
+
};
|
|
49
62
|
_streamResponseChunks(prompt: string, options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<GenerationChunk>;
|
|
50
63
|
_readChunks(reader: any): {
|
|
51
64
|
[Symbol.asyncIterator](): AsyncGenerator<any, void, unknown>;
|
|
@@ -1,13 +1,12 @@
|
|
|
1
1
|
import { SignatureV4 } from "@smithy/signature-v4";
|
|
2
|
-
import { defaultProvider } from "@aws-sdk/credential-provider-node";
|
|
3
2
|
import { HttpRequest } from "@smithy/protocol-http";
|
|
4
3
|
import { EventStreamCodec } from "@smithy/eventstream-codec";
|
|
5
4
|
import { fromUtf8, toUtf8 } from "@smithy/util-utf8";
|
|
6
5
|
import { Sha256 } from "@aws-crypto/sha256-js";
|
|
7
|
-
import { BedrockLLMInputOutputAdapter, } from "
|
|
8
|
-
import { getEnvironmentVariable } from "
|
|
9
|
-
import { LLM } from "
|
|
10
|
-
import { GenerationChunk } from "
|
|
6
|
+
import { BedrockLLMInputOutputAdapter, } from "../../util/bedrock.js";
|
|
7
|
+
import { getEnvironmentVariable } from "../../util/env.js";
|
|
8
|
+
import { LLM } from "../base.js";
|
|
9
|
+
import { GenerationChunk } from "../../schema/index.js";
|
|
11
10
|
/**
|
|
12
11
|
* A type of Large Language Model (LLM) that interacts with the Bedrock
|
|
13
12
|
* service. It extends the base `LLM` class and implements the
|
|
@@ -18,8 +17,20 @@ import { GenerationChunk } from "../schema/index.js";
|
|
|
18
17
|
* region, and the maximum number of tokens to generate.
|
|
19
18
|
*/
|
|
20
19
|
export class Bedrock extends LLM {
|
|
20
|
+
get lc_aliases() {
|
|
21
|
+
return {
|
|
22
|
+
model: "model_id",
|
|
23
|
+
region: "region_name",
|
|
24
|
+
};
|
|
25
|
+
}
|
|
21
26
|
get lc_secrets() {
|
|
22
|
-
return {
|
|
27
|
+
return {
|
|
28
|
+
"credentials.accessKeyId": "BEDROCK_AWS_ACCESS_KEY_ID",
|
|
29
|
+
"credentials.secretAccessKey": "BEDROCK_AWS_SECRET_ACCESS_KEY",
|
|
30
|
+
};
|
|
31
|
+
}
|
|
32
|
+
get lc_attributes() {
|
|
33
|
+
return { region: this.region };
|
|
23
34
|
}
|
|
24
35
|
_llmType() {
|
|
25
36
|
return "bedrock";
|
|
@@ -71,6 +82,7 @@ export class Bedrock extends LLM {
|
|
|
71
82
|
writable: true,
|
|
72
83
|
value: void 0
|
|
73
84
|
});
|
|
85
|
+
/** @deprecated */
|
|
74
86
|
Object.defineProperty(this, "stopSequences", {
|
|
75
87
|
enumerable: true,
|
|
76
88
|
configurable: true,
|
|
@@ -95,8 +107,14 @@ export class Bedrock extends LLM {
|
|
|
95
107
|
writable: true,
|
|
96
108
|
value: false
|
|
97
109
|
});
|
|
110
|
+
Object.defineProperty(this, "lc_serializable", {
|
|
111
|
+
enumerable: true,
|
|
112
|
+
configurable: true,
|
|
113
|
+
writable: true,
|
|
114
|
+
value: true
|
|
115
|
+
});
|
|
98
116
|
this.model = fields?.model ?? this.model;
|
|
99
|
-
const allowedModels = ["ai21", "anthropic", "amazon"];
|
|
117
|
+
const allowedModels = ["ai21", "anthropic", "amazon", "cohere"];
|
|
100
118
|
if (!allowedModels.includes(this.model.split(".")[0])) {
|
|
101
119
|
throw new Error(`Unknown model: '${this.model}', only these are supported: ${allowedModels}`);
|
|
102
120
|
}
|
|
@@ -105,7 +123,11 @@ export class Bedrock extends LLM {
|
|
|
105
123
|
throw new Error("Please set the AWS_DEFAULT_REGION environment variable or pass it to the constructor as the region field.");
|
|
106
124
|
}
|
|
107
125
|
this.region = region;
|
|
108
|
-
|
|
126
|
+
const credentials = fields?.credentials;
|
|
127
|
+
if (!credentials) {
|
|
128
|
+
throw new Error("Please set the AWS credentials in the 'credentials' field.");
|
|
129
|
+
}
|
|
130
|
+
this.credentials = credentials;
|
|
109
131
|
this.temperature = fields?.temperature ?? this.temperature;
|
|
110
132
|
this.maxTokens = fields?.maxTokens ?? this.maxTokens;
|
|
111
133
|
this.fetchFn = fields?.fetchFn ?? fetch;
|
|
@@ -155,7 +177,7 @@ export class Bedrock extends LLM {
|
|
|
155
177
|
}
|
|
156
178
|
async _signedFetch(prompt, options, fields) {
|
|
157
179
|
const { bedrockMethod, endpointHost, provider } = fields;
|
|
158
|
-
const inputBody = BedrockLLMInputOutputAdapter.prepareInput(provider, prompt, this.maxTokens, this.temperature, this.stopSequences, this.modelKwargs);
|
|
180
|
+
const inputBody = BedrockLLMInputOutputAdapter.prepareInput(provider, prompt, this.maxTokens, this.temperature, options.stop ?? this.stopSequences, this.modelKwargs, fields.bedrockMethod);
|
|
159
181
|
const url = new URL(`https://${endpointHost}/model/${this.model}/${bedrockMethod}`);
|
|
160
182
|
const request = new HttpRequest({
|
|
161
183
|
hostname: url.hostname,
|
|
@@ -186,9 +208,21 @@ export class Bedrock extends LLM {
|
|
|
186
208
|
}));
|
|
187
209
|
return response;
|
|
188
210
|
}
|
|
211
|
+
invocationParams(options) {
|
|
212
|
+
return {
|
|
213
|
+
model: this.model,
|
|
214
|
+
region: this.region,
|
|
215
|
+
temperature: this.temperature,
|
|
216
|
+
maxTokens: this.maxTokens,
|
|
217
|
+
stop: options?.stop ?? this.stopSequences,
|
|
218
|
+
modelKwargs: this.modelKwargs,
|
|
219
|
+
};
|
|
220
|
+
}
|
|
189
221
|
async *_streamResponseChunks(prompt, options, runManager) {
|
|
190
222
|
const provider = this.model.split(".")[0];
|
|
191
|
-
const bedrockMethod = provider === "anthropic"
|
|
223
|
+
const bedrockMethod = provider === "anthropic" || provider === "cohere"
|
|
224
|
+
? "invoke-with-response-stream"
|
|
225
|
+
: "invoke";
|
|
192
226
|
const service = "bedrock-runtime";
|
|
193
227
|
const endpointHost = this.endpointHost ?? `${service}.${this.region}.amazonaws.com`;
|
|
194
228
|
// Send request to AWS using the low-level fetch API
|
|
@@ -200,7 +234,7 @@ export class Bedrock extends LLM {
|
|
|
200
234
|
if (response.status < 200 || response.status >= 300) {
|
|
201
235
|
throw Error(`Failed to access underlying url '${endpointHost}': got ${response.status} ${response.statusText}: ${await response.text()}`);
|
|
202
236
|
}
|
|
203
|
-
if (provider === "anthropic") {
|
|
237
|
+
if (provider === "anthropic" || provider === "cohere") {
|
|
204
238
|
const reader = response.body?.getReader();
|
|
205
239
|
const decoder = new TextDecoder();
|
|
206
240
|
for await (const chunk of this._readChunks(reader)) {
|
|
@@ -215,7 +249,7 @@ export class Bedrock extends LLM {
|
|
|
215
249
|
throw new Error(body.message);
|
|
216
250
|
}
|
|
217
251
|
if (body.bytes !== undefined) {
|
|
218
|
-
const chunkResult = JSON.parse(
|
|
252
|
+
const chunkResult = JSON.parse(decoder.decode(Uint8Array.from(atob(body.bytes), (m) => m.codePointAt(0) ?? 0)));
|
|
219
253
|
const text = BedrockLLMInputOutputAdapter.prepareOutput(provider, chunkResult);
|
|
220
254
|
yield new GenerationChunk({
|
|
221
255
|
text,
|
|
@@ -36,6 +36,7 @@ exports.optionalImportEntrypoints = [
|
|
|
36
36
|
"langchain/llms/googlepalm",
|
|
37
37
|
"langchain/llms/sagemaker_endpoint",
|
|
38
38
|
"langchain/llms/bedrock",
|
|
39
|
+
"langchain/llms/bedrock/web",
|
|
39
40
|
"langchain/llms/llama_cpp",
|
|
40
41
|
"langchain/llms/writer",
|
|
41
42
|
"langchain/llms/portkey",
|
|
@@ -111,6 +112,7 @@ exports.optionalImportEntrypoints = [
|
|
|
111
112
|
"langchain/document_transformers/mozilla_readability",
|
|
112
113
|
"langchain/chat_models/portkey",
|
|
113
114
|
"langchain/chat_models/bedrock",
|
|
115
|
+
"langchain/chat_models/bedrock/web",
|
|
114
116
|
"langchain/chat_models/googlevertexai",
|
|
115
117
|
"langchain/chat_models/googlevertexai/web",
|
|
116
118
|
"langchain/chat_models/googlepalm",
|
|
@@ -137,6 +139,7 @@ exports.optionalImportEntrypoints = [
|
|
|
137
139
|
"langchain/cache/upstash_redis",
|
|
138
140
|
"langchain/stores/doc/gcs",
|
|
139
141
|
"langchain/stores/file/node",
|
|
142
|
+
"langchain/stores/message/cassandra",
|
|
140
143
|
"langchain/stores/message/convex",
|
|
141
144
|
"langchain/stores/message/cloudflare_d1",
|
|
142
145
|
"langchain/stores/message/dynamodb",
|