langchain 0.0.127 → 0.0.128
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agents/toolkits/conversational_retrieval/tool.cjs +8 -2
- package/dist/agents/toolkits/conversational_retrieval/tool.d.ts +9 -2
- package/dist/agents/toolkits/conversational_retrieval/tool.js +9 -3
- package/dist/chains/query_constructor/ir.d.ts +5 -4
- package/dist/chains/query_constructor/prompt.cjs +3 -1
- package/dist/chains/query_constructor/prompt.d.ts +2 -2
- package/dist/chains/query_constructor/prompt.js +3 -1
- package/dist/chains/question_answering/refine_prompts.cjs +2 -2
- package/dist/chains/question_answering/refine_prompts.d.ts +1 -1
- package/dist/chains/question_answering/refine_prompts.js +2 -2
- package/dist/chains/sql_db/index.cjs +2 -1
- package/dist/chains/sql_db/index.d.ts +1 -1
- package/dist/chains/sql_db/index.js +1 -1
- package/dist/chains/sql_db/sql_db_prompt.cjs +21 -1
- package/dist/chains/sql_db/sql_db_prompt.d.ts +6 -0
- package/dist/chains/sql_db/sql_db_prompt.js +20 -0
- package/dist/chat_models/openai.cjs +8 -0
- package/dist/chat_models/openai.d.ts +1 -0
- package/dist/chat_models/openai.js +8 -0
- package/dist/document_loaders/web/github.cjs +104 -32
- package/dist/document_loaders/web/github.d.ts +46 -3
- package/dist/document_loaders/web/github.js +104 -32
- package/dist/document_transformers/mozilla_readability.cjs +30 -0
- package/dist/document_transformers/mozilla_readability.d.ts +8 -0
- package/dist/document_transformers/mozilla_readability.js +26 -0
- package/dist/llms/base.cjs +3 -1
- package/dist/llms/base.js +3 -1
- package/dist/llms/bedrock.cjs +152 -0
- package/dist/llms/bedrock.d.ts +54 -0
- package/dist/llms/bedrock.js +148 -0
- package/dist/llms/openai-chat.cjs +8 -0
- package/dist/llms/openai-chat.d.ts +1 -0
- package/dist/llms/openai-chat.js +8 -0
- package/dist/llms/openai.cjs +8 -0
- package/dist/llms/openai.d.ts +1 -0
- package/dist/llms/openai.js +8 -0
- package/dist/load/import_constants.cjs +2 -0
- package/dist/load/import_constants.js +2 -0
- package/dist/retrievers/self_query/base.cjs +31 -0
- package/dist/retrievers/self_query/base.d.ts +5 -2
- package/dist/retrievers/self_query/base.js +31 -0
- package/dist/retrievers/self_query/chroma.d.ts +2 -1
- package/dist/retrievers/self_query/functional.cjs +28 -1
- package/dist/retrievers/self_query/functional.d.ts +3 -0
- package/dist/retrievers/self_query/functional.js +28 -1
- package/dist/retrievers/self_query/index.cjs +18 -4
- package/dist/retrievers/self_query/index.d.ts +14 -10
- package/dist/retrievers/self_query/index.js +18 -4
- package/dist/retrievers/self_query/pinecone.d.ts +2 -1
- package/dist/retrievers/self_query/supabase.cjs +30 -0
- package/dist/retrievers/self_query/supabase.d.ts +3 -5
- package/dist/retrievers/self_query/supabase.js +30 -0
- package/dist/retrievers/self_query/utils.cjs +21 -0
- package/dist/retrievers/self_query/utils.d.ts +2 -0
- package/dist/retrievers/self_query/utils.js +16 -0
- package/dist/retrievers/self_query/weaviate.cjs +67 -16
- package/dist/retrievers/self_query/weaviate.d.ts +6 -3
- package/dist/retrievers/self_query/weaviate.js +67 -16
- package/dist/types/openai-types.d.ts +2 -0
- package/dist/util/event-source-parse.cjs +22 -6
- package/dist/util/event-source-parse.js +22 -6
- package/dist/util/ollama.cjs +16 -2
- package/dist/util/ollama.js +16 -2
- package/dist/util/sql_utils.cjs +38 -3
- package/dist/util/sql_utils.js +39 -4
- package/dist/vectorstores/chroma.cjs +5 -6
- package/dist/vectorstores/chroma.js +5 -6
- package/dist/vectorstores/faiss.cjs +41 -1
- package/dist/vectorstores/faiss.d.ts +6 -0
- package/dist/vectorstores/faiss.js +41 -1
- package/dist/vectorstores/supabase.cjs +10 -4
- package/dist/vectorstores/supabase.d.ts +4 -2
- package/dist/vectorstores/supabase.js +10 -4
- package/document_transformers/mozilla_readability.cjs +1 -0
- package/document_transformers/mozilla_readability.d.ts +1 -0
- package/document_transformers/mozilla_readability.js +1 -0
- package/llms/bedrock.cjs +1 -0
- package/llms/bedrock.d.ts +1 -0
- package/llms/bedrock.js +1 -0
- package/package.json +46 -3
|
@@ -5,12 +5,13 @@ import { BaseDocumentLoader } from "../base.js";
|
|
|
5
5
|
import { UnknownHandling } from "../fs/directory.js";
|
|
6
6
|
import { extname } from "../../util/extname.js";
|
|
7
7
|
import { getEnvironmentVariable } from "../../util/env.js";
|
|
8
|
+
import { AsyncCaller } from "../../util/async_caller.js";
|
|
8
9
|
const extensions = new Set(binaryExtensions);
|
|
9
10
|
function isBinaryPath(name) {
|
|
10
11
|
return extensions.has(extname(name).slice(1).toLowerCase());
|
|
11
12
|
}
|
|
12
13
|
export class GithubRepoLoader extends BaseDocumentLoader {
|
|
13
|
-
constructor(githubUrl, { accessToken = getEnvironmentVariable("GITHUB_ACCESS_TOKEN"), branch = "main", recursive = true, unknown = UnknownHandling.Warn, ignoreFiles = [], ignorePaths, } = {}) {
|
|
14
|
+
constructor(githubUrl, { accessToken = getEnvironmentVariable("GITHUB_ACCESS_TOKEN"), branch = "main", recursive = true, unknown = UnknownHandling.Warn, ignoreFiles = [], ignorePaths, verbose = false, maxConcurrency = 2, maxRetries = 2, ...rest } = {}) {
|
|
14
15
|
super();
|
|
15
16
|
Object.defineProperty(this, "owner", {
|
|
16
17
|
enumerable: true,
|
|
@@ -72,6 +73,18 @@ export class GithubRepoLoader extends BaseDocumentLoader {
|
|
|
72
73
|
writable: true,
|
|
73
74
|
value: void 0
|
|
74
75
|
});
|
|
76
|
+
Object.defineProperty(this, "verbose", {
|
|
77
|
+
enumerable: true,
|
|
78
|
+
configurable: true,
|
|
79
|
+
writable: true,
|
|
80
|
+
value: void 0
|
|
81
|
+
});
|
|
82
|
+
Object.defineProperty(this, "caller", {
|
|
83
|
+
enumerable: true,
|
|
84
|
+
configurable: true,
|
|
85
|
+
writable: true,
|
|
86
|
+
value: void 0
|
|
87
|
+
});
|
|
75
88
|
const { owner, repo, path } = this.extractOwnerAndRepoAndPath(githubUrl);
|
|
76
89
|
this.owner = owner;
|
|
77
90
|
this.repo = repo;
|
|
@@ -81,6 +94,12 @@ export class GithubRepoLoader extends BaseDocumentLoader {
|
|
|
81
94
|
this.unknown = unknown;
|
|
82
95
|
this.accessToken = accessToken;
|
|
83
96
|
this.ignoreFiles = ignoreFiles;
|
|
97
|
+
this.verbose = verbose;
|
|
98
|
+
this.caller = new AsyncCaller({
|
|
99
|
+
maxConcurrency,
|
|
100
|
+
maxRetries,
|
|
101
|
+
...rest,
|
|
102
|
+
});
|
|
84
103
|
if (ignorePaths) {
|
|
85
104
|
this.ignore = ignore.default().add(ignorePaths);
|
|
86
105
|
}
|
|
@@ -98,11 +117,12 @@ export class GithubRepoLoader extends BaseDocumentLoader {
|
|
|
98
117
|
return { owner: match[1], repo: match[2], path: match[4] || "" };
|
|
99
118
|
}
|
|
100
119
|
async load() {
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
120
|
+
return (await this.processRepo()).map((fileResponse) => new Document({
|
|
121
|
+
pageContent: fileResponse.contents,
|
|
122
|
+
metadata: fileResponse.metadata,
|
|
123
|
+
}));
|
|
104
124
|
}
|
|
105
|
-
|
|
125
|
+
shouldIgnore(path, fileType) {
|
|
106
126
|
if (fileType !== "dir" && isBinaryPath(path)) {
|
|
107
127
|
return true;
|
|
108
128
|
}
|
|
@@ -122,46 +142,98 @@ export class GithubRepoLoader extends BaseDocumentLoader {
|
|
|
122
142
|
}
|
|
123
143
|
}));
|
|
124
144
|
}
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
145
|
+
/**
|
|
146
|
+
* Takes the file info and wrap it in a promise that will resolve to the file content and metadata
|
|
147
|
+
* @param file
|
|
148
|
+
* @returns
|
|
149
|
+
*/
|
|
150
|
+
async fetchFileContentWrapper(file) {
|
|
151
|
+
const fileContent = await this.fetchFileContent(file).catch((error) => {
|
|
152
|
+
this.handleError(`Failed wrap file content: ${file}, ${error}`);
|
|
153
|
+
});
|
|
154
|
+
return {
|
|
155
|
+
contents: fileContent || "",
|
|
156
|
+
metadata: { source: file.path },
|
|
157
|
+
};
|
|
158
|
+
}
|
|
159
|
+
/**
|
|
160
|
+
* Maps a list of files / directories to a list of promises that will fetch the file / directory contents
|
|
161
|
+
*/
|
|
162
|
+
async getCurrentDirectoryFilePromises(files) {
|
|
163
|
+
const currentDirectoryFilePromises = [];
|
|
164
|
+
// Directories have nested files / directories, which is why this is a list of promises of promises
|
|
165
|
+
const currentDirectoryDirectoryPromises = [];
|
|
166
|
+
for (const file of files) {
|
|
167
|
+
if (!this.shouldIgnore(file.path, file.type)) {
|
|
168
|
+
if (file.type !== "dir") {
|
|
169
|
+
try {
|
|
170
|
+
currentDirectoryFilePromises.push(this.fetchFileContentWrapper(file));
|
|
139
171
|
}
|
|
140
|
-
|
|
141
|
-
|
|
172
|
+
catch (e) {
|
|
173
|
+
this.handleError(`Failed to fetch file content: ${file.path}, ${e}`);
|
|
142
174
|
}
|
|
143
175
|
}
|
|
176
|
+
else if (this.recursive) {
|
|
177
|
+
currentDirectoryDirectoryPromises.push(this.processDirectory(file.path));
|
|
178
|
+
}
|
|
144
179
|
}
|
|
145
180
|
}
|
|
181
|
+
const curDirDirectories = await Promise.all(currentDirectoryDirectoryPromises);
|
|
182
|
+
return [...currentDirectoryFilePromises, ...curDirDirectories.flat()];
|
|
183
|
+
}
|
|
184
|
+
/**
|
|
185
|
+
* Begins the process of fetching the contents of the repository
|
|
186
|
+
*/
|
|
187
|
+
async processRepo() {
|
|
188
|
+
try {
|
|
189
|
+
// Get the list of file / directory names in the root directory
|
|
190
|
+
const files = await this.fetchRepoFiles(this.initialPath);
|
|
191
|
+
// Map the file / directory paths to promises that will fetch the file / directory contents
|
|
192
|
+
const currentDirectoryFilePromises = await this.getCurrentDirectoryFilePromises(files);
|
|
193
|
+
return Promise.all(currentDirectoryFilePromises);
|
|
194
|
+
}
|
|
195
|
+
catch (error) {
|
|
196
|
+
this.handleError(`Failed to process directory: ${this.initialPath}, ${error}`);
|
|
197
|
+
return Promise.reject(error);
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
async processDirectory(path) {
|
|
201
|
+
try {
|
|
202
|
+
const files = await this.fetchRepoFiles(path);
|
|
203
|
+
return this.getCurrentDirectoryFilePromises(files);
|
|
204
|
+
}
|
|
146
205
|
catch (error) {
|
|
147
206
|
this.handleError(`Failed to process directory: ${path}, ${error}`);
|
|
207
|
+
return Promise.reject(error);
|
|
148
208
|
}
|
|
149
209
|
}
|
|
150
210
|
async fetchRepoFiles(path) {
|
|
151
211
|
const url = `https://api.github.com/repos/${this.owner}/${this.repo}/contents/${path}?ref=${this.branch}`;
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
212
|
+
return this.caller.call(async () => {
|
|
213
|
+
if (this.verbose) {
|
|
214
|
+
console.log("Fetching", url);
|
|
215
|
+
}
|
|
216
|
+
const response = await fetch(url, { headers: this.headers });
|
|
217
|
+
const data = await response.json();
|
|
218
|
+
if (!response.ok) {
|
|
219
|
+
throw new Error(`Unable to fetch repository files: ${response.status} ${JSON.stringify(data)}`);
|
|
220
|
+
}
|
|
221
|
+
if (!Array.isArray(data)) {
|
|
222
|
+
throw new Error("Unable to fetch repository files.");
|
|
223
|
+
}
|
|
224
|
+
return data;
|
|
225
|
+
});
|
|
161
226
|
}
|
|
162
227
|
async fetchFileContent(file) {
|
|
163
|
-
|
|
164
|
-
|
|
228
|
+
return this.caller.call(async () => {
|
|
229
|
+
if (this.verbose) {
|
|
230
|
+
console.log("Fetching", file.download_url);
|
|
231
|
+
}
|
|
232
|
+
const response = await fetch(file.download_url, {
|
|
233
|
+
headers: this.headers,
|
|
234
|
+
});
|
|
235
|
+
return response.text();
|
|
236
|
+
});
|
|
165
237
|
}
|
|
166
238
|
handleError(message) {
|
|
167
239
|
switch (this.unknown) {
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.MozillaReadabilityTransformer = void 0;
|
|
4
|
+
const readability_1 = require("@mozilla/readability");
|
|
5
|
+
const jsdom_1 = require("jsdom");
|
|
6
|
+
const document_js_1 = require("../document.cjs");
|
|
7
|
+
const document_js_2 = require("../schema/document.cjs");
|
|
8
|
+
class MozillaReadabilityTransformer extends document_js_2.MappingDocumentTransformer {
|
|
9
|
+
constructor(options = {}) {
|
|
10
|
+
super(options);
|
|
11
|
+
Object.defineProperty(this, "options", {
|
|
12
|
+
enumerable: true,
|
|
13
|
+
configurable: true,
|
|
14
|
+
writable: true,
|
|
15
|
+
value: options
|
|
16
|
+
});
|
|
17
|
+
}
|
|
18
|
+
async _transformDocument(document) {
|
|
19
|
+
const doc = new jsdom_1.JSDOM(document.pageContent);
|
|
20
|
+
const readability = new readability_1.Readability(doc.window.document, this.options);
|
|
21
|
+
const result = readability.parse();
|
|
22
|
+
return new document_js_1.Document({
|
|
23
|
+
pageContent: result?.textContent ?? "",
|
|
24
|
+
metadata: {
|
|
25
|
+
...document.metadata,
|
|
26
|
+
},
|
|
27
|
+
});
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
exports.MozillaReadabilityTransformer = MozillaReadabilityTransformer;
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import { Options } from "mozilla-readability";
|
|
2
|
+
import { Document } from "../document.js";
|
|
3
|
+
import { MappingDocumentTransformer } from "../schema/document.js";
|
|
4
|
+
export declare class MozillaReadabilityTransformer extends MappingDocumentTransformer {
|
|
5
|
+
protected options: Options;
|
|
6
|
+
constructor(options?: Options);
|
|
7
|
+
_transformDocument(document: Document): Promise<Document>;
|
|
8
|
+
}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import { Readability } from "@mozilla/readability";
|
|
2
|
+
import { JSDOM } from "jsdom";
|
|
3
|
+
import { Document } from "../document.js";
|
|
4
|
+
import { MappingDocumentTransformer } from "../schema/document.js";
|
|
5
|
+
export class MozillaReadabilityTransformer extends MappingDocumentTransformer {
|
|
6
|
+
constructor(options = {}) {
|
|
7
|
+
super(options);
|
|
8
|
+
Object.defineProperty(this, "options", {
|
|
9
|
+
enumerable: true,
|
|
10
|
+
configurable: true,
|
|
11
|
+
writable: true,
|
|
12
|
+
value: options
|
|
13
|
+
});
|
|
14
|
+
}
|
|
15
|
+
async _transformDocument(document) {
|
|
16
|
+
const doc = new JSDOM(document.pageContent);
|
|
17
|
+
const readability = new Readability(doc.window.document, this.options);
|
|
18
|
+
const result = readability.parse();
|
|
19
|
+
return new Document({
|
|
20
|
+
pageContent: result?.textContent ?? "",
|
|
21
|
+
metadata: {
|
|
22
|
+
...document.metadata,
|
|
23
|
+
},
|
|
24
|
+
});
|
|
25
|
+
}
|
|
26
|
+
}
|
package/dist/llms/base.cjs
CHANGED
package/dist/llms/base.js
CHANGED
|
@@ -0,0 +1,152 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.Bedrock = void 0;
|
|
4
|
+
const signature_v4_1 = require("@aws-sdk/signature-v4");
|
|
5
|
+
const credential_provider_node_1 = require("@aws-sdk/credential-provider-node");
|
|
6
|
+
const protocol_http_1 = require("@aws-sdk/protocol-http");
|
|
7
|
+
const sha256_js_1 = require("@aws-crypto/sha256-js");
|
|
8
|
+
const env_js_1 = require("../util/env.cjs");
|
|
9
|
+
const base_js_1 = require("./base.cjs");
|
|
10
|
+
class BedrockLLMInputOutputAdapter {
|
|
11
|
+
/** Adapter class to prepare the inputs from Langchain to a format
|
|
12
|
+
that LLM model expects. Also, provides a helper function to extract
|
|
13
|
+
the generated text from the model response. */
|
|
14
|
+
static prepareInput(provider, prompt) {
|
|
15
|
+
const inputBody = {};
|
|
16
|
+
if (provider === "anthropic" || provider === "ai21") {
|
|
17
|
+
inputBody.prompt = prompt;
|
|
18
|
+
}
|
|
19
|
+
else if (provider === "amazon") {
|
|
20
|
+
inputBody.inputText = prompt;
|
|
21
|
+
inputBody.textGenerationConfig = {};
|
|
22
|
+
}
|
|
23
|
+
else {
|
|
24
|
+
inputBody.inputText = prompt;
|
|
25
|
+
}
|
|
26
|
+
if (provider === "anthropic" && !("max_tokens_to_sample" in inputBody)) {
|
|
27
|
+
inputBody.max_tokens_to_sample = 50;
|
|
28
|
+
}
|
|
29
|
+
return inputBody;
|
|
30
|
+
}
|
|
31
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
32
|
+
static prepareOutput(provider, responseBody) {
|
|
33
|
+
if (provider === "anthropic") {
|
|
34
|
+
return responseBody.completion;
|
|
35
|
+
}
|
|
36
|
+
else if (provider === "ai21") {
|
|
37
|
+
return responseBody.completions[0].data.text;
|
|
38
|
+
}
|
|
39
|
+
return responseBody.results[0].outputText;
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
class Bedrock extends base_js_1.LLM {
|
|
43
|
+
get lc_secrets() {
|
|
44
|
+
return {};
|
|
45
|
+
}
|
|
46
|
+
_llmType() {
|
|
47
|
+
return "bedrock";
|
|
48
|
+
}
|
|
49
|
+
constructor(fields) {
|
|
50
|
+
super(fields ?? {});
|
|
51
|
+
Object.defineProperty(this, "model", {
|
|
52
|
+
enumerable: true,
|
|
53
|
+
configurable: true,
|
|
54
|
+
writable: true,
|
|
55
|
+
value: "amazon.titan-tg1-large"
|
|
56
|
+
});
|
|
57
|
+
Object.defineProperty(this, "region", {
|
|
58
|
+
enumerable: true,
|
|
59
|
+
configurable: true,
|
|
60
|
+
writable: true,
|
|
61
|
+
value: void 0
|
|
62
|
+
});
|
|
63
|
+
Object.defineProperty(this, "credentials", {
|
|
64
|
+
enumerable: true,
|
|
65
|
+
configurable: true,
|
|
66
|
+
writable: true,
|
|
67
|
+
value: void 0
|
|
68
|
+
});
|
|
69
|
+
Object.defineProperty(this, "temperature", {
|
|
70
|
+
enumerable: true,
|
|
71
|
+
configurable: true,
|
|
72
|
+
writable: true,
|
|
73
|
+
value: undefined
|
|
74
|
+
});
|
|
75
|
+
Object.defineProperty(this, "maxTokens", {
|
|
76
|
+
enumerable: true,
|
|
77
|
+
configurable: true,
|
|
78
|
+
writable: true,
|
|
79
|
+
value: undefined
|
|
80
|
+
});
|
|
81
|
+
Object.defineProperty(this, "fetchFn", {
|
|
82
|
+
enumerable: true,
|
|
83
|
+
configurable: true,
|
|
84
|
+
writable: true,
|
|
85
|
+
value: void 0
|
|
86
|
+
});
|
|
87
|
+
this.model = fields?.model ?? this.model;
|
|
88
|
+
const allowedModels = ["ai21", "anthropic", "amazon"];
|
|
89
|
+
if (!allowedModels.includes(this.model.split(".")[0])) {
|
|
90
|
+
throw new Error(`Unknown model: '${this.model}', only these are supported: ${allowedModels}`);
|
|
91
|
+
}
|
|
92
|
+
const region = fields?.region ?? (0, env_js_1.getEnvironmentVariable)("AWS_DEFAULT_REGION");
|
|
93
|
+
if (!region) {
|
|
94
|
+
throw new Error("Please set the AWS_DEFAULT_REGION environment variable or pass it to the constructor as the region field.");
|
|
95
|
+
}
|
|
96
|
+
this.region = region;
|
|
97
|
+
this.credentials = fields?.credentials ?? (0, credential_provider_node_1.defaultProvider)();
|
|
98
|
+
this.temperature = fields?.temperature ?? this.temperature;
|
|
99
|
+
this.maxTokens = fields?.maxTokens ?? this.maxTokens;
|
|
100
|
+
this.fetchFn = fields?.fetchFn ?? fetch;
|
|
101
|
+
}
|
|
102
|
+
/** Call out to Bedrock service model.
|
|
103
|
+
Arguments:
|
|
104
|
+
prompt: The prompt to pass into the model.
|
|
105
|
+
|
|
106
|
+
Returns:
|
|
107
|
+
The string generated by the model.
|
|
108
|
+
|
|
109
|
+
Example:
|
|
110
|
+
response = model.call("Tell me a joke.")
|
|
111
|
+
*/
|
|
112
|
+
async _call(prompt) {
|
|
113
|
+
const provider = this.model.split(".")[0];
|
|
114
|
+
const service = "bedrock";
|
|
115
|
+
const inputBody = BedrockLLMInputOutputAdapter.prepareInput(provider, prompt);
|
|
116
|
+
const url = new URL(`https://${service}.${this.region}.amazonaws.com/model/${this.model}/invoke`);
|
|
117
|
+
const request = new protocol_http_1.HttpRequest({
|
|
118
|
+
hostname: url.hostname,
|
|
119
|
+
path: url.pathname,
|
|
120
|
+
protocol: url.protocol,
|
|
121
|
+
method: "POST",
|
|
122
|
+
body: JSON.stringify(inputBody),
|
|
123
|
+
query: Object.fromEntries(url.searchParams.entries()),
|
|
124
|
+
headers: {
|
|
125
|
+
// host is required by AWS Signature V4: https://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html
|
|
126
|
+
host: url.host,
|
|
127
|
+
accept: "application/json",
|
|
128
|
+
"Content-Type": "application/json",
|
|
129
|
+
},
|
|
130
|
+
});
|
|
131
|
+
const signer = new signature_v4_1.SignatureV4({
|
|
132
|
+
credentials: this.credentials,
|
|
133
|
+
service,
|
|
134
|
+
region: this.region,
|
|
135
|
+
sha256: sha256_js_1.Sha256,
|
|
136
|
+
});
|
|
137
|
+
const signedRequest = await signer.sign(request);
|
|
138
|
+
// Send request to AWS using the low-level fetch API
|
|
139
|
+
const response = await this.fetchFn(url, {
|
|
140
|
+
headers: signedRequest.headers,
|
|
141
|
+
body: signedRequest.body,
|
|
142
|
+
method: signedRequest.method,
|
|
143
|
+
});
|
|
144
|
+
if (response.status < 200 || response.status >= 300) {
|
|
145
|
+
throw Error(`Failed to access underlying url '${url}': got ${response.status} ${response.statusText}: ${await response.text()}`);
|
|
146
|
+
}
|
|
147
|
+
const responseJson = await response.json();
|
|
148
|
+
const text = BedrockLLMInputOutputAdapter.prepareOutput(provider, responseJson);
|
|
149
|
+
return text;
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
exports.Bedrock = Bedrock;
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
import type { AwsCredentialIdentity, Provider } from "@aws-sdk/types";
|
|
2
|
+
import { LLM, BaseLLMParams } from "./base.js";
|
|
3
|
+
type CredentialType = AwsCredentialIdentity | Provider<AwsCredentialIdentity>;
|
|
4
|
+
/** Bedrock models.
|
|
5
|
+
To authenticate, the AWS client uses the following methods to automatically load credentials:
|
|
6
|
+
https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html
|
|
7
|
+
If a specific credential profile should be used, you must pass the name of the profile from the ~/.aws/credentials file that is to be used.
|
|
8
|
+
Make sure the credentials / roles used have the required policies to access the Bedrock service.
|
|
9
|
+
*/
|
|
10
|
+
export interface BedrockInput {
|
|
11
|
+
/** Model to use.
|
|
12
|
+
For example, "amazon.titan-tg1-large", this is equivalent to the modelId property in the list-foundation-models api.
|
|
13
|
+
*/
|
|
14
|
+
model: string;
|
|
15
|
+
/** The AWS region e.g. `us-west-2`.
|
|
16
|
+
Fallback to AWS_DEFAULT_REGION env variable or region specified in ~/.aws/config in case it is not provided here.
|
|
17
|
+
*/
|
|
18
|
+
region?: string;
|
|
19
|
+
/** AWS Credentials.
|
|
20
|
+
If no credentials are provided, the default credentials from `@aws-sdk/credential-provider-node` will be used.
|
|
21
|
+
*/
|
|
22
|
+
credentials?: CredentialType;
|
|
23
|
+
/** Temperature */
|
|
24
|
+
temperature?: number;
|
|
25
|
+
/** Max tokens */
|
|
26
|
+
maxTokens?: number;
|
|
27
|
+
/** A custom fetch function for low-level access to AWS API. Defaults to fetch() */
|
|
28
|
+
fetchFn?: typeof fetch;
|
|
29
|
+
}
|
|
30
|
+
export declare class Bedrock extends LLM implements BedrockInput {
|
|
31
|
+
model: string;
|
|
32
|
+
region: string;
|
|
33
|
+
credentials: CredentialType;
|
|
34
|
+
temperature?: number | undefined;
|
|
35
|
+
maxTokens?: number | undefined;
|
|
36
|
+
fetchFn: typeof fetch;
|
|
37
|
+
get lc_secrets(): {
|
|
38
|
+
[key: string]: string;
|
|
39
|
+
} | undefined;
|
|
40
|
+
_llmType(): string;
|
|
41
|
+
constructor(fields?: Partial<BedrockInput> & BaseLLMParams);
|
|
42
|
+
/** Call out to Bedrock service model.
|
|
43
|
+
Arguments:
|
|
44
|
+
prompt: The prompt to pass into the model.
|
|
45
|
+
|
|
46
|
+
Returns:
|
|
47
|
+
The string generated by the model.
|
|
48
|
+
|
|
49
|
+
Example:
|
|
50
|
+
response = model.call("Tell me a joke.")
|
|
51
|
+
*/
|
|
52
|
+
_call(prompt: string): Promise<string>;
|
|
53
|
+
}
|
|
54
|
+
export {};
|
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
import { SignatureV4 } from "@aws-sdk/signature-v4";
|
|
2
|
+
import { defaultProvider } from "@aws-sdk/credential-provider-node";
|
|
3
|
+
import { HttpRequest } from "@aws-sdk/protocol-http";
|
|
4
|
+
import { Sha256 } from "@aws-crypto/sha256-js";
|
|
5
|
+
import { getEnvironmentVariable } from "../util/env.js";
|
|
6
|
+
import { LLM } from "./base.js";
|
|
7
|
+
class BedrockLLMInputOutputAdapter {
|
|
8
|
+
/** Adapter class to prepare the inputs from Langchain to a format
|
|
9
|
+
that LLM model expects. Also, provides a helper function to extract
|
|
10
|
+
the generated text from the model response. */
|
|
11
|
+
static prepareInput(provider, prompt) {
|
|
12
|
+
const inputBody = {};
|
|
13
|
+
if (provider === "anthropic" || provider === "ai21") {
|
|
14
|
+
inputBody.prompt = prompt;
|
|
15
|
+
}
|
|
16
|
+
else if (provider === "amazon") {
|
|
17
|
+
inputBody.inputText = prompt;
|
|
18
|
+
inputBody.textGenerationConfig = {};
|
|
19
|
+
}
|
|
20
|
+
else {
|
|
21
|
+
inputBody.inputText = prompt;
|
|
22
|
+
}
|
|
23
|
+
if (provider === "anthropic" && !("max_tokens_to_sample" in inputBody)) {
|
|
24
|
+
inputBody.max_tokens_to_sample = 50;
|
|
25
|
+
}
|
|
26
|
+
return inputBody;
|
|
27
|
+
}
|
|
28
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
29
|
+
static prepareOutput(provider, responseBody) {
|
|
30
|
+
if (provider === "anthropic") {
|
|
31
|
+
return responseBody.completion;
|
|
32
|
+
}
|
|
33
|
+
else if (provider === "ai21") {
|
|
34
|
+
return responseBody.completions[0].data.text;
|
|
35
|
+
}
|
|
36
|
+
return responseBody.results[0].outputText;
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
export class Bedrock extends LLM {
|
|
40
|
+
get lc_secrets() {
|
|
41
|
+
return {};
|
|
42
|
+
}
|
|
43
|
+
_llmType() {
|
|
44
|
+
return "bedrock";
|
|
45
|
+
}
|
|
46
|
+
constructor(fields) {
|
|
47
|
+
super(fields ?? {});
|
|
48
|
+
Object.defineProperty(this, "model", {
|
|
49
|
+
enumerable: true,
|
|
50
|
+
configurable: true,
|
|
51
|
+
writable: true,
|
|
52
|
+
value: "amazon.titan-tg1-large"
|
|
53
|
+
});
|
|
54
|
+
Object.defineProperty(this, "region", {
|
|
55
|
+
enumerable: true,
|
|
56
|
+
configurable: true,
|
|
57
|
+
writable: true,
|
|
58
|
+
value: void 0
|
|
59
|
+
});
|
|
60
|
+
Object.defineProperty(this, "credentials", {
|
|
61
|
+
enumerable: true,
|
|
62
|
+
configurable: true,
|
|
63
|
+
writable: true,
|
|
64
|
+
value: void 0
|
|
65
|
+
});
|
|
66
|
+
Object.defineProperty(this, "temperature", {
|
|
67
|
+
enumerable: true,
|
|
68
|
+
configurable: true,
|
|
69
|
+
writable: true,
|
|
70
|
+
value: undefined
|
|
71
|
+
});
|
|
72
|
+
Object.defineProperty(this, "maxTokens", {
|
|
73
|
+
enumerable: true,
|
|
74
|
+
configurable: true,
|
|
75
|
+
writable: true,
|
|
76
|
+
value: undefined
|
|
77
|
+
});
|
|
78
|
+
Object.defineProperty(this, "fetchFn", {
|
|
79
|
+
enumerable: true,
|
|
80
|
+
configurable: true,
|
|
81
|
+
writable: true,
|
|
82
|
+
value: void 0
|
|
83
|
+
});
|
|
84
|
+
this.model = fields?.model ?? this.model;
|
|
85
|
+
const allowedModels = ["ai21", "anthropic", "amazon"];
|
|
86
|
+
if (!allowedModels.includes(this.model.split(".")[0])) {
|
|
87
|
+
throw new Error(`Unknown model: '${this.model}', only these are supported: ${allowedModels}`);
|
|
88
|
+
}
|
|
89
|
+
const region = fields?.region ?? getEnvironmentVariable("AWS_DEFAULT_REGION");
|
|
90
|
+
if (!region) {
|
|
91
|
+
throw new Error("Please set the AWS_DEFAULT_REGION environment variable or pass it to the constructor as the region field.");
|
|
92
|
+
}
|
|
93
|
+
this.region = region;
|
|
94
|
+
this.credentials = fields?.credentials ?? defaultProvider();
|
|
95
|
+
this.temperature = fields?.temperature ?? this.temperature;
|
|
96
|
+
this.maxTokens = fields?.maxTokens ?? this.maxTokens;
|
|
97
|
+
this.fetchFn = fields?.fetchFn ?? fetch;
|
|
98
|
+
}
|
|
99
|
+
/** Call out to Bedrock service model.
|
|
100
|
+
Arguments:
|
|
101
|
+
prompt: The prompt to pass into the model.
|
|
102
|
+
|
|
103
|
+
Returns:
|
|
104
|
+
The string generated by the model.
|
|
105
|
+
|
|
106
|
+
Example:
|
|
107
|
+
response = model.call("Tell me a joke.")
|
|
108
|
+
*/
|
|
109
|
+
async _call(prompt) {
|
|
110
|
+
const provider = this.model.split(".")[0];
|
|
111
|
+
const service = "bedrock";
|
|
112
|
+
const inputBody = BedrockLLMInputOutputAdapter.prepareInput(provider, prompt);
|
|
113
|
+
const url = new URL(`https://${service}.${this.region}.amazonaws.com/model/${this.model}/invoke`);
|
|
114
|
+
const request = new HttpRequest({
|
|
115
|
+
hostname: url.hostname,
|
|
116
|
+
path: url.pathname,
|
|
117
|
+
protocol: url.protocol,
|
|
118
|
+
method: "POST",
|
|
119
|
+
body: JSON.stringify(inputBody),
|
|
120
|
+
query: Object.fromEntries(url.searchParams.entries()),
|
|
121
|
+
headers: {
|
|
122
|
+
// host is required by AWS Signature V4: https://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html
|
|
123
|
+
host: url.host,
|
|
124
|
+
accept: "application/json",
|
|
125
|
+
"Content-Type": "application/json",
|
|
126
|
+
},
|
|
127
|
+
});
|
|
128
|
+
const signer = new SignatureV4({
|
|
129
|
+
credentials: this.credentials,
|
|
130
|
+
service,
|
|
131
|
+
region: this.region,
|
|
132
|
+
sha256: Sha256,
|
|
133
|
+
});
|
|
134
|
+
const signedRequest = await signer.sign(request);
|
|
135
|
+
// Send request to AWS using the low-level fetch API
|
|
136
|
+
const response = await this.fetchFn(url, {
|
|
137
|
+
headers: signedRequest.headers,
|
|
138
|
+
body: signedRequest.body,
|
|
139
|
+
method: signedRequest.method,
|
|
140
|
+
});
|
|
141
|
+
if (response.status < 200 || response.status >= 300) {
|
|
142
|
+
throw Error(`Failed to access underlying url '${url}': got ${response.status} ${response.statusText}: ${await response.text()}`);
|
|
143
|
+
}
|
|
144
|
+
const responseJson = await response.json();
|
|
145
|
+
const text = BedrockLLMInputOutputAdapter.prepareOutput(provider, responseJson);
|
|
146
|
+
return text;
|
|
147
|
+
}
|
|
148
|
+
}
|
|
@@ -140,6 +140,12 @@ class OpenAIChat extends base_js_1.LLM {
|
|
|
140
140
|
writable: true,
|
|
141
141
|
value: void 0
|
|
142
142
|
});
|
|
143
|
+
Object.defineProperty(this, "user", {
|
|
144
|
+
enumerable: true,
|
|
145
|
+
configurable: true,
|
|
146
|
+
writable: true,
|
|
147
|
+
value: void 0
|
|
148
|
+
});
|
|
143
149
|
Object.defineProperty(this, "streaming", {
|
|
144
150
|
enumerable: true,
|
|
145
151
|
configurable: true,
|
|
@@ -228,6 +234,7 @@ class OpenAIChat extends base_js_1.LLM {
|
|
|
228
234
|
this.logitBias = fields?.logitBias;
|
|
229
235
|
this.maxTokens = fields?.maxTokens;
|
|
230
236
|
this.stop = fields?.stop;
|
|
237
|
+
this.user = fields?.user;
|
|
231
238
|
this.streaming = fields?.streaming ?? false;
|
|
232
239
|
if (this.n > 1) {
|
|
233
240
|
throw new Error("Cannot use n > 1 in OpenAIChat LLM. Use ChatOpenAI Chat Model instead.");
|
|
@@ -263,6 +270,7 @@ class OpenAIChat extends base_js_1.LLM {
|
|
|
263
270
|
logit_bias: this.logitBias,
|
|
264
271
|
max_tokens: this.maxTokens === -1 ? undefined : this.maxTokens,
|
|
265
272
|
stop: options?.stop ?? this.stop,
|
|
273
|
+
user: this.user,
|
|
266
274
|
stream: this.streaming,
|
|
267
275
|
...this.modelKwargs,
|
|
268
276
|
};
|
|
@@ -49,6 +49,7 @@ export declare class OpenAIChat extends LLM<OpenAIChatCallOptions> implements Op
|
|
|
49
49
|
modelKwargs?: OpenAIChatInput["modelKwargs"];
|
|
50
50
|
timeout?: number;
|
|
51
51
|
stop?: string[];
|
|
52
|
+
user?: string;
|
|
52
53
|
streaming: boolean;
|
|
53
54
|
openAIApiKey?: string;
|
|
54
55
|
azureOpenAIApiVersion?: string;
|