langchain 0.0.178 → 0.0.180
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/chains/combine_documents/reduce.cjs +1 -0
- package/chains/combine_documents/reduce.d.ts +1 -0
- package/chains/combine_documents/reduce.js +1 -0
- package/chat_models/bedrock/web.cjs +1 -0
- package/chat_models/bedrock/web.d.ts +1 -0
- package/chat_models/bedrock/web.js +1 -0
- package/chat_models/bedrock.cjs +1 -1
- package/chat_models/bedrock.d.ts +1 -1
- package/chat_models/bedrock.js +1 -1
- package/dist/cache/base.d.ts +1 -1
- package/dist/callbacks/index.d.ts +1 -1
- package/dist/chains/combine_documents/reduce.cjs +69 -0
- package/dist/chains/combine_documents/reduce.d.ts +28 -0
- package/dist/chains/combine_documents/reduce.js +64 -0
- package/dist/chat_models/baiduwenxin.cjs +16 -2
- package/dist/chat_models/baiduwenxin.js +16 -2
- package/dist/chat_models/bedrock/index.cjs +24 -0
- package/dist/chat_models/bedrock/index.d.ts +12 -0
- package/dist/chat_models/bedrock/index.js +18 -0
- package/dist/chat_models/{bedrock.cjs → bedrock/web.cjs} +44 -15
- package/dist/chat_models/{bedrock.d.ts → bedrock/web.d.ts} +14 -5
- package/dist/chat_models/{bedrock.js → bedrock/web.js} +42 -13
- package/dist/chat_models/iflytek_xinghuo/index.cjs +1 -1
- package/dist/chat_models/iflytek_xinghuo/index.js +1 -1
- package/dist/llms/bedrock/index.cjs +17 -0
- package/dist/llms/bedrock/index.d.ts +7 -0
- package/dist/llms/bedrock/index.js +13 -0
- package/dist/llms/{bedrock.cjs → bedrock/web.cjs} +46 -12
- package/dist/llms/{bedrock.d.ts → bedrock/web.d.ts} +17 -4
- package/dist/llms/{bedrock.js → bedrock/web.js} +46 -12
- package/dist/load/import_constants.cjs +3 -0
- package/dist/load/import_constants.js +3 -0
- package/dist/load/import_map.cjs +4 -2
- package/dist/load/import_map.d.ts +2 -0
- package/dist/load/import_map.js +2 -0
- package/dist/output_parsers/json.cjs +77 -0
- package/dist/output_parsers/json.d.ts +1 -0
- package/dist/output_parsers/json.js +73 -0
- package/dist/output_parsers/openai_functions.cjs +37 -2
- package/dist/output_parsers/openai_functions.d.ts +10 -5
- package/dist/output_parsers/openai_functions.js +38 -3
- package/dist/schema/index.cjs +33 -1
- package/dist/schema/index.d.ts +3 -1
- package/dist/schema/index.js +31 -0
- package/dist/schema/output_parser.cjs +63 -3
- package/dist/schema/output_parser.d.ts +16 -1
- package/dist/schema/output_parser.js +59 -0
- package/dist/schema/prompt_template.cjs +33 -0
- package/dist/schema/prompt_template.d.ts +12 -0
- package/dist/schema/prompt_template.js +29 -0
- package/dist/storage/convex.d.ts +21 -0
- package/dist/stores/message/cassandra.cjs +135 -0
- package/dist/stores/message/cassandra.d.ts +44 -0
- package/dist/stores/message/cassandra.js +131 -0
- package/dist/stores/message/convex.d.ts +21 -0
- package/dist/util/bedrock.cjs +13 -1
- package/dist/util/bedrock.d.ts +5 -2
- package/dist/util/bedrock.js +13 -1
- package/dist/util/fast-json-patch/index.cjs +1 -0
- package/dist/util/fast-json-patch/index.d.ts +1 -0
- package/dist/util/fast-json-patch/index.js +1 -0
- package/dist/util/fast-json-patch/src/duplex.cjs +237 -0
- package/dist/util/fast-json-patch/src/duplex.d.ts +23 -0
- package/dist/util/fast-json-patch/src/duplex.js +230 -0
- package/dist/vectorstores/cassandra.cjs +197 -47
- package/dist/vectorstores/cassandra.d.ts +47 -4
- package/dist/vectorstores/cassandra.js +197 -47
- package/dist/vectorstores/convex.d.ts +21 -0
- package/llms/bedrock/web.cjs +1 -0
- package/llms/bedrock/web.d.ts +1 -0
- package/llms/bedrock/web.js +1 -0
- package/llms/bedrock.cjs +1 -1
- package/llms/bedrock.d.ts +1 -1
- package/llms/bedrock.js +1 -1
- package/package.json +41 -1
- package/schema/prompt_template.cjs +1 -0
- package/schema/prompt_template.d.ts +1 -0
- package/schema/prompt_template.js +1 -0
- package/stores/message/cassandra.cjs +1 -0
- package/stores/message/cassandra.d.ts +1 -0
- package/stores/message/cassandra.js +1 -0
- package/dist/schema/runnable/remote.cjs +0 -225
- package/dist/schema/runnable/remote.d.ts +0 -28
- package/dist/schema/runnable/remote.js +0 -221
|
@@ -4,10 +4,10 @@ import { HttpRequest } from "@smithy/protocol-http";
|
|
|
4
4
|
import { EventStreamCodec } from "@smithy/eventstream-codec";
|
|
5
5
|
import { fromUtf8, toUtf8 } from "@smithy/util-utf8";
|
|
6
6
|
import { Sha256 } from "@aws-crypto/sha256-js";
|
|
7
|
-
import { BedrockLLMInputOutputAdapter, } from "
|
|
8
|
-
import { getEnvironmentVariable } from "
|
|
9
|
-
import { SimpleChatModel } from "
|
|
10
|
-
import { AIMessageChunk, AIMessage, ChatGenerationChunk, ChatMessage, } from "
|
|
7
|
+
import { BedrockLLMInputOutputAdapter, } from "../../util/bedrock.js";
|
|
8
|
+
import { getEnvironmentVariable } from "../../util/env.js";
|
|
9
|
+
import { SimpleChatModel } from "../base.js";
|
|
10
|
+
import { AIMessageChunk, AIMessage, ChatGenerationChunk, ChatMessage, } from "../../schema/index.js";
|
|
11
11
|
function convertOneMessageToText(message, humanPrompt, aiPrompt) {
|
|
12
12
|
if (message._getType() === "human") {
|
|
13
13
|
return `${humanPrompt} ${message.content}`;
|
|
@@ -56,15 +56,27 @@ export function convertMessagesToPrompt(messages, provider) {
|
|
|
56
56
|
* configured with various parameters such as the model to use, the AWS
|
|
57
57
|
* region, and the maximum number of tokens to generate.
|
|
58
58
|
*/
|
|
59
|
-
export class
|
|
59
|
+
export class BedrockChat extends SimpleChatModel {
|
|
60
|
+
get lc_aliases() {
|
|
61
|
+
return {
|
|
62
|
+
model: "model_id",
|
|
63
|
+
region: "region_name",
|
|
64
|
+
};
|
|
65
|
+
}
|
|
60
66
|
get lc_secrets() {
|
|
61
|
-
return {
|
|
67
|
+
return {
|
|
68
|
+
"credentials.accessKeyId": "BEDROCK_AWS_ACCESS_KEY_ID",
|
|
69
|
+
"credentials.secretAccessKey": "BEDROCK_AWS_SECRET_ACCESS_KEY",
|
|
70
|
+
};
|
|
71
|
+
}
|
|
72
|
+
get lc_attributes() {
|
|
73
|
+
return { region: this.region };
|
|
62
74
|
}
|
|
63
75
|
_llmType() {
|
|
64
76
|
return "bedrock";
|
|
65
77
|
}
|
|
66
78
|
static lc_name() {
|
|
67
|
-
return "
|
|
79
|
+
return "BedrockChat";
|
|
68
80
|
}
|
|
69
81
|
constructor(fields) {
|
|
70
82
|
super(fields ?? {});
|
|
@@ -110,6 +122,7 @@ export class ChatBedrock extends SimpleChatModel {
|
|
|
110
122
|
writable: true,
|
|
111
123
|
value: void 0
|
|
112
124
|
});
|
|
125
|
+
/** @deprecated */
|
|
113
126
|
Object.defineProperty(this, "stopSequences", {
|
|
114
127
|
enumerable: true,
|
|
115
128
|
configurable: true,
|
|
@@ -134,8 +147,14 @@ export class ChatBedrock extends SimpleChatModel {
|
|
|
134
147
|
writable: true,
|
|
135
148
|
value: false
|
|
136
149
|
});
|
|
150
|
+
Object.defineProperty(this, "lc_serializable", {
|
|
151
|
+
enumerable: true,
|
|
152
|
+
configurable: true,
|
|
153
|
+
writable: true,
|
|
154
|
+
value: true
|
|
155
|
+
});
|
|
137
156
|
this.model = fields?.model ?? this.model;
|
|
138
|
-
const allowedModels = ["ai21", "anthropic", "amazon"];
|
|
157
|
+
const allowedModels = ["ai21", "anthropic", "amazon", "cohere"];
|
|
139
158
|
if (!allowedModels.includes(this.model.split(".")[0])) {
|
|
140
159
|
throw new Error(`Unknown model: '${this.model}', only these are supported: ${allowedModels}`);
|
|
141
160
|
}
|
|
@@ -144,7 +163,11 @@ export class ChatBedrock extends SimpleChatModel {
|
|
|
144
163
|
throw new Error("Please set the AWS_DEFAULT_REGION environment variable or pass it to the constructor as the region field.");
|
|
145
164
|
}
|
|
146
165
|
this.region = region;
|
|
147
|
-
|
|
166
|
+
const credentials = fields?.credentials ?? defaultProvider();
|
|
167
|
+
if (!credentials) {
|
|
168
|
+
throw new Error("Please set the AWS credentials in the 'credentials' field.");
|
|
169
|
+
}
|
|
170
|
+
this.credentials = credentials;
|
|
148
171
|
this.temperature = fields?.temperature ?? this.temperature;
|
|
149
172
|
this.maxTokens = fields?.maxTokens ?? this.maxTokens;
|
|
150
173
|
this.fetchFn = fields?.fetchFn ?? fetch;
|
|
@@ -194,7 +217,7 @@ export class ChatBedrock extends SimpleChatModel {
|
|
|
194
217
|
}
|
|
195
218
|
async _signedFetch(messages, options, fields) {
|
|
196
219
|
const { bedrockMethod, endpointHost, provider } = fields;
|
|
197
|
-
const inputBody = BedrockLLMInputOutputAdapter.prepareInput(provider, convertMessagesToPromptAnthropic(messages), this.maxTokens, this.temperature, this.stopSequences, this.modelKwargs);
|
|
220
|
+
const inputBody = BedrockLLMInputOutputAdapter.prepareInput(provider, convertMessagesToPromptAnthropic(messages), this.maxTokens, this.temperature, options.stop ?? this.stopSequences, this.modelKwargs, fields.bedrockMethod);
|
|
198
221
|
const url = new URL(`https://${endpointHost}/model/${this.model}/${bedrockMethod}`);
|
|
199
222
|
const request = new HttpRequest({
|
|
200
223
|
hostname: url.hostname,
|
|
@@ -229,7 +252,9 @@ export class ChatBedrock extends SimpleChatModel {
|
|
|
229
252
|
const provider = this.model.split(".")[0];
|
|
230
253
|
const service = "bedrock-runtime";
|
|
231
254
|
const endpointHost = this.endpointHost ?? `${service}.${this.region}.amazonaws.com`;
|
|
232
|
-
const bedrockMethod = provider === "anthropic"
|
|
255
|
+
const bedrockMethod = provider === "anthropic" || provider === "cohere"
|
|
256
|
+
? "invoke-with-response-stream"
|
|
257
|
+
: "invoke";
|
|
233
258
|
const response = await this._signedFetch(messages, options, {
|
|
234
259
|
bedrockMethod,
|
|
235
260
|
endpointHost,
|
|
@@ -238,7 +263,7 @@ export class ChatBedrock extends SimpleChatModel {
|
|
|
238
263
|
if (response.status < 200 || response.status >= 300) {
|
|
239
264
|
throw Error(`Failed to access underlying url '${endpointHost}': got ${response.status} ${response.statusText}: ${await response.text()}`);
|
|
240
265
|
}
|
|
241
|
-
if (provider === "anthropic") {
|
|
266
|
+
if (provider === "anthropic" || provider === "cohere") {
|
|
242
267
|
const reader = response.body?.getReader();
|
|
243
268
|
const decoder = new TextDecoder();
|
|
244
269
|
for await (const chunk of this._readChunks(reader)) {
|
|
@@ -253,7 +278,7 @@ export class ChatBedrock extends SimpleChatModel {
|
|
|
253
278
|
throw new Error(body.message);
|
|
254
279
|
}
|
|
255
280
|
if (body.bytes !== undefined) {
|
|
256
|
-
const chunkResult = JSON.parse(
|
|
281
|
+
const chunkResult = JSON.parse(decoder.decode(Uint8Array.from(atob(body.bytes), (m) => m.codePointAt(0) ?? 0)));
|
|
257
282
|
const text = BedrockLLMInputOutputAdapter.prepareOutput(provider, chunkResult);
|
|
258
283
|
yield new ChatGenerationChunk({
|
|
259
284
|
text,
|
|
@@ -291,3 +316,7 @@ export class ChatBedrock extends SimpleChatModel {
|
|
|
291
316
|
return {};
|
|
292
317
|
}
|
|
293
318
|
}
|
|
319
|
+
/**
|
|
320
|
+
* @deprecated Use `BedrockChat` instead.
|
|
321
|
+
*/
|
|
322
|
+
export const ChatBedrock = BedrockChat;
|
|
@@ -19,7 +19,7 @@ class ChatIflytekXinghuo extends common_js_1.BaseChatIflytekXinghuo {
|
|
|
19
19
|
const host = "spark-api.xf-yun.com";
|
|
20
20
|
const date = new Date().toUTCString();
|
|
21
21
|
const url = `GET /${this.version}/chat HTTP/1.1`;
|
|
22
|
-
const { createHmac } = await import("crypto");
|
|
22
|
+
const { createHmac } = await import("node:crypto");
|
|
23
23
|
const hash = createHmac("sha256", this.iflytekApiSecret)
|
|
24
24
|
.update(`host: ${host}\ndate: ${date}\n${url}`)
|
|
25
25
|
.digest("base64");
|
|
@@ -13,7 +13,7 @@ export class ChatIflytekXinghuo extends BaseChatIflytekXinghuo {
|
|
|
13
13
|
const host = "spark-api.xf-yun.com";
|
|
14
14
|
const date = new Date().toUTCString();
|
|
15
15
|
const url = `GET /${this.version}/chat HTTP/1.1`;
|
|
16
|
-
const { createHmac } = await import("crypto");
|
|
16
|
+
const { createHmac } = await import("node:crypto");
|
|
17
17
|
const hash = createHmac("sha256", this.iflytekApiSecret)
|
|
18
18
|
.update(`host: ${host}\ndate: ${date}\n${url}`)
|
|
19
19
|
.digest("base64");
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.Bedrock = void 0;
|
|
4
|
+
const credential_provider_node_1 = require("@aws-sdk/credential-provider-node");
|
|
5
|
+
const web_js_1 = require("./web.cjs");
|
|
6
|
+
class Bedrock extends web_js_1.Bedrock {
|
|
7
|
+
static lc_name() {
|
|
8
|
+
return "Bedrock";
|
|
9
|
+
}
|
|
10
|
+
constructor(fields) {
|
|
11
|
+
super({
|
|
12
|
+
...fields,
|
|
13
|
+
credentials: fields?.credentials ?? (0, credential_provider_node_1.defaultProvider)(),
|
|
14
|
+
});
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
exports.Bedrock = Bedrock;
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import { BaseBedrockInput } from "../../util/bedrock.js";
|
|
2
|
+
import { BaseLLMParams } from "../base.js";
|
|
3
|
+
import { Bedrock as BaseBedrock } from "./web.js";
|
|
4
|
+
export declare class Bedrock extends BaseBedrock {
|
|
5
|
+
static lc_name(): string;
|
|
6
|
+
constructor(fields?: Partial<BaseBedrockInput> & BaseLLMParams);
|
|
7
|
+
}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import { defaultProvider } from "@aws-sdk/credential-provider-node";
|
|
2
|
+
import { Bedrock as BaseBedrock } from "./web.js";
|
|
3
|
+
export class Bedrock extends BaseBedrock {
|
|
4
|
+
static lc_name() {
|
|
5
|
+
return "Bedrock";
|
|
6
|
+
}
|
|
7
|
+
constructor(fields) {
|
|
8
|
+
super({
|
|
9
|
+
...fields,
|
|
10
|
+
credentials: fields?.credentials ?? defaultProvider(),
|
|
11
|
+
});
|
|
12
|
+
}
|
|
13
|
+
}
|
|
@@ -2,15 +2,14 @@
|
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.Bedrock = void 0;
|
|
4
4
|
const signature_v4_1 = require("@smithy/signature-v4");
|
|
5
|
-
const credential_provider_node_1 = require("@aws-sdk/credential-provider-node");
|
|
6
5
|
const protocol_http_1 = require("@smithy/protocol-http");
|
|
7
6
|
const eventstream_codec_1 = require("@smithy/eventstream-codec");
|
|
8
7
|
const util_utf8_1 = require("@smithy/util-utf8");
|
|
9
8
|
const sha256_js_1 = require("@aws-crypto/sha256-js");
|
|
10
|
-
const bedrock_js_1 = require("
|
|
11
|
-
const env_js_1 = require("
|
|
12
|
-
const base_js_1 = require("
|
|
13
|
-
const index_js_1 = require("
|
|
9
|
+
const bedrock_js_1 = require("../../util/bedrock.cjs");
|
|
10
|
+
const env_js_1 = require("../../util/env.cjs");
|
|
11
|
+
const base_js_1 = require("../base.cjs");
|
|
12
|
+
const index_js_1 = require("../../schema/index.cjs");
|
|
14
13
|
/**
|
|
15
14
|
* A type of Large Language Model (LLM) that interacts with the Bedrock
|
|
16
15
|
* service. It extends the base `LLM` class and implements the
|
|
@@ -21,8 +20,20 @@ const index_js_1 = require("../schema/index.cjs");
|
|
|
21
20
|
* region, and the maximum number of tokens to generate.
|
|
22
21
|
*/
|
|
23
22
|
class Bedrock extends base_js_1.LLM {
|
|
23
|
+
get lc_aliases() {
|
|
24
|
+
return {
|
|
25
|
+
model: "model_id",
|
|
26
|
+
region: "region_name",
|
|
27
|
+
};
|
|
28
|
+
}
|
|
24
29
|
get lc_secrets() {
|
|
25
|
-
return {
|
|
30
|
+
return {
|
|
31
|
+
"credentials.accessKeyId": "BEDROCK_AWS_ACCESS_KEY_ID",
|
|
32
|
+
"credentials.secretAccessKey": "BEDROCK_AWS_SECRET_ACCESS_KEY",
|
|
33
|
+
};
|
|
34
|
+
}
|
|
35
|
+
get lc_attributes() {
|
|
36
|
+
return { region: this.region };
|
|
26
37
|
}
|
|
27
38
|
_llmType() {
|
|
28
39
|
return "bedrock";
|
|
@@ -74,6 +85,7 @@ class Bedrock extends base_js_1.LLM {
|
|
|
74
85
|
writable: true,
|
|
75
86
|
value: void 0
|
|
76
87
|
});
|
|
88
|
+
/** @deprecated */
|
|
77
89
|
Object.defineProperty(this, "stopSequences", {
|
|
78
90
|
enumerable: true,
|
|
79
91
|
configurable: true,
|
|
@@ -98,8 +110,14 @@ class Bedrock extends base_js_1.LLM {
|
|
|
98
110
|
writable: true,
|
|
99
111
|
value: false
|
|
100
112
|
});
|
|
113
|
+
Object.defineProperty(this, "lc_serializable", {
|
|
114
|
+
enumerable: true,
|
|
115
|
+
configurable: true,
|
|
116
|
+
writable: true,
|
|
117
|
+
value: true
|
|
118
|
+
});
|
|
101
119
|
this.model = fields?.model ?? this.model;
|
|
102
|
-
const allowedModels = ["ai21", "anthropic", "amazon"];
|
|
120
|
+
const allowedModels = ["ai21", "anthropic", "amazon", "cohere"];
|
|
103
121
|
if (!allowedModels.includes(this.model.split(".")[0])) {
|
|
104
122
|
throw new Error(`Unknown model: '${this.model}', only these are supported: ${allowedModels}`);
|
|
105
123
|
}
|
|
@@ -108,7 +126,11 @@ class Bedrock extends base_js_1.LLM {
|
|
|
108
126
|
throw new Error("Please set the AWS_DEFAULT_REGION environment variable or pass it to the constructor as the region field.");
|
|
109
127
|
}
|
|
110
128
|
this.region = region;
|
|
111
|
-
|
|
129
|
+
const credentials = fields?.credentials;
|
|
130
|
+
if (!credentials) {
|
|
131
|
+
throw new Error("Please set the AWS credentials in the 'credentials' field.");
|
|
132
|
+
}
|
|
133
|
+
this.credentials = credentials;
|
|
112
134
|
this.temperature = fields?.temperature ?? this.temperature;
|
|
113
135
|
this.maxTokens = fields?.maxTokens ?? this.maxTokens;
|
|
114
136
|
this.fetchFn = fields?.fetchFn ?? fetch;
|
|
@@ -158,7 +180,7 @@ class Bedrock extends base_js_1.LLM {
|
|
|
158
180
|
}
|
|
159
181
|
async _signedFetch(prompt, options, fields) {
|
|
160
182
|
const { bedrockMethod, endpointHost, provider } = fields;
|
|
161
|
-
const inputBody = bedrock_js_1.BedrockLLMInputOutputAdapter.prepareInput(provider, prompt, this.maxTokens, this.temperature, this.stopSequences, this.modelKwargs);
|
|
183
|
+
const inputBody = bedrock_js_1.BedrockLLMInputOutputAdapter.prepareInput(provider, prompt, this.maxTokens, this.temperature, options.stop ?? this.stopSequences, this.modelKwargs, fields.bedrockMethod);
|
|
162
184
|
const url = new URL(`https://${endpointHost}/model/${this.model}/${bedrockMethod}`);
|
|
163
185
|
const request = new protocol_http_1.HttpRequest({
|
|
164
186
|
hostname: url.hostname,
|
|
@@ -189,9 +211,21 @@ class Bedrock extends base_js_1.LLM {
|
|
|
189
211
|
}));
|
|
190
212
|
return response;
|
|
191
213
|
}
|
|
214
|
+
invocationParams(options) {
|
|
215
|
+
return {
|
|
216
|
+
model: this.model,
|
|
217
|
+
region: this.region,
|
|
218
|
+
temperature: this.temperature,
|
|
219
|
+
maxTokens: this.maxTokens,
|
|
220
|
+
stop: options?.stop ?? this.stopSequences,
|
|
221
|
+
modelKwargs: this.modelKwargs,
|
|
222
|
+
};
|
|
223
|
+
}
|
|
192
224
|
async *_streamResponseChunks(prompt, options, runManager) {
|
|
193
225
|
const provider = this.model.split(".")[0];
|
|
194
|
-
const bedrockMethod = provider === "anthropic"
|
|
226
|
+
const bedrockMethod = provider === "anthropic" || provider === "cohere"
|
|
227
|
+
? "invoke-with-response-stream"
|
|
228
|
+
: "invoke";
|
|
195
229
|
const service = "bedrock-runtime";
|
|
196
230
|
const endpointHost = this.endpointHost ?? `${service}.${this.region}.amazonaws.com`;
|
|
197
231
|
// Send request to AWS using the low-level fetch API
|
|
@@ -203,7 +237,7 @@ class Bedrock extends base_js_1.LLM {
|
|
|
203
237
|
if (response.status < 200 || response.status >= 300) {
|
|
204
238
|
throw Error(`Failed to access underlying url '${endpointHost}': got ${response.status} ${response.statusText}: ${await response.text()}`);
|
|
205
239
|
}
|
|
206
|
-
if (provider === "anthropic") {
|
|
240
|
+
if (provider === "anthropic" || provider === "cohere") {
|
|
207
241
|
const reader = response.body?.getReader();
|
|
208
242
|
const decoder = new TextDecoder();
|
|
209
243
|
for await (const chunk of this._readChunks(reader)) {
|
|
@@ -218,7 +252,7 @@ class Bedrock extends base_js_1.LLM {
|
|
|
218
252
|
throw new Error(body.message);
|
|
219
253
|
}
|
|
220
254
|
if (body.bytes !== undefined) {
|
|
221
|
-
const chunkResult = JSON.parse(
|
|
255
|
+
const chunkResult = JSON.parse(decoder.decode(Uint8Array.from(atob(body.bytes), (m) => m.codePointAt(0) ?? 0)));
|
|
222
256
|
const text = bedrock_js_1.BedrockLLMInputOutputAdapter.prepareOutput(provider, chunkResult);
|
|
223
257
|
yield new index_js_1.GenerationChunk({
|
|
224
258
|
text,
|
|
@@ -1,8 +1,9 @@
|
|
|
1
1
|
import { EventStreamCodec } from "@smithy/eventstream-codec";
|
|
2
|
-
import { BaseBedrockInput, type CredentialType } from "
|
|
3
|
-
import { LLM, BaseLLMParams } from "
|
|
4
|
-
import { CallbackManagerForLLMRun } from "
|
|
5
|
-
import { GenerationChunk } from "
|
|
2
|
+
import { BaseBedrockInput, type CredentialType } from "../../util/bedrock.js";
|
|
3
|
+
import { LLM, BaseLLMParams } from "../base.js";
|
|
4
|
+
import { CallbackManagerForLLMRun } from "../../callbacks/manager.js";
|
|
5
|
+
import { GenerationChunk } from "../../schema/index.js";
|
|
6
|
+
import { SerializedFields } from "../../load/map_keys.js";
|
|
6
7
|
/**
|
|
7
8
|
* A type of Large Language Model (LLM) that interacts with the Bedrock
|
|
8
9
|
* service. It extends the base `LLM` class and implements the
|
|
@@ -20,13 +21,17 @@ export declare class Bedrock extends LLM implements BaseBedrockInput {
|
|
|
20
21
|
maxTokens?: number | undefined;
|
|
21
22
|
fetchFn: typeof fetch;
|
|
22
23
|
endpointHost?: string;
|
|
24
|
+
/** @deprecated */
|
|
23
25
|
stopSequences?: string[];
|
|
24
26
|
modelKwargs?: Record<string, unknown>;
|
|
25
27
|
codec: EventStreamCodec;
|
|
26
28
|
streaming: boolean;
|
|
29
|
+
lc_serializable: boolean;
|
|
30
|
+
get lc_aliases(): Record<string, string>;
|
|
27
31
|
get lc_secrets(): {
|
|
28
32
|
[key: string]: string;
|
|
29
33
|
} | undefined;
|
|
34
|
+
get lc_attributes(): SerializedFields | undefined;
|
|
30
35
|
_llmType(): string;
|
|
31
36
|
static lc_name(): string;
|
|
32
37
|
constructor(fields?: Partial<BaseBedrockInput> & BaseLLMParams);
|
|
@@ -46,6 +51,14 @@ export declare class Bedrock extends LLM implements BaseBedrockInput {
|
|
|
46
51
|
endpointHost: string;
|
|
47
52
|
provider: string;
|
|
48
53
|
}): Promise<Response>;
|
|
54
|
+
invocationParams(options?: this["ParsedCallOptions"]): {
|
|
55
|
+
model: string;
|
|
56
|
+
region: string;
|
|
57
|
+
temperature: number | undefined;
|
|
58
|
+
maxTokens: number | undefined;
|
|
59
|
+
stop: string[] | undefined;
|
|
60
|
+
modelKwargs: Record<string, unknown> | undefined;
|
|
61
|
+
};
|
|
49
62
|
_streamResponseChunks(prompt: string, options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<GenerationChunk>;
|
|
50
63
|
_readChunks(reader: any): {
|
|
51
64
|
[Symbol.asyncIterator](): AsyncGenerator<any, void, unknown>;
|
|
@@ -1,13 +1,12 @@
|
|
|
1
1
|
import { SignatureV4 } from "@smithy/signature-v4";
|
|
2
|
-
import { defaultProvider } from "@aws-sdk/credential-provider-node";
|
|
3
2
|
import { HttpRequest } from "@smithy/protocol-http";
|
|
4
3
|
import { EventStreamCodec } from "@smithy/eventstream-codec";
|
|
5
4
|
import { fromUtf8, toUtf8 } from "@smithy/util-utf8";
|
|
6
5
|
import { Sha256 } from "@aws-crypto/sha256-js";
|
|
7
|
-
import { BedrockLLMInputOutputAdapter, } from "
|
|
8
|
-
import { getEnvironmentVariable } from "
|
|
9
|
-
import { LLM } from "
|
|
10
|
-
import { GenerationChunk } from "
|
|
6
|
+
import { BedrockLLMInputOutputAdapter, } from "../../util/bedrock.js";
|
|
7
|
+
import { getEnvironmentVariable } from "../../util/env.js";
|
|
8
|
+
import { LLM } from "../base.js";
|
|
9
|
+
import { GenerationChunk } from "../../schema/index.js";
|
|
11
10
|
/**
|
|
12
11
|
* A type of Large Language Model (LLM) that interacts with the Bedrock
|
|
13
12
|
* service. It extends the base `LLM` class and implements the
|
|
@@ -18,8 +17,20 @@ import { GenerationChunk } from "../schema/index.js";
|
|
|
18
17
|
* region, and the maximum number of tokens to generate.
|
|
19
18
|
*/
|
|
20
19
|
export class Bedrock extends LLM {
|
|
20
|
+
get lc_aliases() {
|
|
21
|
+
return {
|
|
22
|
+
model: "model_id",
|
|
23
|
+
region: "region_name",
|
|
24
|
+
};
|
|
25
|
+
}
|
|
21
26
|
get lc_secrets() {
|
|
22
|
-
return {
|
|
27
|
+
return {
|
|
28
|
+
"credentials.accessKeyId": "BEDROCK_AWS_ACCESS_KEY_ID",
|
|
29
|
+
"credentials.secretAccessKey": "BEDROCK_AWS_SECRET_ACCESS_KEY",
|
|
30
|
+
};
|
|
31
|
+
}
|
|
32
|
+
get lc_attributes() {
|
|
33
|
+
return { region: this.region };
|
|
23
34
|
}
|
|
24
35
|
_llmType() {
|
|
25
36
|
return "bedrock";
|
|
@@ -71,6 +82,7 @@ export class Bedrock extends LLM {
|
|
|
71
82
|
writable: true,
|
|
72
83
|
value: void 0
|
|
73
84
|
});
|
|
85
|
+
/** @deprecated */
|
|
74
86
|
Object.defineProperty(this, "stopSequences", {
|
|
75
87
|
enumerable: true,
|
|
76
88
|
configurable: true,
|
|
@@ -95,8 +107,14 @@ export class Bedrock extends LLM {
|
|
|
95
107
|
writable: true,
|
|
96
108
|
value: false
|
|
97
109
|
});
|
|
110
|
+
Object.defineProperty(this, "lc_serializable", {
|
|
111
|
+
enumerable: true,
|
|
112
|
+
configurable: true,
|
|
113
|
+
writable: true,
|
|
114
|
+
value: true
|
|
115
|
+
});
|
|
98
116
|
this.model = fields?.model ?? this.model;
|
|
99
|
-
const allowedModels = ["ai21", "anthropic", "amazon"];
|
|
117
|
+
const allowedModels = ["ai21", "anthropic", "amazon", "cohere"];
|
|
100
118
|
if (!allowedModels.includes(this.model.split(".")[0])) {
|
|
101
119
|
throw new Error(`Unknown model: '${this.model}', only these are supported: ${allowedModels}`);
|
|
102
120
|
}
|
|
@@ -105,7 +123,11 @@ export class Bedrock extends LLM {
|
|
|
105
123
|
throw new Error("Please set the AWS_DEFAULT_REGION environment variable or pass it to the constructor as the region field.");
|
|
106
124
|
}
|
|
107
125
|
this.region = region;
|
|
108
|
-
|
|
126
|
+
const credentials = fields?.credentials;
|
|
127
|
+
if (!credentials) {
|
|
128
|
+
throw new Error("Please set the AWS credentials in the 'credentials' field.");
|
|
129
|
+
}
|
|
130
|
+
this.credentials = credentials;
|
|
109
131
|
this.temperature = fields?.temperature ?? this.temperature;
|
|
110
132
|
this.maxTokens = fields?.maxTokens ?? this.maxTokens;
|
|
111
133
|
this.fetchFn = fields?.fetchFn ?? fetch;
|
|
@@ -155,7 +177,7 @@ export class Bedrock extends LLM {
|
|
|
155
177
|
}
|
|
156
178
|
async _signedFetch(prompt, options, fields) {
|
|
157
179
|
const { bedrockMethod, endpointHost, provider } = fields;
|
|
158
|
-
const inputBody = BedrockLLMInputOutputAdapter.prepareInput(provider, prompt, this.maxTokens, this.temperature, this.stopSequences, this.modelKwargs);
|
|
180
|
+
const inputBody = BedrockLLMInputOutputAdapter.prepareInput(provider, prompt, this.maxTokens, this.temperature, options.stop ?? this.stopSequences, this.modelKwargs, fields.bedrockMethod);
|
|
159
181
|
const url = new URL(`https://${endpointHost}/model/${this.model}/${bedrockMethod}`);
|
|
160
182
|
const request = new HttpRequest({
|
|
161
183
|
hostname: url.hostname,
|
|
@@ -186,9 +208,21 @@ export class Bedrock extends LLM {
|
|
|
186
208
|
}));
|
|
187
209
|
return response;
|
|
188
210
|
}
|
|
211
|
+
invocationParams(options) {
|
|
212
|
+
return {
|
|
213
|
+
model: this.model,
|
|
214
|
+
region: this.region,
|
|
215
|
+
temperature: this.temperature,
|
|
216
|
+
maxTokens: this.maxTokens,
|
|
217
|
+
stop: options?.stop ?? this.stopSequences,
|
|
218
|
+
modelKwargs: this.modelKwargs,
|
|
219
|
+
};
|
|
220
|
+
}
|
|
189
221
|
async *_streamResponseChunks(prompt, options, runManager) {
|
|
190
222
|
const provider = this.model.split(".")[0];
|
|
191
|
-
const bedrockMethod = provider === "anthropic"
|
|
223
|
+
const bedrockMethod = provider === "anthropic" || provider === "cohere"
|
|
224
|
+
? "invoke-with-response-stream"
|
|
225
|
+
: "invoke";
|
|
192
226
|
const service = "bedrock-runtime";
|
|
193
227
|
const endpointHost = this.endpointHost ?? `${service}.${this.region}.amazonaws.com`;
|
|
194
228
|
// Send request to AWS using the low-level fetch API
|
|
@@ -200,7 +234,7 @@ export class Bedrock extends LLM {
|
|
|
200
234
|
if (response.status < 200 || response.status >= 300) {
|
|
201
235
|
throw Error(`Failed to access underlying url '${endpointHost}': got ${response.status} ${response.statusText}: ${await response.text()}`);
|
|
202
236
|
}
|
|
203
|
-
if (provider === "anthropic") {
|
|
237
|
+
if (provider === "anthropic" || provider === "cohere") {
|
|
204
238
|
const reader = response.body?.getReader();
|
|
205
239
|
const decoder = new TextDecoder();
|
|
206
240
|
for await (const chunk of this._readChunks(reader)) {
|
|
@@ -215,7 +249,7 @@ export class Bedrock extends LLM {
|
|
|
215
249
|
throw new Error(body.message);
|
|
216
250
|
}
|
|
217
251
|
if (body.bytes !== undefined) {
|
|
218
|
-
const chunkResult = JSON.parse(
|
|
252
|
+
const chunkResult = JSON.parse(decoder.decode(Uint8Array.from(atob(body.bytes), (m) => m.codePointAt(0) ?? 0)));
|
|
219
253
|
const text = BedrockLLMInputOutputAdapter.prepareOutput(provider, chunkResult);
|
|
220
254
|
yield new GenerationChunk({
|
|
221
255
|
text,
|
|
@@ -36,6 +36,7 @@ exports.optionalImportEntrypoints = [
|
|
|
36
36
|
"langchain/llms/googlepalm",
|
|
37
37
|
"langchain/llms/sagemaker_endpoint",
|
|
38
38
|
"langchain/llms/bedrock",
|
|
39
|
+
"langchain/llms/bedrock/web",
|
|
39
40
|
"langchain/llms/llama_cpp",
|
|
40
41
|
"langchain/llms/writer",
|
|
41
42
|
"langchain/llms/portkey",
|
|
@@ -111,6 +112,7 @@ exports.optionalImportEntrypoints = [
|
|
|
111
112
|
"langchain/document_transformers/mozilla_readability",
|
|
112
113
|
"langchain/chat_models/portkey",
|
|
113
114
|
"langchain/chat_models/bedrock",
|
|
115
|
+
"langchain/chat_models/bedrock/web",
|
|
114
116
|
"langchain/chat_models/googlevertexai",
|
|
115
117
|
"langchain/chat_models/googlevertexai/web",
|
|
116
118
|
"langchain/chat_models/googlepalm",
|
|
@@ -137,6 +139,7 @@ exports.optionalImportEntrypoints = [
|
|
|
137
139
|
"langchain/cache/upstash_redis",
|
|
138
140
|
"langchain/stores/doc/gcs",
|
|
139
141
|
"langchain/stores/file/node",
|
|
142
|
+
"langchain/stores/message/cassandra",
|
|
140
143
|
"langchain/stores/message/convex",
|
|
141
144
|
"langchain/stores/message/cloudflare_d1",
|
|
142
145
|
"langchain/stores/message/dynamodb",
|
|
@@ -33,6 +33,7 @@ export const optionalImportEntrypoints = [
|
|
|
33
33
|
"langchain/llms/googlepalm",
|
|
34
34
|
"langchain/llms/sagemaker_endpoint",
|
|
35
35
|
"langchain/llms/bedrock",
|
|
36
|
+
"langchain/llms/bedrock/web",
|
|
36
37
|
"langchain/llms/llama_cpp",
|
|
37
38
|
"langchain/llms/writer",
|
|
38
39
|
"langchain/llms/portkey",
|
|
@@ -108,6 +109,7 @@ export const optionalImportEntrypoints = [
|
|
|
108
109
|
"langchain/document_transformers/mozilla_readability",
|
|
109
110
|
"langchain/chat_models/portkey",
|
|
110
111
|
"langchain/chat_models/bedrock",
|
|
112
|
+
"langchain/chat_models/bedrock/web",
|
|
111
113
|
"langchain/chat_models/googlevertexai",
|
|
112
114
|
"langchain/chat_models/googlevertexai/web",
|
|
113
115
|
"langchain/chat_models/googlepalm",
|
|
@@ -134,6 +136,7 @@ export const optionalImportEntrypoints = [
|
|
|
134
136
|
"langchain/cache/upstash_redis",
|
|
135
137
|
"langchain/stores/doc/gcs",
|
|
136
138
|
"langchain/stores/file/node",
|
|
139
|
+
"langchain/stores/message/cassandra",
|
|
137
140
|
"langchain/stores/message/convex",
|
|
138
141
|
"langchain/stores/message/cloudflare_d1",
|
|
139
142
|
"langchain/stores/message/dynamodb",
|
package/dist/load/import_map.cjs
CHANGED
|
@@ -24,8 +24,8 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|
|
24
24
|
return result;
|
|
25
25
|
};
|
|
26
26
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
27
|
-
exports.
|
|
28
|
-
exports.runnables__remote = exports.evaluation = exports.experimental__chains__violation_of_expectations = exports.experimental__chat_models__bittensor = exports.experimental__plan_and_execute = exports.experimental__generative_agents = exports.experimental__babyagi = exports.experimental__autogpt = exports.util__time = exports.util__math = exports.util__document = exports.storage__in_memory = exports.storage__encoder_backed = exports.stores__message__in_memory = exports.stores__file__in_memory = exports.stores__doc__in_memory = exports.cache = exports.retrievers__vespa = exports.retrievers__score_threshold = exports.retrievers__hyde = exports.retrievers__document_compressors__embeddings_filter = exports.retrievers__document_compressors__chain_extract = exports.retrievers__time_weighted = exports.retrievers__tavily_search_api = exports.retrievers__parent_document = exports.retrievers__multi_vector = exports.retrievers__multi_query = exports.retrievers__document_compressors = exports.retrievers__contextual_compression = exports.retrievers__databerry = exports.retrievers__chaindesk = exports.retrievers__remote = exports.output_parsers = exports.callbacks = exports.schema__storage = exports.schema__runnable = exports.schema__retriever = exports.schema__query_constructor = exports.schema__output_parser = exports.schema__document = exports.schema = exports.chat_models__fake = exports.chat_models__yandex = exports.chat_models__minimax = exports.chat_models__ollama = void 0;
|
|
27
|
+
exports.chat_models__fireworks = exports.chat_models__cloudflare_workersai = exports.chat_models__anthropic = exports.chat_models__openai = exports.chat_models__base = exports.document_transformers__openai_functions = exports.document_loaders__web__sort_xyz_blockchain = exports.document_loaders__web__serpapi = exports.document_loaders__web__searchapi = exports.document_loaders__base = exports.document = exports.memory = exports.text_splitter = exports.vectorstores__xata = exports.vectorstores__vectara = exports.vectorstores__prisma = exports.vectorstores__memory = exports.vectorstores__base = exports.prompts = exports.llms__fake = exports.llms__yandex = exports.llms__fireworks = exports.llms__ollama = exports.llms__cloudflare_workersai = exports.llms__aleph_alpha = exports.llms__ai21 = exports.llms__openai = exports.llms__base = exports.embeddings__minimax = exports.embeddings__openai = exports.embeddings__ollama = exports.embeddings__fake = exports.embeddings__cache_backed = exports.embeddings__base = exports.chains__openai_functions = exports.chains__combine_documents__reduce = exports.chains = exports.tools__render = exports.tools = exports.base_language = exports.agents__openai__output_parser = exports.agents__xml__output_parser = exports.agents__react__output_parser = exports.agents__format_scratchpad__log_to_message = exports.agents__format_scratchpad__xml = exports.agents__format_scratchpad__log = exports.agents__format_scratchpad = exports.agents__toolkits = exports.agents = exports.load__serializable = void 0;
|
|
28
|
+
exports.runnables__remote = exports.evaluation = exports.experimental__chains__violation_of_expectations = exports.experimental__chat_models__bittensor = exports.experimental__plan_and_execute = exports.experimental__generative_agents = exports.experimental__babyagi = exports.experimental__autogpt = exports.util__time = exports.util__math = exports.util__document = exports.storage__in_memory = exports.storage__encoder_backed = exports.stores__message__in_memory = exports.stores__file__in_memory = exports.stores__doc__in_memory = exports.cache = exports.retrievers__vespa = exports.retrievers__score_threshold = exports.retrievers__hyde = exports.retrievers__document_compressors__embeddings_filter = exports.retrievers__document_compressors__chain_extract = exports.retrievers__time_weighted = exports.retrievers__tavily_search_api = exports.retrievers__parent_document = exports.retrievers__multi_vector = exports.retrievers__multi_query = exports.retrievers__document_compressors = exports.retrievers__contextual_compression = exports.retrievers__databerry = exports.retrievers__chaindesk = exports.retrievers__remote = exports.output_parsers = exports.callbacks = exports.schema__storage = exports.schema__runnable = exports.schema__retriever = exports.schema__query_constructor = exports.schema__prompt_template = exports.schema__output_parser = exports.schema__document = exports.schema = exports.chat_models__fake = exports.chat_models__yandex = exports.chat_models__minimax = exports.chat_models__ollama = exports.chat_models__baiduwenxin = void 0;
|
|
29
29
|
exports.load__serializable = __importStar(require("../load/serializable.cjs"));
|
|
30
30
|
exports.agents = __importStar(require("../agents/index.cjs"));
|
|
31
31
|
exports.agents__toolkits = __importStar(require("../agents/toolkits/index.cjs"));
|
|
@@ -40,6 +40,7 @@ exports.base_language = __importStar(require("../base_language/index.cjs"));
|
|
|
40
40
|
exports.tools = __importStar(require("../tools/index.cjs"));
|
|
41
41
|
exports.tools__render = __importStar(require("../tools/render.cjs"));
|
|
42
42
|
exports.chains = __importStar(require("../chains/index.cjs"));
|
|
43
|
+
exports.chains__combine_documents__reduce = __importStar(require("../chains/combine_documents/reduce.cjs"));
|
|
43
44
|
exports.chains__openai_functions = __importStar(require("../chains/openai_functions/index.cjs"));
|
|
44
45
|
exports.embeddings__base = __importStar(require("../embeddings/base.cjs"));
|
|
45
46
|
exports.embeddings__cache_backed = __importStar(require("../embeddings/cache_backed.cjs"));
|
|
@@ -83,6 +84,7 @@ exports.chat_models__fake = __importStar(require("../chat_models/fake.cjs"));
|
|
|
83
84
|
exports.schema = __importStar(require("../schema/index.cjs"));
|
|
84
85
|
exports.schema__document = __importStar(require("../schema/document.cjs"));
|
|
85
86
|
exports.schema__output_parser = __importStar(require("../schema/output_parser.cjs"));
|
|
87
|
+
exports.schema__prompt_template = __importStar(require("../schema/prompt_template.cjs"));
|
|
86
88
|
exports.schema__query_constructor = __importStar(require("../schema/query_constructor.cjs"));
|
|
87
89
|
exports.schema__retriever = __importStar(require("../schema/retriever.cjs"));
|
|
88
90
|
exports.schema__runnable = __importStar(require("../schema/runnable/index.cjs"));
|
|
@@ -12,6 +12,7 @@ export * as base_language from "../base_language/index.js";
|
|
|
12
12
|
export * as tools from "../tools/index.js";
|
|
13
13
|
export * as tools__render from "../tools/render.js";
|
|
14
14
|
export * as chains from "../chains/index.js";
|
|
15
|
+
export * as chains__combine_documents__reduce from "../chains/combine_documents/reduce.js";
|
|
15
16
|
export * as chains__openai_functions from "../chains/openai_functions/index.js";
|
|
16
17
|
export * as embeddings__base from "../embeddings/base.js";
|
|
17
18
|
export * as embeddings__cache_backed from "../embeddings/cache_backed.js";
|
|
@@ -55,6 +56,7 @@ export * as chat_models__fake from "../chat_models/fake.js";
|
|
|
55
56
|
export * as schema from "../schema/index.js";
|
|
56
57
|
export * as schema__document from "../schema/document.js";
|
|
57
58
|
export * as schema__output_parser from "../schema/output_parser.js";
|
|
59
|
+
export * as schema__prompt_template from "../schema/prompt_template.js";
|
|
58
60
|
export * as schema__query_constructor from "../schema/query_constructor.js";
|
|
59
61
|
export * as schema__retriever from "../schema/retriever.js";
|
|
60
62
|
export * as schema__runnable from "../schema/runnable/index.js";
|
package/dist/load/import_map.js
CHANGED
|
@@ -13,6 +13,7 @@ export * as base_language from "../base_language/index.js";
|
|
|
13
13
|
export * as tools from "../tools/index.js";
|
|
14
14
|
export * as tools__render from "../tools/render.js";
|
|
15
15
|
export * as chains from "../chains/index.js";
|
|
16
|
+
export * as chains__combine_documents__reduce from "../chains/combine_documents/reduce.js";
|
|
16
17
|
export * as chains__openai_functions from "../chains/openai_functions/index.js";
|
|
17
18
|
export * as embeddings__base from "../embeddings/base.js";
|
|
18
19
|
export * as embeddings__cache_backed from "../embeddings/cache_backed.js";
|
|
@@ -56,6 +57,7 @@ export * as chat_models__fake from "../chat_models/fake.js";
|
|
|
56
57
|
export * as schema from "../schema/index.js";
|
|
57
58
|
export * as schema__document from "../schema/document.js";
|
|
58
59
|
export * as schema__output_parser from "../schema/output_parser.js";
|
|
60
|
+
export * as schema__prompt_template from "../schema/prompt_template.js";
|
|
59
61
|
export * as schema__query_constructor from "../schema/query_constructor.js";
|
|
60
62
|
export * as schema__retriever from "../schema/retriever.js";
|
|
61
63
|
export * as schema__runnable from "../schema/runnable/index.js";
|