langchain 0.0.155 → 0.0.157
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/chains/graph_qa/cypher.cjs +1 -0
- package/chains/graph_qa/cypher.d.ts +1 -0
- package/chains/graph_qa/cypher.js +1 -0
- package/chat_models/bedrock.cjs +1 -0
- package/chat_models/bedrock.d.ts +1 -0
- package/chat_models/bedrock.js +1 -0
- package/dist/agents/index.d.ts +12 -12
- package/dist/agents/toolkits/index.d.ts +2 -2
- package/dist/cache/upstash_redis.cjs +1 -1
- package/dist/cache/upstash_redis.js +1 -1
- package/dist/callbacks/index.d.ts +3 -3
- package/dist/chains/graph_qa/cypher.cjs +151 -0
- package/dist/chains/graph_qa/cypher.d.ts +45 -0
- package/dist/chains/graph_qa/cypher.js +147 -0
- package/dist/chains/graph_qa/prompts.cjs +34 -0
- package/dist/chains/graph_qa/prompts.d.ts +9 -0
- package/dist/chains/graph_qa/prompts.js +31 -0
- package/dist/chains/index.d.ts +19 -19
- package/dist/chains/index.js +2 -2
- package/dist/chains/openai_functions/index.d.ts +3 -3
- package/dist/chains/query_constructor/index.d.ts +1 -1
- package/dist/chat_models/bedrock.cjs +260 -0
- package/dist/chat_models/bedrock.d.ts +58 -0
- package/dist/chat_models/bedrock.js +254 -0
- package/dist/chat_models/googlevertexai/index.cjs +1 -3
- package/dist/chat_models/googlevertexai/index.d.ts +1 -1
- package/dist/chat_models/googlevertexai/index.js +0 -1
- package/dist/chat_models/googlevertexai/web.cjs +1 -3
- package/dist/chat_models/googlevertexai/web.d.ts +1 -1
- package/dist/chat_models/googlevertexai/web.js +0 -1
- package/dist/chat_models/openai.d.ts +1 -1
- package/dist/embeddings/cloudflare_workersai.cjs +69 -0
- package/dist/embeddings/cloudflare_workersai.d.ts +28 -0
- package/dist/embeddings/cloudflare_workersai.js +65 -0
- package/dist/experimental/autogpt/index.d.ts +3 -3
- package/dist/experimental/babyagi/index.d.ts +1 -1
- package/dist/experimental/plan_and_execute/index.d.ts +1 -1
- package/dist/graphs/neo4j_graph.cjs +112 -0
- package/dist/graphs/neo4j_graph.d.ts +18 -0
- package/dist/graphs/neo4j_graph.js +105 -0
- package/dist/llms/bedrock.cjs +57 -67
- package/dist/llms/bedrock.d.ts +8 -35
- package/dist/llms/bedrock.js +57 -67
- package/dist/llms/openai-chat.d.ts +1 -1
- package/dist/llms/openai.d.ts +1 -1
- package/dist/load/import_constants.cjs +5 -0
- package/dist/load/import_constants.js +5 -0
- package/dist/memory/index.d.ts +8 -8
- package/dist/memory/index.js +1 -1
- package/dist/output_parsers/index.d.ts +3 -3
- package/dist/prompts/index.d.ts +8 -8
- package/dist/retrievers/remote/index.d.ts +3 -3
- package/dist/schema/runnable/index.d.ts +3 -3
- package/dist/sql_db.d.ts +1 -1
- package/dist/tools/index.d.ts +12 -12
- package/dist/util/bedrock.cjs +54 -0
- package/dist/util/bedrock.d.ts +59 -0
- package/dist/util/bedrock.js +50 -0
- package/dist/vectorstores/cloudflare_vectorize.cjs +200 -0
- package/dist/vectorstores/cloudflare_vectorize.d.ts +90 -0
- package/dist/vectorstores/cloudflare_vectorize.js +173 -0
- package/dist/vectorstores/supabase.d.ts +1 -1
- package/embeddings/cloudflare_workersai.cjs +1 -0
- package/embeddings/cloudflare_workersai.d.ts +1 -0
- package/embeddings/cloudflare_workersai.js +1 -0
- package/graphs/neo4j_graph.cjs +1 -0
- package/graphs/neo4j_graph.d.ts +1 -0
- package/graphs/neo4j_graph.js +1 -0
- package/package.json +62 -14
- package/vectorstores/cloudflare_vectorize.cjs +1 -0
- package/vectorstores/cloudflare_vectorize.d.ts +1 -0
- package/vectorstores/cloudflare_vectorize.js +1 -0
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.CloudflareWorkersAIEmbeddings = void 0;
|
|
4
|
+
const ai_1 = require("@cloudflare/ai");
|
|
5
|
+
const chunk_js_1 = require("../util/chunk.cjs");
|
|
6
|
+
const base_js_1 = require("./base.cjs");
|
|
7
|
+
class CloudflareWorkersAIEmbeddings extends base_js_1.Embeddings {
|
|
8
|
+
constructor(fields) {
|
|
9
|
+
super(fields);
|
|
10
|
+
Object.defineProperty(this, "modelName", {
|
|
11
|
+
enumerable: true,
|
|
12
|
+
configurable: true,
|
|
13
|
+
writable: true,
|
|
14
|
+
value: "@cf/baai/bge-base-en-v1.5"
|
|
15
|
+
});
|
|
16
|
+
Object.defineProperty(this, "batchSize", {
|
|
17
|
+
enumerable: true,
|
|
18
|
+
configurable: true,
|
|
19
|
+
writable: true,
|
|
20
|
+
value: 50
|
|
21
|
+
});
|
|
22
|
+
Object.defineProperty(this, "stripNewLines", {
|
|
23
|
+
enumerable: true,
|
|
24
|
+
configurable: true,
|
|
25
|
+
writable: true,
|
|
26
|
+
value: true
|
|
27
|
+
});
|
|
28
|
+
Object.defineProperty(this, "ai", {
|
|
29
|
+
enumerable: true,
|
|
30
|
+
configurable: true,
|
|
31
|
+
writable: true,
|
|
32
|
+
value: void 0
|
|
33
|
+
});
|
|
34
|
+
if (!fields.binding) {
|
|
35
|
+
throw new Error("Must supply a Workers AI binding, eg { binding: env.AI }");
|
|
36
|
+
}
|
|
37
|
+
this.ai = new ai_1.Ai(fields.binding);
|
|
38
|
+
this.modelName = fields.modelName ?? this.modelName;
|
|
39
|
+
this.stripNewLines = fields.stripNewLines ?? this.stripNewLines;
|
|
40
|
+
}
|
|
41
|
+
async embedDocuments(texts) {
|
|
42
|
+
const batches = (0, chunk_js_1.chunkArray)(this.stripNewLines ? texts.map((t) => t.replace(/\n/g, " ")) : texts, this.batchSize);
|
|
43
|
+
const batchRequests = batches.map((batch) => this.runEmbedding(batch));
|
|
44
|
+
const batchResponses = await Promise.all(batchRequests);
|
|
45
|
+
const embeddings = [];
|
|
46
|
+
for (let i = 0; i < batchResponses.length; i += 1) {
|
|
47
|
+
const batchResponse = batchResponses[i];
|
|
48
|
+
for (let j = 0; j < batchResponse.length; j += 1) {
|
|
49
|
+
embeddings.push(batchResponse[j]);
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
return embeddings;
|
|
53
|
+
}
|
|
54
|
+
async embedQuery(text) {
|
|
55
|
+
const data = await this.runEmbedding([
|
|
56
|
+
this.stripNewLines ? text.replace(/\n/g, " ") : text,
|
|
57
|
+
]);
|
|
58
|
+
return data[0];
|
|
59
|
+
}
|
|
60
|
+
async runEmbedding(texts) {
|
|
61
|
+
return this.caller.call(async () => {
|
|
62
|
+
const response = await this.ai.run(this.modelName, {
|
|
63
|
+
text: texts,
|
|
64
|
+
});
|
|
65
|
+
return response.data;
|
|
66
|
+
});
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
exports.CloudflareWorkersAIEmbeddings = CloudflareWorkersAIEmbeddings;
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
import { Ai } from "@cloudflare/ai";
|
|
2
|
+
import { Fetcher } from "@cloudflare/workers-types";
|
|
3
|
+
import { Embeddings, EmbeddingsParams } from "./base.js";
|
|
4
|
+
export interface CloudflareWorkersAIEmbeddingsParams extends EmbeddingsParams {
|
|
5
|
+
/** Binding */
|
|
6
|
+
binding: Fetcher;
|
|
7
|
+
/** Model name to use */
|
|
8
|
+
modelName?: string;
|
|
9
|
+
/**
|
|
10
|
+
* The maximum number of documents to embed in a single request.
|
|
11
|
+
*/
|
|
12
|
+
batchSize?: number;
|
|
13
|
+
/**
|
|
14
|
+
* Whether to strip new lines from the input text. This is recommended by
|
|
15
|
+
* OpenAI, but may not be suitable for all use cases.
|
|
16
|
+
*/
|
|
17
|
+
stripNewLines?: boolean;
|
|
18
|
+
}
|
|
19
|
+
export declare class CloudflareWorkersAIEmbeddings extends Embeddings {
|
|
20
|
+
modelName: string;
|
|
21
|
+
batchSize: number;
|
|
22
|
+
stripNewLines: boolean;
|
|
23
|
+
ai: Ai;
|
|
24
|
+
constructor(fields: CloudflareWorkersAIEmbeddingsParams);
|
|
25
|
+
embedDocuments(texts: string[]): Promise<number[][]>;
|
|
26
|
+
embedQuery(text: string): Promise<number[]>;
|
|
27
|
+
private runEmbedding;
|
|
28
|
+
}
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
import { Ai } from "@cloudflare/ai";
|
|
2
|
+
import { chunkArray } from "../util/chunk.js";
|
|
3
|
+
import { Embeddings } from "./base.js";
|
|
4
|
+
export class CloudflareWorkersAIEmbeddings extends Embeddings {
|
|
5
|
+
constructor(fields) {
|
|
6
|
+
super(fields);
|
|
7
|
+
Object.defineProperty(this, "modelName", {
|
|
8
|
+
enumerable: true,
|
|
9
|
+
configurable: true,
|
|
10
|
+
writable: true,
|
|
11
|
+
value: "@cf/baai/bge-base-en-v1.5"
|
|
12
|
+
});
|
|
13
|
+
Object.defineProperty(this, "batchSize", {
|
|
14
|
+
enumerable: true,
|
|
15
|
+
configurable: true,
|
|
16
|
+
writable: true,
|
|
17
|
+
value: 50
|
|
18
|
+
});
|
|
19
|
+
Object.defineProperty(this, "stripNewLines", {
|
|
20
|
+
enumerable: true,
|
|
21
|
+
configurable: true,
|
|
22
|
+
writable: true,
|
|
23
|
+
value: true
|
|
24
|
+
});
|
|
25
|
+
Object.defineProperty(this, "ai", {
|
|
26
|
+
enumerable: true,
|
|
27
|
+
configurable: true,
|
|
28
|
+
writable: true,
|
|
29
|
+
value: void 0
|
|
30
|
+
});
|
|
31
|
+
if (!fields.binding) {
|
|
32
|
+
throw new Error("Must supply a Workers AI binding, eg { binding: env.AI }");
|
|
33
|
+
}
|
|
34
|
+
this.ai = new Ai(fields.binding);
|
|
35
|
+
this.modelName = fields.modelName ?? this.modelName;
|
|
36
|
+
this.stripNewLines = fields.stripNewLines ?? this.stripNewLines;
|
|
37
|
+
}
|
|
38
|
+
async embedDocuments(texts) {
|
|
39
|
+
const batches = chunkArray(this.stripNewLines ? texts.map((t) => t.replace(/\n/g, " ")) : texts, this.batchSize);
|
|
40
|
+
const batchRequests = batches.map((batch) => this.runEmbedding(batch));
|
|
41
|
+
const batchResponses = await Promise.all(batchRequests);
|
|
42
|
+
const embeddings = [];
|
|
43
|
+
for (let i = 0; i < batchResponses.length; i += 1) {
|
|
44
|
+
const batchResponse = batchResponses[i];
|
|
45
|
+
for (let j = 0; j < batchResponse.length; j += 1) {
|
|
46
|
+
embeddings.push(batchResponse[j]);
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
return embeddings;
|
|
50
|
+
}
|
|
51
|
+
async embedQuery(text) {
|
|
52
|
+
const data = await this.runEmbedding([
|
|
53
|
+
this.stripNewLines ? text.replace(/\n/g, " ") : text,
|
|
54
|
+
]);
|
|
55
|
+
return data[0];
|
|
56
|
+
}
|
|
57
|
+
async runEmbedding(texts) {
|
|
58
|
+
return this.caller.call(async () => {
|
|
59
|
+
const response = await this.ai.run(this.modelName, {
|
|
60
|
+
text: texts,
|
|
61
|
+
});
|
|
62
|
+
return response.data;
|
|
63
|
+
});
|
|
64
|
+
}
|
|
65
|
+
}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
export { AutoGPTPrompt, AutoGPTPromptInput } from "./prompt.js";
|
|
1
|
+
export { AutoGPTPrompt, type AutoGPTPromptInput } from "./prompt.js";
|
|
2
2
|
export { AutoGPTOutputParser, preprocessJsonInput } from "./output_parser.js";
|
|
3
|
-
export { AutoGPT, AutoGPTInput } from "./agent.js";
|
|
4
|
-
export { AutoGPTAction } from "./schema.js";
|
|
3
|
+
export { AutoGPT, type AutoGPTInput } from "./agent.js";
|
|
4
|
+
export type { AutoGPTAction } from "./schema.js";
|
|
@@ -1,4 +1,4 @@
|
|
|
1
1
|
export { TaskCreationChain } from "./task_creation.js";
|
|
2
2
|
export { TaskExecutionChain } from "./task_execution.js";
|
|
3
3
|
export { TaskPrioritizationChain } from "./task_prioritization.js";
|
|
4
|
-
export { BabyAGI, Task, BabyAGIInputs } from "./agent.js";
|
|
4
|
+
export { BabyAGI, type Task, type BabyAGIInputs } from "./agent.js";
|
|
@@ -1,3 +1,3 @@
|
|
|
1
1
|
export { PlanAndExecuteAgentExecutor } from "./agent_executor.js";
|
|
2
|
-
export { BasePlanner, BaseStepContainer, BaseStepExecutor, StepAction, StepResult, Step, Plan, ListStepContainer, LLMPlanner, ChainStepExecutor, } from "./base.js";
|
|
2
|
+
export { BasePlanner, BaseStepContainer, BaseStepExecutor, type StepAction, type StepResult, type Step, type Plan, ListStepContainer, LLMPlanner, ChainStepExecutor, } from "./base.js";
|
|
3
3
|
export { PlanOutputParser } from "./outputParser.js";
|
|
@@ -0,0 +1,112 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.Neo4jGraph = void 0;
|
|
7
|
+
const neo4j_driver_1 = __importDefault(require("neo4j-driver"));
|
|
8
|
+
class Neo4jGraph {
|
|
9
|
+
constructor({ url, username, password, database = "neo4j", }) {
|
|
10
|
+
Object.defineProperty(this, "driver", {
|
|
11
|
+
enumerable: true,
|
|
12
|
+
configurable: true,
|
|
13
|
+
writable: true,
|
|
14
|
+
value: void 0
|
|
15
|
+
});
|
|
16
|
+
Object.defineProperty(this, "database", {
|
|
17
|
+
enumerable: true,
|
|
18
|
+
configurable: true,
|
|
19
|
+
writable: true,
|
|
20
|
+
value: void 0
|
|
21
|
+
});
|
|
22
|
+
Object.defineProperty(this, "schema", {
|
|
23
|
+
enumerable: true,
|
|
24
|
+
configurable: true,
|
|
25
|
+
writable: true,
|
|
26
|
+
value: ""
|
|
27
|
+
});
|
|
28
|
+
try {
|
|
29
|
+
this.driver = neo4j_driver_1.default.driver(url, neo4j_driver_1.default.auth.basic(username, password));
|
|
30
|
+
this.database = database;
|
|
31
|
+
}
|
|
32
|
+
catch (error) {
|
|
33
|
+
throw new Error("Could not create a Neo4j driver instance. Please check the connection details.");
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
static async initialize(config) {
|
|
37
|
+
const graph = new Neo4jGraph(config);
|
|
38
|
+
try {
|
|
39
|
+
await graph.verifyConnectivity();
|
|
40
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
41
|
+
}
|
|
42
|
+
catch (error) {
|
|
43
|
+
console.log("Failed to verify connection.");
|
|
44
|
+
}
|
|
45
|
+
try {
|
|
46
|
+
await graph.refreshSchema();
|
|
47
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
48
|
+
}
|
|
49
|
+
catch (error) {
|
|
50
|
+
throw new Error(`Error: ${error.message}`);
|
|
51
|
+
}
|
|
52
|
+
finally {
|
|
53
|
+
console.log("Schema refreshed successfully.");
|
|
54
|
+
}
|
|
55
|
+
return graph;
|
|
56
|
+
}
|
|
57
|
+
getSchema() {
|
|
58
|
+
return this.schema;
|
|
59
|
+
}
|
|
60
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
61
|
+
async query(query, params = {}) {
|
|
62
|
+
const session = this.driver.session({ database: this.database });
|
|
63
|
+
try {
|
|
64
|
+
const result = await session.run(query, params);
|
|
65
|
+
return result.records.map((record) => record.toObject());
|
|
66
|
+
}
|
|
67
|
+
finally {
|
|
68
|
+
await session.close();
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
async verifyConnectivity() {
|
|
72
|
+
const session = this.driver.session({ database: this.database });
|
|
73
|
+
await session.close();
|
|
74
|
+
}
|
|
75
|
+
async refreshSchema() {
|
|
76
|
+
const nodePropertiesQuery = `
|
|
77
|
+
CALL apoc.meta.data()
|
|
78
|
+
YIELD label, other, elementType, type, property
|
|
79
|
+
WHERE NOT type = "RELATIONSHIP" AND elementType = "node"
|
|
80
|
+
WITH label AS nodeLabels, collect({property:property, type:type}) AS properties
|
|
81
|
+
RETURN {labels: nodeLabels, properties: properties} AS output
|
|
82
|
+
`;
|
|
83
|
+
const relPropertiesQuery = `
|
|
84
|
+
CALL apoc.meta.data()
|
|
85
|
+
YIELD label, other, elementType, type, property
|
|
86
|
+
WHERE NOT type = "RELATIONSHIP" AND elementType = "relationship"
|
|
87
|
+
WITH label AS nodeLabels, collect({property:property, type:type}) AS properties
|
|
88
|
+
RETURN {type: nodeLabels, properties: properties} AS output
|
|
89
|
+
`;
|
|
90
|
+
const relQuery = `
|
|
91
|
+
CALL apoc.meta.data()
|
|
92
|
+
YIELD label, other, elementType, type, property
|
|
93
|
+
WHERE type = "RELATIONSHIP" AND elementType = "node"
|
|
94
|
+
UNWIND other AS other_node
|
|
95
|
+
RETURN "(:" + label + ")-[:" + property + "]->(:" + toString(other_node) + ")" AS output
|
|
96
|
+
`;
|
|
97
|
+
const nodeProperties = await this.query(nodePropertiesQuery);
|
|
98
|
+
const relationshipsProperties = await this.query(relPropertiesQuery);
|
|
99
|
+
const relationships = await this.query(relQuery);
|
|
100
|
+
this.schema = `
|
|
101
|
+
Node properties are the following:
|
|
102
|
+
${nodeProperties.map((el) => el.output)}
|
|
103
|
+
|
|
104
|
+
Relationship properties are the following:
|
|
105
|
+
${relationshipsProperties.map((el) => el.output)}
|
|
106
|
+
|
|
107
|
+
The relationships are the following:
|
|
108
|
+
${relationships.map((el) => el.output)}
|
|
109
|
+
`;
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
exports.Neo4jGraph = Neo4jGraph;
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
interface Neo4jGraphConfig {
|
|
2
|
+
url: string;
|
|
3
|
+
username: string;
|
|
4
|
+
password: string;
|
|
5
|
+
database?: string;
|
|
6
|
+
}
|
|
7
|
+
export declare class Neo4jGraph {
|
|
8
|
+
private driver;
|
|
9
|
+
private database;
|
|
10
|
+
private schema;
|
|
11
|
+
constructor({ url, username, password, database, }: Neo4jGraphConfig);
|
|
12
|
+
static initialize(config: Neo4jGraphConfig): Promise<Neo4jGraph>;
|
|
13
|
+
getSchema(): string;
|
|
14
|
+
query(query: string, params?: any): Promise<any[]>;
|
|
15
|
+
verifyConnectivity(): Promise<void>;
|
|
16
|
+
refreshSchema(): Promise<void>;
|
|
17
|
+
}
|
|
18
|
+
export {};
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
import neo4j from "neo4j-driver";
|
|
2
|
+
export class Neo4jGraph {
|
|
3
|
+
constructor({ url, username, password, database = "neo4j", }) {
|
|
4
|
+
Object.defineProperty(this, "driver", {
|
|
5
|
+
enumerable: true,
|
|
6
|
+
configurable: true,
|
|
7
|
+
writable: true,
|
|
8
|
+
value: void 0
|
|
9
|
+
});
|
|
10
|
+
Object.defineProperty(this, "database", {
|
|
11
|
+
enumerable: true,
|
|
12
|
+
configurable: true,
|
|
13
|
+
writable: true,
|
|
14
|
+
value: void 0
|
|
15
|
+
});
|
|
16
|
+
Object.defineProperty(this, "schema", {
|
|
17
|
+
enumerable: true,
|
|
18
|
+
configurable: true,
|
|
19
|
+
writable: true,
|
|
20
|
+
value: ""
|
|
21
|
+
});
|
|
22
|
+
try {
|
|
23
|
+
this.driver = neo4j.driver(url, neo4j.auth.basic(username, password));
|
|
24
|
+
this.database = database;
|
|
25
|
+
}
|
|
26
|
+
catch (error) {
|
|
27
|
+
throw new Error("Could not create a Neo4j driver instance. Please check the connection details.");
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
static async initialize(config) {
|
|
31
|
+
const graph = new Neo4jGraph(config);
|
|
32
|
+
try {
|
|
33
|
+
await graph.verifyConnectivity();
|
|
34
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
35
|
+
}
|
|
36
|
+
catch (error) {
|
|
37
|
+
console.log("Failed to verify connection.");
|
|
38
|
+
}
|
|
39
|
+
try {
|
|
40
|
+
await graph.refreshSchema();
|
|
41
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
42
|
+
}
|
|
43
|
+
catch (error) {
|
|
44
|
+
throw new Error(`Error: ${error.message}`);
|
|
45
|
+
}
|
|
46
|
+
finally {
|
|
47
|
+
console.log("Schema refreshed successfully.");
|
|
48
|
+
}
|
|
49
|
+
return graph;
|
|
50
|
+
}
|
|
51
|
+
getSchema() {
|
|
52
|
+
return this.schema;
|
|
53
|
+
}
|
|
54
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
55
|
+
async query(query, params = {}) {
|
|
56
|
+
const session = this.driver.session({ database: this.database });
|
|
57
|
+
try {
|
|
58
|
+
const result = await session.run(query, params);
|
|
59
|
+
return result.records.map((record) => record.toObject());
|
|
60
|
+
}
|
|
61
|
+
finally {
|
|
62
|
+
await session.close();
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
async verifyConnectivity() {
|
|
66
|
+
const session = this.driver.session({ database: this.database });
|
|
67
|
+
await session.close();
|
|
68
|
+
}
|
|
69
|
+
async refreshSchema() {
|
|
70
|
+
const nodePropertiesQuery = `
|
|
71
|
+
CALL apoc.meta.data()
|
|
72
|
+
YIELD label, other, elementType, type, property
|
|
73
|
+
WHERE NOT type = "RELATIONSHIP" AND elementType = "node"
|
|
74
|
+
WITH label AS nodeLabels, collect({property:property, type:type}) AS properties
|
|
75
|
+
RETURN {labels: nodeLabels, properties: properties} AS output
|
|
76
|
+
`;
|
|
77
|
+
const relPropertiesQuery = `
|
|
78
|
+
CALL apoc.meta.data()
|
|
79
|
+
YIELD label, other, elementType, type, property
|
|
80
|
+
WHERE NOT type = "RELATIONSHIP" AND elementType = "relationship"
|
|
81
|
+
WITH label AS nodeLabels, collect({property:property, type:type}) AS properties
|
|
82
|
+
RETURN {type: nodeLabels, properties: properties} AS output
|
|
83
|
+
`;
|
|
84
|
+
const relQuery = `
|
|
85
|
+
CALL apoc.meta.data()
|
|
86
|
+
YIELD label, other, elementType, type, property
|
|
87
|
+
WHERE type = "RELATIONSHIP" AND elementType = "node"
|
|
88
|
+
UNWIND other AS other_node
|
|
89
|
+
RETURN "(:" + label + ")-[:" + property + "]->(:" + toString(other_node) + ")" AS output
|
|
90
|
+
`;
|
|
91
|
+
const nodeProperties = await this.query(nodePropertiesQuery);
|
|
92
|
+
const relationshipsProperties = await this.query(relPropertiesQuery);
|
|
93
|
+
const relationships = await this.query(relQuery);
|
|
94
|
+
this.schema = `
|
|
95
|
+
Node properties are the following:
|
|
96
|
+
${nodeProperties.map((el) => el.output)}
|
|
97
|
+
|
|
98
|
+
Relationship properties are the following:
|
|
99
|
+
${relationshipsProperties.map((el) => el.output)}
|
|
100
|
+
|
|
101
|
+
The relationships are the following:
|
|
102
|
+
${relationships.map((el) => el.output)}
|
|
103
|
+
`;
|
|
104
|
+
}
|
|
105
|
+
}
|
package/dist/llms/bedrock.cjs
CHANGED
|
@@ -1,67 +1,20 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.Bedrock = void 0;
|
|
4
|
-
const signature_v4_1 = require("@
|
|
4
|
+
const signature_v4_1 = require("@smithy/signature-v4");
|
|
5
5
|
const credential_provider_node_1 = require("@aws-sdk/credential-provider-node");
|
|
6
|
-
const protocol_http_1 = require("@
|
|
6
|
+
const protocol_http_1 = require("@smithy/protocol-http");
|
|
7
7
|
const eventstream_codec_1 = require("@smithy/eventstream-codec");
|
|
8
8
|
const util_utf8_1 = require("@smithy/util-utf8");
|
|
9
9
|
const sha256_js_1 = require("@aws-crypto/sha256-js");
|
|
10
|
+
const bedrock_js_1 = require("../util/bedrock.cjs");
|
|
10
11
|
const env_js_1 = require("../util/env.cjs");
|
|
11
12
|
const base_js_1 = require("./base.cjs");
|
|
12
13
|
const index_js_1 = require("../schema/index.cjs");
|
|
13
|
-
/**
|
|
14
|
-
* A helper class used within the `Bedrock` class. It is responsible for
|
|
15
|
-
* preparing the input and output for the Bedrock service. It formats the
|
|
16
|
-
* input prompt based on the provider (e.g., "anthropic", "ai21",
|
|
17
|
-
* "amazon") and extracts the generated text from the service response.
|
|
18
|
-
*/
|
|
19
|
-
class BedrockLLMInputOutputAdapter {
|
|
20
|
-
/** Adapter class to prepare the inputs from Langchain to a format
|
|
21
|
-
that LLM model expects. Also, provides a helper function to extract
|
|
22
|
-
the generated text from the model response. */
|
|
23
|
-
static prepareInput(provider, prompt, maxTokens = 50, temperature = 0) {
|
|
24
|
-
const inputBody = {};
|
|
25
|
-
if (provider === "anthropic") {
|
|
26
|
-
inputBody.prompt = prompt;
|
|
27
|
-
inputBody.max_tokens_to_sample = maxTokens;
|
|
28
|
-
inputBody.temperature = temperature;
|
|
29
|
-
}
|
|
30
|
-
else if (provider === "ai21") {
|
|
31
|
-
inputBody.prompt = prompt;
|
|
32
|
-
inputBody.maxTokens = maxTokens;
|
|
33
|
-
inputBody.temperature = temperature;
|
|
34
|
-
}
|
|
35
|
-
else if (provider === "amazon") {
|
|
36
|
-
inputBody.inputText = prompt;
|
|
37
|
-
inputBody.textGenerationConfig = {
|
|
38
|
-
maxTokenCount: maxTokens,
|
|
39
|
-
temperature,
|
|
40
|
-
};
|
|
41
|
-
}
|
|
42
|
-
return inputBody;
|
|
43
|
-
}
|
|
44
|
-
/**
|
|
45
|
-
* Extracts the generated text from the service response.
|
|
46
|
-
* @param provider The provider name.
|
|
47
|
-
* @param responseBody The response body from the service.
|
|
48
|
-
* @returns The generated text.
|
|
49
|
-
*/
|
|
50
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
51
|
-
static prepareOutput(provider, responseBody) {
|
|
52
|
-
if (provider === "anthropic") {
|
|
53
|
-
return responseBody.completion;
|
|
54
|
-
}
|
|
55
|
-
else if (provider === "ai21") {
|
|
56
|
-
return responseBody.data.text;
|
|
57
|
-
}
|
|
58
|
-
return responseBody.outputText;
|
|
59
|
-
}
|
|
60
|
-
}
|
|
61
14
|
/**
|
|
62
15
|
* A type of Large Language Model (LLM) that interacts with the Bedrock
|
|
63
16
|
* service. It extends the base `LLM` class and implements the
|
|
64
|
-
* `
|
|
17
|
+
* `BaseBedrockInput` interface. The class is designed to authenticate and
|
|
65
18
|
* interact with the Bedrock service, which is a part of Amazon Web
|
|
66
19
|
* Services (AWS). It uses AWS credentials for authentication and can be
|
|
67
20
|
* configured with various parameters such as the model to use, the AWS
|
|
@@ -74,6 +27,9 @@ class Bedrock extends base_js_1.LLM {
|
|
|
74
27
|
_llmType() {
|
|
75
28
|
return "bedrock";
|
|
76
29
|
}
|
|
30
|
+
static lc_name() {
|
|
31
|
+
return "Bedrock";
|
|
32
|
+
}
|
|
77
33
|
constructor(fields) {
|
|
78
34
|
super(fields ?? {});
|
|
79
35
|
Object.defineProperty(this, "model", {
|
|
@@ -112,7 +68,19 @@ class Bedrock extends base_js_1.LLM {
|
|
|
112
68
|
writable: true,
|
|
113
69
|
value: void 0
|
|
114
70
|
});
|
|
115
|
-
Object.defineProperty(this, "
|
|
71
|
+
Object.defineProperty(this, "endpointHost", {
|
|
72
|
+
enumerable: true,
|
|
73
|
+
configurable: true,
|
|
74
|
+
writable: true,
|
|
75
|
+
value: void 0
|
|
76
|
+
});
|
|
77
|
+
Object.defineProperty(this, "stopSequences", {
|
|
78
|
+
enumerable: true,
|
|
79
|
+
configurable: true,
|
|
80
|
+
writable: true,
|
|
81
|
+
value: void 0
|
|
82
|
+
});
|
|
83
|
+
Object.defineProperty(this, "modelKwargs", {
|
|
116
84
|
enumerable: true,
|
|
117
85
|
configurable: true,
|
|
118
86
|
writable: true,
|
|
@@ -138,7 +106,9 @@ class Bedrock extends base_js_1.LLM {
|
|
|
138
106
|
this.temperature = fields?.temperature ?? this.temperature;
|
|
139
107
|
this.maxTokens = fields?.maxTokens ?? this.maxTokens;
|
|
140
108
|
this.fetchFn = fields?.fetchFn ?? fetch;
|
|
141
|
-
this.
|
|
109
|
+
this.endpointHost = fields?.endpointHost ?? fields?.endpointUrl;
|
|
110
|
+
this.stopSequences = fields?.stopSequences;
|
|
111
|
+
this.modelKwargs = fields?.modelKwargs;
|
|
142
112
|
}
|
|
143
113
|
/** Call out to Bedrock service model.
|
|
144
114
|
Arguments:
|
|
@@ -159,10 +129,11 @@ class Bedrock extends base_js_1.LLM {
|
|
|
159
129
|
}
|
|
160
130
|
async *_streamResponseChunks(prompt, options, runManager) {
|
|
161
131
|
const provider = this.model.split(".")[0];
|
|
162
|
-
const service = "bedrock";
|
|
163
|
-
const inputBody = BedrockLLMInputOutputAdapter.prepareInput(provider, prompt, this.maxTokens, this.temperature);
|
|
164
|
-
const
|
|
165
|
-
const
|
|
132
|
+
const service = "bedrock-runtime";
|
|
133
|
+
const inputBody = bedrock_js_1.BedrockLLMInputOutputAdapter.prepareInput(provider, prompt, this.maxTokens, this.temperature, this.stopSequences, this.modelKwargs);
|
|
134
|
+
const endpointHost = this.endpointHost ?? `${service}.${this.region}.amazonaws.com`;
|
|
135
|
+
const amazonMethod = provider === "anthropic" ? "invoke-with-response-stream" : "invoke";
|
|
136
|
+
const url = new URL(`https://${endpointHost}/model/${this.model}/${amazonMethod}`);
|
|
166
137
|
const request = new protocol_http_1.HttpRequest({
|
|
167
138
|
hostname: url.hostname,
|
|
168
139
|
path: url.pathname,
|
|
@@ -174,12 +145,12 @@ class Bedrock extends base_js_1.LLM {
|
|
|
174
145
|
// host is required by AWS Signature V4: https://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html
|
|
175
146
|
host: url.host,
|
|
176
147
|
accept: "application/json",
|
|
177
|
-
"
|
|
148
|
+
"content-type": "application/json",
|
|
178
149
|
},
|
|
179
150
|
});
|
|
180
151
|
const signer = new signature_v4_1.SignatureV4({
|
|
181
152
|
credentials: this.credentials,
|
|
182
|
-
service,
|
|
153
|
+
service: "bedrock",
|
|
183
154
|
region: this.region,
|
|
184
155
|
sha256: sha256_js_1.Sha256,
|
|
185
156
|
});
|
|
@@ -193,15 +164,34 @@ class Bedrock extends base_js_1.LLM {
|
|
|
193
164
|
if (response.status < 200 || response.status >= 300) {
|
|
194
165
|
throw Error(`Failed to access underlying url '${url}': got ${response.status} ${response.statusText}: ${await response.text()}`);
|
|
195
166
|
}
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
const
|
|
199
|
-
|
|
200
|
-
event.
|
|
201
|
-
|
|
167
|
+
if (provider === "anthropic") {
|
|
168
|
+
const reader = response.body?.getReader();
|
|
169
|
+
const decoder = new TextDecoder();
|
|
170
|
+
for await (const chunk of this._readChunks(reader)) {
|
|
171
|
+
const event = this.codec.decode(chunk);
|
|
172
|
+
if ((event.headers[":event-type"] !== undefined &&
|
|
173
|
+
event.headers[":event-type"].value !== "chunk") ||
|
|
174
|
+
event.headers[":content-type"].value !== "application/json") {
|
|
175
|
+
throw Error(`Failed to get event chunk: got ${chunk}`);
|
|
176
|
+
}
|
|
177
|
+
const body = JSON.parse(decoder.decode(event.body));
|
|
178
|
+
if (body.message) {
|
|
179
|
+
throw new Error(body.message);
|
|
180
|
+
}
|
|
181
|
+
if (body.bytes !== undefined) {
|
|
182
|
+
const chunkResult = JSON.parse(Buffer.from(body.bytes, "base64").toString());
|
|
183
|
+
const text = bedrock_js_1.BedrockLLMInputOutputAdapter.prepareOutput(provider, chunkResult);
|
|
184
|
+
yield new index_js_1.GenerationChunk({
|
|
185
|
+
text,
|
|
186
|
+
generationInfo: {},
|
|
187
|
+
});
|
|
188
|
+
await runManager?.handleLLMNewToken(text);
|
|
189
|
+
}
|
|
202
190
|
}
|
|
203
|
-
|
|
204
|
-
|
|
191
|
+
}
|
|
192
|
+
else {
|
|
193
|
+
const json = await response.json();
|
|
194
|
+
const text = bedrock_js_1.BedrockLLMInputOutputAdapter.prepareOutput(provider, json);
|
|
205
195
|
yield new index_js_1.GenerationChunk({
|
|
206
196
|
text,
|
|
207
197
|
generationInfo: {},
|