langchain 0.0.53 → 0.0.54

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -269,7 +269,7 @@ class RefineDocumentsChain extends base_js_1.BaseChain {
269
269
  this.initialResponseName =
270
270
  fields.initialResponseName ?? this.initialResponseName;
271
271
  }
272
- _constructInitialInputs(doc, rest) {
272
+ async _constructInitialInputs(doc, rest) {
273
273
  const baseInfo = {
274
274
  page_content: doc.pageContent,
275
275
  ...doc.metadata,
@@ -279,14 +279,14 @@ class RefineDocumentsChain extends base_js_1.BaseChain {
279
279
  documentInfo[value] = baseInfo[value];
280
280
  });
281
281
  const baseInputs = {
282
- [this.documentVariableName]: this.documentPrompt.format({
282
+ [this.documentVariableName]: await this.documentPrompt.format({
283
283
  ...documentInfo,
284
284
  }),
285
285
  };
286
286
  const inputs = { ...baseInputs, ...rest };
287
287
  return inputs;
288
288
  }
289
- _constructRefineInputs(doc, res) {
289
+ async _constructRefineInputs(doc, res) {
290
290
  const baseInfo = {
291
291
  page_content: doc.pageContent,
292
292
  ...doc.metadata,
@@ -296,7 +296,7 @@ class RefineDocumentsChain extends base_js_1.BaseChain {
296
296
  documentInfo[value] = baseInfo[value];
297
297
  });
298
298
  const baseInputs = {
299
- [this.documentVariableName]: this.documentPrompt.format({
299
+ [this.documentVariableName]: await this.documentPrompt.format({
300
300
  ...documentInfo,
301
301
  }),
302
302
  };
@@ -309,11 +309,11 @@ class RefineDocumentsChain extends base_js_1.BaseChain {
309
309
  }
310
310
  const { [this.inputKey]: docs, ...rest } = values;
311
311
  const currentDocs = docs;
312
- const initialInputs = this._constructInitialInputs(currentDocs[0], rest);
312
+ const initialInputs = await this._constructInitialInputs(currentDocs[0], rest);
313
313
  let res = await this.llmChain.predict({ ...initialInputs });
314
314
  const refineSteps = [res];
315
315
  for (let i = 1; i < currentDocs.length; i += 1) {
316
- const refineInputs = this._constructRefineInputs(currentDocs[i], res);
316
+ const refineInputs = await this._constructRefineInputs(currentDocs[i], res);
317
317
  const inputs = { ...refineInputs, ...rest };
318
318
  res = await this.refineLLMChain.predict({ ...inputs });
319
319
  refineSteps.push(res);
@@ -97,12 +97,12 @@ export declare class RefineDocumentsChain extends BaseChain implements RefineDoc
97
97
  documentPrompt?: BasePromptTemplate;
98
98
  initialResponseName?: string;
99
99
  });
100
- _constructInitialInputs(doc: Document, rest: Record<string, unknown>): {
100
+ _constructInitialInputs(doc: Document, rest: Record<string, unknown>): Promise<{
101
101
  [x: string]: unknown;
102
- };
103
- _constructRefineInputs(doc: Document, res: string): {
102
+ }>;
103
+ _constructRefineInputs(doc: Document, res: string): Promise<{
104
104
  [x: string]: unknown;
105
- };
105
+ }>;
106
106
  _call(values: ChainValues): Promise<ChainValues>;
107
107
  _chainType(): "refine_documents_chain";
108
108
  static deserialize(data: SerializedRefineDocumentsChain): Promise<RefineDocumentsChain>;
@@ -264,7 +264,7 @@ export class RefineDocumentsChain extends BaseChain {
264
264
  this.initialResponseName =
265
265
  fields.initialResponseName ?? this.initialResponseName;
266
266
  }
267
- _constructInitialInputs(doc, rest) {
267
+ async _constructInitialInputs(doc, rest) {
268
268
  const baseInfo = {
269
269
  page_content: doc.pageContent,
270
270
  ...doc.metadata,
@@ -274,14 +274,14 @@ export class RefineDocumentsChain extends BaseChain {
274
274
  documentInfo[value] = baseInfo[value];
275
275
  });
276
276
  const baseInputs = {
277
- [this.documentVariableName]: this.documentPrompt.format({
277
+ [this.documentVariableName]: await this.documentPrompt.format({
278
278
  ...documentInfo,
279
279
  }),
280
280
  };
281
281
  const inputs = { ...baseInputs, ...rest };
282
282
  return inputs;
283
283
  }
284
- _constructRefineInputs(doc, res) {
284
+ async _constructRefineInputs(doc, res) {
285
285
  const baseInfo = {
286
286
  page_content: doc.pageContent,
287
287
  ...doc.metadata,
@@ -291,7 +291,7 @@ export class RefineDocumentsChain extends BaseChain {
291
291
  documentInfo[value] = baseInfo[value];
292
292
  });
293
293
  const baseInputs = {
294
- [this.documentVariableName]: this.documentPrompt.format({
294
+ [this.documentVariableName]: await this.documentPrompt.format({
295
295
  ...documentInfo,
296
296
  }),
297
297
  };
@@ -304,11 +304,11 @@ export class RefineDocumentsChain extends BaseChain {
304
304
  }
305
305
  const { [this.inputKey]: docs, ...rest } = values;
306
306
  const currentDocs = docs;
307
- const initialInputs = this._constructInitialInputs(currentDocs[0], rest);
307
+ const initialInputs = await this._constructInitialInputs(currentDocs[0], rest);
308
308
  let res = await this.llmChain.predict({ ...initialInputs });
309
309
  const refineSteps = [res];
310
310
  for (let i = 1; i < currentDocs.length; i += 1) {
311
- const refineInputs = this._constructRefineInputs(currentDocs[i], res);
311
+ const refineInputs = await this._constructRefineInputs(currentDocs[i], res);
312
312
  const inputs = { ...refineInputs, ...rest };
313
313
  res = await this.refineLLMChain.predict({ ...inputs });
314
314
  refineSteps.push(res);
@@ -17,7 +17,7 @@ const index_js_1 = require("../base_language/index.cjs");
17
17
  * import { OpenAI } from "langchain/llms/openai";
18
18
  * import { PromptTemplate } from "langchain/prompts";
19
19
  * const prompt = PromptTemplate.fromTemplate("Tell me a {adjective} joke");
20
- * const llm = LLMChain({ llm: new OpenAI(), prompt });
20
+ * const llm = new LLMChain({ llm: new OpenAI(), prompt });
21
21
  * ```
22
22
  */
23
23
  class LLMChain extends base_js_1.BaseChain {
@@ -25,7 +25,7 @@ export interface LLMChainInput extends ChainInputs {
25
25
  * import { OpenAI } from "langchain/llms/openai";
26
26
  * import { PromptTemplate } from "langchain/prompts";
27
27
  * const prompt = PromptTemplate.fromTemplate("Tell me a {adjective} joke");
28
- * const llm = LLMChain({ llm: new OpenAI(), prompt });
28
+ * const llm = new LLMChain({ llm: new OpenAI(), prompt });
29
29
  * ```
30
30
  */
31
31
  export declare class LLMChain extends BaseChain implements LLMChainInput {
@@ -14,7 +14,7 @@ import { BaseLanguageModel } from "../base_language/index.js";
14
14
  * import { OpenAI } from "langchain/llms/openai";
15
15
  * import { PromptTemplate } from "langchain/prompts";
16
16
  * const prompt = PromptTemplate.fromTemplate("Tell me a {adjective} joke");
17
- * const llm = LLMChain({ llm: new OpenAI(), prompt });
17
+ * const llm = new LLMChain({ llm: new OpenAI(), prompt });
18
18
  * ```
19
19
  */
20
20
  export class LLMChain extends BaseChain {
@@ -23,13 +23,14 @@ class UnstructuredLoader extends base_js_1.BaseDocumentLoader {
23
23
  this.webPath = webPath;
24
24
  }
25
25
  async _partition() {
26
- const { readFile } = await this.imports();
26
+ const { readFile, basename } = await this.imports();
27
27
  const buffer = await readFile(this.filePath);
28
+ const fileName = basename(this.filePath);
28
29
  // I'm aware this reads the file into memory first, but we have lots of work
29
30
  // to do on then consuming Documents in a streaming fashion anyway, so not
30
31
  // worried about this for now.
31
32
  const formData = new FormData();
32
- formData.append("files", new Blob([buffer]));
33
+ formData.append("files", new Blob([buffer]), fileName);
33
34
  const response = await fetch(this.webPath, {
34
35
  method: "POST",
35
36
  body: formData,
@@ -61,7 +62,8 @@ class UnstructuredLoader extends base_js_1.BaseDocumentLoader {
61
62
  async imports() {
62
63
  try {
63
64
  const { readFile } = await import("node:fs/promises");
64
- return { readFile };
65
+ const { basename } = await import("node:path");
66
+ return { readFile, basename };
65
67
  }
66
68
  catch (e) {
67
69
  console.error(e);
@@ -1,3 +1,7 @@
1
+ /// <reference types="node" resolution-mode="require"/>
2
+ /// <reference types="node" resolution-mode="require"/>
3
+ import type { basename as BasenameT } from "node:path";
4
+ import type { readFile as ReaFileT } from "node:fs/promises";
1
5
  import { Document } from "../../document.js";
2
6
  import { BaseDocumentLoader } from "../base.js";
3
7
  interface Element {
@@ -14,7 +18,8 @@ export declare class UnstructuredLoader extends BaseDocumentLoader {
14
18
  _partition(): Promise<Element[]>;
15
19
  load(): Promise<Document[]>;
16
20
  imports(): Promise<{
17
- readFile: typeof import("node:fs/promises")["readFile"];
21
+ readFile: typeof ReaFileT;
22
+ basename: typeof BasenameT;
18
23
  }>;
19
24
  }
20
25
  export {};
@@ -20,13 +20,14 @@ export class UnstructuredLoader extends BaseDocumentLoader {
20
20
  this.webPath = webPath;
21
21
  }
22
22
  async _partition() {
23
- const { readFile } = await this.imports();
23
+ const { readFile, basename } = await this.imports();
24
24
  const buffer = await readFile(this.filePath);
25
+ const fileName = basename(this.filePath);
25
26
  // I'm aware this reads the file into memory first, but we have lots of work
26
27
  // to do on then consuming Documents in a streaming fashion anyway, so not
27
28
  // worried about this for now.
28
29
  const formData = new FormData();
29
- formData.append("files", new Blob([buffer]));
30
+ formData.append("files", new Blob([buffer]), fileName);
30
31
  const response = await fetch(this.webPath, {
31
32
  method: "POST",
32
33
  body: formData,
@@ -58,7 +59,8 @@ export class UnstructuredLoader extends BaseDocumentLoader {
58
59
  async imports() {
59
60
  try {
60
61
  const { readFile } = await import("node:fs/promises");
61
- return { readFile };
62
+ const { basename } = await import("node:path");
63
+ return { readFile, basename };
62
64
  }
63
65
  catch (e) {
64
66
  console.error(e);
@@ -0,0 +1,75 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.MemoryVectorStore = void 0;
4
+ const ml_distance_1 = require("ml-distance");
5
+ const base_js_1 = require("./base.cjs");
6
+ const document_js_1 = require("../document.cjs");
7
+ class MemoryVectorStore extends base_js_1.VectorStore {
8
+ constructor(embeddings, { similarity, ...rest } = {}) {
9
+ super(embeddings, rest);
10
+ Object.defineProperty(this, "memoryVectors", {
11
+ enumerable: true,
12
+ configurable: true,
13
+ writable: true,
14
+ value: []
15
+ });
16
+ Object.defineProperty(this, "similarity", {
17
+ enumerable: true,
18
+ configurable: true,
19
+ writable: true,
20
+ value: void 0
21
+ });
22
+ this.similarity = similarity ?? ml_distance_1.similarity.cosine;
23
+ }
24
+ async addDocuments(documents) {
25
+ const texts = documents.map(({ pageContent }) => pageContent);
26
+ return this.addVectors(await this.embeddings.embedDocuments(texts), documents);
27
+ }
28
+ async addVectors(vectors, documents) {
29
+ const memoryVectors = vectors.map((embedding, idx) => ({
30
+ content: documents[idx].pageContent,
31
+ embedding,
32
+ metadata: documents[idx].metadata,
33
+ }));
34
+ this.memoryVectors = this.memoryVectors.concat(memoryVectors);
35
+ }
36
+ async similaritySearchVectorWithScore(query, k) {
37
+ const searches = this.memoryVectors
38
+ .map((vector, index) => ({
39
+ similarity: this.similarity(query, vector.embedding),
40
+ index,
41
+ }))
42
+ .sort((a, b) => (a.similarity > b.similarity ? -1 : 0))
43
+ .slice(0, k);
44
+ const result = searches.map((search) => [
45
+ new document_js_1.Document({
46
+ metadata: this.memoryVectors[search.index].metadata,
47
+ pageContent: this.memoryVectors[search.index].content,
48
+ }),
49
+ search.similarity,
50
+ ]);
51
+ return result;
52
+ }
53
+ static async fromTexts(texts, metadatas, embeddings, dbConfig) {
54
+ const docs = [];
55
+ for (let i = 0; i < texts.length; i += 1) {
56
+ const metadata = Array.isArray(metadatas) ? metadatas[i] : metadatas;
57
+ const newDoc = new document_js_1.Document({
58
+ pageContent: texts[i],
59
+ metadata,
60
+ });
61
+ docs.push(newDoc);
62
+ }
63
+ return MemoryVectorStore.fromDocuments(docs, embeddings, dbConfig);
64
+ }
65
+ static async fromDocuments(docs, embeddings, dbConfig) {
66
+ const instance = new this(embeddings, dbConfig);
67
+ await instance.addDocuments(docs);
68
+ return instance;
69
+ }
70
+ static async fromExistingIndex(embeddings, dbConfig) {
71
+ const instance = new this(embeddings, dbConfig);
72
+ return instance;
73
+ }
74
+ }
75
+ exports.MemoryVectorStore = MemoryVectorStore;
@@ -0,0 +1,24 @@
1
+ import { similarity as ml_distance_similarity } from "ml-distance";
2
+ import { VectorStore } from "./base.js";
3
+ import { Embeddings } from "../embeddings/base.js";
4
+ import { Document } from "../document.js";
5
+ interface MemoryVector {
6
+ content: string;
7
+ embedding: number[];
8
+ metadata: Record<string, any>;
9
+ }
10
+ export interface MemoryVectorStoreArgs {
11
+ similarity?: typeof ml_distance_similarity.cosine;
12
+ }
13
+ export declare class MemoryVectorStore extends VectorStore {
14
+ memoryVectors: MemoryVector[];
15
+ similarity: typeof ml_distance_similarity.cosine;
16
+ constructor(embeddings: Embeddings, { similarity, ...rest }?: MemoryVectorStoreArgs);
17
+ addDocuments(documents: Document[]): Promise<void>;
18
+ addVectors(vectors: number[][], documents: Document[]): Promise<void>;
19
+ similaritySearchVectorWithScore(query: number[], k: number): Promise<[Document, number][]>;
20
+ static fromTexts(texts: string[], metadatas: object[] | object, embeddings: Embeddings, dbConfig?: MemoryVectorStoreArgs): Promise<MemoryVectorStore>;
21
+ static fromDocuments(docs: Document[], embeddings: Embeddings, dbConfig?: MemoryVectorStoreArgs): Promise<MemoryVectorStore>;
22
+ static fromExistingIndex(embeddings: Embeddings, dbConfig?: MemoryVectorStoreArgs): Promise<MemoryVectorStore>;
23
+ }
24
+ export {};
@@ -0,0 +1,71 @@
1
+ import { similarity as ml_distance_similarity } from "ml-distance";
2
+ import { VectorStore } from "./base.js";
3
+ import { Document } from "../document.js";
4
+ export class MemoryVectorStore extends VectorStore {
5
+ constructor(embeddings, { similarity, ...rest } = {}) {
6
+ super(embeddings, rest);
7
+ Object.defineProperty(this, "memoryVectors", {
8
+ enumerable: true,
9
+ configurable: true,
10
+ writable: true,
11
+ value: []
12
+ });
13
+ Object.defineProperty(this, "similarity", {
14
+ enumerable: true,
15
+ configurable: true,
16
+ writable: true,
17
+ value: void 0
18
+ });
19
+ this.similarity = similarity ?? ml_distance_similarity.cosine;
20
+ }
21
+ async addDocuments(documents) {
22
+ const texts = documents.map(({ pageContent }) => pageContent);
23
+ return this.addVectors(await this.embeddings.embedDocuments(texts), documents);
24
+ }
25
+ async addVectors(vectors, documents) {
26
+ const memoryVectors = vectors.map((embedding, idx) => ({
27
+ content: documents[idx].pageContent,
28
+ embedding,
29
+ metadata: documents[idx].metadata,
30
+ }));
31
+ this.memoryVectors = this.memoryVectors.concat(memoryVectors);
32
+ }
33
+ async similaritySearchVectorWithScore(query, k) {
34
+ const searches = this.memoryVectors
35
+ .map((vector, index) => ({
36
+ similarity: this.similarity(query, vector.embedding),
37
+ index,
38
+ }))
39
+ .sort((a, b) => (a.similarity > b.similarity ? -1 : 0))
40
+ .slice(0, k);
41
+ const result = searches.map((search) => [
42
+ new Document({
43
+ metadata: this.memoryVectors[search.index].metadata,
44
+ pageContent: this.memoryVectors[search.index].content,
45
+ }),
46
+ search.similarity,
47
+ ]);
48
+ return result;
49
+ }
50
+ static async fromTexts(texts, metadatas, embeddings, dbConfig) {
51
+ const docs = [];
52
+ for (let i = 0; i < texts.length; i += 1) {
53
+ const metadata = Array.isArray(metadatas) ? metadatas[i] : metadatas;
54
+ const newDoc = new Document({
55
+ pageContent: texts[i],
56
+ metadata,
57
+ });
58
+ docs.push(newDoc);
59
+ }
60
+ return MemoryVectorStore.fromDocuments(docs, embeddings, dbConfig);
61
+ }
62
+ static async fromDocuments(docs, embeddings, dbConfig) {
63
+ const instance = new this(embeddings, dbConfig);
64
+ await instance.addDocuments(docs);
65
+ return instance;
66
+ }
67
+ static async fromExistingIndex(embeddings, dbConfig) {
68
+ const instance = new this(embeddings, dbConfig);
69
+ return instance;
70
+ }
71
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "langchain",
3
- "version": "0.0.53",
3
+ "version": "0.0.54",
4
4
  "description": "Typescript bindings for langchain",
5
5
  "type": "module",
6
6
  "engines": {
@@ -79,6 +79,9 @@
79
79
  "vectorstores/base.cjs",
80
80
  "vectorstores/base.js",
81
81
  "vectorstores/base.d.ts",
82
+ "vectorstores/memory.cjs",
83
+ "vectorstores/memory.js",
84
+ "vectorstores/memory.d.ts",
82
85
  "vectorstores/chroma.cjs",
83
86
  "vectorstores/chroma.js",
84
87
  "vectorstores/chroma.d.ts",
@@ -389,6 +392,7 @@
389
392
  "expr-eval": "^2.0.2",
390
393
  "flat": "^5.0.2",
391
394
  "jsonpointer": "^5.0.1",
395
+ "ml-distance": "^4.0.0",
392
396
  "object-hash": "^3.0.0",
393
397
  "openai": "^3.2.0",
394
398
  "p-queue": "^6.6.2",
@@ -541,6 +545,11 @@
541
545
  "import": "./vectorstores/base.js",
542
546
  "require": "./vectorstores/base.cjs"
543
547
  },
548
+ "./vectorstores/memory": {
549
+ "types": "./vectorstores/memory.d.ts",
550
+ "import": "./vectorstores/memory.js",
551
+ "require": "./vectorstores/memory.cjs"
552
+ },
544
553
  "./vectorstores/chroma": {
545
554
  "types": "./vectorstores/chroma.d.ts",
546
555
  "import": "./vectorstores/chroma.js",
@@ -0,0 +1 @@
1
+ module.exports = require('../dist/vectorstores/memory.cjs');
@@ -0,0 +1 @@
1
+ export * from '../dist/vectorstores/memory.js'
@@ -0,0 +1 @@
1
+ export * from '../dist/vectorstores/memory.js'