langchain 0.0.104 → 0.0.105

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. package/dist/base_language/count_tokens.cjs +3 -2
  2. package/dist/base_language/count_tokens.js +3 -2
  3. package/dist/embeddings/openai.cjs +2 -2
  4. package/dist/embeddings/openai.js +2 -2
  5. package/dist/output_parsers/index.d.ts +1 -1
  6. package/dist/vectorstores/base.cjs +6 -2
  7. package/dist/vectorstores/base.d.ts +6 -3
  8. package/dist/vectorstores/base.js +6 -2
  9. package/dist/vectorstores/chroma.cjs +23 -8
  10. package/dist/vectorstores/chroma.d.ts +16 -15
  11. package/dist/vectorstores/chroma.js +23 -8
  12. package/dist/vectorstores/elasticsearch.cjs +39 -3
  13. package/dist/vectorstores/elasticsearch.d.ts +9 -2
  14. package/dist/vectorstores/elasticsearch.js +16 -3
  15. package/dist/vectorstores/faiss.cjs +7 -3
  16. package/dist/vectorstores/faiss.d.ts +2 -2
  17. package/dist/vectorstores/faiss.js +7 -3
  18. package/dist/vectorstores/hnswlib.cjs +2 -1
  19. package/dist/vectorstores/hnswlib.js +2 -1
  20. package/dist/vectorstores/pinecone.cjs +29 -3
  21. package/dist/vectorstores/pinecone.d.ts +12 -2
  22. package/dist/vectorstores/pinecone.js +29 -3
  23. package/dist/vectorstores/redis.cjs +1 -1
  24. package/dist/vectorstores/redis.js +1 -1
  25. package/dist/vectorstores/supabase.cjs +12 -1
  26. package/dist/vectorstores/supabase.d.ts +5 -2
  27. package/dist/vectorstores/supabase.js +12 -1
  28. package/dist/vectorstores/tigris.cjs +4 -3
  29. package/dist/vectorstores/tigris.d.ts +6 -2
  30. package/dist/vectorstores/tigris.js +4 -3
  31. package/dist/vectorstores/weaviate.cjs +17 -5
  32. package/dist/vectorstores/weaviate.d.ts +9 -2
  33. package/dist/vectorstores/weaviate.js +17 -5
  34. package/package.json +3 -3
@@ -56,13 +56,14 @@ const getModelContextSize = (modelName) => {
56
56
  };
57
57
  exports.getModelContextSize = getModelContextSize;
58
58
  const calculateMaxTokens = async ({ prompt, modelName, }) => {
59
- // fallback to approximate calculation if tiktoken is not available
60
- let numTokens = Math.ceil(prompt.length / 4);
59
+ let numTokens;
61
60
  try {
62
61
  numTokens = (await (0, tiktoken_js_1.encodingForModel)(modelName)).encode(prompt).length;
63
62
  }
64
63
  catch (error) {
65
64
  console.warn("Failed to calculate number of tokens, falling back to approximate count");
65
+ // fallback to approximate calculation if tiktoken is not available
66
+ numTokens = Math.ceil(prompt.length / 4);
66
67
  }
67
68
  const maxTokens = (0, exports.getModelContextSize)(modelName);
68
69
  return maxTokens - numTokens;
@@ -50,13 +50,14 @@ export const getModelContextSize = (modelName) => {
50
50
  }
51
51
  };
52
52
  export const calculateMaxTokens = async ({ prompt, modelName, }) => {
53
- // fallback to approximate calculation if tiktoken is not available
54
- let numTokens = Math.ceil(prompt.length / 4);
53
+ let numTokens;
55
54
  try {
56
55
  numTokens = (await encodingForModel(modelName)).encode(prompt).length;
57
56
  }
58
57
  catch (error) {
59
58
  console.warn("Failed to calculate number of tokens, falling back to approximate count");
59
+ // fallback to approximate calculation if tiktoken is not available
60
+ numTokens = Math.ceil(prompt.length / 4);
60
61
  }
61
62
  const maxTokens = getModelContextSize(modelName);
62
63
  return maxTokens - numTokens;
@@ -121,7 +121,7 @@ class OpenAIEmbeddings extends base_js_1.Embeddings {
121
121
  };
122
122
  }
123
123
  async embedDocuments(texts) {
124
- const subPrompts = (0, chunk_js_1.chunkArray)(this.stripNewLines ? texts.map((t) => t.replaceAll("\n", " ")) : texts, this.batchSize);
124
+ const subPrompts = (0, chunk_js_1.chunkArray)(this.stripNewLines ? texts.map((t) => t.replace(/\n/g, " ")) : texts, this.batchSize);
125
125
  const embeddings = [];
126
126
  for (let i = 0; i < subPrompts.length; i += 1) {
127
127
  const input = subPrompts[i];
@@ -138,7 +138,7 @@ class OpenAIEmbeddings extends base_js_1.Embeddings {
138
138
  async embedQuery(text) {
139
139
  const { data } = await this.embeddingWithRetry({
140
140
  model: this.modelName,
141
- input: this.stripNewLines ? text.replaceAll("\n", " ") : text,
141
+ input: this.stripNewLines ? text.replace(/\n/g, " ") : text,
142
142
  });
143
143
  return data.data[0].embedding;
144
144
  }
@@ -115,7 +115,7 @@ export class OpenAIEmbeddings extends Embeddings {
115
115
  };
116
116
  }
117
117
  async embedDocuments(texts) {
118
- const subPrompts = chunkArray(this.stripNewLines ? texts.map((t) => t.replaceAll("\n", " ")) : texts, this.batchSize);
118
+ const subPrompts = chunkArray(this.stripNewLines ? texts.map((t) => t.replace(/\n/g, " ")) : texts, this.batchSize);
119
119
  const embeddings = [];
120
120
  for (let i = 0; i < subPrompts.length; i += 1) {
121
121
  const input = subPrompts[i];
@@ -132,7 +132,7 @@ export class OpenAIEmbeddings extends Embeddings {
132
132
  async embedQuery(text) {
133
133
  const { data } = await this.embeddingWithRetry({
134
134
  model: this.modelName,
135
- input: this.stripNewLines ? text.replaceAll("\n", " ") : text,
135
+ input: this.stripNewLines ? text.replace(/\n/g, " ") : text,
136
136
  });
137
137
  return data.data[0].embedding;
138
138
  }
@@ -5,4 +5,4 @@ export { OutputFixingParser } from "./fix.js";
5
5
  export { CombiningOutputParser } from "./combining.js";
6
6
  export { RouterOutputParser, RouterOutputParserInput } from "./router.js";
7
7
  export { CustomListOutputParser } from "./list.js";
8
- export { OutputFunctionsParser, JsonOutputFunctionsParser, JsonKeyOutputFunctionsParser, } from "../output_parsers/openai_functions.js";
8
+ export { FunctionParameters, OutputFunctionsParser, JsonOutputFunctionsParser, JsonKeyOutputFunctionsParser, } from "../output_parsers/openai_functions.js";
@@ -31,8 +31,8 @@ class VectorStoreRetriever extends index_js_1.BaseRetriever {
31
31
  const results = await this.vectorStore.similaritySearch(query, this.k, this.filter);
32
32
  return results;
33
33
  }
34
- async addDocuments(documents) {
35
- await this.vectorStore.addDocuments(documents);
34
+ async addDocuments(documents, options) {
35
+ return this.vectorStore.addDocuments(documents, options);
36
36
  }
37
37
  }
38
38
  exports.VectorStoreRetriever = VectorStoreRetriever;
@@ -47,6 +47,10 @@ class VectorStore {
47
47
  });
48
48
  this.embeddings = embeddings;
49
49
  }
50
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
51
+ async delete(_params) {
52
+ throw new Error("Not implemented.");
53
+ }
50
54
  async similaritySearch(query, k = 4, filter = undefined) {
51
55
  const results = await this.similaritySearchVectorWithScore(await this.embeddings.embedQuery(query), k, filter);
52
56
  return results.map((result) => result[0]);
@@ -1,6 +1,7 @@
1
1
  import { Embeddings } from "../embeddings/base.js";
2
2
  import { Document } from "../document.js";
3
3
  import { BaseRetriever } from "../schema/index.js";
4
+ type AddDocumentOptions = Record<string, any>;
4
5
  export interface VectorStoreRetrieverInput<V extends VectorStore> {
5
6
  vectorStore: V;
6
7
  k?: number;
@@ -12,14 +13,15 @@ export declare class VectorStoreRetriever<V extends VectorStore = VectorStore> e
12
13
  filter?: V["FilterType"];
13
14
  constructor(fields: VectorStoreRetrieverInput<V>);
14
15
  getRelevantDocuments(query: string): Promise<Document[]>;
15
- addDocuments(documents: Document[]): Promise<void>;
16
+ addDocuments(documents: Document[], options?: AddDocumentOptions): Promise<string[] | void>;
16
17
  }
17
18
  export declare abstract class VectorStore {
18
19
  FilterType: object;
19
20
  embeddings: Embeddings;
20
21
  constructor(embeddings: Embeddings, _dbConfig: Record<string, any>);
21
- abstract addVectors(vectors: number[][], documents: Document[]): Promise<void>;
22
- abstract addDocuments(documents: Document[]): Promise<void>;
22
+ abstract addVectors(vectors: number[][], documents: Document[], options?: AddDocumentOptions): Promise<string[] | void>;
23
+ abstract addDocuments(documents: Document[], options?: AddDocumentOptions): Promise<string[] | void>;
24
+ delete(_params?: Record<string, any>): Promise<void>;
23
25
  abstract similaritySearchVectorWithScore(query: number[], k: number, filter?: this["FilterType"]): Promise<[Document, number][]>;
24
26
  similaritySearch(query: string, k?: number, filter?: this["FilterType"] | undefined): Promise<Document[]>;
25
27
  similaritySearchWithScore(query: string, k?: number, filter?: this["FilterType"] | undefined): Promise<[Document, number][]>;
@@ -31,3 +33,4 @@ export declare abstract class SaveableVectorStore extends VectorStore {
31
33
  abstract save(directory: string): Promise<void>;
32
34
  static load(_directory: string, _embeddings: Embeddings): Promise<SaveableVectorStore>;
33
35
  }
36
+ export {};
@@ -28,8 +28,8 @@ export class VectorStoreRetriever extends BaseRetriever {
28
28
  const results = await this.vectorStore.similaritySearch(query, this.k, this.filter);
29
29
  return results;
30
30
  }
31
- async addDocuments(documents) {
32
- await this.vectorStore.addDocuments(documents);
31
+ async addDocuments(documents, options) {
32
+ return this.vectorStore.addDocuments(documents, options);
33
33
  }
34
34
  }
35
35
  export class VectorStore {
@@ -43,6 +43,10 @@ export class VectorStore {
43
43
  });
44
44
  this.embeddings = embeddings;
45
45
  }
46
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
47
+ async delete(_params) {
48
+ throw new Error("Not implemented.");
49
+ }
46
50
  async similaritySearch(query, k = 4, filter = undefined) {
47
51
  const results = await this.similaritySearchVectorWithScore(await this.embeddings.embedQuery(query), k, filter);
48
52
  return results.map((result) => result[0]);
@@ -77,9 +77,9 @@ class Chroma extends base_js_1.VectorStore {
77
77
  }
78
78
  this.filter = args.filter;
79
79
  }
80
- async addDocuments(documents) {
80
+ async addDocuments(documents, options) {
81
81
  const texts = documents.map(({ pageContent }) => pageContent);
82
- await this.addVectors(await this.embeddings.embedDocuments(texts), documents);
82
+ return this.addVectors(await this.embeddings.embedDocuments(texts), documents, options);
83
83
  }
84
84
  async ensureCollection() {
85
85
  if (!this.collection) {
@@ -98,9 +98,9 @@ class Chroma extends base_js_1.VectorStore {
98
98
  }
99
99
  return this.collection;
100
100
  }
101
- async addVectors(vectors, documents) {
101
+ async addVectors(vectors, documents, options) {
102
102
  if (vectors.length === 0) {
103
- return;
103
+ return [];
104
104
  }
105
105
  if (this.numDimensions === undefined) {
106
106
  this.numDimensions = vectors[0].length;
@@ -111,14 +111,29 @@ class Chroma extends base_js_1.VectorStore {
111
111
  if (vectors[0].length !== this.numDimensions) {
112
112
  throw new Error(`Vectors must have the same length as the number of dimensions (${this.numDimensions})`);
113
113
  }
114
+ const documentIds = options?.ids ?? Array.from({ length: vectors.length }, () => uuid.v1());
114
115
  const collection = await this.ensureCollection();
115
- const docstoreSize = await collection.count();
116
- await collection.add({
117
- ids: Array.from({ length: vectors.length }, (_, i) => (docstoreSize + i).toString()),
116
+ await collection.upsert({
117
+ ids: documentIds,
118
118
  embeddings: vectors,
119
119
  metadatas: documents.map(({ metadata }) => metadata),
120
120
  documents: documents.map(({ pageContent }) => pageContent),
121
121
  });
122
+ return documentIds;
123
+ }
124
+ async delete(params) {
125
+ const collection = await this.ensureCollection();
126
+ if (Array.isArray(params.ids)) {
127
+ await collection.delete({ ids: params.ids });
128
+ }
129
+ else if (params.filter) {
130
+ await collection.delete({
131
+ where: { ...params.filter },
132
+ });
133
+ }
134
+ else {
135
+ throw new Error(`You must provide one of "ids or "filter".`);
136
+ }
122
137
  }
123
138
  async similaritySearchVectorWithScore(query, k, filter) {
124
139
  if (filter && this.filter) {
@@ -164,7 +179,7 @@ class Chroma extends base_js_1.VectorStore {
164
179
  });
165
180
  docs.push(newDoc);
166
181
  }
167
- return Chroma.fromDocuments(docs, embeddings, dbConfig);
182
+ return this.fromDocuments(docs, embeddings, dbConfig);
168
183
  }
169
184
  static async fromDocuments(docs, embeddings, dbConfig) {
170
185
  const instance = new this(embeddings, dbConfig);
@@ -1,4 +1,5 @@
1
1
  import type { ChromaClient as ChromaClientT, Collection } from "chromadb";
2
+ import type { Where } from "chromadb/dist/main/types.js";
2
3
  import { Embeddings } from "../embeddings/base.js";
3
4
  import { VectorStore } from "./base.js";
4
5
  import { Document } from "../document.js";
@@ -13,8 +14,12 @@ export type ChromaLibArgs = {
13
14
  collectionName?: string;
14
15
  filter?: object;
15
16
  };
17
+ export interface ChromaDeleteParams<T> {
18
+ ids?: string[];
19
+ filter?: T;
20
+ }
16
21
  export declare class Chroma extends VectorStore {
17
- FilterType: object;
22
+ FilterType: Where;
18
23
  index?: ChromaClientT;
19
24
  collection?: Collection;
20
25
  collectionName: string;
@@ -22,22 +27,18 @@ export declare class Chroma extends VectorStore {
22
27
  url: string;
23
28
  filter?: object;
24
29
  constructor(embeddings: Embeddings, args: ChromaLibArgs);
25
- addDocuments(documents: Document[]): Promise<void>;
30
+ addDocuments(documents: Document[], options?: {
31
+ ids?: string[];
32
+ }): Promise<string[]>;
26
33
  ensureCollection(): Promise<Collection>;
27
- addVectors(vectors: number[][], documents: Document[]): Promise<void>;
34
+ addVectors(vectors: number[][], documents: Document[], options?: {
35
+ ids?: string[];
36
+ }): Promise<string[]>;
37
+ delete(params: ChromaDeleteParams<this["FilterType"]>): Promise<void>;
28
38
  similaritySearchVectorWithScore(query: number[], k: number, filter?: this["FilterType"]): Promise<[Document<Record<string, any>>, number][]>;
29
- static fromTexts(texts: string[], metadatas: object[] | object, embeddings: Embeddings, dbConfig: {
30
- collectionName?: string;
31
- url?: string;
32
- }): Promise<Chroma>;
33
- static fromDocuments(docs: Document[], embeddings: Embeddings, dbConfig: {
34
- collectionName?: string;
35
- url?: string;
36
- }): Promise<Chroma>;
37
- static fromExistingCollection(embeddings: Embeddings, dbConfig: {
38
- collectionName: string;
39
- url?: string;
40
- }): Promise<Chroma>;
39
+ static fromTexts(texts: string[], metadatas: object[] | object, embeddings: Embeddings, dbConfig: ChromaLibArgs): Promise<Chroma>;
40
+ static fromDocuments(docs: Document[], embeddings: Embeddings, dbConfig: ChromaLibArgs): Promise<Chroma>;
41
+ static fromExistingCollection(embeddings: Embeddings, dbConfig: ChromaLibArgs): Promise<Chroma>;
41
42
  static imports(): Promise<{
42
43
  ChromaClient: typeof ChromaClientT;
43
44
  }>;
@@ -51,9 +51,9 @@ export class Chroma extends VectorStore {
51
51
  }
52
52
  this.filter = args.filter;
53
53
  }
54
- async addDocuments(documents) {
54
+ async addDocuments(documents, options) {
55
55
  const texts = documents.map(({ pageContent }) => pageContent);
56
- await this.addVectors(await this.embeddings.embedDocuments(texts), documents);
56
+ return this.addVectors(await this.embeddings.embedDocuments(texts), documents, options);
57
57
  }
58
58
  async ensureCollection() {
59
59
  if (!this.collection) {
@@ -72,9 +72,9 @@ export class Chroma extends VectorStore {
72
72
  }
73
73
  return this.collection;
74
74
  }
75
- async addVectors(vectors, documents) {
75
+ async addVectors(vectors, documents, options) {
76
76
  if (vectors.length === 0) {
77
- return;
77
+ return [];
78
78
  }
79
79
  if (this.numDimensions === undefined) {
80
80
  this.numDimensions = vectors[0].length;
@@ -85,14 +85,29 @@ export class Chroma extends VectorStore {
85
85
  if (vectors[0].length !== this.numDimensions) {
86
86
  throw new Error(`Vectors must have the same length as the number of dimensions (${this.numDimensions})`);
87
87
  }
88
+ const documentIds = options?.ids ?? Array.from({ length: vectors.length }, () => uuid.v1());
88
89
  const collection = await this.ensureCollection();
89
- const docstoreSize = await collection.count();
90
- await collection.add({
91
- ids: Array.from({ length: vectors.length }, (_, i) => (docstoreSize + i).toString()),
90
+ await collection.upsert({
91
+ ids: documentIds,
92
92
  embeddings: vectors,
93
93
  metadatas: documents.map(({ metadata }) => metadata),
94
94
  documents: documents.map(({ pageContent }) => pageContent),
95
95
  });
96
+ return documentIds;
97
+ }
98
+ async delete(params) {
99
+ const collection = await this.ensureCollection();
100
+ if (Array.isArray(params.ids)) {
101
+ await collection.delete({ ids: params.ids });
102
+ }
103
+ else if (params.filter) {
104
+ await collection.delete({
105
+ where: { ...params.filter },
106
+ });
107
+ }
108
+ else {
109
+ throw new Error(`You must provide one of "ids or "filter".`);
110
+ }
96
111
  }
97
112
  async similaritySearchVectorWithScore(query, k, filter) {
98
113
  if (filter && this.filter) {
@@ -138,7 +153,7 @@ export class Chroma extends VectorStore {
138
153
  });
139
154
  docs.push(newDoc);
140
155
  }
141
- return Chroma.fromDocuments(docs, embeddings, dbConfig);
156
+ return this.fromDocuments(docs, embeddings, dbConfig);
142
157
  }
143
158
  static async fromDocuments(docs, embeddings, dbConfig) {
144
159
  const instance = new this(embeddings, dbConfig);
@@ -1,6 +1,30 @@
1
1
  "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
15
+ }) : function(o, v) {
16
+ o["default"] = v;
17
+ });
18
+ var __importStar = (this && this.__importStar) || function (mod) {
19
+ if (mod && mod.__esModule) return mod;
20
+ var result = {};
21
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
22
+ __setModuleDefault(result, mod);
23
+ return result;
24
+ };
2
25
  Object.defineProperty(exports, "__esModule", { value: true });
3
26
  exports.ElasticVectorSearch = void 0;
27
+ const uuid = __importStar(require("uuid"));
4
28
  const document_js_1 = require("../document.cjs");
5
29
  const base_js_1 = require("./base.cjs");
6
30
  class ElasticVectorSearch extends base_js_1.VectorStore {
@@ -56,15 +80,17 @@ class ElasticVectorSearch extends base_js_1.VectorStore {
56
80
  this.client = args.client;
57
81
  this.indexName = args.indexName ?? "documents";
58
82
  }
59
- async addDocuments(documents) {
83
+ async addDocuments(documents, options) {
60
84
  const texts = documents.map(({ pageContent }) => pageContent);
61
- return this.addVectors(await this.embeddings.embedDocuments(texts), documents);
85
+ return this.addVectors(await this.embeddings.embedDocuments(texts), documents, options);
62
86
  }
63
- async addVectors(vectors, documents) {
87
+ async addVectors(vectors, documents, options) {
64
88
  await this.ensureIndexExists(vectors[0].length, this.engine, this.similarity, this.efConstruction, this.m);
89
+ const documentIds = options?.ids ?? Array.from({ length: vectors.length }, () => uuid.v4());
65
90
  const operations = vectors.flatMap((embedding, idx) => [
66
91
  {
67
92
  index: {
93
+ _id: documentIds[idx],
68
94
  _index: this.indexName,
69
95
  },
70
96
  },
@@ -75,6 +101,7 @@ class ElasticVectorSearch extends base_js_1.VectorStore {
75
101
  },
76
102
  ]);
77
103
  await this.client.bulk({ refresh: true, operations });
104
+ return documentIds;
78
105
  }
79
106
  async similaritySearchVectorWithScore(query, k, filter) {
80
107
  const result = await this.client.search({
@@ -96,6 +123,15 @@ class ElasticVectorSearch extends base_js_1.VectorStore {
96
123
  hit._score,
97
124
  ]);
98
125
  }
126
+ async delete(params) {
127
+ const operations = params.ids.map((id) => ({
128
+ delete: {
129
+ _id: id,
130
+ _index: this.indexName,
131
+ },
132
+ }));
133
+ await this.client.bulk({ refresh: true, operations });
134
+ }
99
135
  static fromTexts(texts, metadatas, embeddings, args) {
100
136
  const documents = texts.map((text, idx) => {
101
137
  const metadata = Array.isArray(metadatas) ? metadatas[idx] : metadatas;
@@ -27,9 +27,16 @@ export declare class ElasticVectorSearch extends VectorStore {
27
27
  private readonly m;
28
28
  private readonly candidates;
29
29
  constructor(embeddings: Embeddings, args: ElasticClientArgs);
30
- addDocuments(documents: Document[]): Promise<void>;
31
- addVectors(vectors: number[][], documents: Document[]): Promise<void>;
30
+ addDocuments(documents: Document[], options?: {
31
+ ids?: string[];
32
+ }): Promise<string[]>;
33
+ addVectors(vectors: number[][], documents: Document[], options?: {
34
+ ids?: string[];
35
+ }): Promise<string[]>;
32
36
  similaritySearchVectorWithScore(query: number[], k: number, filter?: ElasticFilter | undefined): Promise<[Document, number][]>;
37
+ delete(params: {
38
+ ids: string[];
39
+ }): Promise<void>;
33
40
  static fromTexts(texts: string[], metadatas: object[] | object, embeddings: Embeddings, args: ElasticClientArgs): Promise<ElasticVectorSearch>;
34
41
  static fromDocuments(docs: Document[], embeddings: Embeddings, dbConfig: ElasticClientArgs): Promise<ElasticVectorSearch>;
35
42
  static fromExistingIndex(embeddings: Embeddings, dbConfig: ElasticClientArgs): Promise<ElasticVectorSearch>;
@@ -1,3 +1,4 @@
1
+ import * as uuid from "uuid";
1
2
  import { Document } from "../document.js";
2
3
  import { VectorStore } from "./base.js";
3
4
  export class ElasticVectorSearch extends VectorStore {
@@ -53,15 +54,17 @@ export class ElasticVectorSearch extends VectorStore {
53
54
  this.client = args.client;
54
55
  this.indexName = args.indexName ?? "documents";
55
56
  }
56
- async addDocuments(documents) {
57
+ async addDocuments(documents, options) {
57
58
  const texts = documents.map(({ pageContent }) => pageContent);
58
- return this.addVectors(await this.embeddings.embedDocuments(texts), documents);
59
+ return this.addVectors(await this.embeddings.embedDocuments(texts), documents, options);
59
60
  }
60
- async addVectors(vectors, documents) {
61
+ async addVectors(vectors, documents, options) {
61
62
  await this.ensureIndexExists(vectors[0].length, this.engine, this.similarity, this.efConstruction, this.m);
63
+ const documentIds = options?.ids ?? Array.from({ length: vectors.length }, () => uuid.v4());
62
64
  const operations = vectors.flatMap((embedding, idx) => [
63
65
  {
64
66
  index: {
67
+ _id: documentIds[idx],
65
68
  _index: this.indexName,
66
69
  },
67
70
  },
@@ -72,6 +75,7 @@ export class ElasticVectorSearch extends VectorStore {
72
75
  },
73
76
  ]);
74
77
  await this.client.bulk({ refresh: true, operations });
78
+ return documentIds;
75
79
  }
76
80
  async similaritySearchVectorWithScore(query, k, filter) {
77
81
  const result = await this.client.search({
@@ -93,6 +97,15 @@ export class ElasticVectorSearch extends VectorStore {
93
97
  hit._score,
94
98
  ]);
95
99
  }
100
+ async delete(params) {
101
+ const operations = params.ids.map((id) => ({
102
+ delete: {
103
+ _id: id,
104
+ _index: this.indexName,
105
+ },
106
+ }));
107
+ await this.client.bulk({ refresh: true, operations });
108
+ }
96
109
  static fromTexts(texts, metadatas, embeddings, args) {
97
110
  const documents = texts.map((text, idx) => {
98
111
  const metadata = Array.isArray(metadatas) ? metadatas[idx] : metadatas;
@@ -76,7 +76,7 @@ class FaissStore extends base_js_1.SaveableVectorStore {
76
76
  }
77
77
  async addVectors(vectors, documents) {
78
78
  if (vectors.length === 0) {
79
- return;
79
+ return [];
80
80
  }
81
81
  if (vectors.length !== documents.length) {
82
82
  throw new Error(`Vectors and documents must have the same length`);
@@ -91,13 +91,16 @@ class FaissStore extends base_js_1.SaveableVectorStore {
91
91
  throw new Error(`Vectors must have the same length as the number of dimensions (${d})`);
92
92
  }
93
93
  const docstoreSize = this.index.ntotal();
94
+ const documentIds = [];
94
95
  for (let i = 0; i < vectors.length; i += 1) {
95
96
  const documentId = uuid.v4();
97
+ documentIds.push(documentId);
96
98
  const id = docstoreSize + i;
97
99
  this.index.add(vectors[i]);
98
100
  this._mapping[id] = documentId;
99
101
  this.docstore.add({ [documentId]: documents[i] });
100
102
  }
103
+ return documentIds;
101
104
  }
102
105
  async similaritySearchVectorWithScore(query, k) {
103
106
  const d = this.index.getDimension();
@@ -230,16 +233,17 @@ class FaissStore extends base_js_1.SaveableVectorStore {
230
233
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
231
234
  }
232
235
  catch (err) {
233
- throw new Error(`Could not import faiss-node. Please install faiss-node as a dependency with, e.g. \`npm install -S faiss-node\` and make sure you have \`libomp\` installed in your path.\n\nError: ${err?.message}`);
236
+ throw new Error(`Could not import faiss-node. Please install faiss-node as a dependency with, e.g. \`npm install -S faiss-node\`.\n\nError: ${err?.message}`);
234
237
  }
235
238
  }
236
239
  static async importPickleparser() {
237
240
  try {
238
241
  const { default: { Parser, NameRegistry }, } = await import("pickleparser");
239
242
  return { Parser, NameRegistry };
243
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
240
244
  }
241
245
  catch (err) {
242
- throw new Error("Please install pickleparser as a dependency with, e.g. `npm install -S pickleparser`");
246
+ throw new Error(`Could not import pickleparser. Please install pickleparser as a dependency with, e.g. \`npm install -S pickleparser\`.\n\nError: ${err?.message}`);
243
247
  }
244
248
  }
245
249
  }
@@ -15,10 +15,10 @@ export declare class FaissStore extends SaveableVectorStore {
15
15
  docstore: SynchronousInMemoryDocstore;
16
16
  args: FaissLibArgs;
17
17
  constructor(embeddings: Embeddings, args: FaissLibArgs);
18
- addDocuments(documents: Document[]): Promise<void>;
18
+ addDocuments(documents: Document[]): Promise<string[]>;
19
19
  get index(): IndexFlatL2;
20
20
  private set index(value);
21
- addVectors(vectors: number[][], documents: Document[]): Promise<void>;
21
+ addVectors(vectors: number[][], documents: Document[]): Promise<string[]>;
22
22
  similaritySearchVectorWithScore(query: number[], k: number): Promise<[Document<Record<string, any>>, number][]>;
23
23
  save(directory: string): Promise<void>;
24
24
  static load(directory: string, embeddings: Embeddings): Promise<FaissStore>;
@@ -50,7 +50,7 @@ export class FaissStore extends SaveableVectorStore {
50
50
  }
51
51
  async addVectors(vectors, documents) {
52
52
  if (vectors.length === 0) {
53
- return;
53
+ return [];
54
54
  }
55
55
  if (vectors.length !== documents.length) {
56
56
  throw new Error(`Vectors and documents must have the same length`);
@@ -65,13 +65,16 @@ export class FaissStore extends SaveableVectorStore {
65
65
  throw new Error(`Vectors must have the same length as the number of dimensions (${d})`);
66
66
  }
67
67
  const docstoreSize = this.index.ntotal();
68
+ const documentIds = [];
68
69
  for (let i = 0; i < vectors.length; i += 1) {
69
70
  const documentId = uuid.v4();
71
+ documentIds.push(documentId);
70
72
  const id = docstoreSize + i;
71
73
  this.index.add(vectors[i]);
72
74
  this._mapping[id] = documentId;
73
75
  this.docstore.add({ [documentId]: documents[i] });
74
76
  }
77
+ return documentIds;
75
78
  }
76
79
  async similaritySearchVectorWithScore(query, k) {
77
80
  const d = this.index.getDimension();
@@ -204,16 +207,17 @@ export class FaissStore extends SaveableVectorStore {
204
207
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
205
208
  }
206
209
  catch (err) {
207
- throw new Error(`Could not import faiss-node. Please install faiss-node as a dependency with, e.g. \`npm install -S faiss-node\` and make sure you have \`libomp\` installed in your path.\n\nError: ${err?.message}`);
210
+ throw new Error(`Could not import faiss-node. Please install faiss-node as a dependency with, e.g. \`npm install -S faiss-node\`.\n\nError: ${err?.message}`);
208
211
  }
209
212
  }
210
213
  static async importPickleparser() {
211
214
  try {
212
215
  const { default: { Parser, NameRegistry }, } = await import("pickleparser");
213
216
  return { Parser, NameRegistry };
217
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
214
218
  }
215
219
  catch (err) {
216
- throw new Error("Please install pickleparser as a dependency with, e.g. `npm install -S pickleparser`");
220
+ throw new Error(`Could not import pickleparser. Please install pickleparser as a dependency with, e.g. \`npm install -S pickleparser\`.\n\nError: ${err?.message}`);
217
221
  }
218
222
  }
219
223
  }
@@ -172,9 +172,10 @@ class HNSWLib extends base_js_1.SaveableVectorStore {
172
172
  try {
173
173
  const { default: { HierarchicalNSW }, } = await import("hnswlib-node");
174
174
  return { HierarchicalNSW };
175
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
175
176
  }
176
177
  catch (err) {
177
- throw new Error("Please install hnswlib-node as a dependency with, e.g. `npm install -S hnswlib-node`");
178
+ throw new Error(`Could not import hnswlib-node. Please install hnswlib-node as a dependency with, e.g. \`npm install -S hnswlib-node\`.\n\nError: ${err?.message}`);
178
179
  }
179
180
  }
180
181
  }
@@ -169,9 +169,10 @@ export class HNSWLib extends SaveableVectorStore {
169
169
  try {
170
170
  const { default: { HierarchicalNSW }, } = await import("hnswlib-node");
171
171
  return { HierarchicalNSW };
172
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
172
173
  }
173
174
  catch (err) {
174
- throw new Error("Please install hnswlib-node as a dependency with, e.g. `npm install -S hnswlib-node`");
175
+ throw new Error(`Could not import hnswlib-node. Please install hnswlib-node as a dependency with, e.g. \`npm install -S hnswlib-node\`.\n\nError: ${err?.message}`);
175
176
  }
176
177
  }
177
178
  }
@@ -64,11 +64,12 @@ class PineconeStore extends base_js_1.VectorStore {
64
64
  this.textKey = args.textKey ?? "text";
65
65
  this.filter = args.filter;
66
66
  }
67
- async addDocuments(documents, ids) {
67
+ async addDocuments(documents, options) {
68
68
  const texts = documents.map(({ pageContent }) => pageContent);
69
- return this.addVectors(await this.embeddings.embedDocuments(texts), documents, ids);
69
+ return this.addVectors(await this.embeddings.embedDocuments(texts), documents, options);
70
70
  }
71
- async addVectors(vectors, documents, ids) {
71
+ async addVectors(vectors, documents, options) {
72
+ const ids = Array.isArray(options) ? options : options?.ids;
72
73
  const documentIds = ids == null ? documents.map(() => uuid.v4()) : ids;
73
74
  const pineconeVectors = vectors.map((values, idx) => {
74
75
  // Pinecone doesn't support nested objects, so we flatten them
@@ -115,6 +116,31 @@ class PineconeStore extends base_js_1.VectorStore {
115
116
  },
116
117
  });
117
118
  }
119
+ return documentIds;
120
+ }
121
+ async delete(params) {
122
+ const { namespace = this.namespace, deleteAll, ids, ...rest } = params;
123
+ if (deleteAll) {
124
+ await this.pineconeIndex.delete1({
125
+ deleteAll: true,
126
+ namespace,
127
+ ...rest,
128
+ });
129
+ }
130
+ else if (ids) {
131
+ const batchSize = 1000;
132
+ for (let i = 0; i < ids.length; i += batchSize) {
133
+ const batchIds = ids.slice(i, i + batchSize);
134
+ await this.pineconeIndex.delete1({
135
+ ids: batchIds,
136
+ namespace,
137
+ ...rest,
138
+ });
139
+ }
140
+ }
141
+ else {
142
+ throw new Error("Either ids or delete_all must be provided.");
143
+ }
118
144
  }
119
145
  async similaritySearchVectorWithScore(query, k, filter) {
120
146
  if (filter && this.filter) {
@@ -9,6 +9,11 @@ export interface PineconeLibArgs {
9
9
  namespace?: string;
10
10
  filter?: PineconeMetadata;
11
11
  }
12
+ export type PineconeDeleteParams = {
13
+ ids?: string[];
14
+ deleteAll?: boolean;
15
+ namespace?: string;
16
+ };
12
17
  export declare class PineconeStore extends VectorStore {
13
18
  FilterType: PineconeMetadata;
14
19
  textKey: string;
@@ -16,8 +21,13 @@ export declare class PineconeStore extends VectorStore {
16
21
  pineconeIndex: VectorOperationsApi;
17
22
  filter?: PineconeMetadata;
18
23
  constructor(embeddings: Embeddings, args: PineconeLibArgs);
19
- addDocuments(documents: Document[], ids?: string[]): Promise<void>;
20
- addVectors(vectors: number[][], documents: Document[], ids?: string[]): Promise<void>;
24
+ addDocuments(documents: Document[], options?: {
25
+ ids?: string[];
26
+ } | string[]): Promise<string[]>;
27
+ addVectors(vectors: number[][], documents: Document[], options?: {
28
+ ids?: string[];
29
+ } | string[]): Promise<string[]>;
30
+ delete(params: PineconeDeleteParams): Promise<void>;
21
31
  similaritySearchVectorWithScore(query: number[], k: number, filter?: PineconeMetadata): Promise<[Document, number][]>;
22
32
  static fromTexts(texts: string[], metadatas: object[] | object, embeddings: Embeddings, dbConfig: {
23
33
  /**
@@ -35,11 +35,12 @@ export class PineconeStore extends VectorStore {
35
35
  this.textKey = args.textKey ?? "text";
36
36
  this.filter = args.filter;
37
37
  }
38
- async addDocuments(documents, ids) {
38
+ async addDocuments(documents, options) {
39
39
  const texts = documents.map(({ pageContent }) => pageContent);
40
- return this.addVectors(await this.embeddings.embedDocuments(texts), documents, ids);
40
+ return this.addVectors(await this.embeddings.embedDocuments(texts), documents, options);
41
41
  }
42
- async addVectors(vectors, documents, ids) {
42
+ async addVectors(vectors, documents, options) {
43
+ const ids = Array.isArray(options) ? options : options?.ids;
43
44
  const documentIds = ids == null ? documents.map(() => uuid.v4()) : ids;
44
45
  const pineconeVectors = vectors.map((values, idx) => {
45
46
  // Pinecone doesn't support nested objects, so we flatten them
@@ -86,6 +87,31 @@ export class PineconeStore extends VectorStore {
86
87
  },
87
88
  });
88
89
  }
90
+ return documentIds;
91
+ }
92
+ async delete(params) {
93
+ const { namespace = this.namespace, deleteAll, ids, ...rest } = params;
94
+ if (deleteAll) {
95
+ await this.pineconeIndex.delete1({
96
+ deleteAll: true,
97
+ namespace,
98
+ ...rest,
99
+ });
100
+ }
101
+ else if (ids) {
102
+ const batchSize = 1000;
103
+ for (let i = 0; i < ids.length; i += batchSize) {
104
+ const batchIds = ids.slice(i, i + batchSize);
105
+ await this.pineconeIndex.delete1({
106
+ ids: batchIds,
107
+ namespace,
108
+ ...rest,
109
+ });
110
+ }
111
+ }
112
+ else {
113
+ throw new Error("Either ids or delete_all must be provided.");
114
+ }
89
115
  }
90
116
  async similaritySearchVectorWithScore(query, k, filter) {
91
117
  if (filter && this.filter) {
@@ -69,7 +69,7 @@ class RedisVectorStore extends base_js_1.VectorStore {
69
69
  }
70
70
  async addDocuments(documents, options) {
71
71
  const texts = documents.map(({ pageContent }) => pageContent);
72
- await this.addVectors(await this.embeddings.embedDocuments(texts), documents, options);
72
+ return this.addVectors(await this.embeddings.embedDocuments(texts), documents, options);
73
73
  }
74
74
  async addVectors(vectors, documents, { keys, batchSize = 1000 } = {}) {
75
75
  // check if the index exists and create it if it doesn't
@@ -66,7 +66,7 @@ export class RedisVectorStore extends VectorStore {
66
66
  }
67
67
  async addDocuments(documents, options) {
68
68
  const texts = documents.map(({ pageContent }) => pageContent);
69
- await this.addVectors(await this.embeddings.embedDocuments(texts), documents, options);
69
+ return this.addVectors(await this.embeddings.embedDocuments(texts), documents, options);
70
70
  }
71
71
  async addVectors(vectors, documents, { keys, batchSize = 1000 } = {}) {
72
72
  // check if the index exists and create it if it doesn't
@@ -48,12 +48,23 @@ class SupabaseVectorStore extends base_js_1.VectorStore {
48
48
  // upsert returns 500/502/504 (yes really any of them) if given too many rows/characters
49
49
  // ~2000 trips it, but my data is probably smaller than average pageContent and metadata
50
50
  const chunkSize = 500;
51
+ let ids = [];
51
52
  for (let i = 0; i < rows.length; i += chunkSize) {
52
53
  const chunk = rows.slice(i, i + chunkSize);
53
- const res = await this.client.from(this.tableName).insert(chunk);
54
+ const res = await this.client.from(this.tableName).upsert(chunk).select();
54
55
  if (res.error) {
55
56
  throw new Error(`Error inserting: ${res.error.message} ${res.status} ${res.statusText}`);
56
57
  }
58
+ if (res.data) {
59
+ ids = ids.concat(res.data.map((row) => row.id));
60
+ }
61
+ }
62
+ return ids;
63
+ }
64
+ async delete(params) {
65
+ const { ids } = params;
66
+ for (const id of ids) {
67
+ await this.client.from(this.tableName).delete().eq("id", id);
57
68
  }
58
69
  }
59
70
  async similaritySearchVectorWithScore(query, k, filter) {
@@ -19,8 +19,11 @@ export declare class SupabaseVectorStore extends VectorStore {
19
19
  queryName: string;
20
20
  filter?: SupabaseMetadata | SupabaseFilterRPCCall;
21
21
  constructor(embeddings: Embeddings, args: SupabaseLibArgs);
22
- addDocuments(documents: Document[]): Promise<void>;
23
- addVectors(vectors: number[][], documents: Document[]): Promise<void>;
22
+ addDocuments(documents: Document[]): Promise<string[]>;
23
+ addVectors(vectors: number[][], documents: Document[]): Promise<string[]>;
24
+ delete(params: {
25
+ ids: string[];
26
+ }): Promise<void>;
24
27
  similaritySearchVectorWithScore(query: number[], k: number, filter?: this["FilterType"]): Promise<[Document, number][]>;
25
28
  static fromTexts(texts: string[], metadatas: object[] | object, embeddings: Embeddings, dbConfig: SupabaseLibArgs): Promise<SupabaseVectorStore>;
26
29
  static fromDocuments(docs: Document[], embeddings: Embeddings, dbConfig: SupabaseLibArgs): Promise<SupabaseVectorStore>;
@@ -45,12 +45,23 @@ export class SupabaseVectorStore extends VectorStore {
45
45
  // upsert returns 500/502/504 (yes really any of them) if given too many rows/characters
46
46
  // ~2000 trips it, but my data is probably smaller than average pageContent and metadata
47
47
  const chunkSize = 500;
48
+ let ids = [];
48
49
  for (let i = 0; i < rows.length; i += chunkSize) {
49
50
  const chunk = rows.slice(i, i + chunkSize);
50
- const res = await this.client.from(this.tableName).insert(chunk);
51
+ const res = await this.client.from(this.tableName).upsert(chunk).select();
51
52
  if (res.error) {
52
53
  throw new Error(`Error inserting: ${res.error.message} ${res.status} ${res.statusText}`);
53
54
  }
55
+ if (res.data) {
56
+ ids = ids.concat(res.data.map((row) => row.id));
57
+ }
58
+ }
59
+ return ids;
60
+ }
61
+ async delete(params) {
62
+ const { ids } = params;
63
+ for (const id of ids) {
64
+ await this.client.from(this.tableName).delete().eq("id", id);
54
65
  }
55
66
  }
56
67
  async similaritySearchVectorWithScore(query, k, filter) {
@@ -39,17 +39,18 @@ class TigrisVectorStore extends base_js_1.VectorStore {
39
39
  this.embeddings = embeddings;
40
40
  this.index = args.index;
41
41
  }
42
- async addDocuments(documents, ids) {
42
+ async addDocuments(documents, options) {
43
43
  const texts = documents.map(({ pageContent }) => pageContent);
44
- await this.addVectors(await this.embeddings.embedDocuments(texts), documents, ids);
44
+ await this.addVectors(await this.embeddings.embedDocuments(texts), documents, options);
45
45
  }
46
- async addVectors(vectors, documents, ids) {
46
+ async addVectors(vectors, documents, options) {
47
47
  if (vectors.length === 0) {
48
48
  return;
49
49
  }
50
50
  if (vectors.length !== documents.length) {
51
51
  throw new Error(`Vectors and metadatas must have the same length`);
52
52
  }
53
+ const ids = Array.isArray(options) ? options : options?.ids;
53
54
  const documentIds = ids == null ? documents.map(() => uuid.v4()) : ids;
54
55
  await this.index?.addDocumentsWithVectors({
55
56
  ids: documentIds,
@@ -8,8 +8,12 @@ export type TigrisLibArgs = {
8
8
  export declare class TigrisVectorStore extends VectorStore {
9
9
  index?: VectorDocumentStoreT;
10
10
  constructor(embeddings: Embeddings, args: TigrisLibArgs);
11
- addDocuments(documents: Document[], ids?: string[]): Promise<void>;
12
- addVectors(vectors: number[][], documents: Document[], ids?: string[]): Promise<void>;
11
+ addDocuments(documents: Document[], options?: {
12
+ ids?: string[];
13
+ } | string[]): Promise<void>;
14
+ addVectors(vectors: number[][], documents: Document[], options?: {
15
+ ids?: string[];
16
+ } | string[]): Promise<void>;
13
17
  similaritySearchVectorWithScore(query: number[], k: number, filter?: object): Promise<[Document<Record<string, any>>, number][]>;
14
18
  static fromTexts(texts: string[], metadatas: object[] | object, embeddings: Embeddings, dbConfig: TigrisLibArgs): Promise<TigrisVectorStore>;
15
19
  static fromDocuments(docs: Document[], embeddings: Embeddings, dbConfig: TigrisLibArgs): Promise<TigrisVectorStore>;
@@ -13,17 +13,18 @@ export class TigrisVectorStore extends VectorStore {
13
13
  this.embeddings = embeddings;
14
14
  this.index = args.index;
15
15
  }
16
- async addDocuments(documents, ids) {
16
+ async addDocuments(documents, options) {
17
17
  const texts = documents.map(({ pageContent }) => pageContent);
18
- await this.addVectors(await this.embeddings.embedDocuments(texts), documents, ids);
18
+ await this.addVectors(await this.embeddings.embedDocuments(texts), documents, options);
19
19
  }
20
- async addVectors(vectors, documents, ids) {
20
+ async addVectors(vectors, documents, options) {
21
21
  if (vectors.length === 0) {
22
22
  return;
23
23
  }
24
24
  if (vectors.length !== documents.length) {
25
25
  throw new Error(`Vectors and metadatas must have the same length`);
26
26
  }
27
+ const ids = Array.isArray(options) ? options : options?.ids;
27
28
  const documentIds = ids == null ? documents.map(() => uuid.v4()) : ids;
28
29
  await this.index?.addDocumentsWithVectors({
29
30
  ids: documentIds,
@@ -118,14 +118,15 @@ class WeaviateStore extends base_js_1.VectorStore {
118
118
  ];
119
119
  }
120
120
  }
121
- async addVectors(vectors, documents) {
121
+ async addVectors(vectors, documents, options) {
122
+ const documentIds = options?.ids ?? documents.map((_) => uuid.v4());
122
123
  const batch = documents.map((document, index) => {
123
124
  if (Object.hasOwn(document.metadata, "id"))
124
125
  throw new Error("Document inserted to Weaviate vectorstore should not have `id` in their metadata.");
125
126
  const flattenedMetadata = (0, exports.flattenObjectForWeaviate)(document.metadata);
126
127
  return {
127
128
  class: this.indexName,
128
- id: uuid.v4(),
129
+ id: documentIds[index],
129
130
  vector: vectors[index],
130
131
  properties: {
131
132
  [this.textKey]: document.pageContent,
@@ -140,11 +141,22 @@ class WeaviateStore extends base_js_1.VectorStore {
140
141
  .do();
141
142
  }
142
143
  catch (e) {
143
- throw Error(`'Error in addDocuments' ${e}`);
144
+ throw Error(`'Error adding vectors' ${e}`);
144
145
  }
146
+ return documentIds;
145
147
  }
146
- async addDocuments(documents) {
147
- return this.addVectors(await this.embeddings.embedDocuments(documents.map((d) => d.pageContent)), documents);
148
+ async addDocuments(documents, options) {
149
+ return this.addVectors(await this.embeddings.embedDocuments(documents.map((d) => d.pageContent)), documents, options);
150
+ }
151
+ async delete(params) {
152
+ const { ids } = params;
153
+ for (const id of ids) {
154
+ await this.client.data
155
+ .deleter()
156
+ .withClassName(this.indexName)
157
+ .withId(id)
158
+ .do();
159
+ }
148
160
  }
149
161
  async similaritySearchVectorWithScore(query, k, filter) {
150
162
  try {
@@ -24,8 +24,15 @@ export declare class WeaviateStore extends VectorStore {
24
24
  private textKey;
25
25
  private queryAttrs;
26
26
  constructor(embeddings: Embeddings, args: WeaviateLibArgs);
27
- addVectors(vectors: number[][], documents: Document[]): Promise<void>;
28
- addDocuments(documents: Document[]): Promise<void>;
27
+ addVectors(vectors: number[][], documents: Document[], options?: {
28
+ ids?: string[];
29
+ }): Promise<string[]>;
30
+ addDocuments(documents: Document[], options?: {
31
+ ids?: string[];
32
+ }): Promise<string[]>;
33
+ delete(params: {
34
+ ids: string[];
35
+ }): Promise<void>;
29
36
  similaritySearchVectorWithScore(query: number[], k: number, filter?: WeaviateFilter): Promise<[Document, number][]>;
30
37
  static fromTexts(texts: string[], metadatas: object | object[], embeddings: Embeddings, args: WeaviateLibArgs): Promise<WeaviateStore>;
31
38
  static fromDocuments(docs: Document[], embeddings: Embeddings, args: WeaviateLibArgs): Promise<WeaviateStore>;
@@ -91,14 +91,15 @@ export class WeaviateStore extends VectorStore {
91
91
  ];
92
92
  }
93
93
  }
94
- async addVectors(vectors, documents) {
94
+ async addVectors(vectors, documents, options) {
95
+ const documentIds = options?.ids ?? documents.map((_) => uuid.v4());
95
96
  const batch = documents.map((document, index) => {
96
97
  if (Object.hasOwn(document.metadata, "id"))
97
98
  throw new Error("Document inserted to Weaviate vectorstore should not have `id` in their metadata.");
98
99
  const flattenedMetadata = flattenObjectForWeaviate(document.metadata);
99
100
  return {
100
101
  class: this.indexName,
101
- id: uuid.v4(),
102
+ id: documentIds[index],
102
103
  vector: vectors[index],
103
104
  properties: {
104
105
  [this.textKey]: document.pageContent,
@@ -113,11 +114,22 @@ export class WeaviateStore extends VectorStore {
113
114
  .do();
114
115
  }
115
116
  catch (e) {
116
- throw Error(`'Error in addDocuments' ${e}`);
117
+ throw Error(`'Error adding vectors' ${e}`);
117
118
  }
119
+ return documentIds;
118
120
  }
119
- async addDocuments(documents) {
120
- return this.addVectors(await this.embeddings.embedDocuments(documents.map((d) => d.pageContent)), documents);
121
+ async addDocuments(documents, options) {
122
+ return this.addVectors(await this.embeddings.embedDocuments(documents.map((d) => d.pageContent)), documents, options);
123
+ }
124
+ async delete(params) {
125
+ const { ids } = params;
126
+ for (const id of ids) {
127
+ await this.client.data
128
+ .deleter()
129
+ .withClassName(this.indexName)
130
+ .withId(id)
131
+ .do();
132
+ }
121
133
  }
122
134
  async similaritySearchVectorWithScore(query, k, filter) {
123
135
  try {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "langchain",
3
- "version": "0.0.104",
3
+ "version": "0.0.105",
4
4
  "description": "Typescript bindings for langchain",
5
5
  "type": "module",
6
6
  "engines": {
@@ -516,7 +516,7 @@
516
516
  "apify-client": "^2.7.1",
517
517
  "axios": "^0.26.0",
518
518
  "cheerio": "^1.0.0-rc.12",
519
- "chromadb": "^1.5.2",
519
+ "chromadb": "^1.5.3",
520
520
  "cohere-ai": "^5.0.2",
521
521
  "d3-dsv": "^2.0.0",
522
522
  "dotenv": "^16.0.3",
@@ -588,7 +588,7 @@
588
588
  "apify-client": "^2.7.1",
589
589
  "axios": "*",
590
590
  "cheerio": "^1.0.0-rc.12",
591
- "chromadb": "^1.5.2",
591
+ "chromadb": "^1.5.3",
592
592
  "cohere-ai": "^5.0.2",
593
593
  "d3-dsv": "^2.0.0",
594
594
  "epub2": "^3.0.1",