langchain 0.0.146 → 0.0.147

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. package/dist/base_language/index.cjs +2 -2
  2. package/dist/base_language/index.d.ts +2 -1
  3. package/dist/base_language/index.js +1 -1
  4. package/dist/chains/base.d.ts +1 -1
  5. package/dist/chains/openai_functions/openapi.cjs +32 -27
  6. package/dist/chains/openai_functions/openapi.d.ts +9 -0
  7. package/dist/chains/openai_functions/openapi.js +31 -27
  8. package/dist/chat_models/base.d.ts +1 -1
  9. package/dist/chat_models/openai.cjs +1 -1
  10. package/dist/chat_models/openai.js +1 -1
  11. package/dist/experimental/llms/bittensor.cjs +141 -0
  12. package/dist/experimental/llms/bittensor.d.ts +33 -0
  13. package/dist/experimental/llms/bittensor.js +137 -0
  14. package/dist/hub.d.ts +1 -1
  15. package/dist/llms/base.d.ts +1 -1
  16. package/dist/llms/openai-chat.cjs +1 -1
  17. package/dist/llms/openai-chat.js +1 -1
  18. package/dist/llms/openai.cjs +1 -1
  19. package/dist/llms/openai.js +1 -1
  20. package/dist/load/import_constants.cjs +1 -0
  21. package/dist/load/import_constants.js +1 -0
  22. package/dist/load/import_map.cjs +1 -1
  23. package/dist/load/import_map.d.ts +1 -1
  24. package/dist/load/import_map.js +1 -1
  25. package/dist/load/index.cjs +2 -1
  26. package/dist/load/index.js +2 -1
  27. package/dist/prompts/base.cjs +2 -2
  28. package/dist/prompts/base.d.ts +1 -1
  29. package/dist/prompts/base.js +1 -1
  30. package/dist/prompts/chat.cjs +2 -2
  31. package/dist/prompts/chat.d.ts +1 -1
  32. package/dist/prompts/chat.js +1 -1
  33. package/dist/schema/document.cjs +2 -2
  34. package/dist/schema/document.d.ts +1 -1
  35. package/dist/schema/document.js +1 -1
  36. package/dist/schema/output_parser.cjs +2 -2
  37. package/dist/schema/output_parser.d.ts +2 -1
  38. package/dist/schema/output_parser.js +1 -1
  39. package/dist/schema/retriever.cjs +2 -2
  40. package/dist/schema/retriever.d.ts +2 -1
  41. package/dist/schema/retriever.js +1 -1
  42. package/dist/schema/runnable/config.cjs +8 -0
  43. package/dist/schema/runnable/config.d.ts +3 -0
  44. package/dist/schema/runnable/config.js +4 -0
  45. package/dist/schema/{runnable.cjs → runnable/index.cjs} +290 -101
  46. package/dist/schema/{runnable.d.ts → runnable/index.d.ts} +127 -41
  47. package/dist/schema/{runnable.js → runnable/index.js} +284 -99
  48. package/dist/tools/base.d.ts +1 -1
  49. package/dist/util/async_caller.cjs +35 -25
  50. package/dist/util/async_caller.d.ts +8 -0
  51. package/dist/util/async_caller.js +35 -25
  52. package/dist/vectorstores/pinecone.cjs +30 -22
  53. package/dist/vectorstores/pinecone.d.ts +3 -1
  54. package/dist/vectorstores/pinecone.js +30 -22
  55. package/dist/vectorstores/vectara.cjs +20 -23
  56. package/dist/vectorstores/vectara.d.ts +9 -2
  57. package/dist/vectorstores/vectara.js +20 -23
  58. package/experimental/llms/bittensor.cjs +1 -0
  59. package/experimental/llms/bittensor.d.ts +1 -0
  60. package/experimental/llms/bittensor.js +1 -0
  61. package/package.json +9 -1
  62. package/schema/runnable.cjs +1 -1
  63. package/schema/runnable.d.ts +1 -1
  64. package/schema/runnable.js +1 -1
@@ -12,6 +12,32 @@ const STATUS_NO_RETRY = [
12
12
  408,
13
13
  409, // Conflict
14
14
  ];
15
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
16
+ const defaultFailedAttemptHandler = (error) => {
17
+ if (error.message.startsWith("Cancel") ||
18
+ error.message.startsWith("TimeoutError") ||
19
+ error.name === "TimeoutError" ||
20
+ error.message.startsWith("AbortError") ||
21
+ error.name === "AbortError") {
22
+ throw error;
23
+ }
24
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
25
+ if (error?.code === "ECONNABORTED") {
26
+ throw error;
27
+ }
28
+ const status =
29
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
30
+ error?.response?.status ?? error?.status;
31
+ if (status && STATUS_NO_RETRY.includes(+status)) {
32
+ throw error;
33
+ }
34
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
35
+ if (error?.error?.code === "insufficient_quota") {
36
+ const err = new Error(error?.message);
37
+ err.name = "InsufficientQuotaError";
38
+ throw err;
39
+ }
40
+ };
15
41
  /**
16
42
  * A class that can be used to make async calls with concurrency and retry logic.
17
43
  *
@@ -39,6 +65,12 @@ export class AsyncCaller {
39
65
  writable: true,
40
66
  value: void 0
41
67
  });
68
+ Object.defineProperty(this, "onFailedAttempt", {
69
+ enumerable: true,
70
+ configurable: true,
71
+ writable: true,
72
+ value: void 0
73
+ });
42
74
  Object.defineProperty(this, "queue", {
43
75
  enumerable: true,
44
76
  configurable: true,
@@ -47,6 +79,8 @@ export class AsyncCaller {
47
79
  });
48
80
  this.maxConcurrency = params.maxConcurrency ?? Infinity;
49
81
  this.maxRetries = params.maxRetries ?? 6;
82
+ this.onFailedAttempt =
83
+ params.onFailedAttempt ?? defaultFailedAttemptHandler;
50
84
  const PQueue = "default" in PQueueMod ? PQueueMod.default : PQueueMod;
51
85
  this.queue = new PQueue({ concurrency: this.maxConcurrency });
52
86
  }
@@ -61,31 +95,7 @@ export class AsyncCaller {
61
95
  throw new Error(error);
62
96
  }
63
97
  }), {
64
- onFailedAttempt(error) {
65
- if (error.message.startsWith("Cancel") ||
66
- error.message.startsWith("TimeoutError") ||
67
- error.name === "TimeoutError" ||
68
- error.message.startsWith("AbortError") ||
69
- error.name === "AbortError") {
70
- throw error;
71
- }
72
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
73
- if (error?.code === "ECONNABORTED") {
74
- throw error;
75
- }
76
- const status =
77
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
78
- error?.response?.status ?? error?.status;
79
- if (status && STATUS_NO_RETRY.includes(+status)) {
80
- throw error;
81
- }
82
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
83
- if (error?.error?.code === "insufficient_quota") {
84
- const err = new Error(error?.message);
85
- err.name = "InsufficientQuotaError";
86
- throw err;
87
- }
88
- },
98
+ onFailedAttempt: this.onFailedAttempt,
89
99
  retries: this.maxRetries,
90
100
  randomize: true,
91
101
  // If needed we can change some of the defaults here,
@@ -31,6 +31,8 @@ const uuid = __importStar(require("uuid"));
31
31
  const flat_1 = __importDefault(require("flat"));
32
32
  const base_js_1 = require("./base.cjs");
33
33
  const document_js_1 = require("../document.cjs");
34
+ const chunk_js_1 = require("../util/chunk.cjs");
35
+ const async_caller_js_1 = require("../util/async_caller.cjs");
34
36
  /**
35
37
  * Class that extends the VectorStore class and provides methods to
36
38
  * interact with the Pinecone vector database.
@@ -65,11 +67,19 @@ class PineconeStore extends base_js_1.VectorStore {
65
67
  writable: true,
66
68
  value: void 0
67
69
  });
70
+ Object.defineProperty(this, "caller", {
71
+ enumerable: true,
72
+ configurable: true,
73
+ writable: true,
74
+ value: void 0
75
+ });
68
76
  this.embeddings = embeddings;
69
- this.namespace = args.namespace;
70
- this.pineconeIndex = args.pineconeIndex;
71
- this.textKey = args.textKey ?? "text";
72
- this.filter = args.filter;
77
+ const { namespace, pineconeIndex, textKey, filter, ...asyncCallerArgs } = args;
78
+ this.namespace = namespace;
79
+ this.pineconeIndex = pineconeIndex;
80
+ this.textKey = textKey ?? "text";
81
+ this.filter = filter;
82
+ this.caller = new async_caller_js_1.AsyncCaller(asyncCallerArgs);
73
83
  }
74
84
  /**
75
85
  * Method that adds documents to the Pinecone database.
@@ -126,16 +136,15 @@ class PineconeStore extends base_js_1.VectorStore {
126
136
  };
127
137
  });
128
138
  // Pinecone recommends a limit of 100 vectors per upsert request
129
- const chunkSize = 50;
130
- for (let i = 0; i < pineconeVectors.length; i += chunkSize) {
131
- const chunk = pineconeVectors.slice(i, i + chunkSize);
132
- await this.pineconeIndex.upsert({
133
- upsertRequest: {
134
- vectors: chunk,
135
- namespace: this.namespace,
136
- },
137
- });
138
- }
139
+ const chunkSize = 100;
140
+ const chunkedVectors = (0, chunk_js_1.chunkArray)(pineconeVectors, chunkSize);
141
+ const batchRequests = chunkedVectors.map((chunk) => this.caller.call(async () => this.pineconeIndex.upsert({
142
+ upsertRequest: {
143
+ vectors: chunk,
144
+ namespace: this.namespace,
145
+ },
146
+ })));
147
+ await Promise.all(batchRequests);
139
148
  return documentIds;
140
149
  }
141
150
  /**
@@ -154,14 +163,13 @@ class PineconeStore extends base_js_1.VectorStore {
154
163
  }
155
164
  else if (ids) {
156
165
  const batchSize = 1000;
157
- for (let i = 0; i < ids.length; i += batchSize) {
158
- const batchIds = ids.slice(i, i + batchSize);
159
- await this.pineconeIndex.delete1({
160
- ids: batchIds,
161
- namespace,
162
- ...rest,
163
- });
164
- }
166
+ const batchedIds = (0, chunk_js_1.chunkArray)(ids, batchSize);
167
+ const batchRequests = batchedIds.map((batchIds) => this.caller.call(async () => this.pineconeIndex.delete1({
168
+ ids: batchIds,
169
+ namespace,
170
+ ...rest,
171
+ })));
172
+ await Promise.all(batchRequests);
165
173
  }
166
174
  else {
167
175
  throw new Error("Either ids or delete_all must be provided.");
@@ -1,9 +1,10 @@
1
1
  import { VectorStore } from "./base.js";
2
2
  import { Embeddings } from "../embeddings/base.js";
3
3
  import { Document } from "../document.js";
4
+ import { AsyncCaller, type AsyncCallerParams } from "../util/async_caller.js";
4
5
  type PineconeMetadata = Record<string, any>;
5
6
  type VectorOperationsApi = ReturnType<import("@pinecone-database/pinecone").PineconeClient["Index"]>;
6
- export interface PineconeLibArgs {
7
+ export interface PineconeLibArgs extends AsyncCallerParams {
7
8
  pineconeIndex: VectorOperationsApi;
8
9
  textKey?: string;
9
10
  namespace?: string;
@@ -28,6 +29,7 @@ export declare class PineconeStore extends VectorStore {
28
29
  namespace?: string;
29
30
  pineconeIndex: VectorOperationsApi;
30
31
  filter?: PineconeMetadata;
32
+ caller: AsyncCaller;
31
33
  _vectorstoreType(): string;
32
34
  constructor(embeddings: Embeddings, args: PineconeLibArgs);
33
35
  /**
@@ -2,6 +2,8 @@ import * as uuid from "uuid";
2
2
  import flatten from "flat";
3
3
  import { VectorStore } from "./base.js";
4
4
  import { Document } from "../document.js";
5
+ import { chunkArray } from "../util/chunk.js";
6
+ import { AsyncCaller } from "../util/async_caller.js";
5
7
  /**
6
8
  * Class that extends the VectorStore class and provides methods to
7
9
  * interact with the Pinecone vector database.
@@ -36,11 +38,19 @@ export class PineconeStore extends VectorStore {
36
38
  writable: true,
37
39
  value: void 0
38
40
  });
41
+ Object.defineProperty(this, "caller", {
42
+ enumerable: true,
43
+ configurable: true,
44
+ writable: true,
45
+ value: void 0
46
+ });
39
47
  this.embeddings = embeddings;
40
- this.namespace = args.namespace;
41
- this.pineconeIndex = args.pineconeIndex;
42
- this.textKey = args.textKey ?? "text";
43
- this.filter = args.filter;
48
+ const { namespace, pineconeIndex, textKey, filter, ...asyncCallerArgs } = args;
49
+ this.namespace = namespace;
50
+ this.pineconeIndex = pineconeIndex;
51
+ this.textKey = textKey ?? "text";
52
+ this.filter = filter;
53
+ this.caller = new AsyncCaller(asyncCallerArgs);
44
54
  }
45
55
  /**
46
56
  * Method that adds documents to the Pinecone database.
@@ -97,16 +107,15 @@ export class PineconeStore extends VectorStore {
97
107
  };
98
108
  });
99
109
  // Pinecone recommends a limit of 100 vectors per upsert request
100
- const chunkSize = 50;
101
- for (let i = 0; i < pineconeVectors.length; i += chunkSize) {
102
- const chunk = pineconeVectors.slice(i, i + chunkSize);
103
- await this.pineconeIndex.upsert({
104
- upsertRequest: {
105
- vectors: chunk,
106
- namespace: this.namespace,
107
- },
108
- });
109
- }
110
+ const chunkSize = 100;
111
+ const chunkedVectors = chunkArray(pineconeVectors, chunkSize);
112
+ const batchRequests = chunkedVectors.map((chunk) => this.caller.call(async () => this.pineconeIndex.upsert({
113
+ upsertRequest: {
114
+ vectors: chunk,
115
+ namespace: this.namespace,
116
+ },
117
+ })));
118
+ await Promise.all(batchRequests);
110
119
  return documentIds;
111
120
  }
112
121
  /**
@@ -125,14 +134,13 @@ export class PineconeStore extends VectorStore {
125
134
  }
126
135
  else if (ids) {
127
136
  const batchSize = 1000;
128
- for (let i = 0; i < ids.length; i += batchSize) {
129
- const batchIds = ids.slice(i, i + batchSize);
130
- await this.pineconeIndex.delete1({
131
- ids: batchIds,
132
- namespace,
133
- ...rest,
134
- });
135
- }
137
+ const batchedIds = chunkArray(ids, batchSize);
138
+ const batchRequests = batchedIds.map((batchIds) => this.caller.call(async () => this.pineconeIndex.delete1({
139
+ ids: batchIds,
140
+ namespace,
141
+ ...rest,
142
+ })));
143
+ await Promise.all(batchRequests);
136
144
  }
137
145
  else {
138
146
  throw new Error("Either ids or delete_all must be provided.");
@@ -185,42 +185,39 @@ class VectaraStore extends base_js_1.VectorStore {
185
185
  * pre-processing and chunking internally in an optimal manner. This method is a wrapper
186
186
  * to utilize that API within LangChain.
187
187
  *
188
- * @param filePaths An array of Blob objects representing the files to be uploaded to Vectara.
188
+ * @param files An array of VectaraFile objects representing the files and their respective file names to be uploaded to Vectara.
189
189
  * @param metadata Optional. An array of metadata objects corresponding to each file in the `filePaths` array.
190
190
  * @returns A Promise that resolves to the number of successfully uploaded files.
191
191
  */
192
- async addFiles(filePaths, metadatas = undefined) {
192
+ async addFiles(files, metadatas = undefined) {
193
193
  if (this.corpusId.length > 1)
194
194
  throw new Error("addFiles does not support multiple corpus ids");
195
195
  let numDocs = 0;
196
- for (const [index, fileBlob] of filePaths.entries()) {
196
+ for (const [index, file] of files.entries()) {
197
197
  const md = metadatas ? metadatas[index] : {};
198
198
  const data = new FormData();
199
- data.append("file", fileBlob, `file_${index}`);
199
+ data.append("file", file.blob, file.fileName);
200
200
  data.append("doc-metadata", JSON.stringify(md));
201
- try {
202
- const response = await fetch(`https://api.vectara.io/v1/upload?c=${this.customerId}&o=${this.corpusId[0]}`, {
203
- method: "POST",
204
- headers: {
205
- "x-api-key": this.apiKey,
206
- },
207
- body: data,
208
- });
209
- const result = await response.json();
210
- const { status } = response;
211
- if (status !== 200 && status !== 409) {
212
- throw new Error(`Vectara API returned status code ${status}: ${result}`);
213
- }
214
- else {
215
- numDocs += 1;
216
- }
201
+ const response = await fetch(`https://api.vectara.io/v1/upload?c=${this.customerId}&o=${this.corpusId[0]}`, {
202
+ method: "POST",
203
+ headers: {
204
+ "x-api-key": this.apiKey,
205
+ },
206
+ body: data,
207
+ });
208
+ const { status } = response;
209
+ if (status === 409) {
210
+ throw new Error(`File at index ${index} already exists in Vectara`);
211
+ }
212
+ else if (status !== 200) {
213
+ throw new Error(`Vectara API returned status code ${status}`);
217
214
  }
218
- catch (err) {
219
- console.error(`Failed to upload file at index ${index}:`, err);
215
+ else {
216
+ numDocs += 1;
220
217
  }
221
218
  }
222
219
  if (this.verbose) {
223
- console.log(`Uploaded ${filePaths.length} files to Vectara`);
220
+ console.log(`Uploaded ${files.length} files to Vectara`);
224
221
  }
225
222
  return numDocs;
226
223
  }
@@ -21,6 +21,13 @@ interface VectaraCallHeader {
21
21
  "customer-id": string;
22
22
  };
23
23
  }
24
+ /**
25
+ * Interface for the file objects to be uploaded to Vectara.
26
+ */
27
+ export interface VectaraFile {
28
+ blob: Blob;
29
+ fileName: string;
30
+ }
24
31
  /**
25
32
  * Interface for the filter options used in Vectara API calls.
26
33
  */
@@ -80,11 +87,11 @@ export declare class VectaraStore extends VectorStore {
80
87
  * pre-processing and chunking internally in an optimal manner. This method is a wrapper
81
88
  * to utilize that API within LangChain.
82
89
  *
83
- * @param filePaths An array of Blob objects representing the files to be uploaded to Vectara.
90
+ * @param files An array of VectaraFile objects representing the files and their respective file names to be uploaded to Vectara.
84
91
  * @param metadata Optional. An array of metadata objects corresponding to each file in the `filePaths` array.
85
92
  * @returns A Promise that resolves to the number of successfully uploaded files.
86
93
  */
87
- addFiles(filePaths: Blob[], metadatas?: Record<string, unknown> | undefined): Promise<number>;
94
+ addFiles(files: VectaraFile[], metadatas?: Record<string, unknown> | undefined): Promise<number>;
88
95
  /**
89
96
  * Performs a similarity search and returns documents along with their
90
97
  * scores.
@@ -182,42 +182,39 @@ export class VectaraStore extends VectorStore {
182
182
  * pre-processing and chunking internally in an optimal manner. This method is a wrapper
183
183
  * to utilize that API within LangChain.
184
184
  *
185
- * @param filePaths An array of Blob objects representing the files to be uploaded to Vectara.
185
+ * @param files An array of VectaraFile objects representing the files and their respective file names to be uploaded to Vectara.
186
186
  * @param metadata Optional. An array of metadata objects corresponding to each file in the `filePaths` array.
187
187
  * @returns A Promise that resolves to the number of successfully uploaded files.
188
188
  */
189
- async addFiles(filePaths, metadatas = undefined) {
189
+ async addFiles(files, metadatas = undefined) {
190
190
  if (this.corpusId.length > 1)
191
191
  throw new Error("addFiles does not support multiple corpus ids");
192
192
  let numDocs = 0;
193
- for (const [index, fileBlob] of filePaths.entries()) {
193
+ for (const [index, file] of files.entries()) {
194
194
  const md = metadatas ? metadatas[index] : {};
195
195
  const data = new FormData();
196
- data.append("file", fileBlob, `file_${index}`);
196
+ data.append("file", file.blob, file.fileName);
197
197
  data.append("doc-metadata", JSON.stringify(md));
198
- try {
199
- const response = await fetch(`https://api.vectara.io/v1/upload?c=${this.customerId}&o=${this.corpusId[0]}`, {
200
- method: "POST",
201
- headers: {
202
- "x-api-key": this.apiKey,
203
- },
204
- body: data,
205
- });
206
- const result = await response.json();
207
- const { status } = response;
208
- if (status !== 200 && status !== 409) {
209
- throw new Error(`Vectara API returned status code ${status}: ${result}`);
210
- }
211
- else {
212
- numDocs += 1;
213
- }
198
+ const response = await fetch(`https://api.vectara.io/v1/upload?c=${this.customerId}&o=${this.corpusId[0]}`, {
199
+ method: "POST",
200
+ headers: {
201
+ "x-api-key": this.apiKey,
202
+ },
203
+ body: data,
204
+ });
205
+ const { status } = response;
206
+ if (status === 409) {
207
+ throw new Error(`File at index ${index} already exists in Vectara`);
208
+ }
209
+ else if (status !== 200) {
210
+ throw new Error(`Vectara API returned status code ${status}`);
214
211
  }
215
- catch (err) {
216
- console.error(`Failed to upload file at index ${index}:`, err);
212
+ else {
213
+ numDocs += 1;
217
214
  }
218
215
  }
219
216
  if (this.verbose) {
220
- console.log(`Uploaded ${filePaths.length} files to Vectara`);
217
+ console.log(`Uploaded ${files.length} files to Vectara`);
221
218
  }
222
219
  return numDocs;
223
220
  }
@@ -0,0 +1 @@
1
+ module.exports = require('../../dist/experimental/llms/bittensor.cjs');
@@ -0,0 +1 @@
1
+ export * from '../../dist/experimental/llms/bittensor.js'
@@ -0,0 +1 @@
1
+ export * from '../../dist/experimental/llms/bittensor.js'
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "langchain",
3
- "version": "0.0.146",
3
+ "version": "0.0.147",
4
4
  "description": "Typescript bindings for langchain",
5
5
  "type": "module",
6
6
  "engines": {
@@ -583,6 +583,9 @@
583
583
  "experimental/chat_models/anthropic_functions.cjs",
584
584
  "experimental/chat_models/anthropic_functions.js",
585
585
  "experimental/chat_models/anthropic_functions.d.ts",
586
+ "experimental/llms/bittensor.cjs",
587
+ "experimental/llms/bittensor.js",
588
+ "experimental/llms/bittensor.d.ts",
586
589
  "evaluation.cjs",
587
590
  "evaluation.js",
588
591
  "evaluation.d.ts",
@@ -2050,6 +2053,11 @@
2050
2053
  "import": "./experimental/chat_models/anthropic_functions.js",
2051
2054
  "require": "./experimental/chat_models/anthropic_functions.cjs"
2052
2055
  },
2056
+ "./experimental/llms/bittensor": {
2057
+ "types": "./experimental/llms/bittensor.d.ts",
2058
+ "import": "./experimental/llms/bittensor.js",
2059
+ "require": "./experimental/llms/bittensor.cjs"
2060
+ },
2053
2061
  "./evaluation": {
2054
2062
  "types": "./evaluation.d.ts",
2055
2063
  "import": "./evaluation.js",
@@ -1 +1 @@
1
- module.exports = require('../dist/schema/runnable.cjs');
1
+ module.exports = require('../dist/schema/runnable/index.cjs');
@@ -1 +1 @@
1
- export * from '../dist/schema/runnable.js'
1
+ export * from '../dist/schema/runnable/index.js'
@@ -1 +1 @@
1
- export * from '../dist/schema/runnable.js'
1
+ export * from '../dist/schema/runnable/index.js'