langchain 0.0.190 → 0.0.192

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. package/cache/file_system.cjs +1 -0
  2. package/cache/file_system.d.ts +1 -0
  3. package/cache/file_system.js +1 -0
  4. package/dist/cache/file_system.cjs +73 -0
  5. package/dist/cache/file_system.d.ts +33 -0
  6. package/dist/cache/file_system.js +66 -0
  7. package/dist/chat_models/bedrock/web.cjs +1 -1
  8. package/dist/chat_models/bedrock/web.js +1 -1
  9. package/dist/document_loaders/fs/pdf.cjs +9 -2
  10. package/dist/document_loaders/fs/pdf.d.ts +3 -1
  11. package/dist/document_loaders/fs/pdf.js +9 -2
  12. package/dist/document_loaders/web/apify_dataset.cjs +5 -2
  13. package/dist/document_loaders/web/apify_dataset.d.ts +4 -1
  14. package/dist/document_loaders/web/apify_dataset.js +5 -2
  15. package/dist/document_loaders/web/pdf.cjs +9 -2
  16. package/dist/document_loaders/web/pdf.d.ts +3 -1
  17. package/dist/document_loaders/web/pdf.js +9 -2
  18. package/dist/llms/bedrock/web.cjs +1 -1
  19. package/dist/llms/bedrock/web.js +1 -1
  20. package/dist/load/import_constants.cjs +2 -0
  21. package/dist/load/import_constants.js +2 -0
  22. package/dist/output_parsers/http_response.cjs +4 -4
  23. package/dist/output_parsers/http_response.js +4 -4
  24. package/dist/vectorstores/prisma.cjs +14 -2
  25. package/dist/vectorstores/prisma.d.ts +1 -0
  26. package/dist/vectorstores/prisma.js +14 -2
  27. package/dist/vectorstores/rockset.cjs +353 -0
  28. package/dist/vectorstores/rockset.d.ts +202 -0
  29. package/dist/vectorstores/rockset.js +347 -0
  30. package/package.json +25 -6
  31. package/vectorstores/rockset.cjs +1 -0
  32. package/vectorstores/rockset.d.ts +1 -0
  33. package/vectorstores/rockset.js +1 -0
@@ -0,0 +1 @@
1
+ module.exports = require('../dist/cache/file_system.cjs');
@@ -0,0 +1 @@
1
+ export * from '../dist/cache/file_system.js'
@@ -0,0 +1 @@
1
+ export * from '../dist/cache/file_system.js'
@@ -0,0 +1,73 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.LocalFileCache = void 0;
7
+ const node_path_1 = __importDefault(require("node:path"));
8
+ const promises_1 = __importDefault(require("node:fs/promises"));
9
+ const index_js_1 = require("../schema/index.cjs");
10
+ const base_js_1 = require("./base.cjs");
11
+ /**
12
+ * A cache that uses the local filesystem as the backing store.
13
+ * This is useful for local development and testing. But it is not recommended for production use.
14
+ */
15
+ class LocalFileCache extends index_js_1.BaseCache {
16
+ constructor(cacheDir) {
17
+ super();
18
+ Object.defineProperty(this, "cacheDir", {
19
+ enumerable: true,
20
+ configurable: true,
21
+ writable: true,
22
+ value: void 0
23
+ });
24
+ this.cacheDir = cacheDir;
25
+ }
26
+ /**
27
+ * Create a new cache backed by the local filesystem.
28
+ * It ensures that the cache directory exists before returning.
29
+ * @param cacheDir
30
+ */
31
+ static async create(cacheDir) {
32
+ if (!cacheDir) {
33
+ // eslint-disable-next-line no-param-reassign
34
+ cacheDir = await promises_1.default.mkdtemp("langchain-cache-");
35
+ }
36
+ else {
37
+ // ensure the cache directory exists
38
+ await promises_1.default.mkdir(cacheDir, { recursive: true });
39
+ }
40
+ return new LocalFileCache(cacheDir);
41
+ }
42
+ /**
43
+ * Retrieves data from the cache. It constructs a cache key from the given
44
+ * `prompt` and `llmKey`, and retrieves the corresponding value from the
45
+ * cache files.
46
+ * @param prompt The prompt used to construct the cache key.
47
+ * @param llmKey The LLM key used to construct the cache key.
48
+ * @returns An array of Generations if found, null otherwise.
49
+ */
50
+ async lookup(prompt, llmKey) {
51
+ const key = `${(0, base_js_1.getCacheKey)(prompt, llmKey)}.json`;
52
+ try {
53
+ const content = await promises_1.default.readFile(node_path_1.default.join(this.cacheDir, key));
54
+ return JSON.parse(content.toString()).map(base_js_1.deserializeStoredGeneration);
55
+ }
56
+ catch {
57
+ return null;
58
+ }
59
+ }
60
+ /**
61
+ * Updates the cache with new data. It constructs a cache key from the
62
+ * given `prompt` and `llmKey`, and stores the `value` in a specific
63
+ * file in the cache directory.
64
+ * @param prompt The prompt used to construct the cache key.
65
+ * @param llmKey The LLM key used to construct the cache key.
66
+ * @param generations The value to be stored in the cache.
67
+ */
68
+ async update(prompt, llmKey, generations) {
69
+ const key = `${(0, base_js_1.getCacheKey)(prompt, llmKey)}.json`;
70
+ await promises_1.default.writeFile(node_path_1.default.join(this.cacheDir, key), JSON.stringify(generations.map(base_js_1.serializeGeneration)));
71
+ }
72
+ }
73
+ exports.LocalFileCache = LocalFileCache;
@@ -0,0 +1,33 @@
1
+ import { BaseCache, Generation } from "../schema/index.js";
2
+ /**
3
+ * A cache that uses the local filesystem as the backing store.
4
+ * This is useful for local development and testing. But it is not recommended for production use.
5
+ */
6
+ export declare class LocalFileCache extends BaseCache {
7
+ private cacheDir;
8
+ private constructor();
9
+ /**
10
+ * Create a new cache backed by the local filesystem.
11
+ * It ensures that the cache directory exists before returning.
12
+ * @param cacheDir
13
+ */
14
+ static create(cacheDir?: string): Promise<LocalFileCache>;
15
+ /**
16
+ * Retrieves data from the cache. It constructs a cache key from the given
17
+ * `prompt` and `llmKey`, and retrieves the corresponding value from the
18
+ * cache files.
19
+ * @param prompt The prompt used to construct the cache key.
20
+ * @param llmKey The LLM key used to construct the cache key.
21
+ * @returns An array of Generations if found, null otherwise.
22
+ */
23
+ lookup(prompt: string, llmKey: string): Promise<any>;
24
+ /**
25
+ * Updates the cache with new data. It constructs a cache key from the
26
+ * given `prompt` and `llmKey`, and stores the `value` in a specific
27
+ * file in the cache directory.
28
+ * @param prompt The prompt used to construct the cache key.
29
+ * @param llmKey The LLM key used to construct the cache key.
30
+ * @param generations The value to be stored in the cache.
31
+ */
32
+ update(prompt: string, llmKey: string, generations: Generation[]): Promise<void>;
33
+ }
@@ -0,0 +1,66 @@
1
+ import path from "node:path";
2
+ import fs from "node:fs/promises";
3
+ import { BaseCache } from "../schema/index.js";
4
+ import { getCacheKey, serializeGeneration, deserializeStoredGeneration, } from "./base.js";
5
+ /**
6
+ * A cache that uses the local filesystem as the backing store.
7
+ * This is useful for local development and testing. But it is not recommended for production use.
8
+ */
9
+ export class LocalFileCache extends BaseCache {
10
+ constructor(cacheDir) {
11
+ super();
12
+ Object.defineProperty(this, "cacheDir", {
13
+ enumerable: true,
14
+ configurable: true,
15
+ writable: true,
16
+ value: void 0
17
+ });
18
+ this.cacheDir = cacheDir;
19
+ }
20
+ /**
21
+ * Create a new cache backed by the local filesystem.
22
+ * It ensures that the cache directory exists before returning.
23
+ * @param cacheDir
24
+ */
25
+ static async create(cacheDir) {
26
+ if (!cacheDir) {
27
+ // eslint-disable-next-line no-param-reassign
28
+ cacheDir = await fs.mkdtemp("langchain-cache-");
29
+ }
30
+ else {
31
+ // ensure the cache directory exists
32
+ await fs.mkdir(cacheDir, { recursive: true });
33
+ }
34
+ return new LocalFileCache(cacheDir);
35
+ }
36
+ /**
37
+ * Retrieves data from the cache. It constructs a cache key from the given
38
+ * `prompt` and `llmKey`, and retrieves the corresponding value from the
39
+ * cache files.
40
+ * @param prompt The prompt used to construct the cache key.
41
+ * @param llmKey The LLM key used to construct the cache key.
42
+ * @returns An array of Generations if found, null otherwise.
43
+ */
44
+ async lookup(prompt, llmKey) {
45
+ const key = `${getCacheKey(prompt, llmKey)}.json`;
46
+ try {
47
+ const content = await fs.readFile(path.join(this.cacheDir, key));
48
+ return JSON.parse(content.toString()).map(deserializeStoredGeneration);
49
+ }
50
+ catch {
51
+ return null;
52
+ }
53
+ }
54
+ /**
55
+ * Updates the cache with new data. It constructs a cache key from the
56
+ * given `prompt` and `llmKey`, and stores the `value` in a specific
57
+ * file in the cache directory.
58
+ * @param prompt The prompt used to construct the cache key.
59
+ * @param llmKey The LLM key used to construct the cache key.
60
+ * @param generations The value to be stored in the cache.
61
+ */
62
+ async update(prompt, llmKey, generations) {
63
+ const key = `${getCacheKey(prompt, llmKey)}.json`;
64
+ await fs.writeFile(path.join(this.cacheDir, key), JSON.stringify(generations.map(serializeGeneration)));
65
+ }
66
+ }
@@ -174,7 +174,7 @@ class BedrockChat extends base_js_1.SimpleChatModel {
174
174
  this.credentials = credentials;
175
175
  this.temperature = fields?.temperature ?? this.temperature;
176
176
  this.maxTokens = fields?.maxTokens ?? this.maxTokens;
177
- this.fetchFn = fields?.fetchFn ?? fetch;
177
+ this.fetchFn = fields?.fetchFn ?? fetch.bind(globalThis);
178
178
  this.endpointHost = fields?.endpointHost ?? fields?.endpointUrl;
179
179
  this.stopSequences = fields?.stopSequences;
180
180
  this.modelKwargs = fields?.modelKwargs;
@@ -169,7 +169,7 @@ export class BedrockChat extends SimpleChatModel {
169
169
  this.credentials = credentials;
170
170
  this.temperature = fields?.temperature ?? this.temperature;
171
171
  this.maxTokens = fields?.maxTokens ?? this.maxTokens;
172
- this.fetchFn = fields?.fetchFn ?? fetch;
172
+ this.fetchFn = fields?.fetchFn ?? fetch.bind(globalThis);
173
173
  this.endpointHost = fields?.endpointHost ?? fields?.endpointUrl;
174
174
  this.stopSequences = fields?.stopSequences;
175
175
  this.modelKwargs = fields?.modelKwargs;
@@ -9,7 +9,7 @@ const document_js_2 = require("../../util/document.cjs");
9
9
  * loader that loads documents from PDF files.
10
10
  */
11
11
  class PDFLoader extends buffer_js_1.BufferLoader {
12
- constructor(filePathOrBlob, { splitPages = true, pdfjs = PDFLoaderImports } = {}) {
12
+ constructor(filePathOrBlob, { splitPages = true, pdfjs = PDFLoaderImports, parsedItemSeparator = " ", } = {}) {
13
13
  super(filePathOrBlob);
14
14
  Object.defineProperty(this, "splitPages", {
15
15
  enumerable: true,
@@ -23,8 +23,15 @@ class PDFLoader extends buffer_js_1.BufferLoader {
23
23
  writable: true,
24
24
  value: void 0
25
25
  });
26
+ Object.defineProperty(this, "parsedItemSeparator", {
27
+ enumerable: true,
28
+ configurable: true,
29
+ writable: true,
30
+ value: void 0
31
+ });
26
32
  this.splitPages = splitPages;
27
33
  this.pdfjs = pdfjs;
34
+ this.parsedItemSeparator = parsedItemSeparator;
28
35
  }
29
36
  /**
30
37
  * A method that takes a `raw` buffer and `metadata` as parameters and
@@ -75,7 +82,7 @@ class PDFLoader extends buffer_js_1.BufferLoader {
75
82
  lastY = item.transform[5];
76
83
  }
77
84
  }
78
- const text = textItems.join(" ");
85
+ const text = textItems.join(this.parsedItemSeparator);
79
86
  documents.push(new document_js_1.Document({
80
87
  pageContent: text,
81
88
  metadata: {
@@ -9,9 +9,11 @@ import { BufferLoader } from "./buffer.js";
9
9
  export declare class PDFLoader extends BufferLoader {
10
10
  private splitPages;
11
11
  private pdfjs;
12
- constructor(filePathOrBlob: string | Blob, { splitPages, pdfjs }?: {
12
+ protected parsedItemSeparator: string;
13
+ constructor(filePathOrBlob: string | Blob, { splitPages, pdfjs, parsedItemSeparator, }?: {
13
14
  splitPages?: boolean | undefined;
14
15
  pdfjs?: typeof PDFLoaderImports | undefined;
16
+ parsedItemSeparator?: string | undefined;
15
17
  });
16
18
  /**
17
19
  * A method that takes a `raw` buffer and `metadata` as parameters and
@@ -6,7 +6,7 @@ import { formatDocumentsAsString } from "../../util/document.js";
6
6
  * loader that loads documents from PDF files.
7
7
  */
8
8
  export class PDFLoader extends BufferLoader {
9
- constructor(filePathOrBlob, { splitPages = true, pdfjs = PDFLoaderImports } = {}) {
9
+ constructor(filePathOrBlob, { splitPages = true, pdfjs = PDFLoaderImports, parsedItemSeparator = " ", } = {}) {
10
10
  super(filePathOrBlob);
11
11
  Object.defineProperty(this, "splitPages", {
12
12
  enumerable: true,
@@ -20,8 +20,15 @@ export class PDFLoader extends BufferLoader {
20
20
  writable: true,
21
21
  value: void 0
22
22
  });
23
+ Object.defineProperty(this, "parsedItemSeparator", {
24
+ enumerable: true,
25
+ configurable: true,
26
+ writable: true,
27
+ value: void 0
28
+ });
23
29
  this.splitPages = splitPages;
24
30
  this.pdfjs = pdfjs;
31
+ this.parsedItemSeparator = parsedItemSeparator;
25
32
  }
26
33
  /**
27
34
  * A method that takes a `raw` buffer and `metadata` as parameters and
@@ -72,7 +79,7 @@ export class PDFLoader extends BufferLoader {
72
79
  lastY = item.transform[5];
73
80
  }
74
81
  }
75
- const text = textItems.join(" ");
82
+ const text = textItems.join(this.parsedItemSeparator);
76
83
  documents.push(new Document({
77
84
  pageContent: text,
78
85
  metadata: {
@@ -55,8 +55,11 @@ class ApifyDatasetLoader extends base_js_1.BaseDocumentLoader {
55
55
  * @returns An array of Document instances.
56
56
  */
57
57
  async load() {
58
- const datasetItems = (await this.apifyClient.dataset(this.datasetId).listItems({ clean: true })).items;
59
- return await Promise.all(datasetItems.map((item) => this.caller.call(async () => this.datasetMappingFunction(item))));
58
+ const dataset = await this.apifyClient
59
+ .dataset(this.datasetId)
60
+ .listItems({ clean: true });
61
+ const documentList = await Promise.all(dataset.items.map((item) => this.caller.call(async () => this.datasetMappingFunction(item))));
62
+ return documentList.flat();
60
63
  }
61
64
  /**
62
65
  * Create an ApifyDatasetLoader by calling an Actor on the Apify platform and waiting for its results to be ready.
@@ -5,8 +5,11 @@ import { Document } from "../../document.js";
5
5
  /**
6
6
  * A type that represents a function that takes a single object (an Apify
7
7
  * dataset item) and converts it to an instance of the Document class.
8
+ *
9
+ * Change function signature to only be asynchronous for simplicity in v0.1.0
10
+ * https://github.com/langchain-ai/langchainjs/pull/3262
8
11
  */
9
- export type ApifyDatasetMappingFunction<Metadata extends Record<string, any>> = (item: Record<string | number, unknown>) => Document<Metadata> | Promise<Document<Metadata>>;
12
+ export type ApifyDatasetMappingFunction<Metadata extends Record<string, any>> = (item: Record<string | number, unknown>) => Document<Metadata> | Array<Document<Metadata>> | Promise<Document<Metadata> | Array<Document<Metadata>>>;
10
13
  export interface ApifyDatasetLoaderConfig<Metadata extends Record<string, any>> extends AsyncCallerParams {
11
14
  datasetMappingFunction: ApifyDatasetMappingFunction<Metadata>;
12
15
  clientOptions?: ApifyClientOptions;
@@ -52,8 +52,11 @@ export class ApifyDatasetLoader extends BaseDocumentLoader {
52
52
  * @returns An array of Document instances.
53
53
  */
54
54
  async load() {
55
- const datasetItems = (await this.apifyClient.dataset(this.datasetId).listItems({ clean: true })).items;
56
- return await Promise.all(datasetItems.map((item) => this.caller.call(async () => this.datasetMappingFunction(item))));
55
+ const dataset = await this.apifyClient
56
+ .dataset(this.datasetId)
57
+ .listItems({ clean: true });
58
+ const documentList = await Promise.all(dataset.items.map((item) => this.caller.call(async () => this.datasetMappingFunction(item))));
59
+ return documentList.flat();
57
60
  }
58
61
  /**
59
62
  * Create an ApifyDatasetLoader by calling an Actor on the Apify platform and waiting for its results to be ready.
@@ -8,7 +8,7 @@ const document_js_2 = require("../../util/document.cjs");
8
8
  * A document loader for loading data from PDFs.
9
9
  */
10
10
  class WebPDFLoader extends base_js_1.BaseDocumentLoader {
11
- constructor(blob, { splitPages = true, pdfjs = PDFLoaderImports } = {}) {
11
+ constructor(blob, { splitPages = true, pdfjs = PDFLoaderImports, parsedItemSeparator = " ", } = {}) {
12
12
  super();
13
13
  Object.defineProperty(this, "blob", {
14
14
  enumerable: true,
@@ -28,9 +28,16 @@ class WebPDFLoader extends base_js_1.BaseDocumentLoader {
28
28
  writable: true,
29
29
  value: void 0
30
30
  });
31
+ Object.defineProperty(this, "parsedItemSeparator", {
32
+ enumerable: true,
33
+ configurable: true,
34
+ writable: true,
35
+ value: void 0
36
+ });
31
37
  this.blob = blob;
32
38
  this.splitPages = splitPages ?? this.splitPages;
33
39
  this.pdfjs = pdfjs;
40
+ this.parsedItemSeparator = parsedItemSeparator;
34
41
  }
35
42
  /**
36
43
  * Loads the contents of the PDF as documents.
@@ -68,7 +75,7 @@ class WebPDFLoader extends base_js_1.BaseDocumentLoader {
68
75
  lastY = item.transform[5];
69
76
  }
70
77
  }
71
- const text = textItems.join(" ");
78
+ const text = textItems.join(this.parsedItemSeparator);
72
79
  documents.push(new document_js_1.Document({
73
80
  pageContent: text,
74
81
  metadata: {
@@ -8,9 +8,11 @@ export declare class WebPDFLoader extends BaseDocumentLoader {
8
8
  protected blob: Blob;
9
9
  protected splitPages: boolean;
10
10
  private pdfjs;
11
- constructor(blob: Blob, { splitPages, pdfjs }?: {
11
+ protected parsedItemSeparator: string;
12
+ constructor(blob: Blob, { splitPages, pdfjs, parsedItemSeparator, }?: {
12
13
  splitPages?: boolean | undefined;
13
14
  pdfjs?: typeof PDFLoaderImports | undefined;
15
+ parsedItemSeparator?: string | undefined;
14
16
  });
15
17
  /**
16
18
  * Loads the contents of the PDF as documents.
@@ -5,7 +5,7 @@ import { formatDocumentsAsString } from "../../util/document.js";
5
5
  * A document loader for loading data from PDFs.
6
6
  */
7
7
  export class WebPDFLoader extends BaseDocumentLoader {
8
- constructor(blob, { splitPages = true, pdfjs = PDFLoaderImports } = {}) {
8
+ constructor(blob, { splitPages = true, pdfjs = PDFLoaderImports, parsedItemSeparator = " ", } = {}) {
9
9
  super();
10
10
  Object.defineProperty(this, "blob", {
11
11
  enumerable: true,
@@ -25,9 +25,16 @@ export class WebPDFLoader extends BaseDocumentLoader {
25
25
  writable: true,
26
26
  value: void 0
27
27
  });
28
+ Object.defineProperty(this, "parsedItemSeparator", {
29
+ enumerable: true,
30
+ configurable: true,
31
+ writable: true,
32
+ value: void 0
33
+ });
28
34
  this.blob = blob;
29
35
  this.splitPages = splitPages ?? this.splitPages;
30
36
  this.pdfjs = pdfjs;
37
+ this.parsedItemSeparator = parsedItemSeparator;
31
38
  }
32
39
  /**
33
40
  * Loads the contents of the PDF as documents.
@@ -65,7 +72,7 @@ export class WebPDFLoader extends BaseDocumentLoader {
65
72
  lastY = item.transform[5];
66
73
  }
67
74
  }
68
- const text = textItems.join(" ");
75
+ const text = textItems.join(this.parsedItemSeparator);
69
76
  documents.push(new Document({
70
77
  pageContent: text,
71
78
  metadata: {
@@ -133,7 +133,7 @@ class Bedrock extends base_js_1.LLM {
133
133
  this.credentials = credentials;
134
134
  this.temperature = fields?.temperature ?? this.temperature;
135
135
  this.maxTokens = fields?.maxTokens ?? this.maxTokens;
136
- this.fetchFn = fields?.fetchFn ?? fetch;
136
+ this.fetchFn = fields?.fetchFn ?? fetch.bind(globalThis);
137
137
  this.endpointHost = fields?.endpointHost ?? fields?.endpointUrl;
138
138
  this.stopSequences = fields?.stopSequences;
139
139
  this.modelKwargs = fields?.modelKwargs;
@@ -130,7 +130,7 @@ export class Bedrock extends LLM {
130
130
  this.credentials = credentials;
131
131
  this.temperature = fields?.temperature ?? this.temperature;
132
132
  this.maxTokens = fields?.maxTokens ?? this.maxTokens;
133
- this.fetchFn = fields?.fetchFn ?? fetch;
133
+ this.fetchFn = fields?.fetchFn ?? fetch.bind(globalThis);
134
134
  this.endpointHost = fields?.endpointHost ?? fields?.endpointUrl;
135
135
  this.stopSequences = fields?.stopSequences;
136
136
  this.modelKwargs = fields?.modelKwargs;
@@ -67,6 +67,7 @@ exports.optionalImportEntrypoints = [
67
67
  "langchain/vectorstores/typeorm",
68
68
  "langchain/vectorstores/myscale",
69
69
  "langchain/vectorstores/redis",
70
+ "langchain/vectorstores/rockset",
70
71
  "langchain/vectorstores/typesense",
71
72
  "langchain/vectorstores/singlestore",
72
73
  "langchain/vectorstores/tigris",
@@ -136,6 +137,7 @@ exports.optionalImportEntrypoints = [
136
137
  "langchain/cache/momento",
137
138
  "langchain/cache/redis",
138
139
  "langchain/cache/ioredis",
140
+ "langchain/cache/file_system",
139
141
  "langchain/cache/upstash_redis",
140
142
  "langchain/stores/doc/gcs",
141
143
  "langchain/stores/file/node",
@@ -64,6 +64,7 @@ export const optionalImportEntrypoints = [
64
64
  "langchain/vectorstores/typeorm",
65
65
  "langchain/vectorstores/myscale",
66
66
  "langchain/vectorstores/redis",
67
+ "langchain/vectorstores/rockset",
67
68
  "langchain/vectorstores/typesense",
68
69
  "langchain/vectorstores/singlestore",
69
70
  "langchain/vectorstores/tigris",
@@ -133,6 +134,7 @@ export const optionalImportEntrypoints = [
133
134
  "langchain/cache/momento",
134
135
  "langchain/cache/redis",
135
136
  "langchain/cache/ioredis",
137
+ "langchain/cache/file_system",
136
138
  "langchain/cache/upstash_redis",
137
139
  "langchain/stores/doc/gcs",
138
140
  "langchain/stores/file/node",
@@ -62,6 +62,10 @@ class HttpResponseOutputParser extends output_parser_js_1.BaseTransformOutputPar
62
62
  */
63
63
  async parse(text) {
64
64
  const chunk = await this.outputParser.parse(text);
65
+ const encoder = new TextEncoder();
66
+ if (this.contentType === "text/event-stream") {
67
+ return encoder.encode(`event: data\ndata: ${JSON.stringify(chunk)}\n\n`);
68
+ }
65
69
  let parsedChunk;
66
70
  if (typeof chunk === "string") {
67
71
  parsedChunk = chunk;
@@ -69,10 +73,6 @@ class HttpResponseOutputParser extends output_parser_js_1.BaseTransformOutputPar
69
73
  else {
70
74
  parsedChunk = JSON.stringify(chunk);
71
75
  }
72
- const encoder = new TextEncoder();
73
- if (this.contentType === "text/event-stream") {
74
- return encoder.encode(`event: data\ndata: ${parsedChunk}\n\n`);
75
- }
76
76
  return encoder.encode(parsedChunk);
77
77
  }
78
78
  getFormatInstructions() {
@@ -59,6 +59,10 @@ export class HttpResponseOutputParser extends BaseTransformOutputParser {
59
59
  */
60
60
  async parse(text) {
61
61
  const chunk = await this.outputParser.parse(text);
62
+ const encoder = new TextEncoder();
63
+ if (this.contentType === "text/event-stream") {
64
+ return encoder.encode(`event: data\ndata: ${JSON.stringify(chunk)}\n\n`);
65
+ }
62
66
  let parsedChunk;
63
67
  if (typeof chunk === "string") {
64
68
  parsedChunk = chunk;
@@ -66,10 +70,6 @@ export class HttpResponseOutputParser extends BaseTransformOutputParser {
66
70
  else {
67
71
  parsedChunk = JSON.stringify(chunk);
68
72
  }
69
- const encoder = new TextEncoder();
70
- if (this.contentType === "text/event-stream") {
71
- return encoder.encode(`event: data\ndata: ${parsedChunk}\n\n`);
72
- }
73
73
  return encoder.encode(parsedChunk);
74
74
  }
75
75
  getFormatInstructions() {
@@ -7,6 +7,7 @@ const IdColumnSymbol = Symbol("id");
7
7
  const ContentColumnSymbol = Symbol("content");
8
8
  const OpMap = {
9
9
  equals: "=",
10
+ in: "IN",
10
11
  lt: "<",
11
12
  lte: "<=",
12
13
  gt: ">",
@@ -236,9 +237,20 @@ class PrismaVectorStore extends base_js_1.VectorStore {
236
237
  return this.Prisma.join(Object.entries(filter).flatMap(([key, ops]) => Object.entries(ops).map(([opName, value]) => {
237
238
  // column name, operators cannot be parametrised
238
239
  // these fields are thus not escaped by Prisma and can be dangerous if user input is used
240
+ const opNameKey = opName;
239
241
  const colRaw = this.Prisma.raw(`"${key}"`);
240
- const opRaw = this.Prisma.raw(OpMap[opName]);
241
- return this.Prisma.sql `${colRaw} ${opRaw} ${value}`;
242
+ const opRaw = this.Prisma.raw(OpMap[opNameKey]);
243
+ switch (OpMap[opNameKey]) {
244
+ case OpMap.in: {
245
+ if (!Array.isArray(value) ||
246
+ !value.every((v) => typeof v === "string")) {
247
+ throw new Error(`Invalid filter: IN operator requires an array of strings. Received: ${JSON.stringify(value, null, 2)}`);
248
+ }
249
+ return this.Prisma.sql `${colRaw} ${opRaw} (${value.join(",")})`;
250
+ }
251
+ default:
252
+ return this.Prisma.sql `${colRaw} ${opRaw} ${value}`;
253
+ }
242
254
  })), " AND ", " WHERE ");
243
255
  }
244
256
  /**
@@ -32,6 +32,7 @@ type ModelColumns<TModel extends Record<string, unknown>> = {
32
32
  export type PrismaSqlFilter<TModel extends Record<string, unknown>> = {
33
33
  [K in keyof TModel]?: {
34
34
  equals?: TModel[K];
35
+ in?: TModel[K][];
35
36
  lt?: TModel[K];
36
37
  lte?: TModel[K];
37
38
  gt?: TModel[K];
@@ -4,6 +4,7 @@ const IdColumnSymbol = Symbol("id");
4
4
  const ContentColumnSymbol = Symbol("content");
5
5
  const OpMap = {
6
6
  equals: "=",
7
+ in: "IN",
7
8
  lt: "<",
8
9
  lte: "<=",
9
10
  gt: ">",
@@ -233,9 +234,20 @@ class PrismaVectorStore extends VectorStore {
233
234
  return this.Prisma.join(Object.entries(filter).flatMap(([key, ops]) => Object.entries(ops).map(([opName, value]) => {
234
235
  // column name, operators cannot be parametrised
235
236
  // these fields are thus not escaped by Prisma and can be dangerous if user input is used
237
+ const opNameKey = opName;
236
238
  const colRaw = this.Prisma.raw(`"${key}"`);
237
- const opRaw = this.Prisma.raw(OpMap[opName]);
238
- return this.Prisma.sql `${colRaw} ${opRaw} ${value}`;
239
+ const opRaw = this.Prisma.raw(OpMap[opNameKey]);
240
+ switch (OpMap[opNameKey]) {
241
+ case OpMap.in: {
242
+ if (!Array.isArray(value) ||
243
+ !value.every((v) => typeof v === "string")) {
244
+ throw new Error(`Invalid filter: IN operator requires an array of strings. Received: ${JSON.stringify(value, null, 2)}`);
245
+ }
246
+ return this.Prisma.sql `${colRaw} ${opRaw} (${value.join(",")})`;
247
+ }
248
+ default:
249
+ return this.Prisma.sql `${colRaw} ${opRaw} ${value}`;
250
+ }
239
251
  })), " AND ", " WHERE ");
240
252
  }
241
253
  /**