langchain 0.0.193 → 0.0.195
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/embeddings/hf.cjs +1 -2
- package/dist/embeddings/hf.js +1 -2
- package/dist/experimental/openai_assistant/index.cjs +2 -1
- package/dist/experimental/openai_assistant/index.d.ts +2 -1
- package/dist/experimental/openai_assistant/index.js +2 -1
- package/dist/experimental/openai_files/index.cjs +88 -0
- package/dist/experimental/openai_files/index.d.ts +79 -0
- package/dist/experimental/openai_files/index.js +84 -0
- package/dist/load/import_constants.cjs +1 -0
- package/dist/load/import_constants.js +1 -0
- package/dist/load/import_map.cjs +3 -2
- package/dist/load/import_map.d.ts +1 -0
- package/dist/load/import_map.js +1 -0
- package/dist/retrievers/chaindesk.cjs +9 -1
- package/dist/retrievers/chaindesk.d.ts +3 -1
- package/dist/retrievers/chaindesk.js +9 -1
- package/dist/retrievers/self_query/vectara.cjs +138 -0
- package/dist/retrievers/self_query/vectara.d.ts +41 -0
- package/dist/retrievers/self_query/vectara.js +134 -0
- package/dist/vectorstores/vectara.cjs +77 -7
- package/dist/vectorstores/vectara.d.ts +9 -3
- package/dist/vectorstores/vectara.js +54 -7
- package/experimental/openai_files.cjs +1 -0
- package/experimental/openai_files.d.ts +1 -0
- package/experimental/openai_files.js +1 -0
- package/package.json +18 -2
- package/retrievers/self_query/vectara.cjs +1 -0
- package/retrievers/self_query/vectara.d.ts +1 -0
- package/retrievers/self_query/vectara.js +1 -0
package/dist/embeddings/hf.cjs
CHANGED
|
@@ -36,8 +36,7 @@ class HuggingFaceInferenceEmbeddings extends base_js_1.Embeddings {
|
|
|
36
36
|
writable: true,
|
|
37
37
|
value: void 0
|
|
38
38
|
});
|
|
39
|
-
this.model =
|
|
40
|
-
fields?.model ?? "sentence-transformers/distilbert-base-nli-mean-tokens";
|
|
39
|
+
this.model = fields?.model ?? "BAAI/bge-base-en-v1.5";
|
|
41
40
|
this.apiKey =
|
|
42
41
|
fields?.apiKey ?? (0, env_js_1.getEnvironmentVariable)("HUGGINGFACEHUB_API_KEY");
|
|
43
42
|
this.endpointUrl = fields?.endpointUrl;
|
package/dist/embeddings/hf.js
CHANGED
|
@@ -33,8 +33,7 @@ export class HuggingFaceInferenceEmbeddings extends Embeddings {
|
|
|
33
33
|
writable: true,
|
|
34
34
|
value: void 0
|
|
35
35
|
});
|
|
36
|
-
this.model =
|
|
37
|
-
fields?.model ?? "sentence-transformers/distilbert-base-nli-mean-tokens";
|
|
36
|
+
this.model = fields?.model ?? "BAAI/bge-base-en-v1.5";
|
|
38
37
|
this.apiKey =
|
|
39
38
|
fields?.apiKey ?? getEnvironmentVariable("HUGGINGFACEHUB_API_KEY");
|
|
40
39
|
this.endpointUrl = fields?.endpointUrl;
|
|
@@ -43,7 +43,7 @@ class OpenAIAssistantRunnable extends base_js_1.Runnable {
|
|
|
43
43
|
this.assistantId = fields.assistantId;
|
|
44
44
|
this.asAgent = fields.asAgent ?? this.asAgent;
|
|
45
45
|
}
|
|
46
|
-
static async createAssistant({ model, name, instructions, tools, client, clientOptions, asAgent, pollIntervalMs, }) {
|
|
46
|
+
static async createAssistant({ model, name, instructions, tools, client, clientOptions, asAgent, pollIntervalMs, fileIds, }) {
|
|
47
47
|
const formattedTools = tools?.map((tool) => {
|
|
48
48
|
// eslint-disable-next-line no-instanceof/no-instanceof
|
|
49
49
|
if (tool instanceof base_js_2.StructuredTool) {
|
|
@@ -57,6 +57,7 @@ class OpenAIAssistantRunnable extends base_js_1.Runnable {
|
|
|
57
57
|
instructions,
|
|
58
58
|
tools: formattedTools,
|
|
59
59
|
model,
|
|
60
|
+
file_ids: fileIds,
|
|
60
61
|
});
|
|
61
62
|
return new this({
|
|
62
63
|
client: oaiClient,
|
|
@@ -20,11 +20,12 @@ export declare class OpenAIAssistantRunnable<AsAgent extends boolean | undefined
|
|
|
20
20
|
pollIntervalMs: number;
|
|
21
21
|
asAgent?: AsAgent;
|
|
22
22
|
constructor(fields: OpenAIAssistantRunnableInput<AsAgent>);
|
|
23
|
-
static createAssistant<AsAgent extends boolean>({ model, name, instructions, tools, client, clientOptions, asAgent, pollIntervalMs, }: Omit<OpenAIAssistantRunnableInput<AsAgent>, "assistantId"> & {
|
|
23
|
+
static createAssistant<AsAgent extends boolean>({ model, name, instructions, tools, client, clientOptions, asAgent, pollIntervalMs, fileIds, }: Omit<OpenAIAssistantRunnableInput<AsAgent>, "assistantId"> & {
|
|
24
24
|
model: string;
|
|
25
25
|
name?: string;
|
|
26
26
|
instructions?: string;
|
|
27
27
|
tools?: OpenAIToolType | Array<StructuredTool>;
|
|
28
|
+
fileIds?: string[];
|
|
28
29
|
}): Promise<OpenAIAssistantRunnable<AsAgent, Record<string, any>>>;
|
|
29
30
|
invoke(input: RunInput, _options?: RunnableConfig): Promise<ExtractRunOutput<AsAgent>>;
|
|
30
31
|
private _parseStepsInput;
|
|
@@ -40,7 +40,7 @@ export class OpenAIAssistantRunnable extends Runnable {
|
|
|
40
40
|
this.assistantId = fields.assistantId;
|
|
41
41
|
this.asAgent = fields.asAgent ?? this.asAgent;
|
|
42
42
|
}
|
|
43
|
-
static async createAssistant({ model, name, instructions, tools, client, clientOptions, asAgent, pollIntervalMs, }) {
|
|
43
|
+
static async createAssistant({ model, name, instructions, tools, client, clientOptions, asAgent, pollIntervalMs, fileIds, }) {
|
|
44
44
|
const formattedTools = tools?.map((tool) => {
|
|
45
45
|
// eslint-disable-next-line no-instanceof/no-instanceof
|
|
46
46
|
if (tool instanceof StructuredTool) {
|
|
@@ -54,6 +54,7 @@ export class OpenAIAssistantRunnable extends Runnable {
|
|
|
54
54
|
instructions,
|
|
55
55
|
tools: formattedTools,
|
|
56
56
|
model,
|
|
57
|
+
file_ids: fileIds,
|
|
57
58
|
});
|
|
58
59
|
return new this({
|
|
59
60
|
client: oaiClient,
|
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.OpenAIFiles = void 0;
|
|
4
|
+
const openai_1 = require("openai");
|
|
5
|
+
const serializable_js_1 = require("../../load/serializable.cjs");
|
|
6
|
+
class OpenAIFiles extends serializable_js_1.Serializable {
|
|
7
|
+
constructor(fields) {
|
|
8
|
+
super(fields);
|
|
9
|
+
Object.defineProperty(this, "lc_namespace", {
|
|
10
|
+
enumerable: true,
|
|
11
|
+
configurable: true,
|
|
12
|
+
writable: true,
|
|
13
|
+
value: ["langchain", "experimental"]
|
|
14
|
+
});
|
|
15
|
+
Object.defineProperty(this, "oaiClient", {
|
|
16
|
+
enumerable: true,
|
|
17
|
+
configurable: true,
|
|
18
|
+
writable: true,
|
|
19
|
+
value: void 0
|
|
20
|
+
});
|
|
21
|
+
this.oaiClient = fields?.client ?? new openai_1.OpenAI(fields?.clientOptions);
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Upload file
|
|
25
|
+
* Upload a file that can be used across various endpoints. The size of all the files uploaded by one organization can be up to 100 GB.
|
|
26
|
+
*
|
|
27
|
+
* @note The size of individual files can be a maximum of 512 MB. See the Assistants Tools guide to learn more about the types of files supported. The Fine-tuning API only supports .jsonl files.
|
|
28
|
+
*
|
|
29
|
+
* @link {https://platform.openai.com/docs/api-reference/files/create}
|
|
30
|
+
* @param {OpenAIClient.FileCreateParams['file']} file
|
|
31
|
+
* @param {OpenAIClient.FileCreateParams['purpose']} purpose
|
|
32
|
+
* @param {OpenAIClient.RequestOptions | undefined} options
|
|
33
|
+
* @returns {Promise<OpenAIClient.Files.FileObject>}
|
|
34
|
+
*/
|
|
35
|
+
async createFile({ file, purpose, options, }) {
|
|
36
|
+
return this.oaiClient.files.create({ file, purpose }, options);
|
|
37
|
+
}
|
|
38
|
+
/**
|
|
39
|
+
* Delete a file.
|
|
40
|
+
*
|
|
41
|
+
* @link {https://platform.openai.com/docs/api-reference/files/delete}
|
|
42
|
+
* @param {string} fileId
|
|
43
|
+
* @param {OpenAIClient.RequestOptions | undefined} options
|
|
44
|
+
* @returns {Promise<OpenAIClient.Files.FileDeleted>}
|
|
45
|
+
*/
|
|
46
|
+
async deleteFile({ fileId, options, }) {
|
|
47
|
+
return this.oaiClient.files.del(fileId, options);
|
|
48
|
+
}
|
|
49
|
+
/**
|
|
50
|
+
* List files
|
|
51
|
+
* Returns a list of files that belong to the user's organization.
|
|
52
|
+
*
|
|
53
|
+
* @link {https://platform.openai.com/docs/api-reference/files/list}
|
|
54
|
+
* @param {OpenAIClient.Files.FileListParams | undefined} query
|
|
55
|
+
* @param {OpenAIClient.RequestOptions | undefined} options
|
|
56
|
+
* @returns {Promise<OpenAIClient.Files.FileObjectsPage>}
|
|
57
|
+
*/
|
|
58
|
+
async listFiles(props) {
|
|
59
|
+
return this.oaiClient.files.list(props?.query, props?.options);
|
|
60
|
+
}
|
|
61
|
+
/**
|
|
62
|
+
* Retrieve file
|
|
63
|
+
* Returns information about a specific file.
|
|
64
|
+
*
|
|
65
|
+
* @link {https://platform.openai.com/docs/api-reference/files/retrieve}
|
|
66
|
+
* @param {string} fileId
|
|
67
|
+
* @param {OpenAIClient.RequestOptions | undefined} options
|
|
68
|
+
* @returns {Promise<OpenAIClient.Files.FileObject>}
|
|
69
|
+
*/
|
|
70
|
+
async retrieveFile({ fileId, options, }) {
|
|
71
|
+
return this.oaiClient.files.retrieve(fileId, options);
|
|
72
|
+
}
|
|
73
|
+
/**
|
|
74
|
+
* Retrieve file content
|
|
75
|
+
* Returns the contents of the specified file.
|
|
76
|
+
*
|
|
77
|
+
* @note You can't retrieve the contents of a file that was uploaded with the "purpose": "assistants" API.
|
|
78
|
+
*
|
|
79
|
+
* @link {https://platform.openai.com/docs/api-reference/files/retrieve-contents}
|
|
80
|
+
* @param {string} fileId
|
|
81
|
+
* @param {OpenAIClient.RequestOptions | undefined} options
|
|
82
|
+
* @returns {Promise<string>}
|
|
83
|
+
*/
|
|
84
|
+
async retrieveFileContent({ fileId, options, }) {
|
|
85
|
+
return this.oaiClient.files.retrieveContent(fileId, options);
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
exports.OpenAIFiles = OpenAIFiles;
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
import { ClientOptions, OpenAI as OpenAIClient } from "openai";
|
|
2
|
+
import { Serializable } from "../../load/serializable.js";
|
|
3
|
+
export type OpenAIFilesInput = {
|
|
4
|
+
client?: OpenAIClient;
|
|
5
|
+
clientOptions?: ClientOptions;
|
|
6
|
+
};
|
|
7
|
+
export declare class OpenAIFiles extends Serializable {
|
|
8
|
+
lc_namespace: string[];
|
|
9
|
+
private oaiClient;
|
|
10
|
+
constructor(fields?: OpenAIFilesInput);
|
|
11
|
+
/**
|
|
12
|
+
* Upload file
|
|
13
|
+
* Upload a file that can be used across various endpoints. The size of all the files uploaded by one organization can be up to 100 GB.
|
|
14
|
+
*
|
|
15
|
+
* @note The size of individual files can be a maximum of 512 MB. See the Assistants Tools guide to learn more about the types of files supported. The Fine-tuning API only supports .jsonl files.
|
|
16
|
+
*
|
|
17
|
+
* @link {https://platform.openai.com/docs/api-reference/files/create}
|
|
18
|
+
* @param {OpenAIClient.FileCreateParams['file']} file
|
|
19
|
+
* @param {OpenAIClient.FileCreateParams['purpose']} purpose
|
|
20
|
+
* @param {OpenAIClient.RequestOptions | undefined} options
|
|
21
|
+
* @returns {Promise<OpenAIClient.Files.FileObject>}
|
|
22
|
+
*/
|
|
23
|
+
createFile({ file, purpose, options, }: OpenAIClient.FileCreateParams & {
|
|
24
|
+
options?: OpenAIClient.RequestOptions;
|
|
25
|
+
}): Promise<OpenAIClient.Files.FileObject>;
|
|
26
|
+
/**
|
|
27
|
+
* Delete a file.
|
|
28
|
+
*
|
|
29
|
+
* @link {https://platform.openai.com/docs/api-reference/files/delete}
|
|
30
|
+
* @param {string} fileId
|
|
31
|
+
* @param {OpenAIClient.RequestOptions | undefined} options
|
|
32
|
+
* @returns {Promise<OpenAIClient.Files.FileDeleted>}
|
|
33
|
+
*/
|
|
34
|
+
deleteFile({ fileId, options, }: {
|
|
35
|
+
fileId: string;
|
|
36
|
+
options?: OpenAIClient.RequestOptions;
|
|
37
|
+
}): Promise<OpenAIClient.Files.FileDeleted>;
|
|
38
|
+
/**
|
|
39
|
+
* List files
|
|
40
|
+
* Returns a list of files that belong to the user's organization.
|
|
41
|
+
*
|
|
42
|
+
* @link {https://platform.openai.com/docs/api-reference/files/list}
|
|
43
|
+
* @param {OpenAIClient.Files.FileListParams | undefined} query
|
|
44
|
+
* @param {OpenAIClient.RequestOptions | undefined} options
|
|
45
|
+
* @returns {Promise<OpenAIClient.Files.FileObjectsPage>}
|
|
46
|
+
*/
|
|
47
|
+
listFiles(props?: {
|
|
48
|
+
query?: OpenAIClient.Files.FileListParams;
|
|
49
|
+
options?: OpenAIClient.RequestOptions;
|
|
50
|
+
}): Promise<OpenAIClient.Files.FileObjectsPage>;
|
|
51
|
+
/**
|
|
52
|
+
* Retrieve file
|
|
53
|
+
* Returns information about a specific file.
|
|
54
|
+
*
|
|
55
|
+
* @link {https://platform.openai.com/docs/api-reference/files/retrieve}
|
|
56
|
+
* @param {string} fileId
|
|
57
|
+
* @param {OpenAIClient.RequestOptions | undefined} options
|
|
58
|
+
* @returns {Promise<OpenAIClient.Files.FileObject>}
|
|
59
|
+
*/
|
|
60
|
+
retrieveFile({ fileId, options, }: {
|
|
61
|
+
fileId: string;
|
|
62
|
+
options?: OpenAIClient.RequestOptions;
|
|
63
|
+
}): Promise<OpenAIClient.Files.FileObject>;
|
|
64
|
+
/**
|
|
65
|
+
* Retrieve file content
|
|
66
|
+
* Returns the contents of the specified file.
|
|
67
|
+
*
|
|
68
|
+
* @note You can't retrieve the contents of a file that was uploaded with the "purpose": "assistants" API.
|
|
69
|
+
*
|
|
70
|
+
* @link {https://platform.openai.com/docs/api-reference/files/retrieve-contents}
|
|
71
|
+
* @param {string} fileId
|
|
72
|
+
* @param {OpenAIClient.RequestOptions | undefined} options
|
|
73
|
+
* @returns {Promise<string>}
|
|
74
|
+
*/
|
|
75
|
+
retrieveFileContent({ fileId, options, }: {
|
|
76
|
+
fileId: string;
|
|
77
|
+
options?: OpenAIClient.RequestOptions;
|
|
78
|
+
}): Promise<string>;
|
|
79
|
+
}
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
import { OpenAI as OpenAIClient } from "openai";
|
|
2
|
+
import { Serializable } from "../../load/serializable.js";
|
|
3
|
+
export class OpenAIFiles extends Serializable {
|
|
4
|
+
constructor(fields) {
|
|
5
|
+
super(fields);
|
|
6
|
+
Object.defineProperty(this, "lc_namespace", {
|
|
7
|
+
enumerable: true,
|
|
8
|
+
configurable: true,
|
|
9
|
+
writable: true,
|
|
10
|
+
value: ["langchain", "experimental"]
|
|
11
|
+
});
|
|
12
|
+
Object.defineProperty(this, "oaiClient", {
|
|
13
|
+
enumerable: true,
|
|
14
|
+
configurable: true,
|
|
15
|
+
writable: true,
|
|
16
|
+
value: void 0
|
|
17
|
+
});
|
|
18
|
+
this.oaiClient = fields?.client ?? new OpenAIClient(fields?.clientOptions);
|
|
19
|
+
}
|
|
20
|
+
/**
|
|
21
|
+
* Upload file
|
|
22
|
+
* Upload a file that can be used across various endpoints. The size of all the files uploaded by one organization can be up to 100 GB.
|
|
23
|
+
*
|
|
24
|
+
* @note The size of individual files can be a maximum of 512 MB. See the Assistants Tools guide to learn more about the types of files supported. The Fine-tuning API only supports .jsonl files.
|
|
25
|
+
*
|
|
26
|
+
* @link {https://platform.openai.com/docs/api-reference/files/create}
|
|
27
|
+
* @param {OpenAIClient.FileCreateParams['file']} file
|
|
28
|
+
* @param {OpenAIClient.FileCreateParams['purpose']} purpose
|
|
29
|
+
* @param {OpenAIClient.RequestOptions | undefined} options
|
|
30
|
+
* @returns {Promise<OpenAIClient.Files.FileObject>}
|
|
31
|
+
*/
|
|
32
|
+
async createFile({ file, purpose, options, }) {
|
|
33
|
+
return this.oaiClient.files.create({ file, purpose }, options);
|
|
34
|
+
}
|
|
35
|
+
/**
|
|
36
|
+
* Delete a file.
|
|
37
|
+
*
|
|
38
|
+
* @link {https://platform.openai.com/docs/api-reference/files/delete}
|
|
39
|
+
* @param {string} fileId
|
|
40
|
+
* @param {OpenAIClient.RequestOptions | undefined} options
|
|
41
|
+
* @returns {Promise<OpenAIClient.Files.FileDeleted>}
|
|
42
|
+
*/
|
|
43
|
+
async deleteFile({ fileId, options, }) {
|
|
44
|
+
return this.oaiClient.files.del(fileId, options);
|
|
45
|
+
}
|
|
46
|
+
/**
|
|
47
|
+
* List files
|
|
48
|
+
* Returns a list of files that belong to the user's organization.
|
|
49
|
+
*
|
|
50
|
+
* @link {https://platform.openai.com/docs/api-reference/files/list}
|
|
51
|
+
* @param {OpenAIClient.Files.FileListParams | undefined} query
|
|
52
|
+
* @param {OpenAIClient.RequestOptions | undefined} options
|
|
53
|
+
* @returns {Promise<OpenAIClient.Files.FileObjectsPage>}
|
|
54
|
+
*/
|
|
55
|
+
async listFiles(props) {
|
|
56
|
+
return this.oaiClient.files.list(props?.query, props?.options);
|
|
57
|
+
}
|
|
58
|
+
/**
|
|
59
|
+
* Retrieve file
|
|
60
|
+
* Returns information about a specific file.
|
|
61
|
+
*
|
|
62
|
+
* @link {https://platform.openai.com/docs/api-reference/files/retrieve}
|
|
63
|
+
* @param {string} fileId
|
|
64
|
+
* @param {OpenAIClient.RequestOptions | undefined} options
|
|
65
|
+
* @returns {Promise<OpenAIClient.Files.FileObject>}
|
|
66
|
+
*/
|
|
67
|
+
async retrieveFile({ fileId, options, }) {
|
|
68
|
+
return this.oaiClient.files.retrieve(fileId, options);
|
|
69
|
+
}
|
|
70
|
+
/**
|
|
71
|
+
* Retrieve file content
|
|
72
|
+
* Returns the contents of the specified file.
|
|
73
|
+
*
|
|
74
|
+
* @note You can't retrieve the contents of a file that was uploaded with the "purpose": "assistants" API.
|
|
75
|
+
*
|
|
76
|
+
* @link {https://platform.openai.com/docs/api-reference/files/retrieve-contents}
|
|
77
|
+
* @param {string} fileId
|
|
78
|
+
* @param {OpenAIClient.RequestOptions | undefined} options
|
|
79
|
+
* @returns {Promise<string>}
|
|
80
|
+
*/
|
|
81
|
+
async retrieveFileContent({ fileId, options, }) {
|
|
82
|
+
return this.oaiClient.files.retrieveContent(fileId, options);
|
|
83
|
+
}
|
|
84
|
+
}
|
|
@@ -133,6 +133,7 @@ exports.optionalImportEntrypoints = [
|
|
|
133
133
|
"langchain/retrievers/self_query/pinecone",
|
|
134
134
|
"langchain/retrievers/self_query/supabase",
|
|
135
135
|
"langchain/retrievers/self_query/weaviate",
|
|
136
|
+
"langchain/retrievers/self_query/vectara",
|
|
136
137
|
"langchain/cache/cloudflare_kv",
|
|
137
138
|
"langchain/cache/momento",
|
|
138
139
|
"langchain/cache/redis",
|
|
@@ -130,6 +130,7 @@ export const optionalImportEntrypoints = [
|
|
|
130
130
|
"langchain/retrievers/self_query/pinecone",
|
|
131
131
|
"langchain/retrievers/self_query/supabase",
|
|
132
132
|
"langchain/retrievers/self_query/weaviate",
|
|
133
|
+
"langchain/retrievers/self_query/vectara",
|
|
133
134
|
"langchain/cache/cloudflare_kv",
|
|
134
135
|
"langchain/cache/momento",
|
|
135
136
|
"langchain/cache/redis",
|
package/dist/load/import_map.cjs
CHANGED
|
@@ -25,8 +25,8 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|
|
25
25
|
};
|
|
26
26
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
27
27
|
exports.chat_models__anthropic = exports.chat_models__openai = exports.chat_models__base = exports.document_transformers__openai_functions = exports.document_loaders__web__sort_xyz_blockchain = exports.document_loaders__web__serpapi = exports.document_loaders__web__searchapi = exports.document_loaders__base = exports.document = exports.memory = exports.text_splitter = exports.vectorstores__xata = exports.vectorstores__vectara = exports.vectorstores__prisma = exports.vectorstores__memory = exports.vectorstores__base = exports.prompts = exports.llms__fake = exports.llms__yandex = exports.llms__fireworks = exports.llms__ollama = exports.llms__cloudflare_workersai = exports.llms__aleph_alpha = exports.llms__ai21 = exports.llms__openai = exports.llms__base = exports.embeddings__voyage = exports.embeddings__minimax = exports.embeddings__openai = exports.embeddings__ollama = exports.embeddings__fake = exports.embeddings__cache_backed = exports.embeddings__base = exports.chains__openai_functions = exports.chains__combine_documents__reduce = exports.chains = exports.tools__render = exports.tools = exports.base_language = exports.agents__openai__output_parser = exports.agents__xml__output_parser = exports.agents__react__output_parser = exports.agents__format_scratchpad__log_to_message = exports.agents__format_scratchpad__xml = exports.agents__format_scratchpad__log = exports.agents__format_scratchpad__openai_tools = exports.agents__format_scratchpad = exports.agents__toolkits = exports.agents = exports.load__serializable = void 0;
|
|
28
|
-
exports.
|
|
29
|
-
exports.runnables__remote = void 0;
|
|
28
|
+
exports.experimental__chains__violation_of_expectations = exports.experimental__chat_models__ollama_functions = exports.experimental__chat_models__bittensor = exports.experimental__plan_and_execute = exports.experimental__generative_agents = exports.experimental__babyagi = exports.experimental__openai_files = exports.experimental__openai_assistant = exports.experimental__autogpt = exports.util__time = exports.util__math = exports.util__document = exports.storage__in_memory = exports.storage__encoder_backed = exports.stores__message__in_memory = exports.stores__file__in_memory = exports.stores__doc__in_memory = exports.cache = exports.retrievers__vespa = exports.retrievers__score_threshold = exports.retrievers__hyde = exports.retrievers__document_compressors__embeddings_filter = exports.retrievers__document_compressors__chain_extract = exports.retrievers__time_weighted = exports.retrievers__tavily_search_api = exports.retrievers__parent_document = exports.retrievers__multi_vector = exports.retrievers__multi_query = exports.retrievers__document_compressors = exports.retrievers__contextual_compression = exports.retrievers__databerry = exports.retrievers__chaindesk = exports.retrievers__remote = exports.output_parsers = exports.callbacks = exports.schema__storage = exports.schema__runnable = exports.schema__retriever = exports.schema__query_constructor = exports.schema__prompt_template = exports.schema__output_parser = exports.schema__document = exports.schema = exports.chat_models__fake = exports.chat_models__yandex = exports.chat_models__minimax = exports.chat_models__ollama = exports.chat_models__baiduwenxin = exports.chat_models__fireworks = exports.chat_models__cloudflare_workersai = void 0;
|
|
29
|
+
exports.runnables__remote = exports.evaluation = void 0;
|
|
30
30
|
exports.load__serializable = __importStar(require("../load/serializable.cjs"));
|
|
31
31
|
exports.agents = __importStar(require("../agents/index.cjs"));
|
|
32
32
|
exports.agents__toolkits = __importStar(require("../agents/toolkits/index.cjs"));
|
|
@@ -120,6 +120,7 @@ exports.util__math = __importStar(require("../util/math.cjs"));
|
|
|
120
120
|
exports.util__time = __importStar(require("../util/time.cjs"));
|
|
121
121
|
exports.experimental__autogpt = __importStar(require("../experimental/autogpt/index.cjs"));
|
|
122
122
|
exports.experimental__openai_assistant = __importStar(require("../experimental/openai_assistant/index.cjs"));
|
|
123
|
+
exports.experimental__openai_files = __importStar(require("../experimental/openai_files/index.cjs"));
|
|
123
124
|
exports.experimental__babyagi = __importStar(require("../experimental/babyagi/index.cjs"));
|
|
124
125
|
exports.experimental__generative_agents = __importStar(require("../experimental/generative_agents/index.cjs"));
|
|
125
126
|
exports.experimental__plan_and_execute = __importStar(require("../experimental/plan_and_execute/index.cjs"));
|
|
@@ -91,6 +91,7 @@ export * as util__math from "../util/math.js";
|
|
|
91
91
|
export * as util__time from "../util/time.js";
|
|
92
92
|
export * as experimental__autogpt from "../experimental/autogpt/index.js";
|
|
93
93
|
export * as experimental__openai_assistant from "../experimental/openai_assistant/index.js";
|
|
94
|
+
export * as experimental__openai_files from "../experimental/openai_files/index.js";
|
|
94
95
|
export * as experimental__babyagi from "../experimental/babyagi/index.js";
|
|
95
96
|
export * as experimental__generative_agents from "../experimental/generative_agents/index.js";
|
|
96
97
|
export * as experimental__plan_and_execute from "../experimental/plan_and_execute/index.js";
|
package/dist/load/import_map.js
CHANGED
|
@@ -92,6 +92,7 @@ export * as util__math from "../util/math.js";
|
|
|
92
92
|
export * as util__time from "../util/time.js";
|
|
93
93
|
export * as experimental__autogpt from "../experimental/autogpt/index.js";
|
|
94
94
|
export * as experimental__openai_assistant from "../experimental/openai_assistant/index.js";
|
|
95
|
+
export * as experimental__openai_files from "../experimental/openai_files/index.js";
|
|
95
96
|
export * as experimental__babyagi from "../experimental/babyagi/index.js";
|
|
96
97
|
export * as experimental__generative_agents from "../experimental/generative_agents/index.js";
|
|
97
98
|
export * as experimental__plan_and_execute from "../experimental/plan_and_execute/index.js";
|
|
@@ -19,7 +19,7 @@ class ChaindeskRetriever extends retriever_js_1.BaseRetriever {
|
|
|
19
19
|
static lc_name() {
|
|
20
20
|
return "ChaindeskRetriever";
|
|
21
21
|
}
|
|
22
|
-
constructor({ datastoreId, apiKey, topK, ...rest }) {
|
|
22
|
+
constructor({ datastoreId, apiKey, topK, filter, ...rest }) {
|
|
23
23
|
super();
|
|
24
24
|
Object.defineProperty(this, "lc_namespace", {
|
|
25
25
|
enumerable: true,
|
|
@@ -45,6 +45,12 @@ class ChaindeskRetriever extends retriever_js_1.BaseRetriever {
|
|
|
45
45
|
writable: true,
|
|
46
46
|
value: void 0
|
|
47
47
|
});
|
|
48
|
+
Object.defineProperty(this, "filter", {
|
|
49
|
+
enumerable: true,
|
|
50
|
+
configurable: true,
|
|
51
|
+
writable: true,
|
|
52
|
+
value: void 0
|
|
53
|
+
});
|
|
48
54
|
Object.defineProperty(this, "apiKey", {
|
|
49
55
|
enumerable: true,
|
|
50
56
|
configurable: true,
|
|
@@ -55,6 +61,7 @@ class ChaindeskRetriever extends retriever_js_1.BaseRetriever {
|
|
|
55
61
|
this.datastoreId = datastoreId;
|
|
56
62
|
this.apiKey = apiKey;
|
|
57
63
|
this.topK = topK;
|
|
64
|
+
this.filter = filter;
|
|
58
65
|
}
|
|
59
66
|
async getRelevantDocuments(query) {
|
|
60
67
|
const r = await this.caller.call(fetch, `https://app.chaindesk.ai/api/datastores/${this.datastoreId}/query`, {
|
|
@@ -62,6 +69,7 @@ class ChaindeskRetriever extends retriever_js_1.BaseRetriever {
|
|
|
62
69
|
body: JSON.stringify({
|
|
63
70
|
query,
|
|
64
71
|
...(this.topK ? { topK: this.topK } : {}),
|
|
72
|
+
...(this.filter ? { filters: this.filter } : {}),
|
|
65
73
|
}),
|
|
66
74
|
headers: {
|
|
67
75
|
"Content-Type": "application/json",
|
|
@@ -4,6 +4,7 @@ import { AsyncCaller, type AsyncCallerParams } from "../util/async_caller.js";
|
|
|
4
4
|
export interface ChaindeskRetrieverArgs extends AsyncCallerParams, BaseRetrieverInput {
|
|
5
5
|
datastoreId: string;
|
|
6
6
|
topK?: number;
|
|
7
|
+
filter?: Record<string, unknown>;
|
|
7
8
|
apiKey?: string;
|
|
8
9
|
}
|
|
9
10
|
/**
|
|
@@ -23,7 +24,8 @@ export declare class ChaindeskRetriever extends BaseRetriever {
|
|
|
23
24
|
caller: AsyncCaller;
|
|
24
25
|
datastoreId: string;
|
|
25
26
|
topK?: number;
|
|
27
|
+
filter?: Record<string, unknown>;
|
|
26
28
|
apiKey?: string;
|
|
27
|
-
constructor({ datastoreId, apiKey, topK, ...rest }: ChaindeskRetrieverArgs);
|
|
29
|
+
constructor({ datastoreId, apiKey, topK, filter, ...rest }: ChaindeskRetrieverArgs);
|
|
28
30
|
getRelevantDocuments(query: string): Promise<Document[]>;
|
|
29
31
|
}
|
|
@@ -16,7 +16,7 @@ export class ChaindeskRetriever extends BaseRetriever {
|
|
|
16
16
|
static lc_name() {
|
|
17
17
|
return "ChaindeskRetriever";
|
|
18
18
|
}
|
|
19
|
-
constructor({ datastoreId, apiKey, topK, ...rest }) {
|
|
19
|
+
constructor({ datastoreId, apiKey, topK, filter, ...rest }) {
|
|
20
20
|
super();
|
|
21
21
|
Object.defineProperty(this, "lc_namespace", {
|
|
22
22
|
enumerable: true,
|
|
@@ -42,6 +42,12 @@ export class ChaindeskRetriever extends BaseRetriever {
|
|
|
42
42
|
writable: true,
|
|
43
43
|
value: void 0
|
|
44
44
|
});
|
|
45
|
+
Object.defineProperty(this, "filter", {
|
|
46
|
+
enumerable: true,
|
|
47
|
+
configurable: true,
|
|
48
|
+
writable: true,
|
|
49
|
+
value: void 0
|
|
50
|
+
});
|
|
45
51
|
Object.defineProperty(this, "apiKey", {
|
|
46
52
|
enumerable: true,
|
|
47
53
|
configurable: true,
|
|
@@ -52,6 +58,7 @@ export class ChaindeskRetriever extends BaseRetriever {
|
|
|
52
58
|
this.datastoreId = datastoreId;
|
|
53
59
|
this.apiKey = apiKey;
|
|
54
60
|
this.topK = topK;
|
|
61
|
+
this.filter = filter;
|
|
55
62
|
}
|
|
56
63
|
async getRelevantDocuments(query) {
|
|
57
64
|
const r = await this.caller.call(fetch, `https://app.chaindesk.ai/api/datastores/${this.datastoreId}/query`, {
|
|
@@ -59,6 +66,7 @@ export class ChaindeskRetriever extends BaseRetriever {
|
|
|
59
66
|
body: JSON.stringify({
|
|
60
67
|
query,
|
|
61
68
|
...(this.topK ? { topK: this.topK } : {}),
|
|
69
|
+
...(this.filter ? { filters: this.filter } : {}),
|
|
62
70
|
}),
|
|
63
71
|
headers: {
|
|
64
72
|
"Content-Type": "application/json",
|
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.VectaraTranslator = void 0;
|
|
4
|
+
const ir_js_1 = require("../../chains/query_constructor/ir.cjs");
|
|
5
|
+
const base_js_1 = require("./base.cjs");
|
|
6
|
+
const utils_js_1 = require("./utils.cjs");
|
|
7
|
+
function processValue(value) {
|
|
8
|
+
/** Convert a value to a string and add single quotes if it is a string. */
|
|
9
|
+
if (typeof value === "string") {
|
|
10
|
+
return `'${value}'`;
|
|
11
|
+
}
|
|
12
|
+
else {
|
|
13
|
+
return String(value);
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
class VectaraTranslator extends base_js_1.BaseTranslator {
|
|
17
|
+
constructor() {
|
|
18
|
+
super(...arguments);
|
|
19
|
+
Object.defineProperty(this, "allowedOperators", {
|
|
20
|
+
enumerable: true,
|
|
21
|
+
configurable: true,
|
|
22
|
+
writable: true,
|
|
23
|
+
value: [ir_js_1.Operators.and, ir_js_1.Operators.or]
|
|
24
|
+
});
|
|
25
|
+
Object.defineProperty(this, "allowedComparators", {
|
|
26
|
+
enumerable: true,
|
|
27
|
+
configurable: true,
|
|
28
|
+
writable: true,
|
|
29
|
+
value: [
|
|
30
|
+
ir_js_1.Comparators.eq,
|
|
31
|
+
ir_js_1.Comparators.ne,
|
|
32
|
+
ir_js_1.Comparators.lt,
|
|
33
|
+
ir_js_1.Comparators.lte,
|
|
34
|
+
ir_js_1.Comparators.gt,
|
|
35
|
+
ir_js_1.Comparators.gte,
|
|
36
|
+
]
|
|
37
|
+
});
|
|
38
|
+
}
|
|
39
|
+
formatFunction(func) {
|
|
40
|
+
if (func in ir_js_1.Comparators) {
|
|
41
|
+
if (this.allowedComparators.length > 0 &&
|
|
42
|
+
this.allowedComparators.indexOf(func) === -1) {
|
|
43
|
+
throw new Error(`Comparator ${func} not allowed. Allowed operators: ${this.allowedComparators.join(", ")}`);
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
else if (func in ir_js_1.Operators) {
|
|
47
|
+
if (this.allowedOperators.length > 0 &&
|
|
48
|
+
this.allowedOperators.indexOf(func) === -1) {
|
|
49
|
+
throw new Error(`Operator ${func} not allowed. Allowed operators: ${this.allowedOperators.join(", ")}`);
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
else {
|
|
53
|
+
throw new Error("Unknown comparator or operator");
|
|
54
|
+
}
|
|
55
|
+
const mapDict = {
|
|
56
|
+
and: " and ",
|
|
57
|
+
or: " or ",
|
|
58
|
+
eq: "=",
|
|
59
|
+
ne: "!=",
|
|
60
|
+
lt: "<",
|
|
61
|
+
lte: "<=",
|
|
62
|
+
gt: ">",
|
|
63
|
+
gte: ">=",
|
|
64
|
+
};
|
|
65
|
+
return mapDict[func];
|
|
66
|
+
}
|
|
67
|
+
/**
|
|
68
|
+
* Visits an operation and returns a VectaraOperationResult. The
|
|
69
|
+
* operation's arguments are visited and the operator is formatted.
|
|
70
|
+
* @param operation The operation to visit.
|
|
71
|
+
* @returns A VectaraOperationResult.
|
|
72
|
+
*/
|
|
73
|
+
visitOperation(operation) {
|
|
74
|
+
const args = operation.args?.map((arg) => arg.accept(this));
|
|
75
|
+
const operator = this.formatFunction(operation.operator);
|
|
76
|
+
return `( ${args.join(operator)} )`;
|
|
77
|
+
}
|
|
78
|
+
/**
|
|
79
|
+
* Visits a comparison and returns a VectaraComparisonResult. The
|
|
80
|
+
* comparison's value is checked for type and the comparator is formatted.
|
|
81
|
+
* Throws an error if the value type is not supported.
|
|
82
|
+
* @param comparison The comparison to visit.
|
|
83
|
+
* @returns A VectaraComparisonResult.
|
|
84
|
+
*/
|
|
85
|
+
visitComparison(comparison) {
|
|
86
|
+
const comparator = this.formatFunction(comparison.comparator);
|
|
87
|
+
return `( doc.${comparison.attribute} ${comparator} ${processValue(comparison.value)} )`;
|
|
88
|
+
}
|
|
89
|
+
/**
|
|
90
|
+
* Visits a structured query and returns a VectaraStructuredQueryResult.
|
|
91
|
+
* If the query has a filter, it is visited.
|
|
92
|
+
* @param query The structured query to visit.
|
|
93
|
+
* @returns A VectaraStructuredQueryResult.
|
|
94
|
+
*/
|
|
95
|
+
visitStructuredQuery(query) {
|
|
96
|
+
let nextArg = {};
|
|
97
|
+
if (query.filter) {
|
|
98
|
+
nextArg = {
|
|
99
|
+
filter: { filter: query.filter.accept(this) },
|
|
100
|
+
};
|
|
101
|
+
}
|
|
102
|
+
return nextArg;
|
|
103
|
+
}
|
|
104
|
+
mergeFilters(defaultFilter, generatedFilter, mergeType = "and", forceDefaultFilter = false) {
|
|
105
|
+
if ((0, utils_js_1.isFilterEmpty)(defaultFilter) && (0, utils_js_1.isFilterEmpty)(generatedFilter)) {
|
|
106
|
+
return undefined;
|
|
107
|
+
}
|
|
108
|
+
if ((0, utils_js_1.isFilterEmpty)(defaultFilter) || mergeType === "replace") {
|
|
109
|
+
if ((0, utils_js_1.isFilterEmpty)(generatedFilter)) {
|
|
110
|
+
return undefined;
|
|
111
|
+
}
|
|
112
|
+
return generatedFilter;
|
|
113
|
+
}
|
|
114
|
+
if ((0, utils_js_1.isFilterEmpty)(generatedFilter)) {
|
|
115
|
+
if (forceDefaultFilter) {
|
|
116
|
+
return defaultFilter;
|
|
117
|
+
}
|
|
118
|
+
if (mergeType === "and") {
|
|
119
|
+
return undefined;
|
|
120
|
+
}
|
|
121
|
+
return defaultFilter;
|
|
122
|
+
}
|
|
123
|
+
if (mergeType === "and") {
|
|
124
|
+
return {
|
|
125
|
+
filter: `${defaultFilter} and ${generatedFilter}`,
|
|
126
|
+
};
|
|
127
|
+
}
|
|
128
|
+
else if (mergeType === "or") {
|
|
129
|
+
return {
|
|
130
|
+
filter: `${defaultFilter} or ${generatedFilter}`,
|
|
131
|
+
};
|
|
132
|
+
}
|
|
133
|
+
else {
|
|
134
|
+
throw new Error("Unknown merge type");
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
exports.VectaraTranslator = VectaraTranslator;
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import { Comparator, Comparison, Operation, Operator, StructuredQuery } from "../../chains/query_constructor/ir.js";
|
|
2
|
+
import { VectaraFilter, VectaraStore } from "../../vectorstores/vectara.js";
|
|
3
|
+
import { BaseTranslator } from "./base.js";
|
|
4
|
+
export type VectaraVisitorResult = VectaraOperationResult | VectaraComparisonResult | VectaraVisitorStructuredQueryResult;
|
|
5
|
+
export type VectaraOperationResult = String;
|
|
6
|
+
export type VectaraComparisonResult = String;
|
|
7
|
+
export type VectaraVisitorStructuredQueryResult = {
|
|
8
|
+
filter?: {
|
|
9
|
+
filter?: VectaraOperationResult | VectaraComparisonResult;
|
|
10
|
+
};
|
|
11
|
+
};
|
|
12
|
+
export declare class VectaraTranslator<T extends VectaraStore> extends BaseTranslator<T> {
|
|
13
|
+
VisitOperationOutput: VectaraOperationResult;
|
|
14
|
+
VisitComparisonOutput: VectaraComparisonResult;
|
|
15
|
+
allowedOperators: Operator[];
|
|
16
|
+
allowedComparators: Comparator[];
|
|
17
|
+
formatFunction(func: Operator | Comparator): string;
|
|
18
|
+
/**
|
|
19
|
+
* Visits an operation and returns a VectaraOperationResult. The
|
|
20
|
+
* operation's arguments are visited and the operator is formatted.
|
|
21
|
+
* @param operation The operation to visit.
|
|
22
|
+
* @returns A VectaraOperationResult.
|
|
23
|
+
*/
|
|
24
|
+
visitOperation(operation: Operation): this["VisitOperationOutput"];
|
|
25
|
+
/**
|
|
26
|
+
* Visits a comparison and returns a VectaraComparisonResult. The
|
|
27
|
+
* comparison's value is checked for type and the comparator is formatted.
|
|
28
|
+
* Throws an error if the value type is not supported.
|
|
29
|
+
* @param comparison The comparison to visit.
|
|
30
|
+
* @returns A VectaraComparisonResult.
|
|
31
|
+
*/
|
|
32
|
+
visitComparison(comparison: Comparison): this["VisitComparisonOutput"];
|
|
33
|
+
/**
|
|
34
|
+
* Visits a structured query and returns a VectaraStructuredQueryResult.
|
|
35
|
+
* If the query has a filter, it is visited.
|
|
36
|
+
* @param query The structured query to visit.
|
|
37
|
+
* @returns A VectaraStructuredQueryResult.
|
|
38
|
+
*/
|
|
39
|
+
visitStructuredQuery(query: StructuredQuery): this["VisitStructuredQueryOutput"];
|
|
40
|
+
mergeFilters(defaultFilter: VectaraFilter | undefined, generatedFilter: VectaraFilter | undefined, mergeType?: string, forceDefaultFilter?: boolean): VectaraFilter | undefined;
|
|
41
|
+
}
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
import { Comparators, Operators, } from "../../chains/query_constructor/ir.js";
|
|
2
|
+
import { BaseTranslator } from "./base.js";
|
|
3
|
+
import { isFilterEmpty } from "./utils.js";
|
|
4
|
+
function processValue(value) {
|
|
5
|
+
/** Convert a value to a string and add single quotes if it is a string. */
|
|
6
|
+
if (typeof value === "string") {
|
|
7
|
+
return `'${value}'`;
|
|
8
|
+
}
|
|
9
|
+
else {
|
|
10
|
+
return String(value);
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
export class VectaraTranslator extends BaseTranslator {
|
|
14
|
+
constructor() {
|
|
15
|
+
super(...arguments);
|
|
16
|
+
Object.defineProperty(this, "allowedOperators", {
|
|
17
|
+
enumerable: true,
|
|
18
|
+
configurable: true,
|
|
19
|
+
writable: true,
|
|
20
|
+
value: [Operators.and, Operators.or]
|
|
21
|
+
});
|
|
22
|
+
Object.defineProperty(this, "allowedComparators", {
|
|
23
|
+
enumerable: true,
|
|
24
|
+
configurable: true,
|
|
25
|
+
writable: true,
|
|
26
|
+
value: [
|
|
27
|
+
Comparators.eq,
|
|
28
|
+
Comparators.ne,
|
|
29
|
+
Comparators.lt,
|
|
30
|
+
Comparators.lte,
|
|
31
|
+
Comparators.gt,
|
|
32
|
+
Comparators.gte,
|
|
33
|
+
]
|
|
34
|
+
});
|
|
35
|
+
}
|
|
36
|
+
formatFunction(func) {
|
|
37
|
+
if (func in Comparators) {
|
|
38
|
+
if (this.allowedComparators.length > 0 &&
|
|
39
|
+
this.allowedComparators.indexOf(func) === -1) {
|
|
40
|
+
throw new Error(`Comparator ${func} not allowed. Allowed operators: ${this.allowedComparators.join(", ")}`);
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
else if (func in Operators) {
|
|
44
|
+
if (this.allowedOperators.length > 0 &&
|
|
45
|
+
this.allowedOperators.indexOf(func) === -1) {
|
|
46
|
+
throw new Error(`Operator ${func} not allowed. Allowed operators: ${this.allowedOperators.join(", ")}`);
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
else {
|
|
50
|
+
throw new Error("Unknown comparator or operator");
|
|
51
|
+
}
|
|
52
|
+
const mapDict = {
|
|
53
|
+
and: " and ",
|
|
54
|
+
or: " or ",
|
|
55
|
+
eq: "=",
|
|
56
|
+
ne: "!=",
|
|
57
|
+
lt: "<",
|
|
58
|
+
lte: "<=",
|
|
59
|
+
gt: ">",
|
|
60
|
+
gte: ">=",
|
|
61
|
+
};
|
|
62
|
+
return mapDict[func];
|
|
63
|
+
}
|
|
64
|
+
/**
|
|
65
|
+
* Visits an operation and returns a VectaraOperationResult. The
|
|
66
|
+
* operation's arguments are visited and the operator is formatted.
|
|
67
|
+
* @param operation The operation to visit.
|
|
68
|
+
* @returns A VectaraOperationResult.
|
|
69
|
+
*/
|
|
70
|
+
visitOperation(operation) {
|
|
71
|
+
const args = operation.args?.map((arg) => arg.accept(this));
|
|
72
|
+
const operator = this.formatFunction(operation.operator);
|
|
73
|
+
return `( ${args.join(operator)} )`;
|
|
74
|
+
}
|
|
75
|
+
/**
|
|
76
|
+
* Visits a comparison and returns a VectaraComparisonResult. The
|
|
77
|
+
* comparison's value is checked for type and the comparator is formatted.
|
|
78
|
+
* Throws an error if the value type is not supported.
|
|
79
|
+
* @param comparison The comparison to visit.
|
|
80
|
+
* @returns A VectaraComparisonResult.
|
|
81
|
+
*/
|
|
82
|
+
visitComparison(comparison) {
|
|
83
|
+
const comparator = this.formatFunction(comparison.comparator);
|
|
84
|
+
return `( doc.${comparison.attribute} ${comparator} ${processValue(comparison.value)} )`;
|
|
85
|
+
}
|
|
86
|
+
/**
|
|
87
|
+
* Visits a structured query and returns a VectaraStructuredQueryResult.
|
|
88
|
+
* If the query has a filter, it is visited.
|
|
89
|
+
* @param query The structured query to visit.
|
|
90
|
+
* @returns A VectaraStructuredQueryResult.
|
|
91
|
+
*/
|
|
92
|
+
visitStructuredQuery(query) {
|
|
93
|
+
let nextArg = {};
|
|
94
|
+
if (query.filter) {
|
|
95
|
+
nextArg = {
|
|
96
|
+
filter: { filter: query.filter.accept(this) },
|
|
97
|
+
};
|
|
98
|
+
}
|
|
99
|
+
return nextArg;
|
|
100
|
+
}
|
|
101
|
+
mergeFilters(defaultFilter, generatedFilter, mergeType = "and", forceDefaultFilter = false) {
|
|
102
|
+
if (isFilterEmpty(defaultFilter) && isFilterEmpty(generatedFilter)) {
|
|
103
|
+
return undefined;
|
|
104
|
+
}
|
|
105
|
+
if (isFilterEmpty(defaultFilter) || mergeType === "replace") {
|
|
106
|
+
if (isFilterEmpty(generatedFilter)) {
|
|
107
|
+
return undefined;
|
|
108
|
+
}
|
|
109
|
+
return generatedFilter;
|
|
110
|
+
}
|
|
111
|
+
if (isFilterEmpty(generatedFilter)) {
|
|
112
|
+
if (forceDefaultFilter) {
|
|
113
|
+
return defaultFilter;
|
|
114
|
+
}
|
|
115
|
+
if (mergeType === "and") {
|
|
116
|
+
return undefined;
|
|
117
|
+
}
|
|
118
|
+
return defaultFilter;
|
|
119
|
+
}
|
|
120
|
+
if (mergeType === "and") {
|
|
121
|
+
return {
|
|
122
|
+
filter: `${defaultFilter} and ${generatedFilter}`,
|
|
123
|
+
};
|
|
124
|
+
}
|
|
125
|
+
else if (mergeType === "or") {
|
|
126
|
+
return {
|
|
127
|
+
filter: `${defaultFilter} or ${generatedFilter}`,
|
|
128
|
+
};
|
|
129
|
+
}
|
|
130
|
+
else {
|
|
131
|
+
throw new Error("Unknown merge type");
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
}
|
|
@@ -1,6 +1,30 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || function (mod) {
|
|
19
|
+
if (mod && mod.__esModule) return mod;
|
|
20
|
+
var result = {};
|
|
21
|
+
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
22
|
+
__setModuleDefault(result, mod);
|
|
23
|
+
return result;
|
|
24
|
+
};
|
|
2
25
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
26
|
exports.VectaraStore = void 0;
|
|
27
|
+
const uuid = __importStar(require("uuid"));
|
|
4
28
|
const document_js_1 = require("../document.cjs");
|
|
5
29
|
const fake_js_1 = require("../embeddings/fake.cjs");
|
|
6
30
|
const env_js_1 = require("../util/env.cjs");
|
|
@@ -130,22 +154,64 @@ class VectaraStore extends base_js_1.VectorStore {
|
|
|
130
154
|
async addVectors(_vectors, _documents) {
|
|
131
155
|
throw new Error("Method not implemented. Please call addDocuments instead.");
|
|
132
156
|
}
|
|
157
|
+
/**
|
|
158
|
+
* Method to delete data from the Vectara corpus.
|
|
159
|
+
* @param params an array of document IDs to be deleted
|
|
160
|
+
* @returns Promise that resolves when the deletion is complete.
|
|
161
|
+
*/
|
|
162
|
+
async deleteDocuments(ids) {
|
|
163
|
+
if (ids && ids.length > 0) {
|
|
164
|
+
const headers = await this.getJsonHeader();
|
|
165
|
+
for (const id of ids) {
|
|
166
|
+
const data = {
|
|
167
|
+
customer_id: this.customerId,
|
|
168
|
+
corpus_id: this.corpusId[0],
|
|
169
|
+
document_id: id,
|
|
170
|
+
};
|
|
171
|
+
try {
|
|
172
|
+
const controller = new AbortController();
|
|
173
|
+
const timeout = setTimeout(() => controller.abort(), this.vectaraApiTimeoutSeconds * 1000);
|
|
174
|
+
const response = await fetch(`https://${this.apiEndpoint}/v1/delete-doc`, {
|
|
175
|
+
method: "POST",
|
|
176
|
+
headers: headers?.headers,
|
|
177
|
+
body: JSON.stringify(data),
|
|
178
|
+
signal: controller.signal,
|
|
179
|
+
});
|
|
180
|
+
clearTimeout(timeout);
|
|
181
|
+
if (response.status !== 200) {
|
|
182
|
+
throw new Error(`Vectara API returned status code ${response.status} when deleting document ${id}`);
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
catch (e) {
|
|
186
|
+
const error = new Error(`Error ${e.message}`);
|
|
187
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
188
|
+
error.code = 500;
|
|
189
|
+
throw error;
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
else {
|
|
194
|
+
throw new Error(`no "ids" specified for deletion`);
|
|
195
|
+
}
|
|
196
|
+
}
|
|
133
197
|
/**
|
|
134
198
|
* Adds documents to the Vectara store.
|
|
135
199
|
* @param documents An array of Document objects to add to the Vectara store.
|
|
136
|
-
* @returns A Promise that resolves
|
|
200
|
+
* @returns A Promise that resolves to an array of document IDs indexed in Vectara.
|
|
137
201
|
*/
|
|
138
202
|
async addDocuments(documents) {
|
|
139
203
|
if (this.corpusId.length > 1)
|
|
140
204
|
throw new Error("addDocuments does not support multiple corpus ids");
|
|
141
205
|
const headers = await this.getJsonHeader();
|
|
206
|
+
const doc_ids = [];
|
|
142
207
|
let countAdded = 0;
|
|
143
|
-
for (const
|
|
208
|
+
for (const document of documents) {
|
|
209
|
+
const doc_id = document.metadata?.document_id ?? uuid.v4();
|
|
144
210
|
const data = {
|
|
145
211
|
customer_id: this.customerId,
|
|
146
212
|
corpus_id: this.corpusId[0],
|
|
147
213
|
document: {
|
|
148
|
-
document_id:
|
|
214
|
+
document_id: doc_id,
|
|
149
215
|
title: document.metadata?.title ?? "",
|
|
150
216
|
metadata_json: JSON.stringify(document.metadata ?? {}),
|
|
151
217
|
section: [
|
|
@@ -175,6 +241,7 @@ class VectaraStore extends base_js_1.VectorStore {
|
|
|
175
241
|
}
|
|
176
242
|
else {
|
|
177
243
|
countAdded += 1;
|
|
244
|
+
doc_ids.push(doc_id);
|
|
178
245
|
}
|
|
179
246
|
}
|
|
180
247
|
catch (e) {
|
|
@@ -187,6 +254,7 @@ class VectaraStore extends base_js_1.VectorStore {
|
|
|
187
254
|
if (this.verbose) {
|
|
188
255
|
console.log(`Added ${countAdded} documents to Vectara`);
|
|
189
256
|
}
|
|
257
|
+
return doc_ids;
|
|
190
258
|
}
|
|
191
259
|
/**
|
|
192
260
|
* Vectara provides a way to add documents directly via their API. This API handles
|
|
@@ -200,13 +268,13 @@ class VectaraStore extends base_js_1.VectorStore {
|
|
|
200
268
|
async addFiles(files, metadatas = undefined) {
|
|
201
269
|
if (this.corpusId.length > 1)
|
|
202
270
|
throw new Error("addFiles does not support multiple corpus ids");
|
|
203
|
-
|
|
271
|
+
const doc_ids = [];
|
|
204
272
|
for (const [index, file] of files.entries()) {
|
|
205
273
|
const md = metadatas ? metadatas[index] : {};
|
|
206
274
|
const data = new FormData();
|
|
207
275
|
data.append("file", file.blob, file.fileName);
|
|
208
276
|
data.append("doc-metadata", JSON.stringify(md));
|
|
209
|
-
const response = await fetch(`https://api.vectara.io/v1/upload?c=${this.customerId}&o=${this.corpusId[0]}`, {
|
|
277
|
+
const response = await fetch(`https://api.vectara.io/v1/upload?c=${this.customerId}&o=${this.corpusId[0]}&d=true`, {
|
|
210
278
|
method: "POST",
|
|
211
279
|
headers: {
|
|
212
280
|
"x-api-key": this.apiKey,
|
|
@@ -222,13 +290,15 @@ class VectaraStore extends base_js_1.VectorStore {
|
|
|
222
290
|
throw new Error(`Vectara API returned status code ${status}`);
|
|
223
291
|
}
|
|
224
292
|
else {
|
|
225
|
-
|
|
293
|
+
const result = await response.json();
|
|
294
|
+
const doc_id = result.document.documentId;
|
|
295
|
+
doc_ids.push(doc_id);
|
|
226
296
|
}
|
|
227
297
|
}
|
|
228
298
|
if (this.verbose) {
|
|
229
299
|
console.log(`Uploaded ${files.length} files to Vectara`);
|
|
230
300
|
}
|
|
231
|
-
return
|
|
301
|
+
return doc_ids;
|
|
232
302
|
}
|
|
233
303
|
/**
|
|
234
304
|
* Performs a similarity search and returns documents along with their
|
|
@@ -79,12 +79,18 @@ export declare class VectaraStore extends VectorStore {
|
|
|
79
79
|
* @returns Does not return a value.
|
|
80
80
|
*/
|
|
81
81
|
addVectors(_vectors: number[][], _documents: Document[]): Promise<void>;
|
|
82
|
+
/**
|
|
83
|
+
* Method to delete data from the Vectara corpus.
|
|
84
|
+
* @param params an array of document IDs to be deleted
|
|
85
|
+
* @returns Promise that resolves when the deletion is complete.
|
|
86
|
+
*/
|
|
87
|
+
deleteDocuments(ids: string[]): Promise<void>;
|
|
82
88
|
/**
|
|
83
89
|
* Adds documents to the Vectara store.
|
|
84
90
|
* @param documents An array of Document objects to add to the Vectara store.
|
|
85
|
-
* @returns A Promise that resolves
|
|
91
|
+
* @returns A Promise that resolves to an array of document IDs indexed in Vectara.
|
|
86
92
|
*/
|
|
87
|
-
addDocuments(documents: Document[]): Promise<
|
|
93
|
+
addDocuments(documents: Document[]): Promise<string[]>;
|
|
88
94
|
/**
|
|
89
95
|
* Vectara provides a way to add documents directly via their API. This API handles
|
|
90
96
|
* pre-processing and chunking internally in an optimal manner. This method is a wrapper
|
|
@@ -94,7 +100,7 @@ export declare class VectaraStore extends VectorStore {
|
|
|
94
100
|
* @param metadata Optional. An array of metadata objects corresponding to each file in the `filePaths` array.
|
|
95
101
|
* @returns A Promise that resolves to the number of successfully uploaded files.
|
|
96
102
|
*/
|
|
97
|
-
addFiles(files: VectaraFile[], metadatas?: Record<string, unknown> | undefined): Promise<
|
|
103
|
+
addFiles(files: VectaraFile[], metadatas?: Record<string, unknown> | undefined): Promise<string[]>;
|
|
98
104
|
/**
|
|
99
105
|
* Performs a similarity search and returns documents along with their
|
|
100
106
|
* scores.
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import * as uuid from "uuid";
|
|
1
2
|
import { Document } from "../document.js";
|
|
2
3
|
import { FakeEmbeddings } from "../embeddings/fake.js";
|
|
3
4
|
import { getEnvironmentVariable } from "../util/env.js";
|
|
@@ -127,22 +128,64 @@ export class VectaraStore extends VectorStore {
|
|
|
127
128
|
async addVectors(_vectors, _documents) {
|
|
128
129
|
throw new Error("Method not implemented. Please call addDocuments instead.");
|
|
129
130
|
}
|
|
131
|
+
/**
|
|
132
|
+
* Method to delete data from the Vectara corpus.
|
|
133
|
+
* @param params an array of document IDs to be deleted
|
|
134
|
+
* @returns Promise that resolves when the deletion is complete.
|
|
135
|
+
*/
|
|
136
|
+
async deleteDocuments(ids) {
|
|
137
|
+
if (ids && ids.length > 0) {
|
|
138
|
+
const headers = await this.getJsonHeader();
|
|
139
|
+
for (const id of ids) {
|
|
140
|
+
const data = {
|
|
141
|
+
customer_id: this.customerId,
|
|
142
|
+
corpus_id: this.corpusId[0],
|
|
143
|
+
document_id: id,
|
|
144
|
+
};
|
|
145
|
+
try {
|
|
146
|
+
const controller = new AbortController();
|
|
147
|
+
const timeout = setTimeout(() => controller.abort(), this.vectaraApiTimeoutSeconds * 1000);
|
|
148
|
+
const response = await fetch(`https://${this.apiEndpoint}/v1/delete-doc`, {
|
|
149
|
+
method: "POST",
|
|
150
|
+
headers: headers?.headers,
|
|
151
|
+
body: JSON.stringify(data),
|
|
152
|
+
signal: controller.signal,
|
|
153
|
+
});
|
|
154
|
+
clearTimeout(timeout);
|
|
155
|
+
if (response.status !== 200) {
|
|
156
|
+
throw new Error(`Vectara API returned status code ${response.status} when deleting document ${id}`);
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
catch (e) {
|
|
160
|
+
const error = new Error(`Error ${e.message}`);
|
|
161
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
162
|
+
error.code = 500;
|
|
163
|
+
throw error;
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
else {
|
|
168
|
+
throw new Error(`no "ids" specified for deletion`);
|
|
169
|
+
}
|
|
170
|
+
}
|
|
130
171
|
/**
|
|
131
172
|
* Adds documents to the Vectara store.
|
|
132
173
|
* @param documents An array of Document objects to add to the Vectara store.
|
|
133
|
-
* @returns A Promise that resolves
|
|
174
|
+
* @returns A Promise that resolves to an array of document IDs indexed in Vectara.
|
|
134
175
|
*/
|
|
135
176
|
async addDocuments(documents) {
|
|
136
177
|
if (this.corpusId.length > 1)
|
|
137
178
|
throw new Error("addDocuments does not support multiple corpus ids");
|
|
138
179
|
const headers = await this.getJsonHeader();
|
|
180
|
+
const doc_ids = [];
|
|
139
181
|
let countAdded = 0;
|
|
140
|
-
for (const
|
|
182
|
+
for (const document of documents) {
|
|
183
|
+
const doc_id = document.metadata?.document_id ?? uuid.v4();
|
|
141
184
|
const data = {
|
|
142
185
|
customer_id: this.customerId,
|
|
143
186
|
corpus_id: this.corpusId[0],
|
|
144
187
|
document: {
|
|
145
|
-
document_id:
|
|
188
|
+
document_id: doc_id,
|
|
146
189
|
title: document.metadata?.title ?? "",
|
|
147
190
|
metadata_json: JSON.stringify(document.metadata ?? {}),
|
|
148
191
|
section: [
|
|
@@ -172,6 +215,7 @@ export class VectaraStore extends VectorStore {
|
|
|
172
215
|
}
|
|
173
216
|
else {
|
|
174
217
|
countAdded += 1;
|
|
218
|
+
doc_ids.push(doc_id);
|
|
175
219
|
}
|
|
176
220
|
}
|
|
177
221
|
catch (e) {
|
|
@@ -184,6 +228,7 @@ export class VectaraStore extends VectorStore {
|
|
|
184
228
|
if (this.verbose) {
|
|
185
229
|
console.log(`Added ${countAdded} documents to Vectara`);
|
|
186
230
|
}
|
|
231
|
+
return doc_ids;
|
|
187
232
|
}
|
|
188
233
|
/**
|
|
189
234
|
* Vectara provides a way to add documents directly via their API. This API handles
|
|
@@ -197,13 +242,13 @@ export class VectaraStore extends VectorStore {
|
|
|
197
242
|
async addFiles(files, metadatas = undefined) {
|
|
198
243
|
if (this.corpusId.length > 1)
|
|
199
244
|
throw new Error("addFiles does not support multiple corpus ids");
|
|
200
|
-
|
|
245
|
+
const doc_ids = [];
|
|
201
246
|
for (const [index, file] of files.entries()) {
|
|
202
247
|
const md = metadatas ? metadatas[index] : {};
|
|
203
248
|
const data = new FormData();
|
|
204
249
|
data.append("file", file.blob, file.fileName);
|
|
205
250
|
data.append("doc-metadata", JSON.stringify(md));
|
|
206
|
-
const response = await fetch(`https://api.vectara.io/v1/upload?c=${this.customerId}&o=${this.corpusId[0]}`, {
|
|
251
|
+
const response = await fetch(`https://api.vectara.io/v1/upload?c=${this.customerId}&o=${this.corpusId[0]}&d=true`, {
|
|
207
252
|
method: "POST",
|
|
208
253
|
headers: {
|
|
209
254
|
"x-api-key": this.apiKey,
|
|
@@ -219,13 +264,15 @@ export class VectaraStore extends VectorStore {
|
|
|
219
264
|
throw new Error(`Vectara API returned status code ${status}`);
|
|
220
265
|
}
|
|
221
266
|
else {
|
|
222
|
-
|
|
267
|
+
const result = await response.json();
|
|
268
|
+
const doc_id = result.document.documentId;
|
|
269
|
+
doc_ids.push(doc_id);
|
|
223
270
|
}
|
|
224
271
|
}
|
|
225
272
|
if (this.verbose) {
|
|
226
273
|
console.log(`Uploaded ${files.length} files to Vectara`);
|
|
227
274
|
}
|
|
228
|
-
return
|
|
275
|
+
return doc_ids;
|
|
229
276
|
}
|
|
230
277
|
/**
|
|
231
278
|
* Performs a similarity search and returns documents along with their
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
module.exports = require('../dist/experimental/openai_files/index.cjs');
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from '../dist/experimental/openai_files/index.js'
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from '../dist/experimental/openai_files/index.js'
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "langchain",
|
|
3
|
-
"version": "0.0.
|
|
3
|
+
"version": "0.0.195",
|
|
4
4
|
"description": "Typescript bindings for langchain",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"engines": {
|
|
@@ -646,6 +646,9 @@
|
|
|
646
646
|
"retrievers/self_query/weaviate.cjs",
|
|
647
647
|
"retrievers/self_query/weaviate.js",
|
|
648
648
|
"retrievers/self_query/weaviate.d.ts",
|
|
649
|
+
"retrievers/self_query/vectara.cjs",
|
|
650
|
+
"retrievers/self_query/vectara.js",
|
|
651
|
+
"retrievers/self_query/vectara.d.ts",
|
|
649
652
|
"retrievers/vespa.cjs",
|
|
650
653
|
"retrievers/vespa.js",
|
|
651
654
|
"retrievers/vespa.d.ts",
|
|
@@ -766,6 +769,9 @@
|
|
|
766
769
|
"experimental/openai_assistant.cjs",
|
|
767
770
|
"experimental/openai_assistant.js",
|
|
768
771
|
"experimental/openai_assistant.d.ts",
|
|
772
|
+
"experimental/openai_files.cjs",
|
|
773
|
+
"experimental/openai_files.js",
|
|
774
|
+
"experimental/openai_files.d.ts",
|
|
769
775
|
"experimental/babyagi.cjs",
|
|
770
776
|
"experimental/babyagi.js",
|
|
771
777
|
"experimental/babyagi.d.ts",
|
|
@@ -1388,7 +1394,7 @@
|
|
|
1388
1394
|
"uuid": "^9.0.0",
|
|
1389
1395
|
"yaml": "^2.2.1",
|
|
1390
1396
|
"zod": "^3.22.3",
|
|
1391
|
-
"zod-to-json-schema": "
|
|
1397
|
+
"zod-to-json-schema": "3.20.3"
|
|
1392
1398
|
},
|
|
1393
1399
|
"publishConfig": {
|
|
1394
1400
|
"access": "public"
|
|
@@ -2468,6 +2474,11 @@
|
|
|
2468
2474
|
"import": "./retrievers/self_query/weaviate.js",
|
|
2469
2475
|
"require": "./retrievers/self_query/weaviate.cjs"
|
|
2470
2476
|
},
|
|
2477
|
+
"./retrievers/self_query/vectara": {
|
|
2478
|
+
"types": "./retrievers/self_query/vectara.d.ts",
|
|
2479
|
+
"import": "./retrievers/self_query/vectara.js",
|
|
2480
|
+
"require": "./retrievers/self_query/vectara.cjs"
|
|
2481
|
+
},
|
|
2471
2482
|
"./retrievers/vespa": {
|
|
2472
2483
|
"types": "./retrievers/vespa.d.ts",
|
|
2473
2484
|
"import": "./retrievers/vespa.js",
|
|
@@ -2668,6 +2679,11 @@
|
|
|
2668
2679
|
"import": "./experimental/openai_assistant.js",
|
|
2669
2680
|
"require": "./experimental/openai_assistant.cjs"
|
|
2670
2681
|
},
|
|
2682
|
+
"./experimental/openai_files": {
|
|
2683
|
+
"types": "./experimental/openai_files.d.ts",
|
|
2684
|
+
"import": "./experimental/openai_files.js",
|
|
2685
|
+
"require": "./experimental/openai_files.cjs"
|
|
2686
|
+
},
|
|
2671
2687
|
"./experimental/babyagi": {
|
|
2672
2688
|
"types": "./experimental/babyagi.d.ts",
|
|
2673
2689
|
"import": "./experimental/babyagi.js",
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
module.exports = require('../../dist/retrievers/self_query/vectara.cjs');
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from '../../dist/retrievers/self_query/vectara.js'
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export * from '../../dist/retrievers/self_query/vectara.js'
|