langchain 0.0.198 → 0.0.200

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. package/dist/agents/index.cjs +3 -1
  2. package/dist/agents/index.d.ts +2 -2
  3. package/dist/agents/index.js +1 -1
  4. package/dist/agents/toolkits/conversational_retrieval/tool.cjs +1 -1
  5. package/dist/agents/toolkits/conversational_retrieval/tool.js +1 -1
  6. package/dist/chains/conversational_retrieval_chain.cjs +16 -2
  7. package/dist/chains/conversational_retrieval_chain.d.ts +2 -0
  8. package/dist/chains/conversational_retrieval_chain.js +16 -2
  9. package/dist/chat_models/fake.cjs +2 -114
  10. package/dist/chat_models/fake.d.ts +1 -52
  11. package/dist/chat_models/fake.js +1 -113
  12. package/dist/chat_models/llama_cpp.cjs +43 -21
  13. package/dist/chat_models/llama_cpp.d.ts +2 -1
  14. package/dist/chat_models/llama_cpp.js +44 -22
  15. package/dist/chat_models/minimax.d.ts +1 -1
  16. package/dist/document_loaders/fs/chatgpt.cjs +85 -0
  17. package/dist/document_loaders/fs/chatgpt.d.ts +8 -0
  18. package/dist/document_loaders/fs/chatgpt.js +81 -0
  19. package/dist/document_loaders/web/confluence.cjs +31 -7
  20. package/dist/document_loaders/web/confluence.d.ts +12 -5
  21. package/dist/document_loaders/web/confluence.js +31 -7
  22. package/dist/embeddings/gradient_ai.cjs +102 -0
  23. package/dist/embeddings/gradient_ai.d.ts +48 -0
  24. package/dist/embeddings/gradient_ai.js +98 -0
  25. package/dist/llms/gradient_ai.cjs +112 -0
  26. package/dist/llms/gradient_ai.d.ts +55 -0
  27. package/dist/llms/gradient_ai.js +108 -0
  28. package/dist/llms/llama_cpp.cjs +2 -1
  29. package/dist/llms/llama_cpp.d.ts +1 -1
  30. package/dist/llms/llama_cpp.js +2 -1
  31. package/dist/llms/watsonx_ai.cjs +154 -0
  32. package/dist/llms/watsonx_ai.d.ts +72 -0
  33. package/dist/llms/watsonx_ai.js +150 -0
  34. package/dist/load/import_constants.cjs +4 -0
  35. package/dist/load/import_constants.js +4 -0
  36. package/dist/load/import_map.cjs +4 -3
  37. package/dist/load/import_map.d.ts +1 -0
  38. package/dist/load/import_map.js +1 -0
  39. package/dist/memory/vector_store.cjs +1 -1
  40. package/dist/memory/vector_store.js +1 -1
  41. package/dist/tools/google_places.cjs +81 -0
  42. package/dist/tools/google_places.d.ts +21 -0
  43. package/dist/tools/google_places.js +77 -0
  44. package/dist/tools/webbrowser.cjs +1 -1
  45. package/dist/tools/webbrowser.js +1 -1
  46. package/dist/util/document.cjs +1 -1
  47. package/dist/util/document.d.ts +1 -1
  48. package/dist/util/document.js +1 -1
  49. package/dist/util/tiktoken.cjs +15 -24
  50. package/dist/util/tiktoken.d.ts +1 -9
  51. package/dist/util/tiktoken.js +1 -21
  52. package/dist/vectorstores/elasticsearch.cjs +16 -3
  53. package/dist/vectorstores/elasticsearch.d.ts +6 -2
  54. package/dist/vectorstores/elasticsearch.js +16 -3
  55. package/dist/vectorstores/prisma.cjs +1 -1
  56. package/dist/vectorstores/prisma.js +1 -1
  57. package/dist/vectorstores/weaviate.d.ts +1 -1
  58. package/dist/vectorstores/xata.cjs +3 -2
  59. package/dist/vectorstores/xata.js +3 -2
  60. package/document_loaders/fs/chatgpt.cjs +1 -0
  61. package/document_loaders/fs/chatgpt.d.ts +1 -0
  62. package/document_loaders/fs/chatgpt.js +1 -0
  63. package/embeddings/gradient_ai.cjs +1 -0
  64. package/embeddings/gradient_ai.d.ts +1 -0
  65. package/embeddings/gradient_ai.js +1 -0
  66. package/llms/gradient_ai.cjs +1 -0
  67. package/llms/gradient_ai.d.ts +1 -0
  68. package/llms/gradient_ai.js +1 -0
  69. package/llms/watsonx_ai.cjs +1 -0
  70. package/llms/watsonx_ai.d.ts +1 -0
  71. package/llms/watsonx_ai.js +1 -0
  72. package/package.json +58 -11
  73. package/tools/google_places.cjs +1 -0
  74. package/tools/google_places.d.ts +1 -0
  75. package/tools/google_places.js +1 -0
@@ -0,0 +1,85 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.ChatGPTLoader = void 0;
4
+ const text_js_1 = require("./text.cjs");
5
+ const document_js_1 = require("../../document.cjs");
6
+ function concatenateRows(message, title) {
7
+ /**
8
+ * Combine message information in a readable format ready to be used.
9
+ * @param {ChatGPTMessage} message - Message to be concatenated
10
+ * @param {string} title - Title of the conversation
11
+ *
12
+ * @returns {string} Concatenated message
13
+ */
14
+ if (!message) {
15
+ return "";
16
+ }
17
+ const sender = message.author ? message.author.role : "unknown";
18
+ const text = message.content.parts[0];
19
+ const date = new Date(message.create_time * 1000)
20
+ .toISOString()
21
+ .slice(0, 19)
22
+ .replace("T", " ");
23
+ return `${title} - ${sender} on ${date}: ${text}\n\n`;
24
+ }
25
+ class ChatGPTLoader extends text_js_1.TextLoader {
26
+ constructor(filePathOrBlob, numLogs = 0) {
27
+ super(filePathOrBlob);
28
+ Object.defineProperty(this, "numLogs", {
29
+ enumerable: true,
30
+ configurable: true,
31
+ writable: true,
32
+ value: void 0
33
+ });
34
+ this.numLogs = numLogs;
35
+ }
36
+ async parse(raw) {
37
+ let data;
38
+ try {
39
+ data = JSON.parse(raw);
40
+ }
41
+ catch (e) {
42
+ console.error(e);
43
+ throw new Error("Failed to parse JSON");
44
+ }
45
+ const truncatedData = this.numLogs > 0 ? data.slice(0, this.numLogs) : data;
46
+ return truncatedData.map((d) => Object.values(d.mapping)
47
+ .filter((msg, idx) => !(idx === 0 && msg.message.author.role === "system"))
48
+ .map((msg) => concatenateRows(msg.message, d.title))
49
+ .join(""));
50
+ }
51
+ async load() {
52
+ let text;
53
+ let metadata;
54
+ if (typeof this.filePathOrBlob === "string") {
55
+ const { readFile } = await text_js_1.TextLoader.imports();
56
+ try {
57
+ text = await readFile(this.filePathOrBlob, "utf8");
58
+ }
59
+ catch (e) {
60
+ console.error(e);
61
+ throw new Error("Failed to read file");
62
+ }
63
+ metadata = { source: this.filePathOrBlob };
64
+ }
65
+ else {
66
+ try {
67
+ text = await this.filePathOrBlob.text();
68
+ }
69
+ catch (e) {
70
+ console.error(e);
71
+ throw new Error("Failed to read blob");
72
+ }
73
+ metadata = { source: "blob", blobType: this.filePathOrBlob.type };
74
+ }
75
+ const parsed = await this.parse(text);
76
+ return parsed.map((pageContent, i) => new document_js_1.Document({
77
+ pageContent,
78
+ metadata: {
79
+ ...metadata,
80
+ logIndex: i + 1,
81
+ },
82
+ }));
83
+ }
84
+ }
85
+ exports.ChatGPTLoader = ChatGPTLoader;
@@ -0,0 +1,8 @@
1
+ import { TextLoader } from "./text.js";
2
+ import { Document } from "../../document.js";
3
+ export declare class ChatGPTLoader extends TextLoader {
4
+ numLogs: number;
5
+ constructor(filePathOrBlob: string | Blob, numLogs?: number);
6
+ protected parse(raw: string): Promise<string[]>;
7
+ load(): Promise<Document[]>;
8
+ }
@@ -0,0 +1,81 @@
1
+ import { TextLoader } from "./text.js";
2
+ import { Document } from "../../document.js";
3
+ function concatenateRows(message, title) {
4
+ /**
5
+ * Combine message information in a readable format ready to be used.
6
+ * @param {ChatGPTMessage} message - Message to be concatenated
7
+ * @param {string} title - Title of the conversation
8
+ *
9
+ * @returns {string} Concatenated message
10
+ */
11
+ if (!message) {
12
+ return "";
13
+ }
14
+ const sender = message.author ? message.author.role : "unknown";
15
+ const text = message.content.parts[0];
16
+ const date = new Date(message.create_time * 1000)
17
+ .toISOString()
18
+ .slice(0, 19)
19
+ .replace("T", " ");
20
+ return `${title} - ${sender} on ${date}: ${text}\n\n`;
21
+ }
22
+ export class ChatGPTLoader extends TextLoader {
23
+ constructor(filePathOrBlob, numLogs = 0) {
24
+ super(filePathOrBlob);
25
+ Object.defineProperty(this, "numLogs", {
26
+ enumerable: true,
27
+ configurable: true,
28
+ writable: true,
29
+ value: void 0
30
+ });
31
+ this.numLogs = numLogs;
32
+ }
33
+ async parse(raw) {
34
+ let data;
35
+ try {
36
+ data = JSON.parse(raw);
37
+ }
38
+ catch (e) {
39
+ console.error(e);
40
+ throw new Error("Failed to parse JSON");
41
+ }
42
+ const truncatedData = this.numLogs > 0 ? data.slice(0, this.numLogs) : data;
43
+ return truncatedData.map((d) => Object.values(d.mapping)
44
+ .filter((msg, idx) => !(idx === 0 && msg.message.author.role === "system"))
45
+ .map((msg) => concatenateRows(msg.message, d.title))
46
+ .join(""));
47
+ }
48
+ async load() {
49
+ let text;
50
+ let metadata;
51
+ if (typeof this.filePathOrBlob === "string") {
52
+ const { readFile } = await TextLoader.imports();
53
+ try {
54
+ text = await readFile(this.filePathOrBlob, "utf8");
55
+ }
56
+ catch (e) {
57
+ console.error(e);
58
+ throw new Error("Failed to read file");
59
+ }
60
+ metadata = { source: this.filePathOrBlob };
61
+ }
62
+ else {
63
+ try {
64
+ text = await this.filePathOrBlob.text();
65
+ }
66
+ catch (e) {
67
+ console.error(e);
68
+ throw new Error("Failed to read blob");
69
+ }
70
+ metadata = { source: "blob", blobType: this.filePathOrBlob.type };
71
+ }
72
+ const parsed = await this.parse(text);
73
+ return parsed.map((pageContent, i) => new Document({
74
+ pageContent,
75
+ metadata: {
76
+ ...metadata,
77
+ logIndex: i + 1,
78
+ },
79
+ }));
80
+ }
81
+ }
@@ -19,7 +19,7 @@ const base_js_1 = require("../base.cjs");
19
19
  * ```
20
20
  */
21
21
  class ConfluencePagesLoader extends base_js_1.BaseDocumentLoader {
22
- constructor({ baseUrl, spaceKey, username, accessToken, limit = 25, }) {
22
+ constructor({ baseUrl, spaceKey, username, accessToken, limit = 25, personalAccessToken, }) {
23
23
  super();
24
24
  Object.defineProperty(this, "baseUrl", {
25
25
  enumerable: true,
@@ -51,11 +51,32 @@ class ConfluencePagesLoader extends base_js_1.BaseDocumentLoader {
51
51
  writable: true,
52
52
  value: void 0
53
53
  });
54
+ Object.defineProperty(this, "personalAccessToken", {
55
+ enumerable: true,
56
+ configurable: true,
57
+ writable: true,
58
+ value: void 0
59
+ });
54
60
  this.baseUrl = baseUrl;
55
61
  this.spaceKey = spaceKey;
56
62
  this.username = username;
57
63
  this.accessToken = accessToken;
58
64
  this.limit = limit;
65
+ this.personalAccessToken = personalAccessToken;
66
+ }
67
+ /**
68
+ * Returns the authorization header for the request.
69
+ * @returns The authorization header as a string, or undefined if no credentials were provided.
70
+ */
71
+ get authorizationHeader() {
72
+ if (this.personalAccessToken) {
73
+ return `Bearer ${this.personalAccessToken}`;
74
+ }
75
+ else if (this.username && this.accessToken) {
76
+ const authToken = Buffer.from(`${this.username}:${this.accessToken}`).toString("base64");
77
+ return `Basic ${authToken}`;
78
+ }
79
+ return undefined;
59
80
  }
60
81
  /**
61
82
  * Fetches all the pages in the specified space and converts each page to
@@ -79,13 +100,16 @@ class ConfluencePagesLoader extends base_js_1.BaseDocumentLoader {
79
100
  */
80
101
  async fetchConfluenceData(url) {
81
102
  try {
82
- const authToken = Buffer.from(`${this.username}:${this.accessToken}`).toString("base64");
103
+ const initialHeaders = {
104
+ "Content-Type": "application/json",
105
+ Accept: "application/json",
106
+ };
107
+ const authHeader = this.authorizationHeader;
108
+ if (authHeader) {
109
+ initialHeaders.Authorization = authHeader;
110
+ }
83
111
  const response = await fetch(url, {
84
- headers: {
85
- Authorization: `Basic ${authToken}`,
86
- "Content-Type": "application/json",
87
- Accept: "application/json",
88
- },
112
+ headers: initialHeaders,
89
113
  });
90
114
  if (!response.ok) {
91
115
  throw new Error(`Failed to fetch ${url} from Confluence: ${response.status}`);
@@ -7,8 +7,9 @@ import { BaseDocumentLoader } from "../base.js";
7
7
  export interface ConfluencePagesLoaderParams {
8
8
  baseUrl: string;
9
9
  spaceKey: string;
10
- username: string;
11
- accessToken: string;
10
+ username?: string;
11
+ accessToken?: string;
12
+ personalAccessToken?: string;
12
13
  limit?: number;
13
14
  }
14
15
  /**
@@ -47,10 +48,16 @@ export interface ConfluenceAPIResponse {
47
48
  export declare class ConfluencePagesLoader extends BaseDocumentLoader {
48
49
  readonly baseUrl: string;
49
50
  readonly spaceKey: string;
50
- readonly username: string;
51
- readonly accessToken: string;
51
+ readonly username?: string;
52
+ readonly accessToken?: string;
52
53
  readonly limit: number;
53
- constructor({ baseUrl, spaceKey, username, accessToken, limit, }: ConfluencePagesLoaderParams);
54
+ readonly personalAccessToken?: string;
55
+ constructor({ baseUrl, spaceKey, username, accessToken, limit, personalAccessToken, }: ConfluencePagesLoaderParams);
56
+ /**
57
+ * Returns the authorization header for the request.
58
+ * @returns The authorization header as a string, or undefined if no credentials were provided.
59
+ */
60
+ private get authorizationHeader();
54
61
  /**
55
62
  * Fetches all the pages in the specified space and converts each page to
56
63
  * a Document instance.
@@ -16,7 +16,7 @@ import { BaseDocumentLoader } from "../base.js";
16
16
  * ```
17
17
  */
18
18
  export class ConfluencePagesLoader extends BaseDocumentLoader {
19
- constructor({ baseUrl, spaceKey, username, accessToken, limit = 25, }) {
19
+ constructor({ baseUrl, spaceKey, username, accessToken, limit = 25, personalAccessToken, }) {
20
20
  super();
21
21
  Object.defineProperty(this, "baseUrl", {
22
22
  enumerable: true,
@@ -48,11 +48,32 @@ export class ConfluencePagesLoader extends BaseDocumentLoader {
48
48
  writable: true,
49
49
  value: void 0
50
50
  });
51
+ Object.defineProperty(this, "personalAccessToken", {
52
+ enumerable: true,
53
+ configurable: true,
54
+ writable: true,
55
+ value: void 0
56
+ });
51
57
  this.baseUrl = baseUrl;
52
58
  this.spaceKey = spaceKey;
53
59
  this.username = username;
54
60
  this.accessToken = accessToken;
55
61
  this.limit = limit;
62
+ this.personalAccessToken = personalAccessToken;
63
+ }
64
+ /**
65
+ * Returns the authorization header for the request.
66
+ * @returns The authorization header as a string, or undefined if no credentials were provided.
67
+ */
68
+ get authorizationHeader() {
69
+ if (this.personalAccessToken) {
70
+ return `Bearer ${this.personalAccessToken}`;
71
+ }
72
+ else if (this.username && this.accessToken) {
73
+ const authToken = Buffer.from(`${this.username}:${this.accessToken}`).toString("base64");
74
+ return `Basic ${authToken}`;
75
+ }
76
+ return undefined;
56
77
  }
57
78
  /**
58
79
  * Fetches all the pages in the specified space and converts each page to
@@ -76,13 +97,16 @@ export class ConfluencePagesLoader extends BaseDocumentLoader {
76
97
  */
77
98
  async fetchConfluenceData(url) {
78
99
  try {
79
- const authToken = Buffer.from(`${this.username}:${this.accessToken}`).toString("base64");
100
+ const initialHeaders = {
101
+ "Content-Type": "application/json",
102
+ Accept: "application/json",
103
+ };
104
+ const authHeader = this.authorizationHeader;
105
+ if (authHeader) {
106
+ initialHeaders.Authorization = authHeader;
107
+ }
80
108
  const response = await fetch(url, {
81
- headers: {
82
- Authorization: `Basic ${authToken}`,
83
- "Content-Type": "application/json",
84
- Accept: "application/json",
85
- },
109
+ headers: initialHeaders,
86
110
  });
87
111
  if (!response.ok) {
88
112
  throw new Error(`Failed to fetch ${url} from Confluence: ${response.status}`);
@@ -0,0 +1,102 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.GradientEmbeddings = void 0;
4
+ const nodejs_sdk_1 = require("@gradientai/nodejs-sdk");
5
+ const env_js_1 = require("../util/env.cjs");
6
+ const chunk_js_1 = require("../util/chunk.cjs");
7
+ const base_js_1 = require("./base.cjs");
8
+ /**
9
+ * Class for generating embeddings using the Gradient AI's API. Extends the
10
+ * Embeddings class and implements GradientEmbeddingsParams and
11
+ */
12
+ class GradientEmbeddings extends base_js_1.Embeddings {
13
+ constructor(fields) {
14
+ super(fields);
15
+ Object.defineProperty(this, "gradientAccessKey", {
16
+ enumerable: true,
17
+ configurable: true,
18
+ writable: true,
19
+ value: void 0
20
+ });
21
+ Object.defineProperty(this, "workspaceId", {
22
+ enumerable: true,
23
+ configurable: true,
24
+ writable: true,
25
+ value: void 0
26
+ });
27
+ Object.defineProperty(this, "batchSize", {
28
+ enumerable: true,
29
+ configurable: true,
30
+ writable: true,
31
+ value: 128
32
+ });
33
+ Object.defineProperty(this, "model", {
34
+ enumerable: true,
35
+ configurable: true,
36
+ writable: true,
37
+ value: void 0
38
+ });
39
+ this.gradientAccessKey =
40
+ fields?.gradientAccessKey ??
41
+ (0, env_js_1.getEnvironmentVariable)("GRADIENT_ACCESS_TOKEN");
42
+ this.workspaceId =
43
+ fields?.workspaceId ?? (0, env_js_1.getEnvironmentVariable)("GRADIENT_WORKSPACE_ID");
44
+ if (!this.gradientAccessKey) {
45
+ throw new Error("Missing Gradient AI Access Token");
46
+ }
47
+ if (!this.workspaceId) {
48
+ throw new Error("Missing Gradient AI Workspace ID");
49
+ }
50
+ }
51
+ /**
52
+ * Method to generate embeddings for an array of documents. Splits the
53
+ * documents into batches and makes requests to the Gradient API to generate
54
+ * embeddings.
55
+ * @param texts Array of documents to generate embeddings for.
56
+ * @returns Promise that resolves to a 2D array of embeddings for each document.
57
+ */
58
+ async embedDocuments(texts) {
59
+ await this.setModel();
60
+ const mappedTexts = texts.map((text) => ({ input: text }));
61
+ const batches = (0, chunk_js_1.chunkArray)(mappedTexts, this.batchSize);
62
+ const batchRequests = batches.map((batch) => this.caller.call(async () => this.model.generateEmbeddings({
63
+ inputs: batch,
64
+ })));
65
+ const batchResponses = await Promise.all(batchRequests);
66
+ const embeddings = [];
67
+ for (let i = 0; i < batchResponses.length; i += 1) {
68
+ const batch = batches[i];
69
+ const { embeddings: batchResponse } = batchResponses[i];
70
+ for (let j = 0; j < batch.length; j += 1) {
71
+ embeddings.push(batchResponse[j].embedding);
72
+ }
73
+ }
74
+ return embeddings;
75
+ }
76
+ /**
77
+ * Method to generate an embedding for a single document. Calls the
78
+ * embedDocuments method with the document as the input.
79
+ * @param text Document to generate an embedding for.
80
+ * @returns Promise that resolves to an embedding for the document.
81
+ */
82
+ async embedQuery(text) {
83
+ const data = await this.embedDocuments([text]);
84
+ return data[0];
85
+ }
86
+ /**
87
+ * Method to set the model to use for generating embeddings.
88
+ * @sets the class' `model` value to that of the retrieved Embeddings Model.
89
+ */
90
+ async setModel() {
91
+ if (this.model)
92
+ return;
93
+ const gradient = new nodejs_sdk_1.Gradient({
94
+ accessToken: this.gradientAccessKey,
95
+ workspaceId: this.workspaceId,
96
+ });
97
+ this.model = await gradient.getEmbeddingsModel({
98
+ slug: "bge-large",
99
+ });
100
+ }
101
+ }
102
+ exports.GradientEmbeddings = GradientEmbeddings;
@@ -0,0 +1,48 @@
1
+ import { Embeddings, EmbeddingsParams } from "./base.js";
2
+ /**
3
+ * Interface for GradientEmbeddings parameters. Extends EmbeddingsParams and
4
+ * defines additional parameters specific to the GradientEmbeddings class.
5
+ */
6
+ export interface GradientEmbeddingsParams extends EmbeddingsParams {
7
+ /**
8
+ * Gradient AI Access Token.
9
+ * Provide Access Token if you do not wish to automatically pull from env.
10
+ */
11
+ gradientAccessKey?: string;
12
+ /**
13
+ * Gradient Workspace Id.
14
+ * Provide workspace id if you do not wish to automatically pull from env.
15
+ */
16
+ workspaceId?: string;
17
+ }
18
+ /**
19
+ * Class for generating embeddings using the Gradient AI's API. Extends the
20
+ * Embeddings class and implements GradientEmbeddingsParams and
21
+ */
22
+ export declare class GradientEmbeddings extends Embeddings implements GradientEmbeddingsParams {
23
+ gradientAccessKey?: string;
24
+ workspaceId?: string;
25
+ batchSize: number;
26
+ model: any;
27
+ constructor(fields: GradientEmbeddingsParams);
28
+ /**
29
+ * Method to generate embeddings for an array of documents. Splits the
30
+ * documents into batches and makes requests to the Gradient API to generate
31
+ * embeddings.
32
+ * @param texts Array of documents to generate embeddings for.
33
+ * @returns Promise that resolves to a 2D array of embeddings for each document.
34
+ */
35
+ embedDocuments(texts: string[]): Promise<number[][]>;
36
+ /**
37
+ * Method to generate an embedding for a single document. Calls the
38
+ * embedDocuments method with the document as the input.
39
+ * @param text Document to generate an embedding for.
40
+ * @returns Promise that resolves to an embedding for the document.
41
+ */
42
+ embedQuery(text: string): Promise<number[]>;
43
+ /**
44
+ * Method to set the model to use for generating embeddings.
45
+ * @sets the class' `model` value to that of the retrieved Embeddings Model.
46
+ */
47
+ setModel(): Promise<void>;
48
+ }
@@ -0,0 +1,98 @@
1
+ import { Gradient } from "@gradientai/nodejs-sdk";
2
+ import { getEnvironmentVariable } from "../util/env.js";
3
+ import { chunkArray } from "../util/chunk.js";
4
+ import { Embeddings } from "./base.js";
5
+ /**
6
+ * Class for generating embeddings using the Gradient AI's API. Extends the
7
+ * Embeddings class and implements GradientEmbeddingsParams and
8
+ */
9
+ export class GradientEmbeddings extends Embeddings {
10
+ constructor(fields) {
11
+ super(fields);
12
+ Object.defineProperty(this, "gradientAccessKey", {
13
+ enumerable: true,
14
+ configurable: true,
15
+ writable: true,
16
+ value: void 0
17
+ });
18
+ Object.defineProperty(this, "workspaceId", {
19
+ enumerable: true,
20
+ configurable: true,
21
+ writable: true,
22
+ value: void 0
23
+ });
24
+ Object.defineProperty(this, "batchSize", {
25
+ enumerable: true,
26
+ configurable: true,
27
+ writable: true,
28
+ value: 128
29
+ });
30
+ Object.defineProperty(this, "model", {
31
+ enumerable: true,
32
+ configurable: true,
33
+ writable: true,
34
+ value: void 0
35
+ });
36
+ this.gradientAccessKey =
37
+ fields?.gradientAccessKey ??
38
+ getEnvironmentVariable("GRADIENT_ACCESS_TOKEN");
39
+ this.workspaceId =
40
+ fields?.workspaceId ?? getEnvironmentVariable("GRADIENT_WORKSPACE_ID");
41
+ if (!this.gradientAccessKey) {
42
+ throw new Error("Missing Gradient AI Access Token");
43
+ }
44
+ if (!this.workspaceId) {
45
+ throw new Error("Missing Gradient AI Workspace ID");
46
+ }
47
+ }
48
+ /**
49
+ * Method to generate embeddings for an array of documents. Splits the
50
+ * documents into batches and makes requests to the Gradient API to generate
51
+ * embeddings.
52
+ * @param texts Array of documents to generate embeddings for.
53
+ * @returns Promise that resolves to a 2D array of embeddings for each document.
54
+ */
55
+ async embedDocuments(texts) {
56
+ await this.setModel();
57
+ const mappedTexts = texts.map((text) => ({ input: text }));
58
+ const batches = chunkArray(mappedTexts, this.batchSize);
59
+ const batchRequests = batches.map((batch) => this.caller.call(async () => this.model.generateEmbeddings({
60
+ inputs: batch,
61
+ })));
62
+ const batchResponses = await Promise.all(batchRequests);
63
+ const embeddings = [];
64
+ for (let i = 0; i < batchResponses.length; i += 1) {
65
+ const batch = batches[i];
66
+ const { embeddings: batchResponse } = batchResponses[i];
67
+ for (let j = 0; j < batch.length; j += 1) {
68
+ embeddings.push(batchResponse[j].embedding);
69
+ }
70
+ }
71
+ return embeddings;
72
+ }
73
+ /**
74
+ * Method to generate an embedding for a single document. Calls the
75
+ * embedDocuments method with the document as the input.
76
+ * @param text Document to generate an embedding for.
77
+ * @returns Promise that resolves to an embedding for the document.
78
+ */
79
+ async embedQuery(text) {
80
+ const data = await this.embedDocuments([text]);
81
+ return data[0];
82
+ }
83
+ /**
84
+ * Method to set the model to use for generating embeddings.
85
+ * @sets the class' `model` value to that of the retrieved Embeddings Model.
86
+ */
87
+ async setModel() {
88
+ if (this.model)
89
+ return;
90
+ const gradient = new Gradient({
91
+ accessToken: this.gradientAccessKey,
92
+ workspaceId: this.workspaceId,
93
+ });
94
+ this.model = await gradient.getEmbeddingsModel({
95
+ slug: "bge-large",
96
+ });
97
+ }
98
+ }