langchain 0.0.198 → 0.0.200

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (75) hide show
  1. package/dist/agents/index.cjs +3 -1
  2. package/dist/agents/index.d.ts +2 -2
  3. package/dist/agents/index.js +1 -1
  4. package/dist/agents/toolkits/conversational_retrieval/tool.cjs +1 -1
  5. package/dist/agents/toolkits/conversational_retrieval/tool.js +1 -1
  6. package/dist/chains/conversational_retrieval_chain.cjs +16 -2
  7. package/dist/chains/conversational_retrieval_chain.d.ts +2 -0
  8. package/dist/chains/conversational_retrieval_chain.js +16 -2
  9. package/dist/chat_models/fake.cjs +2 -114
  10. package/dist/chat_models/fake.d.ts +1 -52
  11. package/dist/chat_models/fake.js +1 -113
  12. package/dist/chat_models/llama_cpp.cjs +43 -21
  13. package/dist/chat_models/llama_cpp.d.ts +2 -1
  14. package/dist/chat_models/llama_cpp.js +44 -22
  15. package/dist/chat_models/minimax.d.ts +1 -1
  16. package/dist/document_loaders/fs/chatgpt.cjs +85 -0
  17. package/dist/document_loaders/fs/chatgpt.d.ts +8 -0
  18. package/dist/document_loaders/fs/chatgpt.js +81 -0
  19. package/dist/document_loaders/web/confluence.cjs +31 -7
  20. package/dist/document_loaders/web/confluence.d.ts +12 -5
  21. package/dist/document_loaders/web/confluence.js +31 -7
  22. package/dist/embeddings/gradient_ai.cjs +102 -0
  23. package/dist/embeddings/gradient_ai.d.ts +48 -0
  24. package/dist/embeddings/gradient_ai.js +98 -0
  25. package/dist/llms/gradient_ai.cjs +112 -0
  26. package/dist/llms/gradient_ai.d.ts +55 -0
  27. package/dist/llms/gradient_ai.js +108 -0
  28. package/dist/llms/llama_cpp.cjs +2 -1
  29. package/dist/llms/llama_cpp.d.ts +1 -1
  30. package/dist/llms/llama_cpp.js +2 -1
  31. package/dist/llms/watsonx_ai.cjs +154 -0
  32. package/dist/llms/watsonx_ai.d.ts +72 -0
  33. package/dist/llms/watsonx_ai.js +150 -0
  34. package/dist/load/import_constants.cjs +4 -0
  35. package/dist/load/import_constants.js +4 -0
  36. package/dist/load/import_map.cjs +4 -3
  37. package/dist/load/import_map.d.ts +1 -0
  38. package/dist/load/import_map.js +1 -0
  39. package/dist/memory/vector_store.cjs +1 -1
  40. package/dist/memory/vector_store.js +1 -1
  41. package/dist/tools/google_places.cjs +81 -0
  42. package/dist/tools/google_places.d.ts +21 -0
  43. package/dist/tools/google_places.js +77 -0
  44. package/dist/tools/webbrowser.cjs +1 -1
  45. package/dist/tools/webbrowser.js +1 -1
  46. package/dist/util/document.cjs +1 -1
  47. package/dist/util/document.d.ts +1 -1
  48. package/dist/util/document.js +1 -1
  49. package/dist/util/tiktoken.cjs +15 -24
  50. package/dist/util/tiktoken.d.ts +1 -9
  51. package/dist/util/tiktoken.js +1 -21
  52. package/dist/vectorstores/elasticsearch.cjs +16 -3
  53. package/dist/vectorstores/elasticsearch.d.ts +6 -2
  54. package/dist/vectorstores/elasticsearch.js +16 -3
  55. package/dist/vectorstores/prisma.cjs +1 -1
  56. package/dist/vectorstores/prisma.js +1 -1
  57. package/dist/vectorstores/weaviate.d.ts +1 -1
  58. package/dist/vectorstores/xata.cjs +3 -2
  59. package/dist/vectorstores/xata.js +3 -2
  60. package/document_loaders/fs/chatgpt.cjs +1 -0
  61. package/document_loaders/fs/chatgpt.d.ts +1 -0
  62. package/document_loaders/fs/chatgpt.js +1 -0
  63. package/embeddings/gradient_ai.cjs +1 -0
  64. package/embeddings/gradient_ai.d.ts +1 -0
  65. package/embeddings/gradient_ai.js +1 -0
  66. package/llms/gradient_ai.cjs +1 -0
  67. package/llms/gradient_ai.d.ts +1 -0
  68. package/llms/gradient_ai.js +1 -0
  69. package/llms/watsonx_ai.cjs +1 -0
  70. package/llms/watsonx_ai.d.ts +1 -0
  71. package/llms/watsonx_ai.js +1 -0
  72. package/package.json +58 -11
  73. package/tools/google_places.cjs +1 -0
  74. package/tools/google_places.d.ts +1 -0
  75. package/tools/google_places.js +1 -0
@@ -0,0 +1,112 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.GradientLLM = void 0;
4
+ const nodejs_sdk_1 = require("@gradientai/nodejs-sdk");
5
+ const base_js_1 = require("./base.cjs");
6
+ const env_js_1 = require("../util/env.cjs");
7
+ /**
8
+ * The GradientLLM class is used to interact with Gradient AI inference Endpoint models.
9
+ * This requires your Gradient AI Access Token which is autoloaded if not specified.
10
+ */
11
+ class GradientLLM extends base_js_1.LLM {
12
+ static lc_name() {
13
+ return "GradientLLM";
14
+ }
15
+ get lc_secrets() {
16
+ return {
17
+ gradientAccessKey: "GRADIENT_ACCESS_TOKEN",
18
+ workspaceId: "GRADIENT_WORKSPACE_ID",
19
+ };
20
+ }
21
+ constructor(fields) {
22
+ super(fields);
23
+ Object.defineProperty(this, "modelSlug", {
24
+ enumerable: true,
25
+ configurable: true,
26
+ writable: true,
27
+ value: "llama2-7b-chat"
28
+ });
29
+ Object.defineProperty(this, "adapterId", {
30
+ enumerable: true,
31
+ configurable: true,
32
+ writable: true,
33
+ value: void 0
34
+ });
35
+ Object.defineProperty(this, "gradientAccessKey", {
36
+ enumerable: true,
37
+ configurable: true,
38
+ writable: true,
39
+ value: void 0
40
+ });
41
+ Object.defineProperty(this, "workspaceId", {
42
+ enumerable: true,
43
+ configurable: true,
44
+ writable: true,
45
+ value: void 0
46
+ });
47
+ Object.defineProperty(this, "inferenceParameters", {
48
+ enumerable: true,
49
+ configurable: true,
50
+ writable: true,
51
+ value: void 0
52
+ });
53
+ // Gradient AI does not export the BaseModel type. Once it does, we can use it here.
54
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
55
+ Object.defineProperty(this, "model", {
56
+ enumerable: true,
57
+ configurable: true,
58
+ writable: true,
59
+ value: void 0
60
+ });
61
+ this.modelSlug = fields?.modelSlug ?? this.modelSlug;
62
+ this.adapterId = fields?.adapterId;
63
+ this.gradientAccessKey =
64
+ fields?.gradientAccessKey ??
65
+ (0, env_js_1.getEnvironmentVariable)("GRADIENT_ACCESS_TOKEN");
66
+ this.workspaceId =
67
+ fields?.workspaceId ?? (0, env_js_1.getEnvironmentVariable)("GRADIENT_WORKSPACE_ID");
68
+ this.inferenceParameters = fields.inferenceParameters;
69
+ if (!this.gradientAccessKey) {
70
+ throw new Error("Missing Gradient AI Access Token");
71
+ }
72
+ if (!this.workspaceId) {
73
+ throw new Error("Missing Gradient AI Workspace ID");
74
+ }
75
+ }
76
+ _llmType() {
77
+ return "gradient_ai";
78
+ }
79
+ /**
80
+ * Calls the Gradient AI endpoint and retrieves the result.
81
+ * @param {string} prompt The input prompt.
82
+ * @returns {Promise<string>} A promise that resolves to the generated string.
83
+ */
84
+ /** @ignore */
85
+ async _call(prompt, _options) {
86
+ await this.setModel();
87
+ const response = (await this.caller.call(async () => this.model.complete({
88
+ query: prompt,
89
+ ...this.inferenceParameters,
90
+ })));
91
+ return response.generatedOutput;
92
+ }
93
+ async setModel() {
94
+ if (this.model)
95
+ return;
96
+ const gradient = new nodejs_sdk_1.Gradient({
97
+ accessToken: this.gradientAccessKey,
98
+ workspaceId: this.workspaceId,
99
+ });
100
+ if (this.adapterId) {
101
+ this.model = await gradient.getModelAdapter({
102
+ modelAdapterId: this.adapterId,
103
+ });
104
+ }
105
+ else {
106
+ this.model = await gradient.getBaseModel({
107
+ baseModelSlug: this.modelSlug,
108
+ });
109
+ }
110
+ }
111
+ }
112
+ exports.GradientLLM = GradientLLM;
@@ -0,0 +1,55 @@
1
+ import { BaseLLMCallOptions, BaseLLMParams, LLM } from "./base.js";
2
+ /**
3
+ * The GradientLLMParams interface defines the input parameters for
4
+ * the GradientLLM class.
5
+ */
6
+ export interface GradientLLMParams extends BaseLLMParams {
7
+ /**
8
+ * Gradient AI Access Token.
9
+ * Provide Access Token if you do not wish to automatically pull from env.
10
+ */
11
+ gradientAccessKey?: string;
12
+ /**
13
+ * Gradient Workspace Id.
14
+ * Provide workspace id if you do not wish to automatically pull from env.
15
+ */
16
+ workspaceId?: string;
17
+ /**
18
+ * Parameters accepted by the Gradient npm package.
19
+ */
20
+ inferenceParameters?: Record<string, unknown>;
21
+ /**
22
+ * Gradient AI Model Slug.
23
+ */
24
+ modelSlug?: string;
25
+ /**
26
+ * Gradient Adapter ID for custom fine tuned models.
27
+ */
28
+ adapterId?: string;
29
+ }
30
+ /**
31
+ * The GradientLLM class is used to interact with Gradient AI inference Endpoint models.
32
+ * This requires your Gradient AI Access Token which is autoloaded if not specified.
33
+ */
34
+ export declare class GradientLLM extends LLM<BaseLLMCallOptions> {
35
+ static lc_name(): string;
36
+ get lc_secrets(): {
37
+ [key: string]: string;
38
+ } | undefined;
39
+ modelSlug: string;
40
+ adapterId?: string;
41
+ gradientAccessKey?: string;
42
+ workspaceId?: string;
43
+ inferenceParameters?: Record<string, unknown>;
44
+ model: any;
45
+ constructor(fields: GradientLLMParams);
46
+ _llmType(): string;
47
+ /**
48
+ * Calls the Gradient AI endpoint and retrieves the result.
49
+ * @param {string} prompt The input prompt.
50
+ * @returns {Promise<string>} A promise that resolves to the generated string.
51
+ */
52
+ /** @ignore */
53
+ _call(prompt: string, _options: this["ParsedCallOptions"]): Promise<string>;
54
+ setModel(): Promise<void>;
55
+ }
@@ -0,0 +1,108 @@
1
+ import { Gradient } from "@gradientai/nodejs-sdk";
2
+ import { LLM } from "./base.js";
3
+ import { getEnvironmentVariable } from "../util/env.js";
4
+ /**
5
+ * The GradientLLM class is used to interact with Gradient AI inference Endpoint models.
6
+ * This requires your Gradient AI Access Token which is autoloaded if not specified.
7
+ */
8
+ export class GradientLLM extends LLM {
9
+ static lc_name() {
10
+ return "GradientLLM";
11
+ }
12
+ get lc_secrets() {
13
+ return {
14
+ gradientAccessKey: "GRADIENT_ACCESS_TOKEN",
15
+ workspaceId: "GRADIENT_WORKSPACE_ID",
16
+ };
17
+ }
18
+ constructor(fields) {
19
+ super(fields);
20
+ Object.defineProperty(this, "modelSlug", {
21
+ enumerable: true,
22
+ configurable: true,
23
+ writable: true,
24
+ value: "llama2-7b-chat"
25
+ });
26
+ Object.defineProperty(this, "adapterId", {
27
+ enumerable: true,
28
+ configurable: true,
29
+ writable: true,
30
+ value: void 0
31
+ });
32
+ Object.defineProperty(this, "gradientAccessKey", {
33
+ enumerable: true,
34
+ configurable: true,
35
+ writable: true,
36
+ value: void 0
37
+ });
38
+ Object.defineProperty(this, "workspaceId", {
39
+ enumerable: true,
40
+ configurable: true,
41
+ writable: true,
42
+ value: void 0
43
+ });
44
+ Object.defineProperty(this, "inferenceParameters", {
45
+ enumerable: true,
46
+ configurable: true,
47
+ writable: true,
48
+ value: void 0
49
+ });
50
+ // Gradient AI does not export the BaseModel type. Once it does, we can use it here.
51
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
52
+ Object.defineProperty(this, "model", {
53
+ enumerable: true,
54
+ configurable: true,
55
+ writable: true,
56
+ value: void 0
57
+ });
58
+ this.modelSlug = fields?.modelSlug ?? this.modelSlug;
59
+ this.adapterId = fields?.adapterId;
60
+ this.gradientAccessKey =
61
+ fields?.gradientAccessKey ??
62
+ getEnvironmentVariable("GRADIENT_ACCESS_TOKEN");
63
+ this.workspaceId =
64
+ fields?.workspaceId ?? getEnvironmentVariable("GRADIENT_WORKSPACE_ID");
65
+ this.inferenceParameters = fields.inferenceParameters;
66
+ if (!this.gradientAccessKey) {
67
+ throw new Error("Missing Gradient AI Access Token");
68
+ }
69
+ if (!this.workspaceId) {
70
+ throw new Error("Missing Gradient AI Workspace ID");
71
+ }
72
+ }
73
+ _llmType() {
74
+ return "gradient_ai";
75
+ }
76
+ /**
77
+ * Calls the Gradient AI endpoint and retrieves the result.
78
+ * @param {string} prompt The input prompt.
79
+ * @returns {Promise<string>} A promise that resolves to the generated string.
80
+ */
81
+ /** @ignore */
82
+ async _call(prompt, _options) {
83
+ await this.setModel();
84
+ const response = (await this.caller.call(async () => this.model.complete({
85
+ query: prompt,
86
+ ...this.inferenceParameters,
87
+ })));
88
+ return response.generatedOutput;
89
+ }
90
+ async setModel() {
91
+ if (this.model)
92
+ return;
93
+ const gradient = new Gradient({
94
+ accessToken: this.gradientAccessKey,
95
+ workspaceId: this.workspaceId,
96
+ });
97
+ if (this.adapterId) {
98
+ this.model = await gradient.getModelAdapter({
99
+ modelAdapterId: this.adapterId,
100
+ });
101
+ }
102
+ else {
103
+ this.model = await gradient.getBaseModel({
104
+ baseModelSlug: this.modelSlug,
105
+ });
106
+ }
107
+ }
108
+ }
@@ -77,9 +77,10 @@ class LlamaCpp extends base_js_1.LLM {
77
77
  return "llama2_cpp";
78
78
  }
79
79
  /** @ignore */
80
- async _call(prompt, _options) {
80
+ async _call(prompt, options) {
81
81
  try {
82
82
  const promptOptions = {
83
+ onToken: options?.onToken,
83
84
  maxTokens: this?.maxTokens,
84
85
  temperature: this?.temperature,
85
86
  topK: this?.topK,
@@ -36,6 +36,6 @@ export declare class LlamaCpp extends LLM<LlamaCppCallOptions> {
36
36
  constructor(inputs: LlamaCppInputs);
37
37
  _llmType(): string;
38
38
  /** @ignore */
39
- _call(prompt: string, _options?: this["ParsedCallOptions"]): Promise<string>;
39
+ _call(prompt: string, options?: this["ParsedCallOptions"]): Promise<string>;
40
40
  _streamResponseChunks(prompt: string, _options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<GenerationChunk>;
41
41
  }
@@ -74,9 +74,10 @@ export class LlamaCpp extends LLM {
74
74
  return "llama2_cpp";
75
75
  }
76
76
  /** @ignore */
77
- async _call(prompt, _options) {
77
+ async _call(prompt, options) {
78
78
  try {
79
79
  const promptOptions = {
80
+ onToken: options?.onToken,
80
81
  maxTokens: this?.maxTokens,
81
82
  temperature: this?.temperature,
82
83
  topK: this?.topK,
@@ -0,0 +1,154 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.WatsonxAI = void 0;
4
+ const base_js_1 = require("./base.cjs");
5
+ const env_js_1 = require("../util/env.cjs");
6
+ const endpointConstructor = (region, version) => `https://${region}.ml.cloud.ibm.com/ml/v1-beta/generation/text?version=${version}`;
7
+ /**
8
+ * The WatsonxAI class is used to interact with Watsonx AI
9
+ * Inference Endpoint models. It uses IBM Cloud for authentication.
10
+ * This requires your IBM Cloud API Key which is autoloaded if not specified.
11
+ */
12
+ class WatsonxAI extends base_js_1.LLM {
13
+ static lc_name() {
14
+ return "WatsonxAI";
15
+ }
16
+ get lc_secrets() {
17
+ return {
18
+ ibmCloudApiKey: "IBM_CLOUD_API_KEY",
19
+ projectId: "WATSONX_PROJECT_ID",
20
+ };
21
+ }
22
+ constructor(fields) {
23
+ super(fields);
24
+ Object.defineProperty(this, "endpoint", {
25
+ enumerable: true,
26
+ configurable: true,
27
+ writable: true,
28
+ value: void 0
29
+ });
30
+ Object.defineProperty(this, "region", {
31
+ enumerable: true,
32
+ configurable: true,
33
+ writable: true,
34
+ value: "us-south"
35
+ });
36
+ Object.defineProperty(this, "version", {
37
+ enumerable: true,
38
+ configurable: true,
39
+ writable: true,
40
+ value: "2023-05-29"
41
+ });
42
+ Object.defineProperty(this, "modelId", {
43
+ enumerable: true,
44
+ configurable: true,
45
+ writable: true,
46
+ value: "meta-llama/llama-2-70b-chat"
47
+ });
48
+ Object.defineProperty(this, "modelKwargs", {
49
+ enumerable: true,
50
+ configurable: true,
51
+ writable: true,
52
+ value: void 0
53
+ });
54
+ Object.defineProperty(this, "ibmCloudApiKey", {
55
+ enumerable: true,
56
+ configurable: true,
57
+ writable: true,
58
+ value: void 0
59
+ });
60
+ Object.defineProperty(this, "ibmCloudToken", {
61
+ enumerable: true,
62
+ configurable: true,
63
+ writable: true,
64
+ value: void 0
65
+ });
66
+ Object.defineProperty(this, "ibmCloudTokenExpiresAt", {
67
+ enumerable: true,
68
+ configurable: true,
69
+ writable: true,
70
+ value: void 0
71
+ });
72
+ Object.defineProperty(this, "projectId", {
73
+ enumerable: true,
74
+ configurable: true,
75
+ writable: true,
76
+ value: void 0
77
+ });
78
+ Object.defineProperty(this, "modelParameters", {
79
+ enumerable: true,
80
+ configurable: true,
81
+ writable: true,
82
+ value: void 0
83
+ });
84
+ this.region = fields?.region ?? this.region;
85
+ this.version = fields?.version ?? this.version;
86
+ this.modelId = fields?.modelId ?? this.modelId;
87
+ this.ibmCloudApiKey =
88
+ fields?.ibmCloudApiKey ?? (0, env_js_1.getEnvironmentVariable)("IBM_CLOUD_API_KEY");
89
+ this.projectId =
90
+ fields?.projectId ?? (0, env_js_1.getEnvironmentVariable)("WATSONX_PROJECT_ID");
91
+ this.endpoint =
92
+ fields?.endpoint ?? endpointConstructor(this.region, this.version);
93
+ this.modelParameters = fields.modelParameters;
94
+ if (!this.ibmCloudApiKey) {
95
+ throw new Error("Missing IBM Cloud API Key");
96
+ }
97
+ if (!this.projectId) {
98
+ throw new Error("Missing WatsonX AI Project ID");
99
+ }
100
+ }
101
+ _llmType() {
102
+ return "watsonx_ai";
103
+ }
104
+ /**
105
+ * Calls the WatsonX AI endpoint and retrieves the result.
106
+ * @param {string} prompt The input prompt.
107
+ * @returns {Promise<string>} A promise that resolves to the generated string.
108
+ */
109
+ /** @ignore */
110
+ async _call(prompt, _options) {
111
+ const response = (await this.caller.call(async () => fetch(this.endpoint, {
112
+ method: "POST",
113
+ headers: {
114
+ "Content-Type": "application/json",
115
+ Accept: "application/json",
116
+ Authorization: `Bearer ${await this.generateToken()}`,
117
+ },
118
+ body: JSON.stringify({
119
+ project_id: this.projectId,
120
+ model_id: this.modelId,
121
+ input: prompt,
122
+ parameters: this.modelParameters,
123
+ }),
124
+ }).then((res) => res.json())));
125
+ /**
126
+ * Handle Errors for invalid requests.
127
+ */
128
+ if (response.errors) {
129
+ throw new Error(response.errors[0].message);
130
+ }
131
+ return response.results[0].generated_text;
132
+ }
133
+ async generateToken() {
134
+ if (this.ibmCloudToken && this.ibmCloudTokenExpiresAt) {
135
+ if (this.ibmCloudTokenExpiresAt > Date.now()) {
136
+ return this.ibmCloudToken;
137
+ }
138
+ }
139
+ const urlTokenParams = new URLSearchParams();
140
+ urlTokenParams.append("grant_type", "urn:ibm:params:oauth:grant-type:apikey");
141
+ urlTokenParams.append("apikey", this.ibmCloudApiKey);
142
+ const data = (await fetch("https://iam.cloud.ibm.com/identity/token", {
143
+ method: "POST",
144
+ headers: {
145
+ "Content-Type": "application/x-www-form-urlencoded",
146
+ },
147
+ body: urlTokenParams,
148
+ }).then((res) => res.json()));
149
+ this.ibmCloudTokenExpiresAt = data.expiration * 1000;
150
+ this.ibmCloudToken = data.access_token;
151
+ return this.ibmCloudToken;
152
+ }
153
+ }
154
+ exports.WatsonxAI = WatsonxAI;
@@ -0,0 +1,72 @@
1
+ import { BaseLLMCallOptions, BaseLLMParams, LLM } from "./base.js";
2
+ /**
3
+ * The WatsonxAIParams interface defines the input parameters for
4
+ * the WatsonxAI class.
5
+ */
6
+ export interface WatsonxAIParams extends BaseLLMParams {
7
+ /**
8
+ * WatsonX AI Complete Endpoint.
9
+ * Can be used if you want a fully custom endpoint.
10
+ */
11
+ endpoint?: string;
12
+ /**
13
+ * IBM Cloud Compute Region.
14
+ * eg. us-south, us-east, etc.
15
+ */
16
+ region?: string;
17
+ /**
18
+ * WatsonX AI Version.
19
+ * Date representing the WatsonX AI Version.
20
+ * eg. 2023-05-29
21
+ */
22
+ version?: string;
23
+ /**
24
+ * WatsonX AI Key.
25
+ * Provide API Key if you do not wish to automatically pull from env.
26
+ */
27
+ ibmCloudApiKey?: string;
28
+ /**
29
+ * WatsonX AI Key.
30
+ * Provide API Key if you do not wish to automatically pull from env.
31
+ */
32
+ projectId?: string;
33
+ /**
34
+ * Parameters accepted by the WatsonX AI Endpoint.
35
+ */
36
+ modelParameters?: Record<string, unknown>;
37
+ /**
38
+ * WatsonX AI Model ID.
39
+ */
40
+ modelId?: string;
41
+ }
42
+ /**
43
+ * The WatsonxAI class is used to interact with Watsonx AI
44
+ * Inference Endpoint models. It uses IBM Cloud for authentication.
45
+ * This requires your IBM Cloud API Key which is autoloaded if not specified.
46
+ */
47
+ export declare class WatsonxAI extends LLM<BaseLLMCallOptions> {
48
+ static lc_name(): string;
49
+ get lc_secrets(): {
50
+ [key: string]: string;
51
+ } | undefined;
52
+ endpoint: string;
53
+ region: string;
54
+ version: string;
55
+ modelId: string;
56
+ modelKwargs?: Record<string, unknown>;
57
+ ibmCloudApiKey?: string;
58
+ ibmCloudToken?: string;
59
+ ibmCloudTokenExpiresAt?: number;
60
+ projectId?: string;
61
+ modelParameters?: Record<string, unknown>;
62
+ constructor(fields: WatsonxAIParams);
63
+ _llmType(): string;
64
+ /**
65
+ * Calls the WatsonX AI endpoint and retrieves the result.
66
+ * @param {string} prompt The input prompt.
67
+ * @returns {Promise<string>} A promise that resolves to the generated string.
68
+ */
69
+ /** @ignore */
70
+ _call(prompt: string, _options: this["ParsedCallOptions"]): Promise<string>;
71
+ generateToken(): Promise<string>;
72
+ }
@@ -0,0 +1,150 @@
1
+ import { LLM } from "./base.js";
2
+ import { getEnvironmentVariable } from "../util/env.js";
3
+ const endpointConstructor = (region, version) => `https://${region}.ml.cloud.ibm.com/ml/v1-beta/generation/text?version=${version}`;
4
+ /**
5
+ * The WatsonxAI class is used to interact with Watsonx AI
6
+ * Inference Endpoint models. It uses IBM Cloud for authentication.
7
+ * This requires your IBM Cloud API Key which is autoloaded if not specified.
8
+ */
9
+ export class WatsonxAI extends LLM {
10
+ static lc_name() {
11
+ return "WatsonxAI";
12
+ }
13
+ get lc_secrets() {
14
+ return {
15
+ ibmCloudApiKey: "IBM_CLOUD_API_KEY",
16
+ projectId: "WATSONX_PROJECT_ID",
17
+ };
18
+ }
19
+ constructor(fields) {
20
+ super(fields);
21
+ Object.defineProperty(this, "endpoint", {
22
+ enumerable: true,
23
+ configurable: true,
24
+ writable: true,
25
+ value: void 0
26
+ });
27
+ Object.defineProperty(this, "region", {
28
+ enumerable: true,
29
+ configurable: true,
30
+ writable: true,
31
+ value: "us-south"
32
+ });
33
+ Object.defineProperty(this, "version", {
34
+ enumerable: true,
35
+ configurable: true,
36
+ writable: true,
37
+ value: "2023-05-29"
38
+ });
39
+ Object.defineProperty(this, "modelId", {
40
+ enumerable: true,
41
+ configurable: true,
42
+ writable: true,
43
+ value: "meta-llama/llama-2-70b-chat"
44
+ });
45
+ Object.defineProperty(this, "modelKwargs", {
46
+ enumerable: true,
47
+ configurable: true,
48
+ writable: true,
49
+ value: void 0
50
+ });
51
+ Object.defineProperty(this, "ibmCloudApiKey", {
52
+ enumerable: true,
53
+ configurable: true,
54
+ writable: true,
55
+ value: void 0
56
+ });
57
+ Object.defineProperty(this, "ibmCloudToken", {
58
+ enumerable: true,
59
+ configurable: true,
60
+ writable: true,
61
+ value: void 0
62
+ });
63
+ Object.defineProperty(this, "ibmCloudTokenExpiresAt", {
64
+ enumerable: true,
65
+ configurable: true,
66
+ writable: true,
67
+ value: void 0
68
+ });
69
+ Object.defineProperty(this, "projectId", {
70
+ enumerable: true,
71
+ configurable: true,
72
+ writable: true,
73
+ value: void 0
74
+ });
75
+ Object.defineProperty(this, "modelParameters", {
76
+ enumerable: true,
77
+ configurable: true,
78
+ writable: true,
79
+ value: void 0
80
+ });
81
+ this.region = fields?.region ?? this.region;
82
+ this.version = fields?.version ?? this.version;
83
+ this.modelId = fields?.modelId ?? this.modelId;
84
+ this.ibmCloudApiKey =
85
+ fields?.ibmCloudApiKey ?? getEnvironmentVariable("IBM_CLOUD_API_KEY");
86
+ this.projectId =
87
+ fields?.projectId ?? getEnvironmentVariable("WATSONX_PROJECT_ID");
88
+ this.endpoint =
89
+ fields?.endpoint ?? endpointConstructor(this.region, this.version);
90
+ this.modelParameters = fields.modelParameters;
91
+ if (!this.ibmCloudApiKey) {
92
+ throw new Error("Missing IBM Cloud API Key");
93
+ }
94
+ if (!this.projectId) {
95
+ throw new Error("Missing WatsonX AI Project ID");
96
+ }
97
+ }
98
+ _llmType() {
99
+ return "watsonx_ai";
100
+ }
101
+ /**
102
+ * Calls the WatsonX AI endpoint and retrieves the result.
103
+ * @param {string} prompt The input prompt.
104
+ * @returns {Promise<string>} A promise that resolves to the generated string.
105
+ */
106
+ /** @ignore */
107
+ async _call(prompt, _options) {
108
+ const response = (await this.caller.call(async () => fetch(this.endpoint, {
109
+ method: "POST",
110
+ headers: {
111
+ "Content-Type": "application/json",
112
+ Accept: "application/json",
113
+ Authorization: `Bearer ${await this.generateToken()}`,
114
+ },
115
+ body: JSON.stringify({
116
+ project_id: this.projectId,
117
+ model_id: this.modelId,
118
+ input: prompt,
119
+ parameters: this.modelParameters,
120
+ }),
121
+ }).then((res) => res.json())));
122
+ /**
123
+ * Handle Errors for invalid requests.
124
+ */
125
+ if (response.errors) {
126
+ throw new Error(response.errors[0].message);
127
+ }
128
+ return response.results[0].generated_text;
129
+ }
130
+ async generateToken() {
131
+ if (this.ibmCloudToken && this.ibmCloudTokenExpiresAt) {
132
+ if (this.ibmCloudTokenExpiresAt > Date.now()) {
133
+ return this.ibmCloudToken;
134
+ }
135
+ }
136
+ const urlTokenParams = new URLSearchParams();
137
+ urlTokenParams.append("grant_type", "urn:ibm:params:oauth:grant-type:apikey");
138
+ urlTokenParams.append("apikey", this.ibmCloudApiKey);
139
+ const data = (await fetch("https://iam.cloud.ibm.com/identity/token", {
140
+ method: "POST",
141
+ headers: {
142
+ "Content-Type": "application/x-www-form-urlencoded",
143
+ },
144
+ body: urlTokenParams,
145
+ }).then((res) => res.json()));
146
+ this.ibmCloudTokenExpiresAt = data.expiration * 1000;
147
+ this.ibmCloudToken = data.access_token;
148
+ return this.ibmCloudToken;
149
+ }
150
+ }