langchain 0.0.154 → 0.0.156

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (78) hide show
  1. package/chat_models/bedrock.cjs +1 -0
  2. package/chat_models/bedrock.d.ts +1 -0
  3. package/chat_models/bedrock.js +1 -0
  4. package/dist/callbacks/base.d.ts +42 -28
  5. package/dist/callbacks/handlers/log_stream.cjs +283 -0
  6. package/dist/callbacks/handlers/log_stream.d.ts +99 -0
  7. package/dist/callbacks/handlers/log_stream.js +277 -0
  8. package/dist/callbacks/handlers/tracer.cjs +34 -18
  9. package/dist/callbacks/handlers/tracer.d.ts +18 -16
  10. package/dist/callbacks/handlers/tracer.js +34 -18
  11. package/dist/chat_models/bedrock.cjs +260 -0
  12. package/dist/chat_models/bedrock.d.ts +58 -0
  13. package/dist/chat_models/bedrock.js +254 -0
  14. package/dist/document_loaders/web/notionapi.cjs +8 -4
  15. package/dist/document_loaders/web/notionapi.js +8 -4
  16. package/dist/document_loaders/web/searchapi.cjs +134 -0
  17. package/dist/document_loaders/web/searchapi.d.ts +65 -0
  18. package/dist/document_loaders/web/searchapi.js +130 -0
  19. package/dist/embeddings/cloudflare_workersai.cjs +69 -0
  20. package/dist/embeddings/cloudflare_workersai.d.ts +28 -0
  21. package/dist/embeddings/cloudflare_workersai.js +65 -0
  22. package/dist/llms/bedrock.cjs +57 -67
  23. package/dist/llms/bedrock.d.ts +8 -35
  24. package/dist/llms/bedrock.js +57 -67
  25. package/dist/load/import_constants.cjs +4 -0
  26. package/dist/load/import_constants.js +4 -0
  27. package/dist/load/import_map.cjs +3 -2
  28. package/dist/load/import_map.d.ts +1 -0
  29. package/dist/load/import_map.js +1 -0
  30. package/dist/schema/runnable/base.cjs +64 -5
  31. package/dist/schema/runnable/base.d.ts +13 -0
  32. package/dist/schema/runnable/base.js +64 -5
  33. package/dist/tools/index.cjs +3 -1
  34. package/dist/tools/index.d.ts +1 -0
  35. package/dist/tools/index.js +1 -0
  36. package/dist/tools/searchapi.cjs +139 -0
  37. package/dist/tools/searchapi.d.ts +64 -0
  38. package/dist/tools/searchapi.js +135 -0
  39. package/dist/util/bedrock.cjs +54 -0
  40. package/dist/util/bedrock.d.ts +59 -0
  41. package/dist/util/bedrock.js +50 -0
  42. package/dist/util/fast-json-patch/index.cjs +48 -0
  43. package/dist/util/fast-json-patch/index.d.ts +21 -0
  44. package/dist/util/fast-json-patch/index.js +15 -0
  45. package/dist/util/fast-json-patch/src/core.cjs +469 -0
  46. package/dist/util/fast-json-patch/src/core.d.ts +111 -0
  47. package/dist/util/fast-json-patch/src/core.js +459 -0
  48. package/dist/util/fast-json-patch/src/helpers.cjs +194 -0
  49. package/dist/util/fast-json-patch/src/helpers.d.ts +36 -0
  50. package/dist/util/fast-json-patch/src/helpers.js +181 -0
  51. package/dist/util/googlevertexai-webauth.cjs +6 -2
  52. package/dist/util/googlevertexai-webauth.d.ts +1 -0
  53. package/dist/util/googlevertexai-webauth.js +6 -2
  54. package/dist/util/stream.cjs +2 -40
  55. package/dist/util/stream.d.ts +1 -2
  56. package/dist/util/stream.js +1 -38
  57. package/dist/vectorstores/cloudflare_vectorize.cjs +200 -0
  58. package/dist/vectorstores/cloudflare_vectorize.d.ts +90 -0
  59. package/dist/vectorstores/cloudflare_vectorize.js +173 -0
  60. package/dist/vectorstores/pgvector.cjs +1 -1
  61. package/dist/vectorstores/pgvector.js +1 -1
  62. package/dist/vectorstores/supabase.d.ts +1 -1
  63. package/dist/vectorstores/vercel_postgres.cjs +300 -0
  64. package/dist/vectorstores/vercel_postgres.d.ts +145 -0
  65. package/dist/vectorstores/vercel_postgres.js +296 -0
  66. package/document_loaders/web/searchapi.cjs +1 -0
  67. package/document_loaders/web/searchapi.d.ts +1 -0
  68. package/document_loaders/web/searchapi.js +1 -0
  69. package/embeddings/cloudflare_workersai.cjs +1 -0
  70. package/embeddings/cloudflare_workersai.d.ts +1 -0
  71. package/embeddings/cloudflare_workersai.js +1 -0
  72. package/package.json +60 -14
  73. package/vectorstores/cloudflare_vectorize.cjs +1 -0
  74. package/vectorstores/cloudflare_vectorize.d.ts +1 -0
  75. package/vectorstores/cloudflare_vectorize.js +1 -0
  76. package/vectorstores/vercel_postgres.cjs +1 -0
  77. package/vectorstores/vercel_postgres.d.ts +1 -0
  78. package/vectorstores/vercel_postgres.js +1 -0
@@ -21,7 +21,7 @@ export class BaseTracer extends BaseCallbackHandler {
21
21
  _addChildRun(parentRun, childRun) {
22
22
  parentRun.child_runs.push(childRun);
23
23
  }
24
- _startTrace(run) {
24
+ async _startTrace(run) {
25
25
  if (run.parent_run_id !== undefined) {
26
26
  const parentRun = this.runMap.get(run.parent_run_id);
27
27
  if (parentRun) {
@@ -30,6 +30,7 @@ export class BaseTracer extends BaseCallbackHandler {
30
30
  }
31
31
  }
32
32
  this.runMap.set(run.id, run);
33
+ await this.onRunCreate?.(run);
33
34
  }
34
35
  async _endTrace(run) {
35
36
  const parentRun = run.parent_run_id !== undefined && this.runMap.get(run.parent_run_id);
@@ -40,6 +41,7 @@ export class BaseTracer extends BaseCallbackHandler {
40
41
  await this.persistRun(run);
41
42
  }
42
43
  this.runMap.delete(run.id);
44
+ await this.onRunUpdate?.(run);
43
45
  }
44
46
  _getExecutionOrder(parentRunId) {
45
47
  const parentRun = parentRunId !== undefined && this.runMap.get(parentRunId);
@@ -49,7 +51,7 @@ export class BaseTracer extends BaseCallbackHandler {
49
51
  }
50
52
  return parentRun.child_execution_order + 1;
51
53
  }
52
- async handleLLMStart(llm, prompts, runId, parentRunId, extraParams, tags, metadata) {
54
+ async handleLLMStart(llm, prompts, runId, parentRunId, extraParams, tags, metadata, name) {
53
55
  const execution_order = this._getExecutionOrder(parentRunId);
54
56
  const start_time = Date.now();
55
57
  const finalExtraParams = metadata
@@ -57,7 +59,7 @@ export class BaseTracer extends BaseCallbackHandler {
57
59
  : extraParams;
58
60
  const run = {
59
61
  id: runId,
60
- name: llm.id[llm.id.length - 1],
62
+ name: name ?? llm.id[llm.id.length - 1],
61
63
  parent_run_id: parentRunId,
62
64
  start_time,
63
65
  serialized: llm,
@@ -75,10 +77,11 @@ export class BaseTracer extends BaseCallbackHandler {
75
77
  extra: finalExtraParams ?? {},
76
78
  tags: tags || [],
77
79
  };
78
- this._startTrace(run);
80
+ await this._startTrace(run);
79
81
  await this.onLLMStart?.(run);
82
+ return run;
80
83
  }
81
- async handleChatModelStart(llm, messages, runId, parentRunId, extraParams, tags, metadata) {
84
+ async handleChatModelStart(llm, messages, runId, parentRunId, extraParams, tags, metadata, name) {
82
85
  const execution_order = this._getExecutionOrder(parentRunId);
83
86
  const start_time = Date.now();
84
87
  const finalExtraParams = metadata
@@ -86,7 +89,7 @@ export class BaseTracer extends BaseCallbackHandler {
86
89
  : extraParams;
87
90
  const run = {
88
91
  id: runId,
89
- name: llm.id[llm.id.length - 1],
92
+ name: name ?? llm.id[llm.id.length - 1],
90
93
  parent_run_id: parentRunId,
91
94
  start_time,
92
95
  serialized: llm,
@@ -104,8 +107,9 @@ export class BaseTracer extends BaseCallbackHandler {
104
107
  extra: finalExtraParams ?? {},
105
108
  tags: tags || [],
106
109
  };
107
- this._startTrace(run);
110
+ await this._startTrace(run);
108
111
  await this.onLLMStart?.(run);
112
+ return run;
109
113
  }
110
114
  async handleLLMEnd(output, runId) {
111
115
  const run = this.runMap.get(runId);
@@ -120,6 +124,7 @@ export class BaseTracer extends BaseCallbackHandler {
120
124
  });
121
125
  await this.onLLMEnd?.(run);
122
126
  await this._endTrace(run);
127
+ return run;
123
128
  }
124
129
  async handleLLMError(error, runId) {
125
130
  const run = this.runMap.get(runId);
@@ -134,13 +139,14 @@ export class BaseTracer extends BaseCallbackHandler {
134
139
  });
135
140
  await this.onLLMError?.(run);
136
141
  await this._endTrace(run);
142
+ return run;
137
143
  }
138
- async handleChainStart(chain, inputs, runId, parentRunId, tags, metadata, runType) {
144
+ async handleChainStart(chain, inputs, runId, parentRunId, tags, metadata, runType, name) {
139
145
  const execution_order = this._getExecutionOrder(parentRunId);
140
146
  const start_time = Date.now();
141
147
  const run = {
142
148
  id: runId,
143
- name: chain.id[chain.id.length - 1],
149
+ name: name ?? chain.id[chain.id.length - 1],
144
150
  parent_run_id: parentRunId,
145
151
  start_time,
146
152
  serialized: chain,
@@ -158,8 +164,9 @@ export class BaseTracer extends BaseCallbackHandler {
158
164
  extra: metadata ? { metadata } : {},
159
165
  tags: tags || [],
160
166
  };
161
- this._startTrace(run);
167
+ await this._startTrace(run);
162
168
  await this.onChainStart?.(run);
169
+ return run;
163
170
  }
164
171
  async handleChainEnd(outputs, runId, _parentRunId, _tags, kwargs) {
165
172
  const run = this.runMap.get(runId);
@@ -177,6 +184,7 @@ export class BaseTracer extends BaseCallbackHandler {
177
184
  }
178
185
  await this.onChainEnd?.(run);
179
186
  await this._endTrace(run);
187
+ return run;
180
188
  }
181
189
  async handleChainError(error, runId, _parentRunId, _tags, kwargs) {
182
190
  const run = this.runMap.get(runId);
@@ -194,13 +202,14 @@ export class BaseTracer extends BaseCallbackHandler {
194
202
  }
195
203
  await this.onChainError?.(run);
196
204
  await this._endTrace(run);
205
+ return run;
197
206
  }
198
- async handleToolStart(tool, input, runId, parentRunId, tags, metadata) {
207
+ async handleToolStart(tool, input, runId, parentRunId, tags, metadata, name) {
199
208
  const execution_order = this._getExecutionOrder(parentRunId);
200
209
  const start_time = Date.now();
201
210
  const run = {
202
211
  id: runId,
203
- name: tool.id[tool.id.length - 1],
212
+ name: name ?? tool.id[tool.id.length - 1],
204
213
  parent_run_id: parentRunId,
205
214
  start_time,
206
215
  serialized: tool,
@@ -218,8 +227,9 @@ export class BaseTracer extends BaseCallbackHandler {
218
227
  extra: metadata ? { metadata } : {},
219
228
  tags: tags || [],
220
229
  };
221
- this._startTrace(run);
230
+ await this._startTrace(run);
222
231
  await this.onToolStart?.(run);
232
+ return run;
223
233
  }
224
234
  async handleToolEnd(output, runId) {
225
235
  const run = this.runMap.get(runId);
@@ -234,6 +244,7 @@ export class BaseTracer extends BaseCallbackHandler {
234
244
  });
235
245
  await this.onToolEnd?.(run);
236
246
  await this._endTrace(run);
247
+ return run;
237
248
  }
238
249
  async handleToolError(error, runId) {
239
250
  const run = this.runMap.get(runId);
@@ -248,6 +259,7 @@ export class BaseTracer extends BaseCallbackHandler {
248
259
  });
249
260
  await this.onToolError?.(run);
250
261
  await this._endTrace(run);
262
+ return run;
251
263
  }
252
264
  async handleAgentAction(action, runId) {
253
265
  const run = this.runMap.get(runId);
@@ -276,12 +288,12 @@ export class BaseTracer extends BaseCallbackHandler {
276
288
  });
277
289
  await this.onAgentEnd?.(run);
278
290
  }
279
- async handleRetrieverStart(retriever, query, runId, parentRunId, tags, metadata) {
291
+ async handleRetrieverStart(retriever, query, runId, parentRunId, tags, metadata, name) {
280
292
  const execution_order = this._getExecutionOrder(parentRunId);
281
293
  const start_time = Date.now();
282
294
  const run = {
283
295
  id: runId,
284
- name: retriever.id[retriever.id.length - 1],
296
+ name: name ?? retriever.id[retriever.id.length - 1],
285
297
  parent_run_id: parentRunId,
286
298
  start_time,
287
299
  serialized: retriever,
@@ -299,8 +311,9 @@ export class BaseTracer extends BaseCallbackHandler {
299
311
  extra: metadata ? { metadata } : {},
300
312
  tags: tags || [],
301
313
  };
302
- this._startTrace(run);
314
+ await this._startTrace(run);
303
315
  await this.onRetrieverStart?.(run);
316
+ return run;
304
317
  }
305
318
  async handleRetrieverEnd(documents, runId) {
306
319
  const run = this.runMap.get(runId);
@@ -315,6 +328,7 @@ export class BaseTracer extends BaseCallbackHandler {
315
328
  });
316
329
  await this.onRetrieverEnd?.(run);
317
330
  await this._endTrace(run);
331
+ return run;
318
332
  }
319
333
  async handleRetrieverError(error, runId) {
320
334
  const run = this.runMap.get(runId);
@@ -329,6 +343,7 @@ export class BaseTracer extends BaseCallbackHandler {
329
343
  });
330
344
  await this.onRetrieverError?.(run);
331
345
  await this._endTrace(run);
346
+ return run;
332
347
  }
333
348
  async handleText(text, runId) {
334
349
  const run = this.runMap.get(runId);
@@ -345,13 +360,14 @@ export class BaseTracer extends BaseCallbackHandler {
345
360
  async handleLLMNewToken(token, idx, runId, _parentRunId, _tags, fields) {
346
361
  const run = this.runMap.get(runId);
347
362
  if (!run || run?.run_type !== "llm") {
348
- return;
363
+ throw new Error(`Invalid "runId" provided to "handleLLMNewToken" callback.`);
349
364
  }
350
365
  run.events.push({
351
366
  name: "new_token",
352
367
  time: new Date().toISOString(),
353
368
  kwargs: { token, idx, chunk: fields?.chunk },
354
369
  });
355
- await this.onLLMNewToken?.(run);
370
+ await this.onLLMNewToken?.(run, token);
371
+ return run;
356
372
  }
357
373
  }
@@ -0,0 +1,260 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.ChatBedrock = exports.convertMessagesToPrompt = exports.convertMessagesToPromptAnthropic = void 0;
4
+ const signature_v4_1 = require("@smithy/signature-v4");
5
+ const credential_provider_node_1 = require("@aws-sdk/credential-provider-node");
6
+ const protocol_http_1 = require("@smithy/protocol-http");
7
+ const eventstream_codec_1 = require("@smithy/eventstream-codec");
8
+ const util_utf8_1 = require("@smithy/util-utf8");
9
+ const sha256_js_1 = require("@aws-crypto/sha256-js");
10
+ const bedrock_js_1 = require("../util/bedrock.cjs");
11
+ const env_js_1 = require("../util/env.cjs");
12
+ const base_js_1 = require("./base.cjs");
13
+ const index_js_1 = require("../schema/index.cjs");
14
+ function convertOneMessageToText(message, humanPrompt, aiPrompt) {
15
+ if (message._getType() === "human") {
16
+ return `${humanPrompt} ${message.content}`;
17
+ }
18
+ else if (message._getType() === "ai") {
19
+ return `${aiPrompt} ${message.content}`;
20
+ }
21
+ else if (message._getType() === "system") {
22
+ return `${humanPrompt} <admin>${message.content}</admin>`;
23
+ }
24
+ else if (index_js_1.ChatMessage.isInstance(message)) {
25
+ return `\n\n${message.role[0].toUpperCase() + message.role.slice(1)}: {message.content}`;
26
+ }
27
+ throw new Error(`Unknown role: ${message._getType()}`);
28
+ }
29
+ function convertMessagesToPromptAnthropic(messages, humanPrompt = "\n\nHuman:", aiPrompt = "\n\nAssistant:") {
30
+ const messagesCopy = [...messages];
31
+ if (messagesCopy.length === 0 ||
32
+ messagesCopy[messagesCopy.length - 1]._getType() !== "ai") {
33
+ messagesCopy.push(new index_js_1.AIMessage({ content: "" }));
34
+ }
35
+ return messagesCopy
36
+ .map((message) => convertOneMessageToText(message, humanPrompt, aiPrompt))
37
+ .join("");
38
+ }
39
+ exports.convertMessagesToPromptAnthropic = convertMessagesToPromptAnthropic;
40
+ /**
41
+ * Function that converts an array of messages into a single string prompt
42
+ * that can be used as input for a chat model. It delegates the conversion
43
+ * logic to the appropriate provider-specific function.
44
+ * @param messages Array of messages to be converted.
45
+ * @param options Options to be used during the conversion.
46
+ * @returns A string prompt that can be used as input for a chat model.
47
+ */
48
+ function convertMessagesToPrompt(messages, provider) {
49
+ if (provider === "anthropic") {
50
+ return convertMessagesToPromptAnthropic(messages);
51
+ }
52
+ throw new Error(`Provider ${provider} does not support chat.`);
53
+ }
54
+ exports.convertMessagesToPrompt = convertMessagesToPrompt;
55
+ /**
56
+ * A type of Large Language Model (LLM) that interacts with the Bedrock
57
+ * service. It extends the base `LLM` class and implements the
58
+ * `BaseBedrockInput` interface. The class is designed to authenticate and
59
+ * interact with the Bedrock service, which is a part of Amazon Web
60
+ * Services (AWS). It uses AWS credentials for authentication and can be
61
+ * configured with various parameters such as the model to use, the AWS
62
+ * region, and the maximum number of tokens to generate.
63
+ */
64
+ class ChatBedrock extends base_js_1.SimpleChatModel {
65
+ get lc_secrets() {
66
+ return {};
67
+ }
68
+ _llmType() {
69
+ return "bedrock";
70
+ }
71
+ static lc_name() {
72
+ return "ChatBedrock";
73
+ }
74
+ constructor(fields) {
75
+ super(fields ?? {});
76
+ Object.defineProperty(this, "model", {
77
+ enumerable: true,
78
+ configurable: true,
79
+ writable: true,
80
+ value: "amazon.titan-tg1-large"
81
+ });
82
+ Object.defineProperty(this, "region", {
83
+ enumerable: true,
84
+ configurable: true,
85
+ writable: true,
86
+ value: void 0
87
+ });
88
+ Object.defineProperty(this, "credentials", {
89
+ enumerable: true,
90
+ configurable: true,
91
+ writable: true,
92
+ value: void 0
93
+ });
94
+ Object.defineProperty(this, "temperature", {
95
+ enumerable: true,
96
+ configurable: true,
97
+ writable: true,
98
+ value: undefined
99
+ });
100
+ Object.defineProperty(this, "maxTokens", {
101
+ enumerable: true,
102
+ configurable: true,
103
+ writable: true,
104
+ value: undefined
105
+ });
106
+ Object.defineProperty(this, "fetchFn", {
107
+ enumerable: true,
108
+ configurable: true,
109
+ writable: true,
110
+ value: void 0
111
+ });
112
+ Object.defineProperty(this, "endpointHost", {
113
+ enumerable: true,
114
+ configurable: true,
115
+ writable: true,
116
+ value: void 0
117
+ });
118
+ Object.defineProperty(this, "stopSequences", {
119
+ enumerable: true,
120
+ configurable: true,
121
+ writable: true,
122
+ value: void 0
123
+ });
124
+ Object.defineProperty(this, "modelKwargs", {
125
+ enumerable: true,
126
+ configurable: true,
127
+ writable: true,
128
+ value: void 0
129
+ });
130
+ Object.defineProperty(this, "codec", {
131
+ enumerable: true,
132
+ configurable: true,
133
+ writable: true,
134
+ value: new eventstream_codec_1.EventStreamCodec(util_utf8_1.toUtf8, util_utf8_1.fromUtf8)
135
+ });
136
+ this.model = fields?.model ?? this.model;
137
+ const allowedModels = ["ai21", "anthropic", "amazon"];
138
+ if (!allowedModels.includes(this.model.split(".")[0])) {
139
+ throw new Error(`Unknown model: '${this.model}', only these are supported: ${allowedModels}`);
140
+ }
141
+ const region = fields?.region ?? (0, env_js_1.getEnvironmentVariable)("AWS_DEFAULT_REGION");
142
+ if (!region) {
143
+ throw new Error("Please set the AWS_DEFAULT_REGION environment variable or pass it to the constructor as the region field.");
144
+ }
145
+ this.region = region;
146
+ this.credentials = fields?.credentials ?? (0, credential_provider_node_1.defaultProvider)();
147
+ this.temperature = fields?.temperature ?? this.temperature;
148
+ this.maxTokens = fields?.maxTokens ?? this.maxTokens;
149
+ this.fetchFn = fields?.fetchFn ?? fetch;
150
+ this.endpointHost = fields?.endpointHost ?? fields?.endpointUrl;
151
+ this.stopSequences = fields?.stopSequences;
152
+ this.modelKwargs = fields?.modelKwargs;
153
+ }
154
+ /** Call out to Bedrock service model.
155
+ Arguments:
156
+ prompt: The prompt to pass into the model.
157
+
158
+ Returns:
159
+ The string generated by the model.
160
+
161
+ Example:
162
+ response = model.call("Tell me a joke.")
163
+ */
164
+ async _call(messages, options, runManager) {
165
+ const chunks = [];
166
+ for await (const chunk of this._streamResponseChunks(messages, options, runManager)) {
167
+ chunks.push(chunk);
168
+ }
169
+ return chunks.map((chunk) => chunk.text).join("");
170
+ }
171
+ async *_streamResponseChunks(messages, options, runManager) {
172
+ const provider = this.model.split(".")[0];
173
+ const service = "bedrock-runtime";
174
+ const inputBody = bedrock_js_1.BedrockLLMInputOutputAdapter.prepareInput(provider, convertMessagesToPromptAnthropic(messages), this.maxTokens, this.temperature, this.stopSequences, this.modelKwargs);
175
+ const endpointHost = this.endpointHost ?? `${service}.${this.region}.amazonaws.com`;
176
+ const amazonMethod = provider === "anthropic" ? "invoke-with-response-stream" : "invoke";
177
+ const url = new URL(`https://${endpointHost}/model/${this.model}/${amazonMethod}`);
178
+ const request = new protocol_http_1.HttpRequest({
179
+ hostname: url.hostname,
180
+ path: url.pathname,
181
+ protocol: url.protocol,
182
+ method: "POST",
183
+ body: JSON.stringify(inputBody),
184
+ query: Object.fromEntries(url.searchParams.entries()),
185
+ headers: {
186
+ // host is required by AWS Signature V4: https://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html
187
+ host: url.host,
188
+ accept: "application/json",
189
+ "content-type": "application/json",
190
+ },
191
+ });
192
+ const signer = new signature_v4_1.SignatureV4({
193
+ credentials: this.credentials,
194
+ service: "bedrock",
195
+ region: this.region,
196
+ sha256: sha256_js_1.Sha256,
197
+ });
198
+ const signedRequest = await signer.sign(request);
199
+ // Send request to AWS using the low-level fetch API
200
+ const response = await this.caller.callWithOptions({ signal: options.signal }, async () => this.fetchFn(url, {
201
+ headers: signedRequest.headers,
202
+ body: signedRequest.body,
203
+ method: signedRequest.method,
204
+ }));
205
+ if (response.status < 200 || response.status >= 300) {
206
+ throw Error(`Failed to access underlying url '${url}': got ${response.status} ${response.statusText}: ${await response.text()}`);
207
+ }
208
+ if (provider === "anthropic") {
209
+ const reader = response.body?.getReader();
210
+ const decoder = new TextDecoder();
211
+ for await (const chunk of this._readChunks(reader)) {
212
+ const event = this.codec.decode(chunk);
213
+ if ((event.headers[":event-type"] !== undefined &&
214
+ event.headers[":event-type"].value !== "chunk") ||
215
+ event.headers[":content-type"].value !== "application/json") {
216
+ throw Error(`Failed to get event chunk: got ${chunk}`);
217
+ }
218
+ // console.log(decoder.decode(event.body));
219
+ const body = JSON.parse(decoder.decode(event.body));
220
+ if (body.message) {
221
+ throw new Error(body.message);
222
+ }
223
+ if (body.bytes !== undefined) {
224
+ const chunkResult = JSON.parse(Buffer.from(body.bytes, "base64").toString());
225
+ const text = bedrock_js_1.BedrockLLMInputOutputAdapter.prepareOutput(provider, chunkResult);
226
+ yield new index_js_1.ChatGenerationChunk({
227
+ text,
228
+ message: new index_js_1.AIMessageChunk({ content: text }),
229
+ });
230
+ await runManager?.handleLLMNewToken(text);
231
+ }
232
+ }
233
+ }
234
+ else {
235
+ const json = await response.json();
236
+ const text = bedrock_js_1.BedrockLLMInputOutputAdapter.prepareOutput(provider, json);
237
+ yield new index_js_1.ChatGenerationChunk({
238
+ text,
239
+ message: new index_js_1.AIMessageChunk({ content: text }),
240
+ });
241
+ await runManager?.handleLLMNewToken(text);
242
+ }
243
+ }
244
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
245
+ _readChunks(reader) {
246
+ return {
247
+ async *[Symbol.asyncIterator]() {
248
+ let readResult = await reader.read();
249
+ while (!readResult.done) {
250
+ yield readResult.value;
251
+ readResult = await reader.read();
252
+ }
253
+ },
254
+ };
255
+ }
256
+ _combineLLMOutput() {
257
+ return {};
258
+ }
259
+ }
260
+ exports.ChatBedrock = ChatBedrock;
@@ -0,0 +1,58 @@
1
+ import { EventStreamCodec } from "@smithy/eventstream-codec";
2
+ import { BaseBedrockInput, type CredentialType } from "../util/bedrock.js";
3
+ import { SimpleChatModel, BaseChatModelParams } from "./base.js";
4
+ import { CallbackManagerForLLMRun } from "../callbacks/manager.js";
5
+ import { BaseMessage, ChatGenerationChunk } from "../schema/index.js";
6
+ export declare function convertMessagesToPromptAnthropic(messages: BaseMessage[], humanPrompt?: string, aiPrompt?: string): string;
7
+ /**
8
+ * Function that converts an array of messages into a single string prompt
9
+ * that can be used as input for a chat model. It delegates the conversion
10
+ * logic to the appropriate provider-specific function.
11
+ * @param messages Array of messages to be converted.
12
+ * @param options Options to be used during the conversion.
13
+ * @returns A string prompt that can be used as input for a chat model.
14
+ */
15
+ export declare function convertMessagesToPrompt(messages: BaseMessage[], provider: string): string;
16
+ /**
17
+ * A type of Large Language Model (LLM) that interacts with the Bedrock
18
+ * service. It extends the base `LLM` class and implements the
19
+ * `BaseBedrockInput` interface. The class is designed to authenticate and
20
+ * interact with the Bedrock service, which is a part of Amazon Web
21
+ * Services (AWS). It uses AWS credentials for authentication and can be
22
+ * configured with various parameters such as the model to use, the AWS
23
+ * region, and the maximum number of tokens to generate.
24
+ */
25
+ export declare class ChatBedrock extends SimpleChatModel implements BaseBedrockInput {
26
+ model: string;
27
+ region: string;
28
+ credentials: CredentialType;
29
+ temperature?: number | undefined;
30
+ maxTokens?: number | undefined;
31
+ fetchFn: typeof fetch;
32
+ endpointHost?: string;
33
+ stopSequences?: string[];
34
+ modelKwargs?: Record<string, unknown>;
35
+ codec: EventStreamCodec;
36
+ get lc_secrets(): {
37
+ [key: string]: string;
38
+ } | undefined;
39
+ _llmType(): string;
40
+ static lc_name(): string;
41
+ constructor(fields?: Partial<BaseBedrockInput> & BaseChatModelParams);
42
+ /** Call out to Bedrock service model.
43
+ Arguments:
44
+ prompt: The prompt to pass into the model.
45
+
46
+ Returns:
47
+ The string generated by the model.
48
+
49
+ Example:
50
+ response = model.call("Tell me a joke.")
51
+ */
52
+ _call(messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): Promise<string>;
53
+ _streamResponseChunks(messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
54
+ _readChunks(reader: any): {
55
+ [Symbol.asyncIterator](): AsyncGenerator<any, void, unknown>;
56
+ };
57
+ _combineLLMOutput(): {};
58
+ }