langchain 0.0.133 → 0.0.135

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. package/dist/agents/chat_convo/outputParser.cjs +13 -10
  2. package/dist/agents/chat_convo/outputParser.js +13 -10
  3. package/dist/callbacks/base.d.ts +6 -3
  4. package/dist/callbacks/handlers/tracer.cjs +2 -2
  5. package/dist/callbacks/handlers/tracer.d.ts +2 -2
  6. package/dist/callbacks/handlers/tracer.js +2 -2
  7. package/dist/callbacks/index.cjs +2 -1
  8. package/dist/callbacks/index.d.ts +1 -1
  9. package/dist/callbacks/index.js +1 -1
  10. package/dist/callbacks/manager.cjs +2 -2
  11. package/dist/callbacks/manager.d.ts +2 -2
  12. package/dist/callbacks/manager.js +2 -2
  13. package/dist/chains/sql_db/sql_db_chain.d.ts +1 -1
  14. package/dist/chains/sql_db/sql_db_prompt.d.ts +6 -6
  15. package/dist/chat_models/openai.cjs +10 -5
  16. package/dist/chat_models/openai.js +10 -5
  17. package/dist/document_loaders/web/recursive_url.cjs +177 -0
  18. package/dist/document_loaders/web/recursive_url.d.ts +27 -0
  19. package/dist/document_loaders/web/recursive_url.js +173 -0
  20. package/dist/hub.cjs +16 -0
  21. package/dist/hub.d.ts +4 -0
  22. package/dist/hub.js +11 -0
  23. package/dist/llms/bedrock.cjs +63 -19
  24. package/dist/llms/bedrock.d.ts +9 -1
  25. package/dist/llms/bedrock.js +63 -19
  26. package/dist/llms/writer.cjs +167 -0
  27. package/dist/llms/writer.d.ts +60 -0
  28. package/dist/llms/writer.js +163 -0
  29. package/dist/load/import_constants.cjs +4 -0
  30. package/dist/load/import_constants.js +4 -0
  31. package/dist/load/import_map.cjs +2 -1
  32. package/dist/load/import_map.d.ts +1 -0
  33. package/dist/load/import_map.js +1 -0
  34. package/dist/memory/summary_buffer.d.ts +1 -1
  35. package/dist/retrievers/score_threshold.cjs +45 -0
  36. package/dist/retrievers/score_threshold.d.ts +15 -0
  37. package/dist/retrievers/score_threshold.js +41 -0
  38. package/dist/sql_db.cjs +8 -1
  39. package/dist/sql_db.d.ts +1 -0
  40. package/dist/sql_db.js +8 -1
  41. package/dist/stores/message/mongodb.cjs +48 -0
  42. package/dist/stores/message/mongodb.d.ts +15 -0
  43. package/dist/stores/message/mongodb.js +44 -0
  44. package/dist/tools/sql.cjs +9 -3
  45. package/dist/tools/sql.d.ts +0 -1
  46. package/dist/tools/sql.js +9 -3
  47. package/dist/util/sql_utils.cjs +8 -2
  48. package/dist/util/sql_utils.d.ts +2 -1
  49. package/dist/util/sql_utils.js +8 -2
  50. package/dist/vectorstores/googlevertexai.cjs +2 -1
  51. package/dist/vectorstores/googlevertexai.js +2 -1
  52. package/dist/vectorstores/myscale.cjs +2 -2
  53. package/dist/vectorstores/myscale.d.ts +1 -1
  54. package/dist/vectorstores/myscale.js +2 -2
  55. package/document_loaders/web/recursive_url.cjs +1 -0
  56. package/document_loaders/web/recursive_url.d.ts +1 -0
  57. package/document_loaders/web/recursive_url.js +1 -0
  58. package/hub.cjs +1 -0
  59. package/hub.d.ts +1 -0
  60. package/hub.js +1 -0
  61. package/llms/writer.cjs +1 -0
  62. package/llms/writer.d.ts +1 -0
  63. package/llms/writer.js +1 -0
  64. package/package.json +61 -1
  65. package/retrievers/score_threshold.cjs +1 -0
  66. package/retrievers/score_threshold.d.ts +1 -0
  67. package/retrievers/score_threshold.js +1 -0
  68. package/stores/message/mongodb.cjs +1 -0
  69. package/stores/message/mongodb.d.ts +1 -0
  70. package/stores/message/mongodb.js +1 -0
@@ -0,0 +1,173 @@
1
+ import { JSDOM } from "jsdom";
2
+ import { AsyncCaller } from "../../util/async_caller.js";
3
+ import { BaseDocumentLoader } from "../base.js";
4
+ export class RecursiveUrlLoader extends BaseDocumentLoader {
5
+ constructor(url, options) {
6
+ super();
7
+ Object.defineProperty(this, "caller", {
8
+ enumerable: true,
9
+ configurable: true,
10
+ writable: true,
11
+ value: void 0
12
+ });
13
+ Object.defineProperty(this, "url", {
14
+ enumerable: true,
15
+ configurable: true,
16
+ writable: true,
17
+ value: void 0
18
+ });
19
+ Object.defineProperty(this, "excludeDirs", {
20
+ enumerable: true,
21
+ configurable: true,
22
+ writable: true,
23
+ value: void 0
24
+ });
25
+ Object.defineProperty(this, "extractor", {
26
+ enumerable: true,
27
+ configurable: true,
28
+ writable: true,
29
+ value: void 0
30
+ });
31
+ Object.defineProperty(this, "maxDepth", {
32
+ enumerable: true,
33
+ configurable: true,
34
+ writable: true,
35
+ value: void 0
36
+ });
37
+ Object.defineProperty(this, "timeout", {
38
+ enumerable: true,
39
+ configurable: true,
40
+ writable: true,
41
+ value: void 0
42
+ });
43
+ Object.defineProperty(this, "preventOutside", {
44
+ enumerable: true,
45
+ configurable: true,
46
+ writable: true,
47
+ value: void 0
48
+ });
49
+ this.caller = new AsyncCaller({
50
+ maxConcurrency: 64,
51
+ maxRetries: 0,
52
+ ...options.callerOptions,
53
+ });
54
+ this.url = url;
55
+ this.excludeDirs = options.excludeDirs ?? [];
56
+ this.extractor = options.extractor ?? ((s) => s);
57
+ this.maxDepth = options.maxDepth ?? 2;
58
+ this.timeout = options.timeout ?? 10000;
59
+ this.preventOutside = options.preventOutside ?? true;
60
+ }
61
+ async fetchWithTimeout(resource, options) {
62
+ const { timeout, ...rest } = options;
63
+ return this.caller.call(() => fetch(resource, { ...rest, signal: AbortSignal.timeout(timeout) }));
64
+ }
65
+ getChildLinks(html, baseUrl) {
66
+ const allLinks = Array.from(new JSDOM(html).window.document.querySelectorAll("a")).map((a) => a.href);
67
+ const absolutePaths = [];
68
+ // eslint-disable-next-line no-script-url
69
+ const invalidPrefixes = ["javascript:", "mailto:", "#"];
70
+ const invalidSuffixes = [
71
+ ".css",
72
+ ".js",
73
+ ".ico",
74
+ ".png",
75
+ ".jpg",
76
+ ".jpeg",
77
+ ".gif",
78
+ ".svg",
79
+ ];
80
+ for (const link of allLinks) {
81
+ if (invalidPrefixes.some((prefix) => link.startsWith(prefix)) ||
82
+ invalidSuffixes.some((suffix) => link.endsWith(suffix)))
83
+ continue;
84
+ if (link.startsWith("http")) {
85
+ const isAllowed = !this.preventOutside || link.startsWith(baseUrl);
86
+ if (isAllowed)
87
+ absolutePaths.push(link);
88
+ }
89
+ else if (link.startsWith("//")) {
90
+ const base = new URL(baseUrl);
91
+ absolutePaths.push(base.protocol + link);
92
+ }
93
+ else {
94
+ const newLink = new URL(link, baseUrl).href;
95
+ absolutePaths.push(newLink);
96
+ }
97
+ }
98
+ return Array.from(new Set(absolutePaths));
99
+ }
100
+ extractMetadata(rawHtml, url) {
101
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
102
+ const metadata = { source: url };
103
+ const { document } = new JSDOM(rawHtml).window;
104
+ const title = document.getElementsByTagName("title")[0];
105
+ if (title) {
106
+ metadata.title = title.textContent;
107
+ }
108
+ const description = document.querySelector("meta[name=description]");
109
+ if (description) {
110
+ metadata.description = description.getAttribute("content");
111
+ }
112
+ const html = document.getElementsByTagName("html")[0];
113
+ if (html) {
114
+ metadata.language = html.getAttribute("lang");
115
+ }
116
+ return metadata;
117
+ }
118
+ async getUrlAsDoc(url) {
119
+ let res;
120
+ try {
121
+ res = await this.fetchWithTimeout(url, { timeout: this.timeout });
122
+ res = await res.text();
123
+ }
124
+ catch (e) {
125
+ return null;
126
+ }
127
+ return {
128
+ pageContent: this.extractor(res),
129
+ metadata: this.extractMetadata(res, url),
130
+ };
131
+ }
132
+ async getChildUrlsRecursive(inputUrl, visited = new Set(), depth = 0) {
133
+ if (depth > this.maxDepth)
134
+ return [];
135
+ let url = inputUrl;
136
+ if (!inputUrl.endsWith("/"))
137
+ url += "/";
138
+ const isExcluded = this.excludeDirs.some((exDir) => url.startsWith(exDir));
139
+ if (isExcluded)
140
+ return [];
141
+ let res;
142
+ try {
143
+ res = await this.fetchWithTimeout(url, { timeout: this.timeout });
144
+ res = await res.text();
145
+ }
146
+ catch (e) {
147
+ return [];
148
+ }
149
+ const childUrls = this.getChildLinks(res, url);
150
+ const results = await Promise.all(childUrls.map((childUrl) => (async () => {
151
+ if (visited.has(childUrl))
152
+ return null;
153
+ visited.add(childUrl);
154
+ const childDoc = await this.getUrlAsDoc(childUrl);
155
+ if (!childDoc)
156
+ return null;
157
+ if (childUrl.endsWith("/")) {
158
+ const childUrlResponses = await this.getChildUrlsRecursive(childUrl, visited, depth + 1);
159
+ return [childDoc, ...childUrlResponses];
160
+ }
161
+ return [childDoc];
162
+ })()));
163
+ return results.flat().filter((docs) => docs !== null);
164
+ }
165
+ async load() {
166
+ const rootDoc = await this.getUrlAsDoc(this.url);
167
+ if (!rootDoc)
168
+ return [];
169
+ const docs = [rootDoc];
170
+ docs.push(...(await this.getChildUrlsRecursive(this.url, new Set([this.url]))));
171
+ return docs;
172
+ }
173
+ }
package/dist/hub.cjs ADDED
@@ -0,0 +1,16 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.pull = exports.push = void 0;
4
+ const langchainhub_1 = require("langchainhub");
5
+ const index_js_1 = require("./load/index.cjs");
6
+ async function push(repoFullName, runnable, options) {
7
+ const client = new langchainhub_1.Client(options);
8
+ return client.push(repoFullName, JSON.stringify(runnable), options);
9
+ }
10
+ exports.push = push;
11
+ async function pull(ownerRepoCommit, options) {
12
+ const client = new langchainhub_1.Client(options);
13
+ const result = await client.pull(ownerRepoCommit);
14
+ return (0, index_js_1.load)(result);
15
+ }
16
+ exports.pull = pull;
package/dist/hub.d.ts ADDED
@@ -0,0 +1,4 @@
1
+ import { ClientConfiguration, HubPushOptions } from "langchainhub";
2
+ import { Runnable } from "./schema/runnable.js";
3
+ export declare function push(repoFullName: string, runnable: Runnable, options?: HubPushOptions & ClientConfiguration): Promise<any>;
4
+ export declare function pull<T extends Runnable>(ownerRepoCommit: string, options?: ClientConfiguration): Promise<T>;
package/dist/hub.js ADDED
@@ -0,0 +1,11 @@
1
+ import { Client } from "langchainhub";
2
+ import { load } from "./load/index.js";
3
+ export async function push(repoFullName, runnable, options) {
4
+ const client = new Client(options);
5
+ return client.push(repoFullName, JSON.stringify(runnable), options);
6
+ }
7
+ export async function pull(ownerRepoCommit, options) {
8
+ const client = new Client(options);
9
+ const result = await client.pull(ownerRepoCommit);
10
+ return load(result);
11
+ }
@@ -4,9 +4,12 @@ exports.Bedrock = void 0;
4
4
  const signature_v4_1 = require("@aws-sdk/signature-v4");
5
5
  const credential_provider_node_1 = require("@aws-sdk/credential-provider-node");
6
6
  const protocol_http_1 = require("@aws-sdk/protocol-http");
7
+ const eventstream_codec_1 = require("@smithy/eventstream-codec");
8
+ const util_utf8_1 = require("@smithy/util-utf8");
7
9
  const sha256_js_1 = require("@aws-crypto/sha256-js");
8
10
  const env_js_1 = require("../util/env.cjs");
9
11
  const base_js_1 = require("./base.cjs");
12
+ const index_js_1 = require("../schema/index.cjs");
10
13
  /**
11
14
  * A helper class used within the `Bedrock` class. It is responsible for
12
15
  * preparing the input and output for the Bedrock service. It formats the
@@ -17,20 +20,24 @@ class BedrockLLMInputOutputAdapter {
17
20
  /** Adapter class to prepare the inputs from Langchain to a format
18
21
  that LLM model expects. Also, provides a helper function to extract
19
22
  the generated text from the model response. */
20
- static prepareInput(provider, prompt) {
23
+ static prepareInput(provider, prompt, maxTokens = 50, temperature = 0) {
21
24
  const inputBody = {};
22
- if (provider === "anthropic" || provider === "ai21") {
25
+ if (provider === "anthropic") {
23
26
  inputBody.prompt = prompt;
27
+ inputBody.max_tokens_to_sample = maxTokens;
28
+ inputBody.temperature = temperature;
24
29
  }
25
- else if (provider === "amazon") {
26
- inputBody.inputText = prompt;
27
- inputBody.textGenerationConfig = {};
30
+ else if (provider === "ai21") {
31
+ inputBody.prompt = prompt;
32
+ inputBody.maxTokens = maxTokens;
33
+ inputBody.temperature = temperature;
28
34
  }
29
- else {
35
+ else if (provider === "amazon") {
30
36
  inputBody.inputText = prompt;
31
- }
32
- if (provider === "anthropic" && !("max_tokens_to_sample" in inputBody)) {
33
- inputBody.max_tokens_to_sample = 50;
37
+ inputBody.textGenerationConfig = {
38
+ maxTokenCount: maxTokens,
39
+ temperature,
40
+ };
34
41
  }
35
42
  return inputBody;
36
43
  }
@@ -46,9 +53,9 @@ class BedrockLLMInputOutputAdapter {
46
53
  return responseBody.completion;
47
54
  }
48
55
  else if (provider === "ai21") {
49
- return responseBody.completions[0].data.text;
56
+ return responseBody.data.text;
50
57
  }
51
- return responseBody.results[0].outputText;
58
+ return responseBody.outputText;
52
59
  }
53
60
  }
54
61
  /**
@@ -105,6 +112,12 @@ class Bedrock extends base_js_1.LLM {
105
112
  writable: true,
106
113
  value: void 0
107
114
  });
115
+ Object.defineProperty(this, "codec", {
116
+ enumerable: true,
117
+ configurable: true,
118
+ writable: true,
119
+ value: new eventstream_codec_1.EventStreamCodec(util_utf8_1.toUtf8, util_utf8_1.fromUtf8)
120
+ });
108
121
  this.model = fields?.model ?? this.model;
109
122
  const allowedModels = ["ai21", "anthropic", "amazon"];
110
123
  if (!allowedModels.includes(this.model.split(".")[0])) {
@@ -130,11 +143,18 @@ class Bedrock extends base_js_1.LLM {
130
143
  Example:
131
144
  response = model.call("Tell me a joke.")
132
145
  */
133
- async _call(prompt) {
146
+ async _call(prompt, options, runManager) {
147
+ const chunks = [];
148
+ for await (const chunk of this._streamResponseChunks(prompt, options, runManager)) {
149
+ chunks.push(chunk);
150
+ }
151
+ return chunks.map((chunk) => chunk.text).join("");
152
+ }
153
+ async *_streamResponseChunks(prompt, options, runManager) {
134
154
  const provider = this.model.split(".")[0];
135
155
  const service = "bedrock";
136
- const inputBody = BedrockLLMInputOutputAdapter.prepareInput(provider, prompt);
137
- const url = new URL(`https://${service}.${this.region}.amazonaws.com/model/${this.model}/invoke`);
156
+ const inputBody = BedrockLLMInputOutputAdapter.prepareInput(provider, prompt, this.maxTokens, this.temperature);
157
+ const url = new URL(`https://${service}.${this.region}.amazonaws.com/model/${this.model}/invoke-with-response-stream`);
138
158
  const request = new protocol_http_1.HttpRequest({
139
159
  hostname: url.hostname,
140
160
  path: url.pathname,
@@ -157,17 +177,41 @@ class Bedrock extends base_js_1.LLM {
157
177
  });
158
178
  const signedRequest = await signer.sign(request);
159
179
  // Send request to AWS using the low-level fetch API
160
- const response = await this.fetchFn(url, {
180
+ const response = await this.caller.callWithOptions({ signal: options.signal }, async () => this.fetchFn(url, {
161
181
  headers: signedRequest.headers,
162
182
  body: signedRequest.body,
163
183
  method: signedRequest.method,
164
- });
184
+ }));
165
185
  if (response.status < 200 || response.status >= 300) {
166
186
  throw Error(`Failed to access underlying url '${url}': got ${response.status} ${response.statusText}: ${await response.text()}`);
167
187
  }
168
- const responseJson = await response.json();
169
- const text = BedrockLLMInputOutputAdapter.prepareOutput(provider, responseJson);
170
- return text;
188
+ const reader = response.body?.getReader();
189
+ for await (const chunk of this._readChunks(reader)) {
190
+ const event = this.codec.decode(chunk);
191
+ if (event.headers[":event-type"].value !== "chunk" ||
192
+ event.headers[":content-type"].value !== "application/json") {
193
+ throw Error(`Failed to get event chunk: got ${chunk}`);
194
+ }
195
+ const body = JSON.parse(Buffer.from(JSON.parse(new TextDecoder("utf-8").decode(event.body)).bytes, "base64").toString());
196
+ const text = BedrockLLMInputOutputAdapter.prepareOutput(provider, body);
197
+ yield new index_js_1.GenerationChunk({
198
+ text,
199
+ generationInfo: {},
200
+ });
201
+ await runManager?.handleLLMNewToken(text);
202
+ }
203
+ }
204
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
205
+ _readChunks(reader) {
206
+ return {
207
+ async *[Symbol.asyncIterator]() {
208
+ let readResult = await reader.read();
209
+ while (!readResult.done) {
210
+ yield readResult.value;
211
+ readResult = await reader.read();
212
+ }
213
+ },
214
+ };
171
215
  }
172
216
  }
173
217
  exports.Bedrock = Bedrock;
@@ -1,5 +1,8 @@
1
+ import { EventStreamCodec } from "@smithy/eventstream-codec";
1
2
  import type { AwsCredentialIdentity, Provider } from "@aws-sdk/types";
2
3
  import { LLM, BaseLLMParams } from "./base.js";
4
+ import { CallbackManagerForLLMRun } from "../callbacks/manager.js";
5
+ import { GenerationChunk } from "../schema/index.js";
3
6
  type CredentialType = AwsCredentialIdentity | Provider<AwsCredentialIdentity>;
4
7
  /** Bedrock models.
5
8
  To authenticate, the AWS client uses the following methods to automatically load credentials:
@@ -43,6 +46,7 @@ export declare class Bedrock extends LLM implements BedrockInput {
43
46
  temperature?: number | undefined;
44
47
  maxTokens?: number | undefined;
45
48
  fetchFn: typeof fetch;
49
+ codec: EventStreamCodec;
46
50
  get lc_secrets(): {
47
51
  [key: string]: string;
48
52
  } | undefined;
@@ -58,6 +62,10 @@ export declare class Bedrock extends LLM implements BedrockInput {
58
62
  Example:
59
63
  response = model.call("Tell me a joke.")
60
64
  */
61
- _call(prompt: string): Promise<string>;
65
+ _call(prompt: string, options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): Promise<string>;
66
+ _streamResponseChunks(prompt: string, options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<GenerationChunk>;
67
+ _readChunks(reader: any): {
68
+ [Symbol.asyncIterator](): AsyncGenerator<any, void, unknown>;
69
+ };
62
70
  }
63
71
  export {};
@@ -1,9 +1,12 @@
1
1
  import { SignatureV4 } from "@aws-sdk/signature-v4";
2
2
  import { defaultProvider } from "@aws-sdk/credential-provider-node";
3
3
  import { HttpRequest } from "@aws-sdk/protocol-http";
4
+ import { EventStreamCodec } from "@smithy/eventstream-codec";
5
+ import { fromUtf8, toUtf8 } from "@smithy/util-utf8";
4
6
  import { Sha256 } from "@aws-crypto/sha256-js";
5
7
  import { getEnvironmentVariable } from "../util/env.js";
6
8
  import { LLM } from "./base.js";
9
+ import { GenerationChunk } from "../schema/index.js";
7
10
  /**
8
11
  * A helper class used within the `Bedrock` class. It is responsible for
9
12
  * preparing the input and output for the Bedrock service. It formats the
@@ -14,20 +17,24 @@ class BedrockLLMInputOutputAdapter {
14
17
  /** Adapter class to prepare the inputs from Langchain to a format
15
18
  that LLM model expects. Also, provides a helper function to extract
16
19
  the generated text from the model response. */
17
- static prepareInput(provider, prompt) {
20
+ static prepareInput(provider, prompt, maxTokens = 50, temperature = 0) {
18
21
  const inputBody = {};
19
- if (provider === "anthropic" || provider === "ai21") {
22
+ if (provider === "anthropic") {
20
23
  inputBody.prompt = prompt;
24
+ inputBody.max_tokens_to_sample = maxTokens;
25
+ inputBody.temperature = temperature;
21
26
  }
22
- else if (provider === "amazon") {
23
- inputBody.inputText = prompt;
24
- inputBody.textGenerationConfig = {};
27
+ else if (provider === "ai21") {
28
+ inputBody.prompt = prompt;
29
+ inputBody.maxTokens = maxTokens;
30
+ inputBody.temperature = temperature;
25
31
  }
26
- else {
32
+ else if (provider === "amazon") {
27
33
  inputBody.inputText = prompt;
28
- }
29
- if (provider === "anthropic" && !("max_tokens_to_sample" in inputBody)) {
30
- inputBody.max_tokens_to_sample = 50;
34
+ inputBody.textGenerationConfig = {
35
+ maxTokenCount: maxTokens,
36
+ temperature,
37
+ };
31
38
  }
32
39
  return inputBody;
33
40
  }
@@ -43,9 +50,9 @@ class BedrockLLMInputOutputAdapter {
43
50
  return responseBody.completion;
44
51
  }
45
52
  else if (provider === "ai21") {
46
- return responseBody.completions[0].data.text;
53
+ return responseBody.data.text;
47
54
  }
48
- return responseBody.results[0].outputText;
55
+ return responseBody.outputText;
49
56
  }
50
57
  }
51
58
  /**
@@ -102,6 +109,12 @@ export class Bedrock extends LLM {
102
109
  writable: true,
103
110
  value: void 0
104
111
  });
112
+ Object.defineProperty(this, "codec", {
113
+ enumerable: true,
114
+ configurable: true,
115
+ writable: true,
116
+ value: new EventStreamCodec(toUtf8, fromUtf8)
117
+ });
105
118
  this.model = fields?.model ?? this.model;
106
119
  const allowedModels = ["ai21", "anthropic", "amazon"];
107
120
  if (!allowedModels.includes(this.model.split(".")[0])) {
@@ -127,11 +140,18 @@ export class Bedrock extends LLM {
127
140
  Example:
128
141
  response = model.call("Tell me a joke.")
129
142
  */
130
- async _call(prompt) {
143
+ async _call(prompt, options, runManager) {
144
+ const chunks = [];
145
+ for await (const chunk of this._streamResponseChunks(prompt, options, runManager)) {
146
+ chunks.push(chunk);
147
+ }
148
+ return chunks.map((chunk) => chunk.text).join("");
149
+ }
150
+ async *_streamResponseChunks(prompt, options, runManager) {
131
151
  const provider = this.model.split(".")[0];
132
152
  const service = "bedrock";
133
- const inputBody = BedrockLLMInputOutputAdapter.prepareInput(provider, prompt);
134
- const url = new URL(`https://${service}.${this.region}.amazonaws.com/model/${this.model}/invoke`);
153
+ const inputBody = BedrockLLMInputOutputAdapter.prepareInput(provider, prompt, this.maxTokens, this.temperature);
154
+ const url = new URL(`https://${service}.${this.region}.amazonaws.com/model/${this.model}/invoke-with-response-stream`);
135
155
  const request = new HttpRequest({
136
156
  hostname: url.hostname,
137
157
  path: url.pathname,
@@ -154,16 +174,40 @@ export class Bedrock extends LLM {
154
174
  });
155
175
  const signedRequest = await signer.sign(request);
156
176
  // Send request to AWS using the low-level fetch API
157
- const response = await this.fetchFn(url, {
177
+ const response = await this.caller.callWithOptions({ signal: options.signal }, async () => this.fetchFn(url, {
158
178
  headers: signedRequest.headers,
159
179
  body: signedRequest.body,
160
180
  method: signedRequest.method,
161
- });
181
+ }));
162
182
  if (response.status < 200 || response.status >= 300) {
163
183
  throw Error(`Failed to access underlying url '${url}': got ${response.status} ${response.statusText}: ${await response.text()}`);
164
184
  }
165
- const responseJson = await response.json();
166
- const text = BedrockLLMInputOutputAdapter.prepareOutput(provider, responseJson);
167
- return text;
185
+ const reader = response.body?.getReader();
186
+ for await (const chunk of this._readChunks(reader)) {
187
+ const event = this.codec.decode(chunk);
188
+ if (event.headers[":event-type"].value !== "chunk" ||
189
+ event.headers[":content-type"].value !== "application/json") {
190
+ throw Error(`Failed to get event chunk: got ${chunk}`);
191
+ }
192
+ const body = JSON.parse(Buffer.from(JSON.parse(new TextDecoder("utf-8").decode(event.body)).bytes, "base64").toString());
193
+ const text = BedrockLLMInputOutputAdapter.prepareOutput(provider, body);
194
+ yield new GenerationChunk({
195
+ text,
196
+ generationInfo: {},
197
+ });
198
+ await runManager?.handleLLMNewToken(text);
199
+ }
200
+ }
201
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
202
+ _readChunks(reader) {
203
+ return {
204
+ async *[Symbol.asyncIterator]() {
205
+ let readResult = await reader.read();
206
+ while (!readResult.done) {
207
+ yield readResult.value;
208
+ readResult = await reader.read();
209
+ }
210
+ },
211
+ };
168
212
  }
169
213
  }
@@ -0,0 +1,167 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.Writer = void 0;
4
+ const writer_sdk_1 = require("@writerai/writer-sdk");
5
+ const base_js_1 = require("./base.cjs");
6
+ const env_js_1 = require("../util/env.cjs");
7
+ /**
8
+ * Class representing a Writer Large Language Model (LLM). It interacts
9
+ * with the Writer API to generate text completions.
10
+ */
11
+ class Writer extends base_js_1.LLM {
12
+ static lc_name() {
13
+ return "Writer";
14
+ }
15
+ get lc_secrets() {
16
+ return {
17
+ apiKey: "WRITER_API_KEY",
18
+ orgId: "WRITER_ORG_ID",
19
+ };
20
+ }
21
+ get lc_aliases() {
22
+ return {
23
+ apiKey: "writer_api_key",
24
+ orgId: "writer_org_id",
25
+ };
26
+ }
27
+ constructor(fields) {
28
+ super(fields ?? {});
29
+ Object.defineProperty(this, "lc_serializable", {
30
+ enumerable: true,
31
+ configurable: true,
32
+ writable: true,
33
+ value: true
34
+ });
35
+ Object.defineProperty(this, "apiKey", {
36
+ enumerable: true,
37
+ configurable: true,
38
+ writable: true,
39
+ value: void 0
40
+ });
41
+ Object.defineProperty(this, "orgId", {
42
+ enumerable: true,
43
+ configurable: true,
44
+ writable: true,
45
+ value: void 0
46
+ });
47
+ Object.defineProperty(this, "model", {
48
+ enumerable: true,
49
+ configurable: true,
50
+ writable: true,
51
+ value: "palmyra-instruct"
52
+ });
53
+ Object.defineProperty(this, "temperature", {
54
+ enumerable: true,
55
+ configurable: true,
56
+ writable: true,
57
+ value: void 0
58
+ });
59
+ Object.defineProperty(this, "minTokens", {
60
+ enumerable: true,
61
+ configurable: true,
62
+ writable: true,
63
+ value: void 0
64
+ });
65
+ Object.defineProperty(this, "maxTokens", {
66
+ enumerable: true,
67
+ configurable: true,
68
+ writable: true,
69
+ value: void 0
70
+ });
71
+ Object.defineProperty(this, "bestOf", {
72
+ enumerable: true,
73
+ configurable: true,
74
+ writable: true,
75
+ value: void 0
76
+ });
77
+ Object.defineProperty(this, "frequencyPenalty", {
78
+ enumerable: true,
79
+ configurable: true,
80
+ writable: true,
81
+ value: void 0
82
+ });
83
+ Object.defineProperty(this, "logprobs", {
84
+ enumerable: true,
85
+ configurable: true,
86
+ writable: true,
87
+ value: void 0
88
+ });
89
+ Object.defineProperty(this, "n", {
90
+ enumerable: true,
91
+ configurable: true,
92
+ writable: true,
93
+ value: void 0
94
+ });
95
+ Object.defineProperty(this, "presencePenalty", {
96
+ enumerable: true,
97
+ configurable: true,
98
+ writable: true,
99
+ value: void 0
100
+ });
101
+ Object.defineProperty(this, "topP", {
102
+ enumerable: true,
103
+ configurable: true,
104
+ writable: true,
105
+ value: void 0
106
+ });
107
+ const apiKey = fields?.apiKey ?? (0, env_js_1.getEnvironmentVariable)("WRITER_API_KEY");
108
+ const orgId = fields?.orgId ?? (0, env_js_1.getEnvironmentVariable)("WRITER_ORG_ID");
109
+ if (!apiKey) {
110
+ throw new Error("Please set the WRITER_API_KEY environment variable or pass it to the constructor as the apiKey field.");
111
+ }
112
+ if (!orgId) {
113
+ throw new Error("Please set the WRITER_ORG_ID environment variable or pass it to the constructor as the orgId field.");
114
+ }
115
+ this.apiKey = apiKey;
116
+ this.orgId = typeof orgId === "string" ? parseInt(orgId, 10) : orgId;
117
+ this.model = fields?.model ?? this.model;
118
+ this.temperature = fields?.temperature ?? this.temperature;
119
+ this.minTokens = fields?.minTokens ?? this.minTokens;
120
+ this.maxTokens = fields?.maxTokens ?? this.maxTokens;
121
+ this.bestOf = fields?.bestOf ?? this.bestOf;
122
+ this.frequencyPenalty = fields?.frequencyPenalty ?? this.frequencyPenalty;
123
+ this.logprobs = fields?.logprobs ?? this.logprobs;
124
+ this.n = fields?.n ?? this.n;
125
+ this.presencePenalty = fields?.presencePenalty ?? this.presencePenalty;
126
+ this.topP = fields?.topP ?? this.topP;
127
+ }
128
+ _llmType() {
129
+ return "writer";
130
+ }
131
+ /** @ignore */
132
+ async _call(prompt, options) {
133
+ const sdk = new writer_sdk_1.Writer({
134
+ security: {
135
+ apiKey: this.apiKey,
136
+ },
137
+ organizationId: this.orgId,
138
+ });
139
+ return this.caller.callWithOptions({ signal: options.signal }, async () => {
140
+ try {
141
+ const res = await sdk.completions.create({
142
+ completionRequest: {
143
+ prompt,
144
+ stop: options.stop,
145
+ temperature: this.temperature,
146
+ minTokens: this.minTokens,
147
+ maxTokens: this.maxTokens,
148
+ bestOf: this.bestOf,
149
+ n: this.n,
150
+ frequencyPenalty: this.frequencyPenalty,
151
+ logprobs: this.logprobs,
152
+ presencePenalty: this.presencePenalty,
153
+ topP: this.topP,
154
+ },
155
+ modelId: this.model,
156
+ });
157
+ return (res.completionResponse?.choices?.[0].text ?? "No completion found.");
158
+ }
159
+ catch (e) {
160
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
161
+ e.response = e.rawResponse;
162
+ throw e;
163
+ }
164
+ });
165
+ }
166
+ }
167
+ exports.Writer = Writer;