langchain 0.0.153 → 0.0.155
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/base_language/index.cjs +36 -0
- package/dist/base_language/index.d.ts +9 -1
- package/dist/base_language/index.js +36 -0
- package/dist/cache/base.cjs +24 -1
- package/dist/cache/base.d.ts +9 -0
- package/dist/cache/base.js +21 -0
- package/dist/cache/cloudflare_kv.cjs +2 -5
- package/dist/cache/cloudflare_kv.js +3 -6
- package/dist/cache/ioredis.cjs +16 -6
- package/dist/cache/ioredis.d.ts +5 -2
- package/dist/cache/ioredis.js +17 -7
- package/dist/cache/momento.cjs +6 -2
- package/dist/cache/momento.js +7 -3
- package/dist/cache/redis.cjs +3 -5
- package/dist/cache/redis.js +4 -6
- package/dist/cache/upstash_redis.cjs +2 -5
- package/dist/cache/upstash_redis.js +3 -6
- package/dist/callbacks/base.d.ts +42 -28
- package/dist/callbacks/handlers/log_stream.cjs +283 -0
- package/dist/callbacks/handlers/log_stream.d.ts +99 -0
- package/dist/callbacks/handlers/log_stream.js +277 -0
- package/dist/callbacks/handlers/tracer.cjs +34 -18
- package/dist/callbacks/handlers/tracer.d.ts +18 -16
- package/dist/callbacks/handlers/tracer.js +34 -18
- package/dist/chat_models/base.cjs +64 -20
- package/dist/chat_models/base.d.ts +7 -0
- package/dist/chat_models/base.js +64 -20
- package/dist/document_loaders/web/notionapi.cjs +8 -4
- package/dist/document_loaders/web/notionapi.js +8 -4
- package/dist/document_loaders/web/searchapi.cjs +134 -0
- package/dist/document_loaders/web/searchapi.d.ts +65 -0
- package/dist/document_loaders/web/searchapi.js +130 -0
- package/dist/llms/base.cjs +10 -26
- package/dist/llms/base.d.ts +4 -4
- package/dist/llms/base.js +4 -20
- package/dist/load/import_constants.cjs +1 -0
- package/dist/load/import_constants.js +1 -0
- package/dist/load/import_map.cjs +3 -2
- package/dist/load/import_map.d.ts +1 -0
- package/dist/load/import_map.js +1 -0
- package/dist/schema/index.cjs +50 -1
- package/dist/schema/index.d.ts +5 -0
- package/dist/schema/index.js +48 -0
- package/dist/schema/runnable/base.cjs +64 -5
- package/dist/schema/runnable/base.d.ts +13 -0
- package/dist/schema/runnable/base.js +64 -5
- package/dist/stores/message/utils.cjs +2 -50
- package/dist/stores/message/utils.d.ts +0 -14
- package/dist/stores/message/utils.js +2 -49
- package/dist/tools/index.cjs +3 -1
- package/dist/tools/index.d.ts +1 -0
- package/dist/tools/index.js +1 -0
- package/dist/tools/searchapi.cjs +139 -0
- package/dist/tools/searchapi.d.ts +64 -0
- package/dist/tools/searchapi.js +135 -0
- package/dist/util/fast-json-patch/index.cjs +48 -0
- package/dist/util/fast-json-patch/index.d.ts +21 -0
- package/dist/util/fast-json-patch/index.js +15 -0
- package/dist/util/fast-json-patch/src/core.cjs +469 -0
- package/dist/util/fast-json-patch/src/core.d.ts +111 -0
- package/dist/util/fast-json-patch/src/core.js +459 -0
- package/dist/util/fast-json-patch/src/helpers.cjs +194 -0
- package/dist/util/fast-json-patch/src/helpers.d.ts +36 -0
- package/dist/util/fast-json-patch/src/helpers.js +181 -0
- package/dist/util/googlevertexai-webauth.cjs +6 -2
- package/dist/util/googlevertexai-webauth.d.ts +1 -0
- package/dist/util/googlevertexai-webauth.js +6 -2
- package/dist/util/stream.cjs +2 -40
- package/dist/util/stream.d.ts +1 -2
- package/dist/util/stream.js +1 -38
- package/dist/vectorstores/pgvector.cjs +1 -1
- package/dist/vectorstores/pgvector.js +1 -1
- package/dist/vectorstores/vercel_postgres.cjs +300 -0
- package/dist/vectorstores/vercel_postgres.d.ts +145 -0
- package/dist/vectorstores/vercel_postgres.js +296 -0
- package/document_loaders/web/searchapi.cjs +1 -0
- package/document_loaders/web/searchapi.d.ts +1 -0
- package/document_loaders/web/searchapi.js +1 -0
- package/package.json +22 -1
- package/vectorstores/vercel_postgres.cjs +1 -0
- package/vectorstores/vercel_postgres.d.ts +1 -0
- package/vectorstores/vercel_postgres.js +1 -0
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
import { Document } from "../../document.js";
|
|
2
|
+
import { BaseDocumentLoader } from "../base.js";
|
|
3
|
+
type JSONPrimitive = string | number | boolean | null;
|
|
4
|
+
type JSONValue = JSONPrimitive | JSONObject | JSONArray;
|
|
5
|
+
interface JSONObject {
|
|
6
|
+
[key: string]: JSONValue;
|
|
7
|
+
}
|
|
8
|
+
interface JSONArray extends Array<JSONValue> {
|
|
9
|
+
}
|
|
10
|
+
/**
|
|
11
|
+
* SearchApiParameters Type Definition.
|
|
12
|
+
*
|
|
13
|
+
* For more parameters and supported search engines, refer specific engine documentation:
|
|
14
|
+
* Google - https://www.searchapi.io/docs/google
|
|
15
|
+
* Google News - https://www.searchapi.io/docs/google-news
|
|
16
|
+
* Google Scholar - https://www.searchapi.io/docs/google-scholar
|
|
17
|
+
* YouTube Transcripts - https://www.searchapi.io/docs/youtube-transcripts
|
|
18
|
+
* and others.
|
|
19
|
+
*
|
|
20
|
+
*/
|
|
21
|
+
type SearchApiParameters = {
|
|
22
|
+
[key: string]: JSONValue;
|
|
23
|
+
};
|
|
24
|
+
/**
|
|
25
|
+
* Class representing a document loader for loading search results from
|
|
26
|
+
* the SearchApi. It extends the BaseDocumentLoader class.
|
|
27
|
+
*/
|
|
28
|
+
export declare class SearchApiLoader extends BaseDocumentLoader {
|
|
29
|
+
private apiKey;
|
|
30
|
+
private parameters;
|
|
31
|
+
constructor(params: SearchApiParameters);
|
|
32
|
+
/**
|
|
33
|
+
* Builds the URL for the SearchApi search request.
|
|
34
|
+
* @returns The URL for the search request.
|
|
35
|
+
*/
|
|
36
|
+
buildUrl(): string;
|
|
37
|
+
/**
|
|
38
|
+
* Extracts documents from the provided output.
|
|
39
|
+
* @param output - The output to extract documents from.
|
|
40
|
+
* @param responseType - The type of the response to extract documents from.
|
|
41
|
+
* @returns An array of Documents.
|
|
42
|
+
*/
|
|
43
|
+
private extractDocuments;
|
|
44
|
+
/**
|
|
45
|
+
* Processes the response data from the SearchApi search request and converts it into an array of Documents.
|
|
46
|
+
* @param data - The response data from the SearchApi search request.
|
|
47
|
+
* @returns An array of Documents.
|
|
48
|
+
*/
|
|
49
|
+
processResponseData(data: Record<string, unknown>): Document[];
|
|
50
|
+
/**
|
|
51
|
+
* Fetches the data from the provided URL and returns it as a JSON object.
|
|
52
|
+
* If an error occurs during the fetch operation, an exception is thrown with the error message.
|
|
53
|
+
* @param url - The URL to fetch data from.
|
|
54
|
+
* @returns A promise that resolves to the fetched data as a JSON object.
|
|
55
|
+
* @throws An error if the fetch operation fails.
|
|
56
|
+
*/
|
|
57
|
+
private fetchData;
|
|
58
|
+
/**
|
|
59
|
+
* Loads the search results from the SearchApi.
|
|
60
|
+
* @returns An array of Documents representing the search results.
|
|
61
|
+
* @throws An error if the search results could not be loaded.
|
|
62
|
+
*/
|
|
63
|
+
load(): Promise<Document[]>;
|
|
64
|
+
}
|
|
65
|
+
export {};
|
|
@@ -0,0 +1,130 @@
|
|
|
1
|
+
import { getEnvironmentVariable } from "../../util/env.js";
|
|
2
|
+
import { Document } from "../../document.js";
|
|
3
|
+
import { BaseDocumentLoader } from "../base.js";
|
|
4
|
+
/**
|
|
5
|
+
* Class representing a document loader for loading search results from
|
|
6
|
+
* the SearchApi. It extends the BaseDocumentLoader class.
|
|
7
|
+
*/
|
|
8
|
+
export class SearchApiLoader extends BaseDocumentLoader {
|
|
9
|
+
constructor(params) {
|
|
10
|
+
super();
|
|
11
|
+
Object.defineProperty(this, "apiKey", {
|
|
12
|
+
enumerable: true,
|
|
13
|
+
configurable: true,
|
|
14
|
+
writable: true,
|
|
15
|
+
value: void 0
|
|
16
|
+
});
|
|
17
|
+
Object.defineProperty(this, "parameters", {
|
|
18
|
+
enumerable: true,
|
|
19
|
+
configurable: true,
|
|
20
|
+
writable: true,
|
|
21
|
+
value: void 0
|
|
22
|
+
});
|
|
23
|
+
const { apiKey = getEnvironmentVariable("SEARCHAPI_API_KEY") } = params;
|
|
24
|
+
if (typeof apiKey !== "string") {
|
|
25
|
+
throw new Error("Invalid type for apiKey. Expected string.");
|
|
26
|
+
}
|
|
27
|
+
if (!apiKey) {
|
|
28
|
+
throw new Error("SearchApi API key not set. You can set it as SEARCHAPI_API_KEY in your .env file, or pass it to SearchApi.");
|
|
29
|
+
}
|
|
30
|
+
this.apiKey = apiKey;
|
|
31
|
+
this.parameters = { ...params };
|
|
32
|
+
}
|
|
33
|
+
/**
|
|
34
|
+
* Builds the URL for the SearchApi search request.
|
|
35
|
+
* @returns The URL for the search request.
|
|
36
|
+
*/
|
|
37
|
+
buildUrl() {
|
|
38
|
+
this.parameters = {
|
|
39
|
+
engine: "google",
|
|
40
|
+
api_key: this.apiKey,
|
|
41
|
+
...this.parameters,
|
|
42
|
+
};
|
|
43
|
+
const preparedParams = Object.entries(this.parameters)
|
|
44
|
+
.filter(([key, value]) => value !== undefined && value !== null && key !== "apiKey")
|
|
45
|
+
.map(([key, value]) => [key, `${value}`]);
|
|
46
|
+
const searchParams = new URLSearchParams(preparedParams);
|
|
47
|
+
return `https://www.searchapi.io/api/v1/search?${searchParams}`;
|
|
48
|
+
}
|
|
49
|
+
/**
|
|
50
|
+
* Extracts documents from the provided output.
|
|
51
|
+
* @param output - The output to extract documents from.
|
|
52
|
+
* @param responseType - The type of the response to extract documents from.
|
|
53
|
+
* @returns An array of Documents.
|
|
54
|
+
*/
|
|
55
|
+
extractDocuments(output, responseType) {
|
|
56
|
+
const documents = [];
|
|
57
|
+
const results = Array.isArray(output) ? output : [output];
|
|
58
|
+
if (responseType === "transcripts") {
|
|
59
|
+
const pageContent = results.map((result) => result.text).join("\n");
|
|
60
|
+
const metadata = {
|
|
61
|
+
source: "SearchApi",
|
|
62
|
+
responseType,
|
|
63
|
+
};
|
|
64
|
+
documents.push(new Document({ pageContent, metadata }));
|
|
65
|
+
}
|
|
66
|
+
else {
|
|
67
|
+
for (const result of results) {
|
|
68
|
+
const pageContent = JSON.stringify(result);
|
|
69
|
+
const metadata = {
|
|
70
|
+
source: "SearchApi",
|
|
71
|
+
responseType,
|
|
72
|
+
};
|
|
73
|
+
documents.push(new Document({ pageContent, metadata }));
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
return documents;
|
|
77
|
+
}
|
|
78
|
+
/**
|
|
79
|
+
* Processes the response data from the SearchApi search request and converts it into an array of Documents.
|
|
80
|
+
* @param data - The response data from the SearchApi search request.
|
|
81
|
+
* @returns An array of Documents.
|
|
82
|
+
*/
|
|
83
|
+
processResponseData(data) {
|
|
84
|
+
const documents = [];
|
|
85
|
+
const responseTypes = [
|
|
86
|
+
"answer_box",
|
|
87
|
+
"shopping_results",
|
|
88
|
+
"knowledge_graph",
|
|
89
|
+
"organic_results",
|
|
90
|
+
"transcripts",
|
|
91
|
+
];
|
|
92
|
+
for (const responseType of responseTypes) {
|
|
93
|
+
if (responseType in data) {
|
|
94
|
+
documents.push(...this.extractDocuments(data[responseType], responseType));
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
return documents;
|
|
98
|
+
}
|
|
99
|
+
/**
|
|
100
|
+
* Fetches the data from the provided URL and returns it as a JSON object.
|
|
101
|
+
* If an error occurs during the fetch operation, an exception is thrown with the error message.
|
|
102
|
+
* @param url - The URL to fetch data from.
|
|
103
|
+
* @returns A promise that resolves to the fetched data as a JSON object.
|
|
104
|
+
* @throws An error if the fetch operation fails.
|
|
105
|
+
*/
|
|
106
|
+
async fetchData(url) {
|
|
107
|
+
const response = await fetch(url);
|
|
108
|
+
const data = await response.json();
|
|
109
|
+
if (data.error) {
|
|
110
|
+
throw new Error(`Failed to load search results from SearchApi due to: ${data.error}`);
|
|
111
|
+
}
|
|
112
|
+
return data;
|
|
113
|
+
}
|
|
114
|
+
/**
|
|
115
|
+
* Loads the search results from the SearchApi.
|
|
116
|
+
* @returns An array of Documents representing the search results.
|
|
117
|
+
* @throws An error if the search results could not be loaded.
|
|
118
|
+
*/
|
|
119
|
+
async load() {
|
|
120
|
+
const url = this.buildUrl();
|
|
121
|
+
const data = await this.fetchData(url);
|
|
122
|
+
try {
|
|
123
|
+
return this.processResponseData(data);
|
|
124
|
+
}
|
|
125
|
+
catch (error) {
|
|
126
|
+
console.error(error);
|
|
127
|
+
throw new Error(`Failed to process search results from SearchApi: ${error}`);
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
}
|
package/dist/llms/base.cjs
CHANGED
|
@@ -1,16 +1,15 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.LLM = exports.BaseLLM = void 0;
|
|
4
|
-
const index_js_1 = require("../
|
|
5
|
-
const index_js_2 = require("../
|
|
6
|
-
const index_js_3 = require("../base_language/index.cjs");
|
|
4
|
+
const index_js_1 = require("../schema/index.cjs");
|
|
5
|
+
const index_js_2 = require("../base_language/index.cjs");
|
|
7
6
|
const manager_js_1 = require("../callbacks/manager.cjs");
|
|
8
7
|
const base_js_1 = require("../memory/base.cjs");
|
|
9
8
|
/**
|
|
10
9
|
* LLM Wrapper. Provides an {@link call} (an {@link generate}) function that takes in a prompt (or prompts) and returns a string.
|
|
11
10
|
*/
|
|
12
|
-
class BaseLLM extends
|
|
13
|
-
constructor({
|
|
11
|
+
class BaseLLM extends index_js_2.BaseLanguageModel {
|
|
12
|
+
constructor({ concurrency, ...rest }) {
|
|
14
13
|
super(concurrency ? { maxConcurrency: concurrency, ...rest } : rest);
|
|
15
14
|
Object.defineProperty(this, "lc_namespace", {
|
|
16
15
|
enumerable: true,
|
|
@@ -18,21 +17,6 @@ class BaseLLM extends index_js_3.BaseLanguageModel {
|
|
|
18
17
|
writable: true,
|
|
19
18
|
value: ["langchain", "llms", this._llmType()]
|
|
20
19
|
});
|
|
21
|
-
Object.defineProperty(this, "cache", {
|
|
22
|
-
enumerable: true,
|
|
23
|
-
configurable: true,
|
|
24
|
-
writable: true,
|
|
25
|
-
value: void 0
|
|
26
|
-
});
|
|
27
|
-
if (typeof cache === "object") {
|
|
28
|
-
this.cache = cache;
|
|
29
|
-
}
|
|
30
|
-
else if (cache) {
|
|
31
|
-
this.cache = index_js_1.InMemoryCache.global();
|
|
32
|
-
}
|
|
33
|
-
else {
|
|
34
|
-
this.cache = undefined;
|
|
35
|
-
}
|
|
36
20
|
}
|
|
37
21
|
/**
|
|
38
22
|
* This method takes an input and options, and returns a string. It
|
|
@@ -72,7 +56,7 @@ class BaseLLM extends index_js_3.BaseLanguageModel {
|
|
|
72
56
|
invocation_params: this?.invocationParams(callOptions),
|
|
73
57
|
};
|
|
74
58
|
const runManagers = await callbackManager_?.handleLLMStart(this.toJSON(), [prompt.toString()], undefined, undefined, extra);
|
|
75
|
-
let generation = new
|
|
59
|
+
let generation = new index_js_1.GenerationChunk({
|
|
76
60
|
text: "",
|
|
77
61
|
});
|
|
78
62
|
try {
|
|
@@ -160,7 +144,7 @@ class BaseLLM extends index_js_3.BaseLanguageModel {
|
|
|
160
144
|
// This defines RUN_KEY as a non-enumerable property on the output object
|
|
161
145
|
// so that it is not serialized when the output is stringified, and so that
|
|
162
146
|
// it isnt included when listing the keys of the output object.
|
|
163
|
-
Object.defineProperty(output,
|
|
147
|
+
Object.defineProperty(output, index_js_1.RUN_KEY, {
|
|
164
148
|
value: runIds ? { runIds } : undefined,
|
|
165
149
|
configurable: true,
|
|
166
150
|
});
|
|
@@ -186,9 +170,7 @@ class BaseLLM extends index_js_3.BaseLanguageModel {
|
|
|
186
170
|
return this._generateUncached(prompts, callOptions, runnableConfig);
|
|
187
171
|
}
|
|
188
172
|
const { cache } = this;
|
|
189
|
-
const
|
|
190
|
-
params.stop = callOptions.stop ?? params.stop;
|
|
191
|
-
const llmStringKey = `${Object.entries(params).sort()}`;
|
|
173
|
+
const llmStringKey = this._getSerializedCacheKeyParametersForCall(callOptions);
|
|
192
174
|
const missingPromptIndices = [];
|
|
193
175
|
const generations = await Promise.all(prompts.map(async (prompt, index) => {
|
|
194
176
|
const result = await cache.lookup(prompt, llmStringKey);
|
|
@@ -238,7 +220,7 @@ class BaseLLM extends index_js_3.BaseLanguageModel {
|
|
|
238
220
|
async predictMessages(messages, options, callbacks) {
|
|
239
221
|
const text = (0, base_js_1.getBufferString)(messages);
|
|
240
222
|
const prediction = await this.call(text, options, callbacks);
|
|
241
|
-
return new
|
|
223
|
+
return new index_js_1.AIMessage(prediction);
|
|
242
224
|
}
|
|
243
225
|
/**
|
|
244
226
|
* Get the identifying parameters of the LLM.
|
|
@@ -248,6 +230,7 @@ class BaseLLM extends index_js_3.BaseLanguageModel {
|
|
|
248
230
|
return {};
|
|
249
231
|
}
|
|
250
232
|
/**
|
|
233
|
+
* @deprecated
|
|
251
234
|
* Return a json-like object representing this LLM.
|
|
252
235
|
*/
|
|
253
236
|
serialize() {
|
|
@@ -261,6 +244,7 @@ class BaseLLM extends index_js_3.BaseLanguageModel {
|
|
|
261
244
|
return "base_llm";
|
|
262
245
|
}
|
|
263
246
|
/**
|
|
247
|
+
* @deprecated
|
|
264
248
|
* Load an LLM from a json-like object describing it.
|
|
265
249
|
*/
|
|
266
250
|
static async deserialize(data) {
|
package/dist/llms/base.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { BaseMessage, BasePromptValue, GenerationChunk, LLMResult } from "../schema/index.js";
|
|
2
2
|
import { BaseLanguageModel, BaseLanguageModelCallOptions, BaseLanguageModelInput, BaseLanguageModelParams } from "../base_language/index.js";
|
|
3
3
|
import { BaseCallbackConfig, CallbackManagerForLLMRun, Callbacks } from "../callbacks/manager.js";
|
|
4
4
|
import { RunnableConfig } from "../schema/runnable/config.js";
|
|
@@ -11,7 +11,6 @@ export interface BaseLLMParams extends BaseLanguageModelParams {
|
|
|
11
11
|
* @deprecated Use `maxConcurrency` instead
|
|
12
12
|
*/
|
|
13
13
|
concurrency?: number;
|
|
14
|
-
cache?: BaseCache | boolean;
|
|
15
14
|
}
|
|
16
15
|
export interface BaseLLMCallOptions extends BaseLanguageModelCallOptions {
|
|
17
16
|
}
|
|
@@ -21,8 +20,7 @@ export interface BaseLLMCallOptions extends BaseLanguageModelCallOptions {
|
|
|
21
20
|
export declare abstract class BaseLLM<CallOptions extends BaseLLMCallOptions = BaseLLMCallOptions> extends BaseLanguageModel<string, CallOptions> {
|
|
22
21
|
ParsedCallOptions: Omit<CallOptions, keyof RunnableConfig & "timeout">;
|
|
23
22
|
lc_namespace: string[];
|
|
24
|
-
|
|
25
|
-
constructor({ cache, concurrency, ...rest }: BaseLLMParams);
|
|
23
|
+
constructor({ concurrency, ...rest }: BaseLLMParams);
|
|
26
24
|
/**
|
|
27
25
|
* This method takes an input and options, and returns a string. It
|
|
28
26
|
* converts the input to a prompt value and generates a result based on
|
|
@@ -90,11 +88,13 @@ export declare abstract class BaseLLM<CallOptions extends BaseLLMCallOptions = B
|
|
|
90
88
|
*/
|
|
91
89
|
abstract _llmType(): string;
|
|
92
90
|
/**
|
|
91
|
+
* @deprecated
|
|
93
92
|
* Return a json-like object representing this LLM.
|
|
94
93
|
*/
|
|
95
94
|
serialize(): SerializedLLM;
|
|
96
95
|
_modelType(): string;
|
|
97
96
|
/**
|
|
97
|
+
* @deprecated
|
|
98
98
|
* Load an LLM from a json-like object describing it.
|
|
99
99
|
*/
|
|
100
100
|
static deserialize(data: SerializedLLM): Promise<BaseLLM>;
|
package/dist/llms/base.js
CHANGED
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
import { InMemoryCache } from "../cache/index.js";
|
|
2
1
|
import { AIMessage, GenerationChunk, RUN_KEY, } from "../schema/index.js";
|
|
3
2
|
import { BaseLanguageModel, } from "../base_language/index.js";
|
|
4
3
|
import { CallbackManager, } from "../callbacks/manager.js";
|
|
@@ -7,7 +6,7 @@ import { getBufferString } from "../memory/base.js";
|
|
|
7
6
|
* LLM Wrapper. Provides an {@link call} (an {@link generate}) function that takes in a prompt (or prompts) and returns a string.
|
|
8
7
|
*/
|
|
9
8
|
export class BaseLLM extends BaseLanguageModel {
|
|
10
|
-
constructor({
|
|
9
|
+
constructor({ concurrency, ...rest }) {
|
|
11
10
|
super(concurrency ? { maxConcurrency: concurrency, ...rest } : rest);
|
|
12
11
|
Object.defineProperty(this, "lc_namespace", {
|
|
13
12
|
enumerable: true,
|
|
@@ -15,21 +14,6 @@ export class BaseLLM extends BaseLanguageModel {
|
|
|
15
14
|
writable: true,
|
|
16
15
|
value: ["langchain", "llms", this._llmType()]
|
|
17
16
|
});
|
|
18
|
-
Object.defineProperty(this, "cache", {
|
|
19
|
-
enumerable: true,
|
|
20
|
-
configurable: true,
|
|
21
|
-
writable: true,
|
|
22
|
-
value: void 0
|
|
23
|
-
});
|
|
24
|
-
if (typeof cache === "object") {
|
|
25
|
-
this.cache = cache;
|
|
26
|
-
}
|
|
27
|
-
else if (cache) {
|
|
28
|
-
this.cache = InMemoryCache.global();
|
|
29
|
-
}
|
|
30
|
-
else {
|
|
31
|
-
this.cache = undefined;
|
|
32
|
-
}
|
|
33
17
|
}
|
|
34
18
|
/**
|
|
35
19
|
* This method takes an input and options, and returns a string. It
|
|
@@ -183,9 +167,7 @@ export class BaseLLM extends BaseLanguageModel {
|
|
|
183
167
|
return this._generateUncached(prompts, callOptions, runnableConfig);
|
|
184
168
|
}
|
|
185
169
|
const { cache } = this;
|
|
186
|
-
const
|
|
187
|
-
params.stop = callOptions.stop ?? params.stop;
|
|
188
|
-
const llmStringKey = `${Object.entries(params).sort()}`;
|
|
170
|
+
const llmStringKey = this._getSerializedCacheKeyParametersForCall(callOptions);
|
|
189
171
|
const missingPromptIndices = [];
|
|
190
172
|
const generations = await Promise.all(prompts.map(async (prompt, index) => {
|
|
191
173
|
const result = await cache.lookup(prompt, llmStringKey);
|
|
@@ -245,6 +227,7 @@ export class BaseLLM extends BaseLanguageModel {
|
|
|
245
227
|
return {};
|
|
246
228
|
}
|
|
247
229
|
/**
|
|
230
|
+
* @deprecated
|
|
248
231
|
* Return a json-like object representing this LLM.
|
|
249
232
|
*/
|
|
250
233
|
serialize() {
|
|
@@ -258,6 +241,7 @@ export class BaseLLM extends BaseLanguageModel {
|
|
|
258
241
|
return "base_llm";
|
|
259
242
|
}
|
|
260
243
|
/**
|
|
244
|
+
* @deprecated
|
|
261
245
|
* Load an LLM from a json-like object describing it.
|
|
262
246
|
*/
|
|
263
247
|
static async deserialize(data) {
|
|
@@ -57,6 +57,7 @@ exports.optionalImportEntrypoints = [
|
|
|
57
57
|
"langchain/vectorstores/singlestore",
|
|
58
58
|
"langchain/vectorstores/tigris",
|
|
59
59
|
"langchain/vectorstores/usearch",
|
|
60
|
+
"langchain/vectorstores/vercel_postgres",
|
|
60
61
|
"langchain/vectorstores/voy",
|
|
61
62
|
"langchain/vectorstores/zep",
|
|
62
63
|
"langchain/memory/zep",
|
|
@@ -54,6 +54,7 @@ export const optionalImportEntrypoints = [
|
|
|
54
54
|
"langchain/vectorstores/singlestore",
|
|
55
55
|
"langchain/vectorstores/tigris",
|
|
56
56
|
"langchain/vectorstores/usearch",
|
|
57
|
+
"langchain/vectorstores/vercel_postgres",
|
|
57
58
|
"langchain/vectorstores/voy",
|
|
58
59
|
"langchain/vectorstores/zep",
|
|
59
60
|
"langchain/memory/zep",
|
package/dist/load/import_map.cjs
CHANGED
|
@@ -24,8 +24,8 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|
|
24
24
|
return result;
|
|
25
25
|
};
|
|
26
26
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
27
|
-
exports.
|
|
28
|
-
exports.evaluation = exports.experimental__chat_models__bittensor = exports.experimental__plan_and_execute = exports.experimental__generative_agents = exports.experimental__babyagi = exports.experimental__autogpt = exports.util__math = exports.storage__in_memory = exports.stores__message__in_memory = exports.stores__file__in_memory = exports.stores__doc__in_memory = exports.cache = exports.retrievers__vespa = exports.retrievers__score_threshold = exports.retrievers__hyde = exports.retrievers__document_compressors__chain_extract = exports.retrievers__time_weighted = exports.retrievers__parent_document = exports.retrievers__multi_vector = exports.retrievers__multi_query = exports.retrievers__document_compressors = exports.retrievers__contextual_compression = void 0;
|
|
27
|
+
exports.retrievers__remote = exports.output_parsers = exports.callbacks = exports.schema__storage = exports.schema__runnable = exports.schema__retriever = exports.schema__query_constructor = exports.schema__output_parser = exports.schema__document = exports.schema = exports.chat_models__minimax = exports.chat_models__ollama = exports.chat_models__baiduwenxin = exports.chat_models__fireworks = exports.chat_models__anthropic = exports.chat_models__openai = exports.chat_models__base = exports.document_transformers__openai_functions = exports.document_loaders__web__sort_xyz_blockchain = exports.document_loaders__web__serpapi = exports.document_loaders__web__searchapi = exports.document_loaders__base = exports.document = exports.memory = exports.text_splitter = exports.vectorstores__xata = exports.vectorstores__vectara = exports.vectorstores__prisma = exports.vectorstores__memory = exports.vectorstores__base = exports.prompts = exports.llms__fireworks = exports.llms__ollama = exports.llms__aleph_alpha = exports.llms__ai21 = exports.llms__openai = exports.llms__base = exports.embeddings__minimax = exports.embeddings__openai = exports.embeddings__ollama = exports.embeddings__fake = exports.embeddings__cache_backed = exports.embeddings__base = exports.chains__openai_functions = exports.chains = exports.tools = exports.base_language = exports.agents__toolkits = exports.agents = exports.load__serializable = void 0;
|
|
28
|
+
exports.evaluation = exports.experimental__chat_models__bittensor = exports.experimental__plan_and_execute = exports.experimental__generative_agents = exports.experimental__babyagi = exports.experimental__autogpt = exports.util__math = exports.storage__in_memory = exports.stores__message__in_memory = exports.stores__file__in_memory = exports.stores__doc__in_memory = exports.cache = exports.retrievers__vespa = exports.retrievers__score_threshold = exports.retrievers__hyde = exports.retrievers__document_compressors__chain_extract = exports.retrievers__time_weighted = exports.retrievers__parent_document = exports.retrievers__multi_vector = exports.retrievers__multi_query = exports.retrievers__document_compressors = exports.retrievers__contextual_compression = exports.retrievers__databerry = void 0;
|
|
29
29
|
exports.load__serializable = __importStar(require("../load/serializable.cjs"));
|
|
30
30
|
exports.agents = __importStar(require("../agents/index.cjs"));
|
|
31
31
|
exports.agents__toolkits = __importStar(require("../agents/toolkits/index.cjs"));
|
|
@@ -55,6 +55,7 @@ exports.text_splitter = __importStar(require("../text_splitter.cjs"));
|
|
|
55
55
|
exports.memory = __importStar(require("../memory/index.cjs"));
|
|
56
56
|
exports.document = __importStar(require("../document.cjs"));
|
|
57
57
|
exports.document_loaders__base = __importStar(require("../document_loaders/base.cjs"));
|
|
58
|
+
exports.document_loaders__web__searchapi = __importStar(require("../document_loaders/web/searchapi.cjs"));
|
|
58
59
|
exports.document_loaders__web__serpapi = __importStar(require("../document_loaders/web/serpapi.cjs"));
|
|
59
60
|
exports.document_loaders__web__sort_xyz_blockchain = __importStar(require("../document_loaders/web/sort_xyz_blockchain.cjs"));
|
|
60
61
|
exports.document_transformers__openai_functions = __importStar(require("../document_transformers/openai_functions.cjs"));
|
|
@@ -27,6 +27,7 @@ export * as text_splitter from "../text_splitter.js";
|
|
|
27
27
|
export * as memory from "../memory/index.js";
|
|
28
28
|
export * as document from "../document.js";
|
|
29
29
|
export * as document_loaders__base from "../document_loaders/base.js";
|
|
30
|
+
export * as document_loaders__web__searchapi from "../document_loaders/web/searchapi.js";
|
|
30
31
|
export * as document_loaders__web__serpapi from "../document_loaders/web/serpapi.js";
|
|
31
32
|
export * as document_loaders__web__sort_xyz_blockchain from "../document_loaders/web/sort_xyz_blockchain.js";
|
|
32
33
|
export * as document_transformers__openai_functions from "../document_transformers/openai_functions.js";
|
package/dist/load/import_map.js
CHANGED
|
@@ -28,6 +28,7 @@ export * as text_splitter from "../text_splitter.js";
|
|
|
28
28
|
export * as memory from "../memory/index.js";
|
|
29
29
|
export * as document from "../document.js";
|
|
30
30
|
export * as document_loaders__base from "../document_loaders/base.js";
|
|
31
|
+
export * as document_loaders__web__searchapi from "../document_loaders/web/searchapi.js";
|
|
31
32
|
export * as document_loaders__web__serpapi from "../document_loaders/web/serpapi.js";
|
|
32
33
|
export * as document_loaders__web__sort_xyz_blockchain from "../document_loaders/web/sort_xyz_blockchain.js";
|
|
33
34
|
export * as document_transformers__openai_functions from "../document_transformers/openai_functions.js";
|
package/dist/schema/index.cjs
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.Docstore = exports.BaseEntityStore = exports.BaseFileStore = exports.BaseCache = exports.BaseListChatMessageHistory = exports.BaseChatMessageHistory = exports.BasePromptValue = exports.ChatGenerationChunk = exports.ChatMessageChunk = exports.coerceMessageLikeToMessage = exports.isBaseMessage = exports.ChatMessage = exports.FunctionMessageChunk = exports.FunctionMessage = exports.SystemChatMessage = exports.AIChatMessage = exports.HumanChatMessage = exports.BaseChatMessage = exports.SystemMessageChunk = exports.SystemMessage = exports.AIMessageChunk = exports.AIMessage = exports.HumanMessageChunk = exports.HumanMessage = exports.BaseMessageChunk = exports.BaseMessage = exports.GenerationChunk = exports.RUN_KEY = void 0;
|
|
3
|
+
exports.Docstore = exports.BaseEntityStore = exports.BaseFileStore = exports.BaseCache = exports.BaseListChatMessageHistory = exports.BaseChatMessageHistory = exports.BasePromptValue = exports.mapStoredMessageToChatMessage = exports.ChatGenerationChunk = exports.ChatMessageChunk = exports.coerceMessageLikeToMessage = exports.isBaseMessage = exports.ChatMessage = exports.FunctionMessageChunk = exports.FunctionMessage = exports.SystemChatMessage = exports.AIChatMessage = exports.HumanChatMessage = exports.BaseChatMessage = exports.SystemMessageChunk = exports.SystemMessage = exports.AIMessageChunk = exports.AIMessage = exports.HumanMessageChunk = exports.HumanMessage = exports.BaseMessageChunk = exports.BaseMessage = exports.GenerationChunk = exports.RUN_KEY = void 0;
|
|
4
4
|
const serializable_js_1 = require("../load/serializable.cjs");
|
|
5
5
|
exports.RUN_KEY = "__run";
|
|
6
6
|
/**
|
|
@@ -405,6 +405,55 @@ class ChatGenerationChunk extends GenerationChunk {
|
|
|
405
405
|
}
|
|
406
406
|
}
|
|
407
407
|
exports.ChatGenerationChunk = ChatGenerationChunk;
|
|
408
|
+
/**
|
|
409
|
+
* Maps messages from an older format (V1) to the current `StoredMessage`
|
|
410
|
+
* format. If the message is already in the `StoredMessage` format, it is
|
|
411
|
+
* returned as is. Otherwise, it transforms the V1 message into a
|
|
412
|
+
* `StoredMessage`. This function is important for maintaining
|
|
413
|
+
* compatibility with older message formats.
|
|
414
|
+
*/
|
|
415
|
+
function mapV1MessageToStoredMessage(message) {
|
|
416
|
+
// TODO: Remove this mapper when we deprecate the old message format.
|
|
417
|
+
if (message.data !== undefined) {
|
|
418
|
+
return message;
|
|
419
|
+
}
|
|
420
|
+
else {
|
|
421
|
+
const v1Message = message;
|
|
422
|
+
return {
|
|
423
|
+
type: v1Message.type,
|
|
424
|
+
data: {
|
|
425
|
+
content: v1Message.text,
|
|
426
|
+
role: v1Message.role,
|
|
427
|
+
name: undefined,
|
|
428
|
+
},
|
|
429
|
+
};
|
|
430
|
+
}
|
|
431
|
+
}
|
|
432
|
+
function mapStoredMessageToChatMessage(message) {
|
|
433
|
+
const storedMessage = mapV1MessageToStoredMessage(message);
|
|
434
|
+
switch (storedMessage.type) {
|
|
435
|
+
case "human":
|
|
436
|
+
return new HumanMessage(storedMessage.data);
|
|
437
|
+
case "ai":
|
|
438
|
+
return new AIMessage(storedMessage.data);
|
|
439
|
+
case "system":
|
|
440
|
+
return new SystemMessage(storedMessage.data);
|
|
441
|
+
case "function":
|
|
442
|
+
if (storedMessage.data.name === undefined) {
|
|
443
|
+
throw new Error("Name must be defined for function messages");
|
|
444
|
+
}
|
|
445
|
+
return new FunctionMessage(storedMessage.data);
|
|
446
|
+
case "chat": {
|
|
447
|
+
if (storedMessage.data.role === undefined) {
|
|
448
|
+
throw new Error("Role must be defined for chat messages");
|
|
449
|
+
}
|
|
450
|
+
return new ChatMessage(storedMessage.data);
|
|
451
|
+
}
|
|
452
|
+
default:
|
|
453
|
+
throw new Error(`Got unexpected type: ${storedMessage.type}`);
|
|
454
|
+
}
|
|
455
|
+
}
|
|
456
|
+
exports.mapStoredMessageToChatMessage = mapStoredMessageToChatMessage;
|
|
408
457
|
/**
|
|
409
458
|
* Base PromptValue class. All prompt values should extend this class.
|
|
410
459
|
*/
|
package/dist/schema/index.d.ts
CHANGED
|
@@ -59,6 +59,10 @@ export interface StoredMessage {
|
|
|
59
59
|
type: string;
|
|
60
60
|
data: StoredMessageData;
|
|
61
61
|
}
|
|
62
|
+
export interface StoredGeneration {
|
|
63
|
+
text: string;
|
|
64
|
+
message?: StoredMessage;
|
|
65
|
+
}
|
|
62
66
|
export type MessageType = "human" | "ai" | "generic" | "system" | "function";
|
|
63
67
|
export interface BaseMessageFields {
|
|
64
68
|
content: string;
|
|
@@ -239,6 +243,7 @@ export declare class ChatGenerationChunk extends GenerationChunk implements Chat
|
|
|
239
243
|
constructor(fields: ChatGenerationChunkFields);
|
|
240
244
|
concat(chunk: ChatGenerationChunk): ChatGenerationChunk;
|
|
241
245
|
}
|
|
246
|
+
export declare function mapStoredMessageToChatMessage(message: StoredMessage): HumanMessage | AIMessage | SystemMessage | FunctionMessage | ChatMessage;
|
|
242
247
|
export interface ChatResult {
|
|
243
248
|
generations: ChatGeneration[];
|
|
244
249
|
llmOutput?: Record<string, any>;
|
package/dist/schema/index.js
CHANGED
|
@@ -386,6 +386,54 @@ export class ChatGenerationChunk extends GenerationChunk {
|
|
|
386
386
|
});
|
|
387
387
|
}
|
|
388
388
|
}
|
|
389
|
+
/**
|
|
390
|
+
* Maps messages from an older format (V1) to the current `StoredMessage`
|
|
391
|
+
* format. If the message is already in the `StoredMessage` format, it is
|
|
392
|
+
* returned as is. Otherwise, it transforms the V1 message into a
|
|
393
|
+
* `StoredMessage`. This function is important for maintaining
|
|
394
|
+
* compatibility with older message formats.
|
|
395
|
+
*/
|
|
396
|
+
function mapV1MessageToStoredMessage(message) {
|
|
397
|
+
// TODO: Remove this mapper when we deprecate the old message format.
|
|
398
|
+
if (message.data !== undefined) {
|
|
399
|
+
return message;
|
|
400
|
+
}
|
|
401
|
+
else {
|
|
402
|
+
const v1Message = message;
|
|
403
|
+
return {
|
|
404
|
+
type: v1Message.type,
|
|
405
|
+
data: {
|
|
406
|
+
content: v1Message.text,
|
|
407
|
+
role: v1Message.role,
|
|
408
|
+
name: undefined,
|
|
409
|
+
},
|
|
410
|
+
};
|
|
411
|
+
}
|
|
412
|
+
}
|
|
413
|
+
export function mapStoredMessageToChatMessage(message) {
|
|
414
|
+
const storedMessage = mapV1MessageToStoredMessage(message);
|
|
415
|
+
switch (storedMessage.type) {
|
|
416
|
+
case "human":
|
|
417
|
+
return new HumanMessage(storedMessage.data);
|
|
418
|
+
case "ai":
|
|
419
|
+
return new AIMessage(storedMessage.data);
|
|
420
|
+
case "system":
|
|
421
|
+
return new SystemMessage(storedMessage.data);
|
|
422
|
+
case "function":
|
|
423
|
+
if (storedMessage.data.name === undefined) {
|
|
424
|
+
throw new Error("Name must be defined for function messages");
|
|
425
|
+
}
|
|
426
|
+
return new FunctionMessage(storedMessage.data);
|
|
427
|
+
case "chat": {
|
|
428
|
+
if (storedMessage.data.role === undefined) {
|
|
429
|
+
throw new Error("Role must be defined for chat messages");
|
|
430
|
+
}
|
|
431
|
+
return new ChatMessage(storedMessage.data);
|
|
432
|
+
}
|
|
433
|
+
default:
|
|
434
|
+
throw new Error(`Got unexpected type: ${storedMessage.type}`);
|
|
435
|
+
}
|
|
436
|
+
}
|
|
389
437
|
/**
|
|
390
438
|
* Base PromptValue class. All prompt values should extend this class.
|
|
391
439
|
*/
|