langchain 0.1.13 → 0.1.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cache/redis.cjs +1 -1
- package/dist/cache/redis.d.ts +1 -1
- package/dist/cache/redis.js +1 -1
- package/dist/document_loaders/web/confluence.cjs +7 -1
- package/dist/document_loaders/web/confluence.d.ts +9 -0
- package/dist/document_loaders/web/confluence.js +7 -1
- package/dist/load/import_constants.cjs +1 -0
- package/dist/load/import_constants.js +1 -0
- package/dist/load/import_map.cjs +5 -1
- package/dist/load/import_map.d.ts +5 -1
- package/dist/load/import_map.js +5 -1
- package/dist/runnables/remote.cjs +15 -339
- package/dist/runnables/remote.d.ts +1 -30
- package/dist/runnables/remote.js +1 -337
- package/dist/stores/message/redis.cjs +2 -0
- package/dist/stores/message/redis.js +2 -0
- package/dist/util/migrations/0_0-0_1-migrate-imports.cjs +208 -0
- package/dist/util/migrations/0_0-0_1-migrate-imports.d.ts +44 -0
- package/dist/util/migrations/0_0-0_1-migrate-imports.js +201 -0
- package/dist/vectorstores/redis.cjs +2 -0
- package/dist/vectorstores/redis.js +2 -0
- package/package.json +25 -2
- package/util/migrations/0_1.cjs +1 -0
- package/util/migrations/0_1.d.cts +1 -0
- package/util/migrations/0_1.d.ts +1 -0
- package/util/migrations/0_1.js +1 -0
package/dist/cache/redis.cjs
CHANGED
|
@@ -3,7 +3,7 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
3
3
|
exports.RedisCache = void 0;
|
|
4
4
|
const caches_1 = require("@langchain/core/caches");
|
|
5
5
|
/**
|
|
6
|
-
* @deprecated Import from "@langchain/
|
|
6
|
+
* @deprecated Import from "@langchain/redis" instead.
|
|
7
7
|
* Represents a specific implementation of a caching mechanism using Redis
|
|
8
8
|
* as the underlying storage system. It extends the `BaseCache` class and
|
|
9
9
|
* overrides its methods to provide the Redis-specific logic.
|
package/dist/cache/redis.d.ts
CHANGED
|
@@ -7,7 +7,7 @@ import { Generation } from "@langchain/core/outputs";
|
|
|
7
7
|
*/
|
|
8
8
|
type RedisClientType = ReturnType<typeof createClient> | ReturnType<typeof createCluster>;
|
|
9
9
|
/**
|
|
10
|
-
* @deprecated Import from "@langchain/
|
|
10
|
+
* @deprecated Import from "@langchain/redis" instead.
|
|
11
11
|
* Represents a specific implementation of a caching mechanism using Redis
|
|
12
12
|
* as the underlying storage system. It extends the `BaseCache` class and
|
|
13
13
|
* overrides its methods to provide the Redis-specific logic.
|
package/dist/cache/redis.js
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { BaseCache, deserializeStoredGeneration, getCacheKey, serializeGeneration, } from "@langchain/core/caches";
|
|
2
2
|
/**
|
|
3
|
-
* @deprecated Import from "@langchain/
|
|
3
|
+
* @deprecated Import from "@langchain/redis" instead.
|
|
4
4
|
* Represents a specific implementation of a caching mechanism using Redis
|
|
5
5
|
* as the underlying storage system. It extends the `BaseCache` class and
|
|
6
6
|
* overrides its methods to provide the Redis-specific logic.
|
|
@@ -19,7 +19,7 @@ const base_js_1 = require("../base.cjs");
|
|
|
19
19
|
* ```
|
|
20
20
|
*/
|
|
21
21
|
class ConfluencePagesLoader extends base_js_1.BaseDocumentLoader {
|
|
22
|
-
constructor({ baseUrl, spaceKey, username, accessToken, limit = 25, expand = "body.storage", personalAccessToken, }) {
|
|
22
|
+
constructor({ baseUrl, spaceKey, username, accessToken, limit = 25, expand = "body.storage,version", personalAccessToken, }) {
|
|
23
23
|
super();
|
|
24
24
|
Object.defineProperty(this, "baseUrl", {
|
|
25
25
|
enumerable: true,
|
|
@@ -168,8 +168,14 @@ class ConfluencePagesLoader extends base_js_1.BaseDocumentLoader {
|
|
|
168
168
|
return new documents_1.Document({
|
|
169
169
|
pageContent: textWithoutEmptyLines,
|
|
170
170
|
metadata: {
|
|
171
|
+
id: page.id,
|
|
172
|
+
status: page.status,
|
|
171
173
|
title: page.title,
|
|
174
|
+
type: page.type,
|
|
172
175
|
url: pageUrl,
|
|
176
|
+
version: page.version?.number,
|
|
177
|
+
updated_by: page.version?.by?.displayName,
|
|
178
|
+
updated_at: page.version?.when,
|
|
173
179
|
},
|
|
174
180
|
});
|
|
175
181
|
}
|
|
@@ -19,11 +19,20 @@ export interface ConfluencePagesLoaderParams {
|
|
|
19
19
|
export interface ConfluencePage {
|
|
20
20
|
id: string;
|
|
21
21
|
title: string;
|
|
22
|
+
type: string;
|
|
22
23
|
body: {
|
|
23
24
|
storage: {
|
|
24
25
|
value: string;
|
|
25
26
|
};
|
|
26
27
|
};
|
|
28
|
+
status: string;
|
|
29
|
+
version?: {
|
|
30
|
+
number: number;
|
|
31
|
+
when: string;
|
|
32
|
+
by: {
|
|
33
|
+
displayName: string;
|
|
34
|
+
};
|
|
35
|
+
};
|
|
27
36
|
}
|
|
28
37
|
/**
|
|
29
38
|
* Interface representing the response from the Confluence API.
|
|
@@ -16,7 +16,7 @@ import { BaseDocumentLoader } from "../base.js";
|
|
|
16
16
|
* ```
|
|
17
17
|
*/
|
|
18
18
|
export class ConfluencePagesLoader extends BaseDocumentLoader {
|
|
19
|
-
constructor({ baseUrl, spaceKey, username, accessToken, limit = 25, expand = "body.storage", personalAccessToken, }) {
|
|
19
|
+
constructor({ baseUrl, spaceKey, username, accessToken, limit = 25, expand = "body.storage,version", personalAccessToken, }) {
|
|
20
20
|
super();
|
|
21
21
|
Object.defineProperty(this, "baseUrl", {
|
|
22
22
|
enumerable: true,
|
|
@@ -165,8 +165,14 @@ export class ConfluencePagesLoader extends BaseDocumentLoader {
|
|
|
165
165
|
return new Document({
|
|
166
166
|
pageContent: textWithoutEmptyLines,
|
|
167
167
|
metadata: {
|
|
168
|
+
id: page.id,
|
|
169
|
+
status: page.status,
|
|
168
170
|
title: page.title,
|
|
171
|
+
type: page.type,
|
|
169
172
|
url: pageUrl,
|
|
173
|
+
version: page.version?.number,
|
|
174
|
+
updated_by: page.version?.by?.displayName,
|
|
175
|
+
updated_at: page.version?.when,
|
|
170
176
|
},
|
|
171
177
|
});
|
|
172
178
|
}
|
|
@@ -171,6 +171,7 @@ exports.optionalImportEntrypoints = [
|
|
|
171
171
|
"langchain/graphs/neo4j_graph",
|
|
172
172
|
"langchain/hub",
|
|
173
173
|
"langchain/util/convex",
|
|
174
|
+
"langchain/util/migrations/0_1",
|
|
174
175
|
"langchain/experimental/multimodal_embeddings/googlevertexai",
|
|
175
176
|
"langchain/experimental/chat_models/anthropic_functions",
|
|
176
177
|
"langchain/experimental/llms/bittensor",
|
|
@@ -168,6 +168,7 @@ export const optionalImportEntrypoints = [
|
|
|
168
168
|
"langchain/graphs/neo4j_graph",
|
|
169
169
|
"langchain/hub",
|
|
170
170
|
"langchain/util/convex",
|
|
171
|
+
"langchain/util/migrations/0_1",
|
|
171
172
|
"langchain/experimental/multimodal_embeddings/googlevertexai",
|
|
172
173
|
"langchain/experimental/chat_models/anthropic_functions",
|
|
173
174
|
"langchain/experimental/llms/bittensor",
|
package/dist/load/import_map.cjs
CHANGED
|
@@ -25,7 +25,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|
|
25
25
|
};
|
|
26
26
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
27
27
|
exports.storage__in_memory = exports.storage__encoder_backed = exports.stores__message__in_memory = exports.stores__file__in_memory = exports.stores__doc__in_memory = exports.retrievers__vespa = exports.retrievers__score_threshold = exports.retrievers__hyde = exports.retrievers__document_compressors__embeddings_filter = exports.retrievers__document_compressors__chain_extract = exports.retrievers__time_weighted = exports.retrievers__parent_document = exports.retrievers__multi_vector = exports.retrievers__multi_query = exports.retrievers__document_compressors = exports.retrievers__contextual_compression = exports.retrievers__remote = exports.output_parsers = exports.schema__query_constructor = exports.schema__prompt_template = exports.chat_models__anthropic = exports.document_transformers__openai_functions = exports.document_loaders__web__sort_xyz_blockchain = exports.document_loaders__web__serpapi = exports.document_loaders__web__searchapi = exports.document_loaders__base = exports.text_splitter = exports.vectorstores__memory = exports.llms__fake = exports.embeddings__fake = exports.embeddings__cache_backed = exports.chains__retrieval = exports.chains__openai_functions = exports.chains__history_aware_retriever = exports.chains__combine_documents__reduce = exports.chains__combine_documents = exports.chains = exports.tools__retriever = exports.tools__render = exports.tools__chain = exports.agents__openai__output_parser = exports.agents__xml__output_parser = exports.agents__react__output_parser = exports.agents__format_scratchpad__log_to_message = exports.agents__format_scratchpad__xml = exports.agents__format_scratchpad__log = exports.agents__format_scratchpad__openai_tools = exports.agents__format_scratchpad = exports.agents__toolkits = exports.agents = void 0;
|
|
28
|
-
exports.llms__fireworks = exports.chat_models__fireworks = exports.schema__output = exports.schema__output_parser = exports.schema__runnable = exports.prompts__base = exports.prompts__pipeline = exports.prompts__chat = exports.schema = exports.schema__messages = exports.prompts__prompt = exports.embeddings__openai = exports.llms__openai = exports.chat_models__openai = exports.indexes = exports.runnables__remote = exports.smith = exports.evaluation = exports.experimental__prompts__custom_format = exports.experimental__masking = exports.experimental__chains__violation_of_expectations = exports.experimental__chat_models__bittensor = exports.experimental__plan_and_execute = exports.experimental__generative_agents = exports.experimental__babyagi = exports.experimental__openai_files = exports.experimental__openai_assistant = exports.experimental__autogpt = exports.util__time = exports.util__math = exports.util__document = void 0;
|
|
28
|
+
exports.llms__fireworks = exports.chat_models__fireworks = exports.schema__output = exports.schema__output_parser = exports.schema__runnable = exports.prompts__base = exports.prompts__pipeline = exports.prompts__image = exports.prompts__chat = exports.schema = exports.schema__messages = exports.prompts__prompt = exports.embeddings__openai = exports.llms__openai = exports.chat_models__openai = exports.indexes = exports.runnables__remote = exports.smith = exports.evaluation = exports.experimental__prompts__custom_format = exports.experimental__masking = exports.experimental__chains__violation_of_expectations = exports.experimental__chat_models__bittensor = exports.experimental__plan_and_execute = exports.experimental__generative_agents = exports.experimental__babyagi = exports.experimental__openai_files = exports.experimental__openai_assistant = exports.experimental__autogpt = exports.util__time = exports.util__math = exports.util__document = void 0;
|
|
29
29
|
exports.agents = __importStar(require("../agents/index.cjs"));
|
|
30
30
|
exports.agents__toolkits = __importStar(require("../agents/toolkits/index.cjs"));
|
|
31
31
|
exports.agents__format_scratchpad = __importStar(require("../agents/format_scratchpad/openai_functions.cjs"));
|
|
@@ -161,6 +161,10 @@ const prompts__chat = {
|
|
|
161
161
|
SystemMessagePromptTemplate: prompts_1.SystemMessagePromptTemplate
|
|
162
162
|
};
|
|
163
163
|
exports.prompts__chat = prompts__chat;
|
|
164
|
+
const prompts__image = {
|
|
165
|
+
ImagePromptTemplate: prompts_1.ImagePromptTemplate
|
|
166
|
+
};
|
|
167
|
+
exports.prompts__image = prompts__image;
|
|
164
168
|
const prompts__pipeline = {
|
|
165
169
|
PipelinePromptTemplate: prompts_1.PipelinePromptTemplate
|
|
166
170
|
};
|
|
@@ -66,7 +66,7 @@ export * as smith from "../smith/index.js";
|
|
|
66
66
|
export * as runnables__remote from "../runnables/remote.js";
|
|
67
67
|
export * as indexes from "../indexes/index.js";
|
|
68
68
|
import { ChatOpenAI, OpenAI, OpenAIEmbeddings } from "@langchain/openai";
|
|
69
|
-
import { PromptTemplate, AIMessagePromptTemplate, ChatMessagePromptTemplate, ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder, SystemMessagePromptTemplate, PipelinePromptTemplate } from "@langchain/core/prompts";
|
|
69
|
+
import { PromptTemplate, AIMessagePromptTemplate, ChatMessagePromptTemplate, ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder, SystemMessagePromptTemplate, ImagePromptTemplate, PipelinePromptTemplate } from "@langchain/core/prompts";
|
|
70
70
|
import { AIMessage, AIMessageChunk, BaseMessage, BaseMessageChunk, ChatMessage, ChatMessageChunk, FunctionMessage, FunctionMessageChunk, HumanMessage, HumanMessageChunk, SystemMessage, SystemMessageChunk, ToolMessage, ToolMessageChunk } from "@langchain/core/messages";
|
|
71
71
|
import { StringPromptValue } from "@langchain/core/prompt_values";
|
|
72
72
|
import { RouterRunnable, RunnableAssign, RunnableBinding, RunnableBranch, RunnableEach, RunnableMap, RunnableParallel, RunnablePassthrough, RunnablePick, RunnableRetry, RunnableSequence, RunnableWithFallbacks, RunnableWithMessageHistory } from "@langchain/core/runnables";
|
|
@@ -133,6 +133,10 @@ declare const prompts__chat: {
|
|
|
133
133
|
SystemMessagePromptTemplate: typeof SystemMessagePromptTemplate;
|
|
134
134
|
};
|
|
135
135
|
export { prompts__chat };
|
|
136
|
+
declare const prompts__image: {
|
|
137
|
+
ImagePromptTemplate: typeof ImagePromptTemplate;
|
|
138
|
+
};
|
|
139
|
+
export { prompts__image };
|
|
136
140
|
declare const prompts__pipeline: {
|
|
137
141
|
PipelinePromptTemplate: typeof PipelinePromptTemplate;
|
|
138
142
|
};
|
package/dist/load/import_map.js
CHANGED
|
@@ -67,7 +67,7 @@ export * as smith from "../smith/index.js";
|
|
|
67
67
|
export * as runnables__remote from "../runnables/remote.js";
|
|
68
68
|
export * as indexes from "../indexes/index.js";
|
|
69
69
|
import { ChatOpenAI, OpenAI, OpenAIEmbeddings } from "@langchain/openai";
|
|
70
|
-
import { PromptTemplate, AIMessagePromptTemplate, ChatMessagePromptTemplate, ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder, SystemMessagePromptTemplate, PipelinePromptTemplate } from "@langchain/core/prompts";
|
|
70
|
+
import { PromptTemplate, AIMessagePromptTemplate, ChatMessagePromptTemplate, ChatPromptTemplate, HumanMessagePromptTemplate, MessagesPlaceholder, SystemMessagePromptTemplate, ImagePromptTemplate, PipelinePromptTemplate } from "@langchain/core/prompts";
|
|
71
71
|
import { AIMessage, AIMessageChunk, BaseMessage, BaseMessageChunk, ChatMessage, ChatMessageChunk, FunctionMessage, FunctionMessageChunk, HumanMessage, HumanMessageChunk, SystemMessage, SystemMessageChunk, ToolMessage, ToolMessageChunk } from "@langchain/core/messages";
|
|
72
72
|
import { StringPromptValue } from "@langchain/core/prompt_values";
|
|
73
73
|
import { RouterRunnable, RunnableAssign, RunnableBinding, RunnableBranch, RunnableEach, RunnableMap, RunnableParallel, RunnablePassthrough, RunnablePick, RunnableRetry, RunnableSequence, RunnableWithFallbacks, RunnableWithMessageHistory } from "@langchain/core/runnables";
|
|
@@ -134,6 +134,10 @@ const prompts__chat = {
|
|
|
134
134
|
SystemMessagePromptTemplate
|
|
135
135
|
};
|
|
136
136
|
export { prompts__chat };
|
|
137
|
+
const prompts__image = {
|
|
138
|
+
ImagePromptTemplate
|
|
139
|
+
};
|
|
140
|
+
export { prompts__image };
|
|
137
141
|
const prompts__pipeline = {
|
|
138
142
|
PipelinePromptTemplate
|
|
139
143
|
};
|
|
@@ -1,341 +1,17 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
14
|
+
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
15
|
+
};
|
|
2
16
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports
|
|
4
|
-
const runnables_1 = require("@langchain/core/runnables");
|
|
5
|
-
const documents_1 = require("@langchain/core/documents");
|
|
6
|
-
const prompt_values_1 = require("@langchain/core/prompt_values");
|
|
7
|
-
const log_stream_1 = require("@langchain/core/tracers/log_stream");
|
|
8
|
-
const messages_1 = require("@langchain/core/messages");
|
|
9
|
-
const outputs_1 = require("@langchain/core/outputs");
|
|
10
|
-
const event_source_parse_1 = require("@langchain/community/utils/event_source_parse");
|
|
11
|
-
const stream_1 = require("@langchain/core/utils/stream");
|
|
12
|
-
function isSuperset(set, subset) {
|
|
13
|
-
for (const elem of subset) {
|
|
14
|
-
if (!set.has(elem)) {
|
|
15
|
-
return false;
|
|
16
|
-
}
|
|
17
|
-
}
|
|
18
|
-
return true;
|
|
19
|
-
}
|
|
20
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
21
|
-
function revive(obj) {
|
|
22
|
-
if (Array.isArray(obj))
|
|
23
|
-
return obj.map(revive);
|
|
24
|
-
if (typeof obj === "object") {
|
|
25
|
-
// eslint-disable-next-line no-instanceof/no-instanceof
|
|
26
|
-
if (!obj || obj instanceof Date) {
|
|
27
|
-
return obj;
|
|
28
|
-
}
|
|
29
|
-
const keysArr = Object.keys(obj);
|
|
30
|
-
const keys = new Set(keysArr);
|
|
31
|
-
if (isSuperset(keys, new Set(["page_content", "metadata"]))) {
|
|
32
|
-
return new documents_1.Document({
|
|
33
|
-
pageContent: obj.page_content,
|
|
34
|
-
metadata: obj.metadata,
|
|
35
|
-
});
|
|
36
|
-
}
|
|
37
|
-
if (isSuperset(keys, new Set(["content", "type", "additional_kwargs"]))) {
|
|
38
|
-
if (obj.type === "HumanMessage" || obj.type === "human") {
|
|
39
|
-
return new messages_1.HumanMessage({
|
|
40
|
-
content: obj.content,
|
|
41
|
-
});
|
|
42
|
-
}
|
|
43
|
-
if (obj.type === "SystemMessage" || obj.type === "system") {
|
|
44
|
-
return new messages_1.SystemMessage({
|
|
45
|
-
content: obj.content,
|
|
46
|
-
});
|
|
47
|
-
}
|
|
48
|
-
if (obj.type === "ChatMessage" || obj.type === "chat") {
|
|
49
|
-
return new messages_1.ChatMessage({
|
|
50
|
-
content: obj.content,
|
|
51
|
-
role: obj.role,
|
|
52
|
-
});
|
|
53
|
-
}
|
|
54
|
-
if (obj.type === "FunctionMessage" || obj.type === "function") {
|
|
55
|
-
return new messages_1.FunctionMessage({
|
|
56
|
-
content: obj.content,
|
|
57
|
-
name: obj.name,
|
|
58
|
-
});
|
|
59
|
-
}
|
|
60
|
-
if (obj.type === "ToolMessage" || obj.type === "tool") {
|
|
61
|
-
return new messages_1.ToolMessage({
|
|
62
|
-
content: obj.content,
|
|
63
|
-
tool_call_id: obj.tool_call_id,
|
|
64
|
-
});
|
|
65
|
-
}
|
|
66
|
-
if (obj.type === "AIMessage" || obj.type === "ai") {
|
|
67
|
-
return new messages_1.AIMessage({
|
|
68
|
-
content: obj.content,
|
|
69
|
-
});
|
|
70
|
-
}
|
|
71
|
-
if (obj.type === "HumanMessageChunk") {
|
|
72
|
-
return new messages_1.HumanMessageChunk({
|
|
73
|
-
content: obj.content,
|
|
74
|
-
});
|
|
75
|
-
}
|
|
76
|
-
if (obj.type === "SystemMessageChunk") {
|
|
77
|
-
return new messages_1.SystemMessageChunk({
|
|
78
|
-
content: obj.content,
|
|
79
|
-
});
|
|
80
|
-
}
|
|
81
|
-
if (obj.type === "ChatMessageChunk") {
|
|
82
|
-
return new messages_1.ChatMessageChunk({
|
|
83
|
-
content: obj.content,
|
|
84
|
-
role: obj.role,
|
|
85
|
-
});
|
|
86
|
-
}
|
|
87
|
-
if (obj.type === "FunctionMessageChunk") {
|
|
88
|
-
return new messages_1.FunctionMessageChunk({
|
|
89
|
-
content: obj.content,
|
|
90
|
-
name: obj.name,
|
|
91
|
-
});
|
|
92
|
-
}
|
|
93
|
-
if (obj.type === "ToolMessageChunk") {
|
|
94
|
-
return new messages_1.ToolMessageChunk({
|
|
95
|
-
content: obj.content,
|
|
96
|
-
tool_call_id: obj.tool_call_id,
|
|
97
|
-
});
|
|
98
|
-
}
|
|
99
|
-
if (obj.type === "AIMessageChunk") {
|
|
100
|
-
return new messages_1.AIMessageChunk({
|
|
101
|
-
content: obj.content,
|
|
102
|
-
});
|
|
103
|
-
}
|
|
104
|
-
}
|
|
105
|
-
if (isSuperset(keys, new Set(["text", "generation_info", "type"]))) {
|
|
106
|
-
if (obj.type === "ChatGenerationChunk") {
|
|
107
|
-
return new outputs_1.ChatGenerationChunk({
|
|
108
|
-
message: revive(obj.message),
|
|
109
|
-
text: obj.text,
|
|
110
|
-
generationInfo: obj.generation_info,
|
|
111
|
-
});
|
|
112
|
-
}
|
|
113
|
-
else if (obj.type === "ChatGeneration") {
|
|
114
|
-
return {
|
|
115
|
-
message: revive(obj.message),
|
|
116
|
-
text: obj.text,
|
|
117
|
-
generationInfo: obj.generation_info,
|
|
118
|
-
};
|
|
119
|
-
}
|
|
120
|
-
else if (obj.type === "GenerationChunk") {
|
|
121
|
-
return new outputs_1.GenerationChunk({
|
|
122
|
-
text: obj.text,
|
|
123
|
-
generationInfo: obj.generation_info,
|
|
124
|
-
});
|
|
125
|
-
}
|
|
126
|
-
else if (obj.type === "Generation") {
|
|
127
|
-
return {
|
|
128
|
-
text: obj.text,
|
|
129
|
-
generationInfo: obj.generation_info,
|
|
130
|
-
};
|
|
131
|
-
}
|
|
132
|
-
}
|
|
133
|
-
if (isSuperset(keys, new Set(["tool", "tool_input", "log", "type"]))) {
|
|
134
|
-
if (obj.type === "AgentAction") {
|
|
135
|
-
return {
|
|
136
|
-
tool: obj.tool,
|
|
137
|
-
toolInput: obj.tool_input,
|
|
138
|
-
log: obj.log,
|
|
139
|
-
};
|
|
140
|
-
}
|
|
141
|
-
}
|
|
142
|
-
if (isSuperset(keys, new Set(["return_values", "log", "type"]))) {
|
|
143
|
-
if (obj.type === "AgentFinish") {
|
|
144
|
-
return {
|
|
145
|
-
returnValues: obj.return_values,
|
|
146
|
-
log: obj.log,
|
|
147
|
-
};
|
|
148
|
-
}
|
|
149
|
-
}
|
|
150
|
-
if (isSuperset(keys, new Set(["generations", "run", "type"]))) {
|
|
151
|
-
if (obj.type === "LLMResult") {
|
|
152
|
-
return {
|
|
153
|
-
generations: revive(obj.generations),
|
|
154
|
-
llmOutput: obj.llm_output,
|
|
155
|
-
[outputs_1.RUN_KEY]: obj.run,
|
|
156
|
-
};
|
|
157
|
-
}
|
|
158
|
-
}
|
|
159
|
-
if (isSuperset(keys, new Set(["messages"]))) {
|
|
160
|
-
// TODO: Start checking for type: ChatPromptValue and ChatPromptValueConcrete
|
|
161
|
-
// when LangServe bug is fixed
|
|
162
|
-
return new prompt_values_1.ChatPromptValue({
|
|
163
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
164
|
-
messages: obj.messages.map((msg) => revive(msg)),
|
|
165
|
-
});
|
|
166
|
-
}
|
|
167
|
-
if (isSuperset(keys, new Set(["text"]))) {
|
|
168
|
-
// TODO: Start checking for type: StringPromptValue
|
|
169
|
-
// when LangServe bug is fixed
|
|
170
|
-
return new prompt_values_1.StringPromptValue(obj.text);
|
|
171
|
-
}
|
|
172
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
173
|
-
const innerRevive = (key) => [
|
|
174
|
-
key,
|
|
175
|
-
revive(obj[key]),
|
|
176
|
-
];
|
|
177
|
-
const rtn = Object.fromEntries(keysArr.map(innerRevive));
|
|
178
|
-
return rtn;
|
|
179
|
-
}
|
|
180
|
-
return obj;
|
|
181
|
-
}
|
|
182
|
-
function deserialize(str) {
|
|
183
|
-
const obj = JSON.parse(str);
|
|
184
|
-
return revive(obj);
|
|
185
|
-
}
|
|
186
|
-
function removeCallbacks(options) {
|
|
187
|
-
const rest = { ...options };
|
|
188
|
-
delete rest.callbacks;
|
|
189
|
-
return rest;
|
|
190
|
-
}
|
|
191
|
-
class RemoteRunnable extends runnables_1.Runnable {
|
|
192
|
-
constructor(fields) {
|
|
193
|
-
super(fields);
|
|
194
|
-
Object.defineProperty(this, "url", {
|
|
195
|
-
enumerable: true,
|
|
196
|
-
configurable: true,
|
|
197
|
-
writable: true,
|
|
198
|
-
value: void 0
|
|
199
|
-
});
|
|
200
|
-
Object.defineProperty(this, "options", {
|
|
201
|
-
enumerable: true,
|
|
202
|
-
configurable: true,
|
|
203
|
-
writable: true,
|
|
204
|
-
value: void 0
|
|
205
|
-
});
|
|
206
|
-
Object.defineProperty(this, "lc_namespace", {
|
|
207
|
-
enumerable: true,
|
|
208
|
-
configurable: true,
|
|
209
|
-
writable: true,
|
|
210
|
-
value: ["langchain", "schema", "runnable", "remote"]
|
|
211
|
-
});
|
|
212
|
-
const { url, options } = fields;
|
|
213
|
-
this.url = url.replace(/\/$/, ""); // remove trailing slash
|
|
214
|
-
this.options = options;
|
|
215
|
-
}
|
|
216
|
-
async post(path, body) {
|
|
217
|
-
return await fetch(`${this.url}${path}`, {
|
|
218
|
-
method: "POST",
|
|
219
|
-
body: JSON.stringify(body),
|
|
220
|
-
headers: {
|
|
221
|
-
"Content-Type": "application/json",
|
|
222
|
-
...this.options?.headers,
|
|
223
|
-
},
|
|
224
|
-
signal: AbortSignal.timeout(this.options?.timeout ?? 60000),
|
|
225
|
-
});
|
|
226
|
-
}
|
|
227
|
-
async invoke(input, options) {
|
|
228
|
-
const [config, kwargs] = this._separateRunnableConfigFromCallOptions(options);
|
|
229
|
-
const response = await this.post("/invoke", {
|
|
230
|
-
input,
|
|
231
|
-
config: removeCallbacks(config),
|
|
232
|
-
kwargs: kwargs ?? {},
|
|
233
|
-
});
|
|
234
|
-
return revive((await response.json()).output);
|
|
235
|
-
}
|
|
236
|
-
async _batch(inputs, options, _, batchOptions) {
|
|
237
|
-
if (batchOptions?.returnExceptions) {
|
|
238
|
-
throw new Error("returnExceptions is not supported for remote clients");
|
|
239
|
-
}
|
|
240
|
-
const configsAndKwargsArray = options?.map((opts) => this._separateRunnableConfigFromCallOptions(opts));
|
|
241
|
-
const [configs, kwargs] = configsAndKwargsArray?.reduce(([pc, pk], [c, k]) => [
|
|
242
|
-
[...pc, c],
|
|
243
|
-
[...pk, k],
|
|
244
|
-
], [[], []]) ?? [undefined, undefined];
|
|
245
|
-
const response = await this.post("/batch", {
|
|
246
|
-
inputs,
|
|
247
|
-
config: (configs ?? [])
|
|
248
|
-
.map(removeCallbacks)
|
|
249
|
-
.map((config) => ({ ...config, ...batchOptions })),
|
|
250
|
-
kwargs,
|
|
251
|
-
});
|
|
252
|
-
const body = await response.json();
|
|
253
|
-
if (!body.output)
|
|
254
|
-
throw new Error("Invalid response from remote runnable");
|
|
255
|
-
return revive(body.output);
|
|
256
|
-
}
|
|
257
|
-
async batch(inputs, options, batchOptions) {
|
|
258
|
-
if (batchOptions?.returnExceptions) {
|
|
259
|
-
throw Error("returnExceptions is not supported for remote clients");
|
|
260
|
-
}
|
|
261
|
-
return this._batchWithConfig(this._batch.bind(this), inputs, options, batchOptions);
|
|
262
|
-
}
|
|
263
|
-
async stream(input, options) {
|
|
264
|
-
const [config, kwargs] = this._separateRunnableConfigFromCallOptions(options);
|
|
265
|
-
const response = await this.post("/stream", {
|
|
266
|
-
input,
|
|
267
|
-
config,
|
|
268
|
-
kwargs,
|
|
269
|
-
});
|
|
270
|
-
if (!response.ok) {
|
|
271
|
-
const json = await response.json();
|
|
272
|
-
const error = new Error(`RemoteRunnable call failed with status code ${response.status}: ${json.message}`);
|
|
273
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
274
|
-
error.response = response;
|
|
275
|
-
throw error;
|
|
276
|
-
}
|
|
277
|
-
const { body } = response;
|
|
278
|
-
if (!body) {
|
|
279
|
-
throw new Error("Could not begin remote stream. Please check the given URL and try again.");
|
|
280
|
-
}
|
|
281
|
-
const stream = new ReadableStream({
|
|
282
|
-
async start(controller) {
|
|
283
|
-
const enqueueLine = (0, event_source_parse_1.getMessages)((msg) => {
|
|
284
|
-
if (msg.data)
|
|
285
|
-
controller.enqueue(deserialize(msg.data));
|
|
286
|
-
});
|
|
287
|
-
const onLine = (line, fieldLength, flush) => {
|
|
288
|
-
enqueueLine(line, fieldLength, flush);
|
|
289
|
-
if (flush)
|
|
290
|
-
controller.close();
|
|
291
|
-
};
|
|
292
|
-
await (0, event_source_parse_1.getBytes)(body, (0, event_source_parse_1.getLines)(onLine));
|
|
293
|
-
},
|
|
294
|
-
});
|
|
295
|
-
return stream_1.IterableReadableStream.fromReadableStream(stream);
|
|
296
|
-
}
|
|
297
|
-
async *streamLog(input, options, streamOptions) {
|
|
298
|
-
const [config, kwargs] = this._separateRunnableConfigFromCallOptions(options);
|
|
299
|
-
const stream = new log_stream_1.LogStreamCallbackHandler({
|
|
300
|
-
...streamOptions,
|
|
301
|
-
autoClose: false,
|
|
302
|
-
});
|
|
303
|
-
const { callbacks } = config;
|
|
304
|
-
if (callbacks === undefined) {
|
|
305
|
-
config.callbacks = [stream];
|
|
306
|
-
}
|
|
307
|
-
else if (Array.isArray(callbacks)) {
|
|
308
|
-
config.callbacks = callbacks.concat([stream]);
|
|
309
|
-
}
|
|
310
|
-
else {
|
|
311
|
-
const copiedCallbacks = callbacks.copy();
|
|
312
|
-
copiedCallbacks.inheritableHandlers.push(stream);
|
|
313
|
-
config.callbacks = copiedCallbacks;
|
|
314
|
-
}
|
|
315
|
-
// The type is in camelCase but the API only accepts snake_case.
|
|
316
|
-
const camelCaseStreamOptions = {
|
|
317
|
-
include_names: streamOptions?.includeNames,
|
|
318
|
-
include_types: streamOptions?.includeTypes,
|
|
319
|
-
include_tags: streamOptions?.includeTags,
|
|
320
|
-
exclude_names: streamOptions?.excludeNames,
|
|
321
|
-
exclude_types: streamOptions?.excludeTypes,
|
|
322
|
-
exclude_tags: streamOptions?.excludeTags,
|
|
323
|
-
};
|
|
324
|
-
const response = await this.post("/stream_log", {
|
|
325
|
-
input,
|
|
326
|
-
config,
|
|
327
|
-
kwargs,
|
|
328
|
-
...camelCaseStreamOptions,
|
|
329
|
-
diff: false,
|
|
330
|
-
});
|
|
331
|
-
const { body } = response;
|
|
332
|
-
if (!body) {
|
|
333
|
-
throw new Error("Could not begin remote stream log. Please check the given URL and try again.");
|
|
334
|
-
}
|
|
335
|
-
const runnableStream = (0, event_source_parse_1.convertEventStreamToIterableReadableDataStream)(body);
|
|
336
|
-
for await (const log of runnableStream) {
|
|
337
|
-
yield revive(JSON.parse(log));
|
|
338
|
-
}
|
|
339
|
-
}
|
|
340
|
-
}
|
|
341
|
-
exports.RemoteRunnable = RemoteRunnable;
|
|
17
|
+
__exportStar(require("@langchain/core/runnables/remote"), exports);
|
|
@@ -1,30 +1 @@
|
|
|
1
|
-
|
|
2
|
-
import { CallbackManagerForChainRun } from "@langchain/core/callbacks/manager";
|
|
3
|
-
import { type LogStreamCallbackHandlerInput, type RunLogPatch } from "@langchain/core/tracers/log_stream";
|
|
4
|
-
import { IterableReadableStream } from "@langchain/core/utils/stream";
|
|
5
|
-
type RemoteRunnableOptions = {
|
|
6
|
-
timeout?: number;
|
|
7
|
-
headers?: Record<string, unknown>;
|
|
8
|
-
};
|
|
9
|
-
export declare class RemoteRunnable<RunInput, RunOutput, CallOptions extends RunnableConfig> extends Runnable<RunInput, RunOutput, CallOptions> {
|
|
10
|
-
private url;
|
|
11
|
-
private options?;
|
|
12
|
-
lc_namespace: string[];
|
|
13
|
-
constructor(fields: {
|
|
14
|
-
url: string;
|
|
15
|
-
options?: RemoteRunnableOptions;
|
|
16
|
-
});
|
|
17
|
-
private post;
|
|
18
|
-
invoke(input: RunInput, options?: Partial<CallOptions>): Promise<RunOutput>;
|
|
19
|
-
_batch(inputs: RunInput[], options?: Partial<CallOptions>[], _?: (CallbackManagerForChainRun | undefined)[], batchOptions?: RunnableBatchOptions): Promise<(RunOutput | Error)[]>;
|
|
20
|
-
batch(inputs: RunInput[], options?: Partial<CallOptions> | Partial<CallOptions>[], batchOptions?: RunnableBatchOptions & {
|
|
21
|
-
returnExceptions?: false;
|
|
22
|
-
}): Promise<RunOutput[]>;
|
|
23
|
-
batch(inputs: RunInput[], options?: Partial<CallOptions> | Partial<CallOptions>[], batchOptions?: RunnableBatchOptions & {
|
|
24
|
-
returnExceptions: true;
|
|
25
|
-
}): Promise<(RunOutput | Error)[]>;
|
|
26
|
-
batch(inputs: RunInput[], options?: Partial<CallOptions> | Partial<CallOptions>[], batchOptions?: RunnableBatchOptions): Promise<(RunOutput | Error)[]>;
|
|
27
|
-
stream(input: RunInput, options?: Partial<CallOptions>): Promise<IterableReadableStream<RunOutput>>;
|
|
28
|
-
streamLog(input: RunInput, options?: Partial<CallOptions>, streamOptions?: Omit<LogStreamCallbackHandlerInput, "autoClose">): AsyncGenerator<RunLogPatch>;
|
|
29
|
-
}
|
|
30
|
-
export {};
|
|
1
|
+
export * from "@langchain/core/runnables/remote";
|