langchain 0.3.28 → 0.3.30
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +15 -12
- package/dist/agents/agent.cjs +37 -4
- package/dist/agents/agent.d.ts +1 -1
- package/dist/agents/agent.js +1 -1
- package/dist/agents/format_scratchpad/log.cjs +1 -2
- package/dist/agents/format_scratchpad/log_to_message.cjs +1 -2
- package/dist/agents/format_scratchpad/openai_functions.cjs +2 -3
- package/dist/agents/format_scratchpad/tool_calling.cjs +2 -3
- package/dist/agents/format_scratchpad/xml.cjs +1 -2
- package/dist/agents/initialize.cjs +2 -2
- package/dist/agents/mrkl/index.cjs +1 -1
- package/dist/agents/mrkl/index.d.ts +1 -1
- package/dist/agents/mrkl/index.js +1 -1
- package/dist/agents/openai_functions/index.cjs +4 -3
- package/dist/agents/openai_functions/index.d.ts +1 -0
- package/dist/agents/openai_functions/index.js +1 -0
- package/dist/agents/openai_functions/output_parser.cjs +1 -1
- package/dist/agents/openai_functions/output_parser.d.ts +1 -1
- package/dist/agents/openai_functions/output_parser.js +1 -1
- package/dist/agents/openai_tools/index.cjs +3 -3
- package/dist/agents/openai_tools/index.d.ts +1 -1
- package/dist/agents/openai_tools/index.js +1 -1
- package/dist/agents/openai_tools/output_parser.cjs +1 -1
- package/dist/agents/openai_tools/output_parser.d.ts +1 -1
- package/dist/agents/openai_tools/output_parser.js +1 -1
- package/dist/agents/react/index.cjs +1 -2
- package/dist/agents/structured_chat/index.cjs +3 -3
- package/dist/agents/structured_chat/index.d.ts +1 -1
- package/dist/agents/structured_chat/index.js +1 -1
- package/dist/agents/structured_chat/outputParser.cjs +1 -1
- package/dist/agents/structured_chat/outputParser.d.ts +1 -1
- package/dist/agents/structured_chat/outputParser.js +1 -1
- package/dist/agents/structured_chat/prompt.d.ts +1 -1
- package/dist/agents/tool_calling/index.cjs +1 -2
- package/dist/agents/tool_calling/output_parser.cjs +2 -2
- package/dist/agents/toolkits/conversational_retrieval/openai_functions.cjs +1 -2
- package/dist/agents/toolkits/conversational_retrieval/tool.cjs +1 -2
- package/dist/agents/toolkits/json/json.cjs +2 -2
- package/dist/agents/toolkits/openapi/openapi.cjs +3 -3
- package/dist/agents/toolkits/openapi/openapi.d.ts +1 -1
- package/dist/agents/toolkits/openapi/openapi.js +1 -1
- package/dist/agents/toolkits/sql/sql.cjs +3 -3
- package/dist/agents/toolkits/sql/sql.d.ts +1 -1
- package/dist/agents/toolkits/sql/sql.js +1 -1
- package/dist/agents/toolkits/vectorstore/vectorstore.cjs +4 -4
- package/dist/agents/toolkits/vectorstore/vectorstore.d.ts +1 -1
- package/dist/agents/toolkits/vectorstore/vectorstore.js +1 -1
- package/dist/agents/xml/index.cjs +2 -2
- package/dist/chains/analyze_documents_chain.cjs +1 -1
- package/dist/chains/analyze_documents_chain.d.ts +1 -1
- package/dist/chains/analyze_documents_chain.js +1 -1
- package/dist/chains/api/prompts.d.ts +1 -1
- package/dist/chains/base.cjs +42 -9
- package/dist/chains/base.d.ts +2 -2
- package/dist/chains/base.js +1 -1
- package/dist/chains/combine_documents/base.cjs +2 -2
- package/dist/chains/combine_documents/reduce.cjs +2 -3
- package/dist/chains/combine_documents/stuff.cjs +1 -2
- package/dist/chains/constitutional_ai/constitutional_principle.cjs +2 -2
- package/dist/chains/constitutional_ai/constitutional_principle.d.ts +2 -2
- package/dist/chains/constitutional_ai/constitutional_principle.js +2 -2
- package/dist/chains/conversation.cjs +1 -1
- package/dist/chains/conversation.d.ts +1 -1
- package/dist/chains/conversation.js +1 -1
- package/dist/chains/graph_qa/cypher.cjs +1 -1
- package/dist/chains/graph_qa/cypher.d.ts +1 -1
- package/dist/chains/graph_qa/cypher.js +1 -1
- package/dist/chains/history_aware_retriever.cjs +2 -3
- package/dist/chains/history_aware_retriever.d.ts +1 -1
- package/dist/chains/history_aware_retriever.js +1 -1
- package/dist/chains/llm_chain.cjs +1 -1
- package/dist/chains/llm_chain.d.ts +1 -1
- package/dist/chains/llm_chain.js +1 -1
- package/dist/chains/openai_functions/base.cjs +3 -4
- package/dist/chains/openai_functions/base.d.ts +1 -1
- package/dist/chains/openai_functions/base.js +1 -1
- package/dist/chains/openai_functions/extraction.cjs +2 -3
- package/dist/chains/openai_functions/openapi.cjs +4 -5
- package/dist/chains/openai_functions/openapi.js +1 -1
- package/dist/chains/openai_functions/structured_output.cjs +4 -4
- package/dist/chains/openai_functions/structured_output.js +1 -1
- package/dist/chains/openai_functions/tagging.cjs +2 -3
- package/dist/chains/query_constructor/index.cjs +3 -3
- package/dist/chains/question_answering/load.cjs +4 -4
- package/dist/chains/retrieval.cjs +2 -3
- package/dist/chains/retrieval.d.ts +1 -1
- package/dist/chains/retrieval.js +1 -1
- package/dist/chains/router/multi_prompt.cjs +16 -13
- package/dist/chains/router/multi_prompt.d.ts +16 -13
- package/dist/chains/router/multi_prompt.js +16 -13
- package/dist/chains/router/multi_retrieval_qa.cjs +1 -1
- package/dist/chains/router/multi_retrieval_qa.d.ts +1 -1
- package/dist/chains/router/multi_retrieval_qa.js +1 -1
- package/dist/chains/router/utils.cjs +1 -2
- package/dist/chains/sequential_chain.cjs +4 -3
- package/dist/chains/sequential_chain.d.ts +2 -2
- package/dist/chains/sequential_chain.js +4 -3
- package/dist/chains/sql_db/sql_db_chain.cjs +3 -3
- package/dist/chains/sql_db/sql_db_chain.d.ts +1 -1
- package/dist/chains/sql_db/sql_db_chain.js +1 -1
- package/dist/chat_models/universal.cjs +54 -21
- package/dist/document_loaders/fs/buffer.cjs +34 -1
- package/dist/document_loaders/fs/buffer.d.ts +0 -3
- package/dist/document_loaders/fs/directory.cjs +35 -2
- package/dist/document_loaders/fs/directory.d.ts +0 -2
- package/dist/document_loaders/fs/text.cjs +34 -1
- package/dist/document_loaders/fs/text.d.ts +0 -1
- package/dist/document_transformers/openai_functions.cjs +4 -4
- package/dist/document_transformers/openai_functions.js +1 -1
- package/dist/evaluation/embedding_distance/base.cjs +3 -3
- package/dist/evaluation/loader.cjs +2 -3
- package/dist/evaluation/loader.js +1 -1
- package/dist/experimental/autogpt/agent.cjs +1 -1
- package/dist/experimental/autogpt/agent.d.ts +1 -1
- package/dist/experimental/autogpt/agent.js +1 -1
- package/dist/experimental/autogpt/output_parser.cjs +2 -2
- package/dist/experimental/autogpt/prompt_generator.cjs +2 -2
- package/dist/experimental/generative_agents/generative_agent.cjs +2 -2
- package/dist/experimental/generative_agents/generative_agent.d.ts +1 -1
- package/dist/experimental/generative_agents/generative_agent.js +2 -2
- package/dist/experimental/openai_assistant/index.cjs +8 -4
- package/dist/experimental/openai_assistant/index.d.ts +6 -6
- package/dist/experimental/openai_assistant/index.js +9 -5
- package/dist/experimental/openai_files/index.cjs +2 -2
- package/dist/experimental/openai_files/index.d.ts +6 -4
- package/dist/experimental/openai_files/index.js +2 -2
- package/dist/experimental/plan_and_execute/agent_executor.cjs +2 -2
- package/dist/hub/base.cjs +4 -5
- package/dist/hub/index.cjs +2 -2
- package/dist/hub/node.cjs +43 -10
- package/dist/index.cjs +0 -1
- package/dist/index.d.ts +0 -1
- package/dist/index.js +1 -1
- package/dist/load/import_map.cjs +17 -7
- package/dist/load/index.cjs +18 -9
- package/dist/memory/buffer_memory.cjs +1 -1
- package/dist/memory/buffer_memory.d.ts +1 -1
- package/dist/memory/buffer_memory.js +1 -1
- package/dist/memory/buffer_token_memory.cjs +1 -1
- package/dist/memory/buffer_token_memory.d.ts +1 -1
- package/dist/memory/buffer_token_memory.js +1 -1
- package/dist/memory/buffer_window_memory.cjs +1 -1
- package/dist/memory/buffer_window_memory.d.ts +1 -1
- package/dist/memory/buffer_window_memory.js +1 -1
- package/dist/memory/entity_memory.cjs +2 -2
- package/dist/memory/entity_memory.d.ts +2 -2
- package/dist/memory/entity_memory.js +2 -2
- package/dist/memory/summary.cjs +2 -2
- package/dist/memory/summary.d.ts +2 -2
- package/dist/memory/summary.js +2 -2
- package/dist/memory/summary_buffer.cjs +2 -2
- package/dist/memory/summary_buffer.d.ts +2 -2
- package/dist/memory/summary_buffer.js +2 -2
- package/dist/output_parsers/expression_type_handlers/base.cjs +34 -1
- package/dist/prompts/index.cjs +0 -1
- package/dist/prompts/index.d.ts +0 -1
- package/dist/prompts/index.js +1 -1
- package/dist/retrievers/hyde.cjs +3 -3
- package/dist/retrievers/hyde.d.ts +1 -1
- package/dist/retrievers/hyde.js +1 -1
- package/dist/retrievers/parent_document.cjs +17 -7
- package/dist/retrievers/self_query/index.cjs +1 -1
- package/dist/retrievers/self_query/index.d.ts +1 -1
- package/dist/retrievers/self_query/index.js +1 -1
- package/dist/smith/config.cjs +5 -6
- package/dist/smith/name_generation.cjs +1 -2
- package/dist/smith/runner_utils.cjs +1 -2
- package/dist/sql_db.cjs +35 -2
- package/dist/storage/encoder_backed.cjs +2 -2
- package/dist/storage/encoder_backed.d.ts +1 -1
- package/dist/storage/file_system.cjs +17 -7
- package/dist/storage/file_system.d.ts +1 -1
- package/dist/stores/file/node.cjs +17 -7
- package/dist/tools/convert_to_openai.cjs +2 -2
- package/dist/tools/render.cjs +2 -3
- package/dist/tools/retriever.cjs +1 -2
- package/dist/tools/webbrowser.cjs +18 -8
- package/dist/tools/webbrowser.d.ts +1 -1
- package/dist/tools/webbrowser.js +1 -1
- package/dist/util/axios-fetch-adapter.cjs +1 -1
- package/dist/util/azure.cjs +1 -2
- package/dist/util/entrypoint_deprecation.cjs +2 -3
- package/dist/util/load.cjs +34 -1
- package/dist/util/ml-distance/distances.cjs +3 -4
- package/dist/util/ml-distance/similarities.cjs +1 -2
- package/dist/util/ml-distance-euclidean/euclidean.cjs +2 -3
- package/dist/util/openapi.cjs +17 -7
- package/dist/util/openapi.d.ts +32 -32
- package/dist/util/parse.cjs +17 -7
- package/dist/util/prompt-layer.d.ts +1 -1
- package/dist/util/set.cjs +3 -4
- package/dist/util/time.cjs +1 -2
- package/package.json +9 -9
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.
|
|
3
|
+
exports.convertOpenAPISchemaToJSONSchema = convertOpenAPISchemaToJSONSchema;
|
|
4
|
+
exports.convertOpenAPISpecToOpenAIFunctions = convertOpenAPISpecToOpenAIFunctions;
|
|
5
|
+
exports.createOpenAPIChain = createOpenAPIChain;
|
|
4
6
|
const openai_1 = require("@langchain/openai");
|
|
5
7
|
const prompts_1 = require("@langchain/core/prompts");
|
|
6
8
|
const openapi_js_1 = require("../../util/openapi.cjs");
|
|
@@ -158,7 +160,6 @@ function convertOpenAPISchemaToJSONSchema(schema, spec) {
|
|
|
158
160
|
type: schema.type ?? "string",
|
|
159
161
|
};
|
|
160
162
|
}
|
|
161
|
-
exports.convertOpenAPISchemaToJSONSchema = convertOpenAPISchemaToJSONSchema;
|
|
162
163
|
/**
|
|
163
164
|
* Converts an OpenAPI specification to OpenAI functions.
|
|
164
165
|
* @param spec The OpenAPI specification to convert.
|
|
@@ -302,7 +303,6 @@ function convertOpenAPISpecToOpenAIFunctions(spec) {
|
|
|
302
303
|
},
|
|
303
304
|
};
|
|
304
305
|
}
|
|
305
|
-
exports.convertOpenAPISpecToOpenAIFunctions = convertOpenAPISpecToOpenAIFunctions;
|
|
306
306
|
/**
|
|
307
307
|
* A chain for making simple API requests.
|
|
308
308
|
*/
|
|
@@ -378,7 +378,7 @@ async function createOpenAPIChain(spec, options = {}) {
|
|
|
378
378
|
if (defaultExecutionMethod === undefined) {
|
|
379
379
|
throw new Error(`Could not parse any valid operations from the provided spec.`);
|
|
380
380
|
}
|
|
381
|
-
const { llm = new openai_1.ChatOpenAI({
|
|
381
|
+
const { llm = new openai_1.ChatOpenAI({ model: "gpt-3.5-turbo-0613" }), prompt = prompts_1.ChatPromptTemplate.fromMessages([
|
|
382
382
|
prompts_1.HumanMessagePromptTemplate.fromTemplate("Use the provided API's to respond to this user query:\n\n{query}"),
|
|
383
383
|
]), requestChain = new SimpleRequestChain({
|
|
384
384
|
requestMethod: async (name, args) => defaultExecutionMethod(name, args, {
|
|
@@ -402,4 +402,3 @@ async function createOpenAPIChain(spec, options = {}) {
|
|
|
402
402
|
...rest,
|
|
403
403
|
});
|
|
404
404
|
}
|
|
405
|
-
exports.createOpenAPIChain = createOpenAPIChain;
|
|
@@ -373,7 +373,7 @@ export async function createOpenAPIChain(spec, options = {}) {
|
|
|
373
373
|
if (defaultExecutionMethod === undefined) {
|
|
374
374
|
throw new Error(`Could not parse any valid operations from the provided spec.`);
|
|
375
375
|
}
|
|
376
|
-
const { llm = new ChatOpenAI({
|
|
376
|
+
const { llm = new ChatOpenAI({ model: "gpt-3.5-turbo-0613" }), prompt = ChatPromptTemplate.fromMessages([
|
|
377
377
|
HumanMessagePromptTemplate.fromTemplate("Use the provided API's to respond to this user query:\n\n{query}"),
|
|
378
378
|
]), requestChain = new SimpleRequestChain({
|
|
379
379
|
requestMethod: async (name, args) => defaultExecutionMethod(name, args, {
|
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.
|
|
3
|
+
exports.FunctionCallStructuredOutputParser = void 0;
|
|
4
|
+
exports.createStructuredOutputChain = createStructuredOutputChain;
|
|
5
|
+
exports.createStructuredOutputChainFromZod = createStructuredOutputChainFromZod;
|
|
4
6
|
const json_schema_1 = require("@langchain/core/utils/json_schema");
|
|
5
7
|
const openai_1 = require("@langchain/openai");
|
|
6
8
|
const output_parsers_1 = require("@langchain/core/output_parsers");
|
|
@@ -109,7 +111,7 @@ exports.FunctionCallStructuredOutputParser = FunctionCallStructuredOutputParser;
|
|
|
109
111
|
* @returns OpenAPIChain
|
|
110
112
|
*/
|
|
111
113
|
function createStructuredOutputChain(input) {
|
|
112
|
-
const { outputSchema, llm = new openai_1.ChatOpenAI({
|
|
114
|
+
const { outputSchema, llm = new openai_1.ChatOpenAI({ model: "gpt-3.5-turbo-0613", temperature: 0 }), outputKey = "output", llmKwargs = {}, zodSchema, ...rest } = input;
|
|
113
115
|
if (outputSchema === undefined && zodSchema === undefined) {
|
|
114
116
|
throw new Error(`Must provide one of "outputSchema" or "zodSchema".`);
|
|
115
117
|
}
|
|
@@ -137,7 +139,6 @@ function createStructuredOutputChain(input) {
|
|
|
137
139
|
...rest,
|
|
138
140
|
});
|
|
139
141
|
}
|
|
140
|
-
exports.createStructuredOutputChain = createStructuredOutputChain;
|
|
141
142
|
/** @deprecated Use {@link https://api.js.langchain.com/functions/langchain.chains_openai_functions.createStructuredOutputRunnable.html | createStructuredOutputRunnable} instead */
|
|
142
143
|
function createStructuredOutputChainFromZod(zodSchema, input) {
|
|
143
144
|
return createStructuredOutputChain({
|
|
@@ -146,4 +147,3 @@ function createStructuredOutputChainFromZod(zodSchema, input) {
|
|
|
146
147
|
zodSchema,
|
|
147
148
|
});
|
|
148
149
|
}
|
|
149
|
-
exports.createStructuredOutputChainFromZod = createStructuredOutputChainFromZod;
|
|
@@ -105,7 +105,7 @@ export class FunctionCallStructuredOutputParser extends BaseLLMOutputParser {
|
|
|
105
105
|
* @returns OpenAPIChain
|
|
106
106
|
*/
|
|
107
107
|
export function createStructuredOutputChain(input) {
|
|
108
|
-
const { outputSchema, llm = new ChatOpenAI({
|
|
108
|
+
const { outputSchema, llm = new ChatOpenAI({ model: "gpt-3.5-turbo-0613", temperature: 0 }), outputKey = "output", llmKwargs = {}, zodSchema, ...rest } = input;
|
|
109
109
|
if (outputSchema === undefined && zodSchema === undefined) {
|
|
110
110
|
throw new Error(`Must provide one of "outputSchema" or "zodSchema".`);
|
|
111
111
|
}
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.
|
|
3
|
+
exports.createTaggingChain = createTaggingChain;
|
|
4
|
+
exports.createTaggingChainFromZod = createTaggingChainFromZod;
|
|
4
5
|
const prompts_1 = require("@langchain/core/prompts");
|
|
5
6
|
const json_schema_1 = require("@langchain/core/utils/json_schema");
|
|
6
7
|
const openai_functions_js_1 = require("../../output_parsers/openai_functions.cjs");
|
|
@@ -51,7 +52,6 @@ function createTaggingChain(schema, llm, options = {}) {
|
|
|
51
52
|
...rest,
|
|
52
53
|
});
|
|
53
54
|
}
|
|
54
|
-
exports.createTaggingChain = createTaggingChain;
|
|
55
55
|
/**
|
|
56
56
|
* Function that creates a tagging chain from a Zod schema. It converts
|
|
57
57
|
* the Zod schema to a JSON schema using the zodToJsonSchema function and
|
|
@@ -70,4 +70,3 @@ function createTaggingChainFromZod(
|
|
|
70
70
|
schema, llm, options) {
|
|
71
71
|
return createTaggingChain((0, json_schema_1.toJsonSchema)(schema), llm, options);
|
|
72
72
|
}
|
|
73
|
-
exports.createTaggingChainFromZod = createTaggingChainFromZod;
|
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.
|
|
3
|
+
exports.StructuredQueryOutputParser = exports.EXAMPLE_PROMPT = exports.DEFAULT_SUFFIX = exports.DEFAULT_SCHEMA = exports.DEFAULT_PREFIX = exports.DEFAULT_EXAMPLES = exports.QueryTransformer = exports.AttributeInfo = void 0;
|
|
4
|
+
exports.formatAttributeInfo = formatAttributeInfo;
|
|
5
|
+
exports.loadQueryConstructorRunnable = loadQueryConstructorRunnable;
|
|
4
6
|
const zod_1 = require("zod");
|
|
5
7
|
const prompts_1 = require("@langchain/core/prompts");
|
|
6
8
|
const parser_js_1 = require("./parser.cjs");
|
|
@@ -118,7 +120,6 @@ function formatAttributeInfo(info) {
|
|
|
118
120
|
.replaceAll("{", "{{")
|
|
119
121
|
.replaceAll("}", "}}");
|
|
120
122
|
}
|
|
121
|
-
exports.formatAttributeInfo = formatAttributeInfo;
|
|
122
123
|
const defaultExample = prompt_js_1.DEFAULT_EXAMPLES.map((EXAMPLE) => EXAMPLE);
|
|
123
124
|
function _getPrompt(documentContents, attributeInfo, allowedComparators, allowedOperators, examples = defaultExample) {
|
|
124
125
|
const myAllowedComparators = allowedComparators ?? Object.values(ir_js_1.Comparators);
|
|
@@ -151,4 +152,3 @@ function loadQueryConstructorRunnable(opts) {
|
|
|
151
152
|
const outputParser = StructuredQueryOutputParser.fromComponents(opts.allowedComparators, opts.allowedOperators);
|
|
152
153
|
return prompt.pipe(opts.llm).pipe(outputParser);
|
|
153
154
|
}
|
|
154
|
-
exports.loadQueryConstructorRunnable = loadQueryConstructorRunnable;
|
|
@@ -1,6 +1,9 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.
|
|
3
|
+
exports.loadQAChain = void 0;
|
|
4
|
+
exports.loadQAStuffChain = loadQAStuffChain;
|
|
5
|
+
exports.loadQAMapReduceChain = loadQAMapReduceChain;
|
|
6
|
+
exports.loadQARefineChain = loadQARefineChain;
|
|
4
7
|
const llm_chain_js_1 = require("../llm_chain.cjs");
|
|
5
8
|
const combine_docs_chain_js_1 = require("../combine_docs_chain.cjs");
|
|
6
9
|
const stuff_prompts_js_1 = require("./stuff_prompts.cjs");
|
|
@@ -33,7 +36,6 @@ function loadQAStuffChain(llm, params = {}) {
|
|
|
33
36
|
const chain = new combine_docs_chain_js_1.StuffDocumentsChain({ llmChain, verbose });
|
|
34
37
|
return chain;
|
|
35
38
|
}
|
|
36
|
-
exports.loadQAStuffChain = loadQAStuffChain;
|
|
37
39
|
/**
|
|
38
40
|
* Loads a MapReduceQAChain based on the provided parameters. It takes an
|
|
39
41
|
* LLM instance and MapReduceQAChainParams as parameters.
|
|
@@ -62,7 +64,6 @@ function loadQAMapReduceChain(llm, params = {}) {
|
|
|
62
64
|
});
|
|
63
65
|
return chain;
|
|
64
66
|
}
|
|
65
|
-
exports.loadQAMapReduceChain = loadQAMapReduceChain;
|
|
66
67
|
/**
|
|
67
68
|
* Loads a RefineQAChain based on the provided parameters. It takes an LLM
|
|
68
69
|
* instance and RefineQAChainParams as parameters.
|
|
@@ -85,4 +86,3 @@ function loadQARefineChain(llm, params = {}) {
|
|
|
85
86
|
});
|
|
86
87
|
return chain;
|
|
87
88
|
}
|
|
88
|
-
exports.loadQARefineChain = loadQARefineChain;
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.createRetrievalChain =
|
|
3
|
+
exports.createRetrievalChain = createRetrievalChain;
|
|
4
4
|
const runnables_1 = require("@langchain/core/runnables");
|
|
5
5
|
function isBaseRetriever(x) {
|
|
6
6
|
return (!!x &&
|
|
@@ -22,7 +22,7 @@ function isBaseRetriever(x) {
|
|
|
22
22
|
* import { createStuffDocumentsChain } from "langchain/chains/combine_documents";
|
|
23
23
|
*
|
|
24
24
|
* const retrievalQAChatPrompt = await pull("langchain-ai/retrieval-qa-chat");
|
|
25
|
-
* const llm = new ChatOpenAI({});
|
|
25
|
+
* const llm = new ChatOpenAI({ model: "gpt-4o-mini" });
|
|
26
26
|
* const retriever = ...
|
|
27
27
|
* const combineDocsChain = await createStuffDocumentsChain(...);
|
|
28
28
|
* const retrievalChain = await createRetrievalChain({
|
|
@@ -57,4 +57,3 @@ async function createRetrievalChain({ retriever, combineDocsChain, }) {
|
|
|
57
57
|
]).withConfig({ runName: "retrieval_chain" });
|
|
58
58
|
return retrievalChain;
|
|
59
59
|
}
|
|
60
|
-
exports.createRetrievalChain = createRetrievalChain;
|
|
@@ -42,7 +42,7 @@ export type CreateRetrievalChainParams<RunOutput> = {
|
|
|
42
42
|
* import { createStuffDocumentsChain } from "langchain/chains/combine_documents";
|
|
43
43
|
*
|
|
44
44
|
* const retrievalQAChatPrompt = await pull("langchain-ai/retrieval-qa-chat");
|
|
45
|
-
* const llm = new ChatOpenAI({});
|
|
45
|
+
* const llm = new ChatOpenAI({ model: "gpt-4o-mini" });
|
|
46
46
|
* const retriever = ...
|
|
47
47
|
* const combineDocsChain = await createStuffDocumentsChain(...);
|
|
48
48
|
* const retrievalChain = await createRetrievalChain({
|
package/dist/chains/retrieval.js
CHANGED
|
@@ -19,7 +19,7 @@ function isBaseRetriever(x) {
|
|
|
19
19
|
* import { createStuffDocumentsChain } from "langchain/chains/combine_documents";
|
|
20
20
|
*
|
|
21
21
|
* const retrievalQAChatPrompt = await pull("langchain-ai/retrieval-qa-chat");
|
|
22
|
-
* const llm = new ChatOpenAI({});
|
|
22
|
+
* const llm = new ChatOpenAI({ model: "gpt-4o-mini" });
|
|
23
23
|
* const retriever = ...
|
|
24
24
|
* const combineDocsChain = await createStuffDocumentsChain(...);
|
|
25
25
|
* const retrievalChain = await createRetrievalChain({
|
|
@@ -16,19 +16,22 @@ const router_js_1 = require("../../output_parsers/router.cjs");
|
|
|
16
16
|
* functionality specific to multi-prompt chains.
|
|
17
17
|
* @example
|
|
18
18
|
* ```typescript
|
|
19
|
-
* const multiPromptChain = MultiPromptChain.fromLLMAndPrompts(
|
|
20
|
-
*
|
|
21
|
-
*
|
|
22
|
-
* "
|
|
23
|
-
*
|
|
24
|
-
*
|
|
25
|
-
*
|
|
26
|
-
*
|
|
27
|
-
*
|
|
28
|
-
*
|
|
29
|
-
*
|
|
30
|
-
*
|
|
31
|
-
* }
|
|
19
|
+
* const multiPromptChain = MultiPromptChain.fromLLMAndPrompts(
|
|
20
|
+
* new ChatOpenAI({ model: "gpt-4o-mini" }),
|
|
21
|
+
* {
|
|
22
|
+
* promptNames: ["physics", "math", "history"],
|
|
23
|
+
* promptDescriptions: [
|
|
24
|
+
* "Good for answering questions about physics",
|
|
25
|
+
* "Good for answering math questions",
|
|
26
|
+
* "Good for answering questions about history",
|
|
27
|
+
* ],
|
|
28
|
+
* promptTemplates: [
|
|
29
|
+
* `You are a very smart physics professor. Here is a question:\n{input}\n`,
|
|
30
|
+
* `You are a very good mathematician. Here is a question:\n{input}\n`,
|
|
31
|
+
* `You are a very smart history professor. Here is a question:\n{input}\n`,
|
|
32
|
+
* ],
|
|
33
|
+
* }
|
|
34
|
+
* );
|
|
32
35
|
* const result = await multiPromptChain.call({
|
|
33
36
|
* input: "What is the speed of light?",
|
|
34
37
|
* });
|
|
@@ -9,19 +9,22 @@ import { LLMChainInput } from "../../chains/llm_chain.js";
|
|
|
9
9
|
* functionality specific to multi-prompt chains.
|
|
10
10
|
* @example
|
|
11
11
|
* ```typescript
|
|
12
|
-
* const multiPromptChain = MultiPromptChain.fromLLMAndPrompts(
|
|
13
|
-
*
|
|
14
|
-
*
|
|
15
|
-
* "
|
|
16
|
-
*
|
|
17
|
-
*
|
|
18
|
-
*
|
|
19
|
-
*
|
|
20
|
-
*
|
|
21
|
-
*
|
|
22
|
-
*
|
|
23
|
-
*
|
|
24
|
-
* }
|
|
12
|
+
* const multiPromptChain = MultiPromptChain.fromLLMAndPrompts(
|
|
13
|
+
* new ChatOpenAI({ model: "gpt-4o-mini" }),
|
|
14
|
+
* {
|
|
15
|
+
* promptNames: ["physics", "math", "history"],
|
|
16
|
+
* promptDescriptions: [
|
|
17
|
+
* "Good for answering questions about physics",
|
|
18
|
+
* "Good for answering math questions",
|
|
19
|
+
* "Good for answering questions about history",
|
|
20
|
+
* ],
|
|
21
|
+
* promptTemplates: [
|
|
22
|
+
* `You are a very smart physics professor. Here is a question:\n{input}\n`,
|
|
23
|
+
* `You are a very good mathematician. Here is a question:\n{input}\n`,
|
|
24
|
+
* `You are a very smart history professor. Here is a question:\n{input}\n`,
|
|
25
|
+
* ],
|
|
26
|
+
* }
|
|
27
|
+
* );
|
|
25
28
|
* const result = await multiPromptChain.call({
|
|
26
29
|
* input: "What is the speed of light?",
|
|
27
30
|
* });
|
|
@@ -13,19 +13,22 @@ import { RouterOutputParser } from "../../output_parsers/router.js";
|
|
|
13
13
|
* functionality specific to multi-prompt chains.
|
|
14
14
|
* @example
|
|
15
15
|
* ```typescript
|
|
16
|
-
* const multiPromptChain = MultiPromptChain.fromLLMAndPrompts(
|
|
17
|
-
*
|
|
18
|
-
*
|
|
19
|
-
* "
|
|
20
|
-
*
|
|
21
|
-
*
|
|
22
|
-
*
|
|
23
|
-
*
|
|
24
|
-
*
|
|
25
|
-
*
|
|
26
|
-
*
|
|
27
|
-
*
|
|
28
|
-
* }
|
|
16
|
+
* const multiPromptChain = MultiPromptChain.fromLLMAndPrompts(
|
|
17
|
+
* new ChatOpenAI({ model: "gpt-4o-mini" }),
|
|
18
|
+
* {
|
|
19
|
+
* promptNames: ["physics", "math", "history"],
|
|
20
|
+
* promptDescriptions: [
|
|
21
|
+
* "Good for answering questions about physics",
|
|
22
|
+
* "Good for answering math questions",
|
|
23
|
+
* "Good for answering questions about history",
|
|
24
|
+
* ],
|
|
25
|
+
* promptTemplates: [
|
|
26
|
+
* `You are a very smart physics professor. Here is a question:\n{input}\n`,
|
|
27
|
+
* `You are a very good mathematician. Here is a question:\n{input}\n`,
|
|
28
|
+
* `You are a very smart history professor. Here is a question:\n{input}\n`,
|
|
29
|
+
* ],
|
|
30
|
+
* }
|
|
31
|
+
* );
|
|
29
32
|
* const result = await multiPromptChain.call({
|
|
30
33
|
* input: "What is the speed of light?",
|
|
31
34
|
* });
|
|
@@ -18,7 +18,7 @@ const router_js_1 = require("../../output_parsers/router.cjs");
|
|
|
18
18
|
* @example
|
|
19
19
|
* ```typescript
|
|
20
20
|
* const multiRetrievalQAChain = MultiRetrievalQAChain.fromLLMAndRetrievers(
|
|
21
|
-
* new ChatOpenAI(),
|
|
21
|
+
* new ChatOpenAI({ model: "gpt-4o-mini" }),
|
|
22
22
|
* {
|
|
23
23
|
* retrieverNames: ["aqua teen", "mst3k", "animaniacs"],
|
|
24
24
|
* retrieverDescriptions: [
|
|
@@ -22,7 +22,7 @@ export type MultiRetrievalDefaults = {
|
|
|
22
22
|
* @example
|
|
23
23
|
* ```typescript
|
|
24
24
|
* const multiRetrievalQAChain = MultiRetrievalQAChain.fromLLMAndRetrievers(
|
|
25
|
-
* new ChatOpenAI(),
|
|
25
|
+
* new ChatOpenAI({ model: "gpt-4o-mini" }),
|
|
26
26
|
* {
|
|
27
27
|
* retrieverNames: ["aqua teen", "mst3k", "animaniacs"],
|
|
28
28
|
* retrieverDescriptions: [
|
|
@@ -15,7 +15,7 @@ import { RouterOutputParser } from "../../output_parsers/router.js";
|
|
|
15
15
|
* @example
|
|
16
16
|
* ```typescript
|
|
17
17
|
* const multiRetrievalQAChain = MultiRetrievalQAChain.fromLLMAndRetrievers(
|
|
18
|
-
* new ChatOpenAI(),
|
|
18
|
+
* new ChatOpenAI({ model: "gpt-4o-mini" }),
|
|
19
19
|
* {
|
|
20
20
|
* retrieverNames: ["aqua teen", "mst3k", "animaniacs"],
|
|
21
21
|
* retrieverDescriptions: [
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.zipEntries =
|
|
3
|
+
exports.zipEntries = zipEntries;
|
|
4
4
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
5
5
|
function zipEntries(...arrays) {
|
|
6
6
|
// Check for empty input
|
|
@@ -31,4 +31,3 @@ function zipEntries(...arrays) {
|
|
|
31
31
|
}
|
|
32
32
|
return zipped;
|
|
33
33
|
}
|
|
34
|
-
exports.zipEntries = zipEntries;
|
|
@@ -32,12 +32,12 @@ function formatSet(input) {
|
|
|
32
32
|
* const overallChain = new SequentialChain({
|
|
33
33
|
* chains: [
|
|
34
34
|
* new LLMChain({
|
|
35
|
-
* llm: new ChatOpenAI({ temperature: 0 }),
|
|
35
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0 }),
|
|
36
36
|
* prompt: promptTemplate,
|
|
37
37
|
* outputKey: "synopsis",
|
|
38
38
|
* }),
|
|
39
39
|
* new LLMChain({
|
|
40
|
-
* llm: new OpenAI({ temperature: 0 }),
|
|
40
|
+
* llm: new OpenAI({ model: "gpt-4o-mini", temperature: 0 }),
|
|
41
41
|
* prompt: reviewPromptTemplate,
|
|
42
42
|
* outputKey: "review",
|
|
43
43
|
* }),
|
|
@@ -275,7 +275,8 @@ class SimpleSequentialChain extends base_js_1.BaseChain {
|
|
|
275
275
|
/** @ignore */
|
|
276
276
|
_validateChains() {
|
|
277
277
|
for (const chain of this.chains) {
|
|
278
|
-
if (chain.inputKeys.filter((k) => !chain.memory?.memoryKeys.includes(k)
|
|
278
|
+
if (chain.inputKeys.filter((k) => !chain.memory?.memoryKeys.includes(k))
|
|
279
|
+
.length !== 1) {
|
|
279
280
|
throw new Error(`Chains used in SimpleSequentialChain should all have one input, got ${chain.inputKeys.length} for ${chain._chainType()}.`);
|
|
280
281
|
}
|
|
281
282
|
if (chain.outputKeys.length !== 1) {
|
|
@@ -43,12 +43,12 @@ export interface SequentialChainInput extends ChainInputs {
|
|
|
43
43
|
* const overallChain = new SequentialChain({
|
|
44
44
|
* chains: [
|
|
45
45
|
* new LLMChain({
|
|
46
|
-
* llm: new ChatOpenAI({ temperature: 0 }),
|
|
46
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0 }),
|
|
47
47
|
* prompt: promptTemplate,
|
|
48
48
|
* outputKey: "synopsis",
|
|
49
49
|
* }),
|
|
50
50
|
* new LLMChain({
|
|
51
|
-
* llm: new OpenAI({ temperature: 0 }),
|
|
51
|
+
* llm: new OpenAI({ model: "gpt-4o-mini", temperature: 0 }),
|
|
52
52
|
* prompt: reviewPromptTemplate,
|
|
53
53
|
* outputKey: "review",
|
|
54
54
|
* }),
|
|
@@ -29,12 +29,12 @@ function formatSet(input) {
|
|
|
29
29
|
* const overallChain = new SequentialChain({
|
|
30
30
|
* chains: [
|
|
31
31
|
* new LLMChain({
|
|
32
|
-
* llm: new ChatOpenAI({ temperature: 0 }),
|
|
32
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0 }),
|
|
33
33
|
* prompt: promptTemplate,
|
|
34
34
|
* outputKey: "synopsis",
|
|
35
35
|
* }),
|
|
36
36
|
* new LLMChain({
|
|
37
|
-
* llm: new OpenAI({ temperature: 0 }),
|
|
37
|
+
* llm: new OpenAI({ model: "gpt-4o-mini", temperature: 0 }),
|
|
38
38
|
* prompt: reviewPromptTemplate,
|
|
39
39
|
* outputKey: "review",
|
|
40
40
|
* }),
|
|
@@ -271,7 +271,8 @@ export class SimpleSequentialChain extends BaseChain {
|
|
|
271
271
|
/** @ignore */
|
|
272
272
|
_validateChains() {
|
|
273
273
|
for (const chain of this.chains) {
|
|
274
|
-
if (chain.inputKeys.filter((k) => !chain.memory?.memoryKeys.includes(k)
|
|
274
|
+
if (chain.inputKeys.filter((k) => !chain.memory?.memoryKeys.includes(k))
|
|
275
|
+
.length !== 1) {
|
|
275
276
|
throw new Error(`Chains used in SimpleSequentialChain should all have one input, got ${chain.inputKeys.length} for ${chain._chainType()}.`);
|
|
276
277
|
}
|
|
277
278
|
if (chain.outputKeys.length !== 1) {
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.
|
|
3
|
+
exports.SqlDatabaseChain = void 0;
|
|
4
|
+
exports.createSqlQueryChain = createSqlQueryChain;
|
|
4
5
|
const base_1 = require("@langchain/core/language_models/base");
|
|
5
6
|
const runnables_1 = require("@langchain/core/runnables");
|
|
6
7
|
const output_parsers_1 = require("@langchain/core/output_parsers");
|
|
@@ -217,7 +218,7 @@ const difference = (setA, setB) => new Set([...setA].filter((x) => !setB.has(x))
|
|
|
217
218
|
* const db = await SqlDatabase.fromDataSourceParams({
|
|
218
219
|
* appDataSource: datasource,
|
|
219
220
|
* });
|
|
220
|
-
* const llm = new ChatOpenAI({ temperature: 0 });
|
|
221
|
+
* const llm = new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0 });
|
|
221
222
|
* const chain = await createSqlQueryChain({
|
|
222
223
|
* llm,
|
|
223
224
|
* db,
|
|
@@ -267,4 +268,3 @@ async function createSqlQueryChain({ llm, db, prompt, k = 5, dialect, }) {
|
|
|
267
268
|
strip,
|
|
268
269
|
]);
|
|
269
270
|
}
|
|
270
|
-
exports.createSqlQueryChain = createSqlQueryChain;
|
|
@@ -104,7 +104,7 @@ export interface CreateSqlQueryChainFields {
|
|
|
104
104
|
* const db = await SqlDatabase.fromDataSourceParams({
|
|
105
105
|
* appDataSource: datasource,
|
|
106
106
|
* });
|
|
107
|
-
* const llm = new ChatOpenAI({ temperature: 0 });
|
|
107
|
+
* const llm = new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0 });
|
|
108
108
|
* const chain = await createSqlQueryChain({
|
|
109
109
|
* llm,
|
|
110
110
|
* db,
|
|
@@ -213,7 +213,7 @@ const difference = (setA, setB) => new Set([...setA].filter((x) => !setB.has(x))
|
|
|
213
213
|
* const db = await SqlDatabase.fromDataSourceParams({
|
|
214
214
|
* appDataSource: datasource,
|
|
215
215
|
* });
|
|
216
|
-
* const llm = new ChatOpenAI({ temperature: 0 });
|
|
216
|
+
* const llm = new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0 });
|
|
217
217
|
* const chain = await createSqlQueryChain({
|
|
218
218
|
* llm,
|
|
219
219
|
* db,
|
|
@@ -1,6 +1,41 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
2
35
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.
|
|
36
|
+
exports.ConfigurableModel = void 0;
|
|
37
|
+
exports._inferModelProvider = _inferModelProvider;
|
|
38
|
+
exports.initChatModel = initChatModel;
|
|
4
39
|
const chat_models_1 = require("@langchain/core/language_models/chat_models");
|
|
5
40
|
const runnables_1 = require("@langchain/core/runnables");
|
|
6
41
|
const stream_1 = require("@langchain/core/utils/stream");
|
|
@@ -34,83 +69,83 @@ params = {}) {
|
|
|
34
69
|
try {
|
|
35
70
|
switch (modelProviderCopy) {
|
|
36
71
|
case "openai": {
|
|
37
|
-
const { ChatOpenAI } = await
|
|
72
|
+
const { ChatOpenAI } = await Promise.resolve().then(() => __importStar(require("@langchain/openai")));
|
|
38
73
|
return new ChatOpenAI({ model, ...passedParams });
|
|
39
74
|
}
|
|
40
75
|
case "anthropic": {
|
|
41
|
-
const { ChatAnthropic } = await
|
|
76
|
+
const { ChatAnthropic } = await Promise.resolve().then(() => __importStar(require("@langchain/anthropic")));
|
|
42
77
|
return new ChatAnthropic({ model, ...passedParams });
|
|
43
78
|
}
|
|
44
79
|
case "azure_openai": {
|
|
45
|
-
const { AzureChatOpenAI } = await
|
|
80
|
+
const { AzureChatOpenAI } = await Promise.resolve().then(() => __importStar(require("@langchain/openai")));
|
|
46
81
|
return new AzureChatOpenAI({ model, ...passedParams });
|
|
47
82
|
}
|
|
48
83
|
case "cohere": {
|
|
49
|
-
const { ChatCohere } = await
|
|
84
|
+
const { ChatCohere } = await Promise.resolve().then(() => __importStar(require("@langchain/cohere")));
|
|
50
85
|
return new ChatCohere({ model, ...passedParams });
|
|
51
86
|
}
|
|
52
87
|
case "google-vertexai": {
|
|
53
|
-
const { ChatVertexAI } = await
|
|
88
|
+
const { ChatVertexAI } = await Promise.resolve().then(() => __importStar(require("@langchain/google-vertexai")));
|
|
54
89
|
return new ChatVertexAI({ model, ...passedParams });
|
|
55
90
|
}
|
|
56
91
|
case "google-vertexai-web": {
|
|
57
|
-
const { ChatVertexAI } = await
|
|
92
|
+
const { ChatVertexAI } = await Promise.resolve().then(() => __importStar(require("@langchain/google-vertexai-web")));
|
|
58
93
|
return new ChatVertexAI({ model, ...passedParams });
|
|
59
94
|
}
|
|
60
95
|
case "google-genai": {
|
|
61
|
-
const { ChatGoogleGenerativeAI } = await
|
|
96
|
+
const { ChatGoogleGenerativeAI } = await Promise.resolve().then(() => __importStar(require("@langchain/google-genai")));
|
|
62
97
|
return new ChatGoogleGenerativeAI({ model, ...passedParams });
|
|
63
98
|
}
|
|
64
99
|
case "ollama": {
|
|
65
|
-
const { ChatOllama } = await
|
|
100
|
+
const { ChatOllama } = await Promise.resolve().then(() => __importStar(require("@langchain/ollama")));
|
|
66
101
|
return new ChatOllama({ model, ...passedParams });
|
|
67
102
|
}
|
|
68
103
|
case "mistralai": {
|
|
69
|
-
const { ChatMistralAI } = await
|
|
104
|
+
const { ChatMistralAI } = await Promise.resolve().then(() => __importStar(require("@langchain/mistralai")));
|
|
70
105
|
return new ChatMistralAI({ model, ...passedParams });
|
|
71
106
|
}
|
|
72
107
|
case "groq": {
|
|
73
|
-
const { ChatGroq } = await
|
|
108
|
+
const { ChatGroq } = await Promise.resolve().then(() => __importStar(require("@langchain/groq")));
|
|
74
109
|
return new ChatGroq({ model, ...passedParams });
|
|
75
110
|
}
|
|
76
111
|
case "cerebras": {
|
|
77
|
-
const { ChatCerebras } = await
|
|
112
|
+
const { ChatCerebras } = await Promise.resolve().then(() => __importStar(require("@langchain/cerebras")));
|
|
78
113
|
return new ChatCerebras({ model, ...passedParams });
|
|
79
114
|
}
|
|
80
115
|
case "bedrock": {
|
|
81
|
-
const { ChatBedrockConverse } = await
|
|
116
|
+
const { ChatBedrockConverse } = await Promise.resolve().then(() => __importStar(require("@langchain/aws")));
|
|
82
117
|
return new ChatBedrockConverse({ model, ...passedParams });
|
|
83
118
|
}
|
|
84
119
|
case "deepseek": {
|
|
85
|
-
const { ChatDeepSeek } = await
|
|
120
|
+
const { ChatDeepSeek } = await Promise.resolve().then(() => __importStar(require("@langchain/deepseek")));
|
|
86
121
|
return new ChatDeepSeek({ model, ...passedParams });
|
|
87
122
|
}
|
|
88
123
|
case "xai": {
|
|
89
|
-
const { ChatXAI } = await
|
|
124
|
+
const { ChatXAI } = await Promise.resolve().then(() => __importStar(require("@langchain/xai")));
|
|
90
125
|
return new ChatXAI({ model, ...passedParams });
|
|
91
126
|
}
|
|
92
127
|
case "fireworks": {
|
|
93
128
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
94
129
|
// @ts-ignore - Can not install as a proper dependency due to circular dependency
|
|
95
|
-
const { ChatFireworks } = await
|
|
130
|
+
const { ChatFireworks } = await Promise.resolve().then(() => __importStar(require(
|
|
96
131
|
// We can not 'expect-error' because if you explicitly build `@langchain/community`
|
|
97
132
|
// this import will be able to be resolved, thus there will be no error. However
|
|
98
133
|
// this will never be the case in CI.
|
|
99
134
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
100
135
|
// @ts-ignore - Can not install as a proper dependency due to circular dependency
|
|
101
|
-
"@langchain/community/chat_models/fireworks");
|
|
136
|
+
"@langchain/community/chat_models/fireworks")));
|
|
102
137
|
return new ChatFireworks({ model, ...passedParams });
|
|
103
138
|
}
|
|
104
139
|
case "together": {
|
|
105
140
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
106
141
|
// @ts-ignore - Can not install as a proper dependency due to circular dependency
|
|
107
|
-
const { ChatTogetherAI } = await
|
|
142
|
+
const { ChatTogetherAI } = await Promise.resolve().then(() => __importStar(require(
|
|
108
143
|
// We can not 'expect-error' because if you explicitly build `@langchain/community`
|
|
109
144
|
// this import will be able to be resolved, thus there will be no error. However
|
|
110
145
|
// this will never be the case in CI.
|
|
111
146
|
// eslint-disable-next-line @typescript-eslint/ban-ts-comment
|
|
112
147
|
// @ts-ignore - Can not install as a proper dependency due to circular dependency
|
|
113
|
-
"@langchain/community/chat_models/togetherai");
|
|
148
|
+
"@langchain/community/chat_models/togetherai")));
|
|
114
149
|
return new ChatTogetherAI({ model, ...passedParams });
|
|
115
150
|
}
|
|
116
151
|
default: {
|
|
@@ -169,7 +204,6 @@ function _inferModelProvider(modelName) {
|
|
|
169
204
|
return undefined;
|
|
170
205
|
}
|
|
171
206
|
}
|
|
172
|
-
exports._inferModelProvider = _inferModelProvider;
|
|
173
207
|
/**
|
|
174
208
|
* Internal class used to create chat models.
|
|
175
209
|
*
|
|
@@ -651,4 +685,3 @@ fields) {
|
|
|
651
685
|
});
|
|
652
686
|
}
|
|
653
687
|
}
|
|
654
|
-
exports.initChatModel = initChatModel;
|