langchain 0.3.28 → 0.3.30
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +15 -12
- package/dist/agents/agent.cjs +37 -4
- package/dist/agents/agent.d.ts +1 -1
- package/dist/agents/agent.js +1 -1
- package/dist/agents/format_scratchpad/log.cjs +1 -2
- package/dist/agents/format_scratchpad/log_to_message.cjs +1 -2
- package/dist/agents/format_scratchpad/openai_functions.cjs +2 -3
- package/dist/agents/format_scratchpad/tool_calling.cjs +2 -3
- package/dist/agents/format_scratchpad/xml.cjs +1 -2
- package/dist/agents/initialize.cjs +2 -2
- package/dist/agents/mrkl/index.cjs +1 -1
- package/dist/agents/mrkl/index.d.ts +1 -1
- package/dist/agents/mrkl/index.js +1 -1
- package/dist/agents/openai_functions/index.cjs +4 -3
- package/dist/agents/openai_functions/index.d.ts +1 -0
- package/dist/agents/openai_functions/index.js +1 -0
- package/dist/agents/openai_functions/output_parser.cjs +1 -1
- package/dist/agents/openai_functions/output_parser.d.ts +1 -1
- package/dist/agents/openai_functions/output_parser.js +1 -1
- package/dist/agents/openai_tools/index.cjs +3 -3
- package/dist/agents/openai_tools/index.d.ts +1 -1
- package/dist/agents/openai_tools/index.js +1 -1
- package/dist/agents/openai_tools/output_parser.cjs +1 -1
- package/dist/agents/openai_tools/output_parser.d.ts +1 -1
- package/dist/agents/openai_tools/output_parser.js +1 -1
- package/dist/agents/react/index.cjs +1 -2
- package/dist/agents/structured_chat/index.cjs +3 -3
- package/dist/agents/structured_chat/index.d.ts +1 -1
- package/dist/agents/structured_chat/index.js +1 -1
- package/dist/agents/structured_chat/outputParser.cjs +1 -1
- package/dist/agents/structured_chat/outputParser.d.ts +1 -1
- package/dist/agents/structured_chat/outputParser.js +1 -1
- package/dist/agents/structured_chat/prompt.d.ts +1 -1
- package/dist/agents/tool_calling/index.cjs +1 -2
- package/dist/agents/tool_calling/output_parser.cjs +2 -2
- package/dist/agents/toolkits/conversational_retrieval/openai_functions.cjs +1 -2
- package/dist/agents/toolkits/conversational_retrieval/tool.cjs +1 -2
- package/dist/agents/toolkits/json/json.cjs +2 -2
- package/dist/agents/toolkits/openapi/openapi.cjs +3 -3
- package/dist/agents/toolkits/openapi/openapi.d.ts +1 -1
- package/dist/agents/toolkits/openapi/openapi.js +1 -1
- package/dist/agents/toolkits/sql/sql.cjs +3 -3
- package/dist/agents/toolkits/sql/sql.d.ts +1 -1
- package/dist/agents/toolkits/sql/sql.js +1 -1
- package/dist/agents/toolkits/vectorstore/vectorstore.cjs +4 -4
- package/dist/agents/toolkits/vectorstore/vectorstore.d.ts +1 -1
- package/dist/agents/toolkits/vectorstore/vectorstore.js +1 -1
- package/dist/agents/xml/index.cjs +2 -2
- package/dist/chains/analyze_documents_chain.cjs +1 -1
- package/dist/chains/analyze_documents_chain.d.ts +1 -1
- package/dist/chains/analyze_documents_chain.js +1 -1
- package/dist/chains/api/prompts.d.ts +1 -1
- package/dist/chains/base.cjs +42 -9
- package/dist/chains/base.d.ts +2 -2
- package/dist/chains/base.js +1 -1
- package/dist/chains/combine_documents/base.cjs +2 -2
- package/dist/chains/combine_documents/reduce.cjs +2 -3
- package/dist/chains/combine_documents/stuff.cjs +1 -2
- package/dist/chains/constitutional_ai/constitutional_principle.cjs +2 -2
- package/dist/chains/constitutional_ai/constitutional_principle.d.ts +2 -2
- package/dist/chains/constitutional_ai/constitutional_principle.js +2 -2
- package/dist/chains/conversation.cjs +1 -1
- package/dist/chains/conversation.d.ts +1 -1
- package/dist/chains/conversation.js +1 -1
- package/dist/chains/graph_qa/cypher.cjs +1 -1
- package/dist/chains/graph_qa/cypher.d.ts +1 -1
- package/dist/chains/graph_qa/cypher.js +1 -1
- package/dist/chains/history_aware_retriever.cjs +2 -3
- package/dist/chains/history_aware_retriever.d.ts +1 -1
- package/dist/chains/history_aware_retriever.js +1 -1
- package/dist/chains/llm_chain.cjs +1 -1
- package/dist/chains/llm_chain.d.ts +1 -1
- package/dist/chains/llm_chain.js +1 -1
- package/dist/chains/openai_functions/base.cjs +3 -4
- package/dist/chains/openai_functions/base.d.ts +1 -1
- package/dist/chains/openai_functions/base.js +1 -1
- package/dist/chains/openai_functions/extraction.cjs +2 -3
- package/dist/chains/openai_functions/openapi.cjs +4 -5
- package/dist/chains/openai_functions/openapi.js +1 -1
- package/dist/chains/openai_functions/structured_output.cjs +4 -4
- package/dist/chains/openai_functions/structured_output.js +1 -1
- package/dist/chains/openai_functions/tagging.cjs +2 -3
- package/dist/chains/query_constructor/index.cjs +3 -3
- package/dist/chains/question_answering/load.cjs +4 -4
- package/dist/chains/retrieval.cjs +2 -3
- package/dist/chains/retrieval.d.ts +1 -1
- package/dist/chains/retrieval.js +1 -1
- package/dist/chains/router/multi_prompt.cjs +16 -13
- package/dist/chains/router/multi_prompt.d.ts +16 -13
- package/dist/chains/router/multi_prompt.js +16 -13
- package/dist/chains/router/multi_retrieval_qa.cjs +1 -1
- package/dist/chains/router/multi_retrieval_qa.d.ts +1 -1
- package/dist/chains/router/multi_retrieval_qa.js +1 -1
- package/dist/chains/router/utils.cjs +1 -2
- package/dist/chains/sequential_chain.cjs +4 -3
- package/dist/chains/sequential_chain.d.ts +2 -2
- package/dist/chains/sequential_chain.js +4 -3
- package/dist/chains/sql_db/sql_db_chain.cjs +3 -3
- package/dist/chains/sql_db/sql_db_chain.d.ts +1 -1
- package/dist/chains/sql_db/sql_db_chain.js +1 -1
- package/dist/chat_models/universal.cjs +54 -21
- package/dist/document_loaders/fs/buffer.cjs +34 -1
- package/dist/document_loaders/fs/buffer.d.ts +0 -3
- package/dist/document_loaders/fs/directory.cjs +35 -2
- package/dist/document_loaders/fs/directory.d.ts +0 -2
- package/dist/document_loaders/fs/text.cjs +34 -1
- package/dist/document_loaders/fs/text.d.ts +0 -1
- package/dist/document_transformers/openai_functions.cjs +4 -4
- package/dist/document_transformers/openai_functions.js +1 -1
- package/dist/evaluation/embedding_distance/base.cjs +3 -3
- package/dist/evaluation/loader.cjs +2 -3
- package/dist/evaluation/loader.js +1 -1
- package/dist/experimental/autogpt/agent.cjs +1 -1
- package/dist/experimental/autogpt/agent.d.ts +1 -1
- package/dist/experimental/autogpt/agent.js +1 -1
- package/dist/experimental/autogpt/output_parser.cjs +2 -2
- package/dist/experimental/autogpt/prompt_generator.cjs +2 -2
- package/dist/experimental/generative_agents/generative_agent.cjs +2 -2
- package/dist/experimental/generative_agents/generative_agent.d.ts +1 -1
- package/dist/experimental/generative_agents/generative_agent.js +2 -2
- package/dist/experimental/openai_assistant/index.cjs +8 -4
- package/dist/experimental/openai_assistant/index.d.ts +6 -6
- package/dist/experimental/openai_assistant/index.js +9 -5
- package/dist/experimental/openai_files/index.cjs +2 -2
- package/dist/experimental/openai_files/index.d.ts +6 -4
- package/dist/experimental/openai_files/index.js +2 -2
- package/dist/experimental/plan_and_execute/agent_executor.cjs +2 -2
- package/dist/hub/base.cjs +4 -5
- package/dist/hub/index.cjs +2 -2
- package/dist/hub/node.cjs +43 -10
- package/dist/index.cjs +0 -1
- package/dist/index.d.ts +0 -1
- package/dist/index.js +1 -1
- package/dist/load/import_map.cjs +17 -7
- package/dist/load/index.cjs +18 -9
- package/dist/memory/buffer_memory.cjs +1 -1
- package/dist/memory/buffer_memory.d.ts +1 -1
- package/dist/memory/buffer_memory.js +1 -1
- package/dist/memory/buffer_token_memory.cjs +1 -1
- package/dist/memory/buffer_token_memory.d.ts +1 -1
- package/dist/memory/buffer_token_memory.js +1 -1
- package/dist/memory/buffer_window_memory.cjs +1 -1
- package/dist/memory/buffer_window_memory.d.ts +1 -1
- package/dist/memory/buffer_window_memory.js +1 -1
- package/dist/memory/entity_memory.cjs +2 -2
- package/dist/memory/entity_memory.d.ts +2 -2
- package/dist/memory/entity_memory.js +2 -2
- package/dist/memory/summary.cjs +2 -2
- package/dist/memory/summary.d.ts +2 -2
- package/dist/memory/summary.js +2 -2
- package/dist/memory/summary_buffer.cjs +2 -2
- package/dist/memory/summary_buffer.d.ts +2 -2
- package/dist/memory/summary_buffer.js +2 -2
- package/dist/output_parsers/expression_type_handlers/base.cjs +34 -1
- package/dist/prompts/index.cjs +0 -1
- package/dist/prompts/index.d.ts +0 -1
- package/dist/prompts/index.js +1 -1
- package/dist/retrievers/hyde.cjs +3 -3
- package/dist/retrievers/hyde.d.ts +1 -1
- package/dist/retrievers/hyde.js +1 -1
- package/dist/retrievers/parent_document.cjs +17 -7
- package/dist/retrievers/self_query/index.cjs +1 -1
- package/dist/retrievers/self_query/index.d.ts +1 -1
- package/dist/retrievers/self_query/index.js +1 -1
- package/dist/smith/config.cjs +5 -6
- package/dist/smith/name_generation.cjs +1 -2
- package/dist/smith/runner_utils.cjs +1 -2
- package/dist/sql_db.cjs +35 -2
- package/dist/storage/encoder_backed.cjs +2 -2
- package/dist/storage/encoder_backed.d.ts +1 -1
- package/dist/storage/file_system.cjs +17 -7
- package/dist/storage/file_system.d.ts +1 -1
- package/dist/stores/file/node.cjs +17 -7
- package/dist/tools/convert_to_openai.cjs +2 -2
- package/dist/tools/render.cjs +2 -3
- package/dist/tools/retriever.cjs +1 -2
- package/dist/tools/webbrowser.cjs +18 -8
- package/dist/tools/webbrowser.d.ts +1 -1
- package/dist/tools/webbrowser.js +1 -1
- package/dist/util/axios-fetch-adapter.cjs +1 -1
- package/dist/util/azure.cjs +1 -2
- package/dist/util/entrypoint_deprecation.cjs +2 -3
- package/dist/util/load.cjs +34 -1
- package/dist/util/ml-distance/distances.cjs +3 -4
- package/dist/util/ml-distance/similarities.cjs +1 -2
- package/dist/util/ml-distance-euclidean/euclidean.cjs +2 -3
- package/dist/util/openapi.cjs +17 -7
- package/dist/util/openapi.d.ts +32 -32
- package/dist/util/parse.cjs +17 -7
- package/dist/util/prompt-layer.d.ts +1 -1
- package/dist/util/set.cjs +3 -4
- package/dist/util/time.cjs +1 -2
- package/package.json +9 -9
package/dist/hub/node.cjs
CHANGED
|
@@ -1,6 +1,40 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
2
35
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.
|
|
36
|
+
exports.push = void 0;
|
|
37
|
+
exports.pull = pull;
|
|
4
38
|
const base_js_1 = require("./base.cjs");
|
|
5
39
|
Object.defineProperty(exports, "push", { enumerable: true, get: function () { return base_js_1.basePush; } });
|
|
6
40
|
const index_js_1 = require("../load/index.cjs");
|
|
@@ -21,29 +55,29 @@ async function pull(ownerRepoCommit, options) {
|
|
|
21
55
|
if (Array.isArray(promptObject.manifest.kwargs?.last?.kwargs?.bound?.id)) {
|
|
22
56
|
const modelName = promptObject.manifest.kwargs?.last?.kwargs?.bound?.id.at(-1);
|
|
23
57
|
if (modelName === "ChatOpenAI") {
|
|
24
|
-
modelClass = (await
|
|
58
|
+
modelClass = (await Promise.resolve().then(() => __importStar(require("@langchain/openai")))).ChatOpenAI;
|
|
25
59
|
}
|
|
26
60
|
else if (modelName === "ChatAnthropic") {
|
|
27
|
-
modelClass = (await
|
|
61
|
+
modelClass = (await Promise.resolve().then(() => __importStar(require("@langchain/anthropic")))).ChatAnthropic;
|
|
28
62
|
}
|
|
29
63
|
else if (modelName === "ChatAzureOpenAI") {
|
|
30
|
-
modelClass = (await
|
|
64
|
+
modelClass = (await Promise.resolve().then(() => __importStar(require("@langchain/openai")))).AzureChatOpenAI;
|
|
31
65
|
}
|
|
32
66
|
else if (modelName === "ChatVertexAI") {
|
|
33
|
-
modelClass = (await
|
|
67
|
+
modelClass = (await Promise.resolve().then(() => __importStar(require("@langchain/google-vertexai")))).ChatVertexAI;
|
|
34
68
|
}
|
|
35
69
|
else if (modelName === "ChatGoogleGenerativeAI") {
|
|
36
|
-
modelClass = (await
|
|
70
|
+
modelClass = (await Promise.resolve().then(() => __importStar(require("@langchain/google-genai"))))
|
|
37
71
|
.ChatGoogleGenerativeAI;
|
|
38
72
|
}
|
|
39
73
|
else if (modelName === "ChatBedrockConverse") {
|
|
40
|
-
modelClass = (await
|
|
74
|
+
modelClass = (await Promise.resolve().then(() => __importStar(require("@langchain/aws")))).ChatBedrockConverse;
|
|
41
75
|
}
|
|
42
76
|
else if (modelName === "ChatMistral") {
|
|
43
|
-
modelClass = (await
|
|
77
|
+
modelClass = (await Promise.resolve().then(() => __importStar(require("@langchain/mistralai")))).ChatMistralAI;
|
|
44
78
|
}
|
|
45
79
|
else if (modelName === "ChatGroq") {
|
|
46
|
-
modelClass = (await
|
|
80
|
+
modelClass = (await Promise.resolve().then(() => __importStar(require("@langchain/groq")))).ChatGroq;
|
|
47
81
|
}
|
|
48
82
|
else if (modelName !== undefined) {
|
|
49
83
|
console.warn(`Received unknown model name from prompt hub: "${modelName}"`);
|
|
@@ -53,4 +87,3 @@ async function pull(ownerRepoCommit, options) {
|
|
|
53
87
|
const loadedPrompt = await (0, index_js_1.load)(JSON.stringify(promptObject.manifest), undefined, (0, base_js_1.generateOptionalImportMap)(modelClass), (0, base_js_1.generateModelImportMap)(modelClass));
|
|
54
88
|
return loadedPrompt;
|
|
55
89
|
}
|
|
56
|
-
exports.pull = pull;
|
package/dist/index.cjs
CHANGED
package/dist/index.d.ts
CHANGED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export {};
|
package/dist/index.js
CHANGED
package/dist/load/import_map.cjs
CHANGED
|
@@ -16,13 +16,23 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
|
|
|
16
16
|
}) : function(o, v) {
|
|
17
17
|
o["default"] = v;
|
|
18
18
|
});
|
|
19
|
-
var __importStar = (this && this.__importStar) || function (
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
};
|
|
19
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
20
|
+
var ownKeys = function(o) {
|
|
21
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
22
|
+
var ar = [];
|
|
23
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
24
|
+
return ar;
|
|
25
|
+
};
|
|
26
|
+
return ownKeys(o);
|
|
27
|
+
};
|
|
28
|
+
return function (mod) {
|
|
29
|
+
if (mod && mod.__esModule) return mod;
|
|
30
|
+
var result = {};
|
|
31
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
32
|
+
__setModuleDefault(result, mod);
|
|
33
|
+
return result;
|
|
34
|
+
};
|
|
35
|
+
})();
|
|
26
36
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
27
37
|
exports.util__math = exports.util__document = exports.storage__in_memory = exports.storage__encoder_backed = exports.stores__message__in_memory = exports.stores__file__in_memory = exports.stores__doc__in_memory = exports.stores__doc__base = exports.retrievers__matryoshka_retriever = exports.retrievers__score_threshold = exports.retrievers__hyde = exports.retrievers__document_compressors__embeddings_filter = exports.retrievers__document_compressors__chain_extract = exports.retrievers__time_weighted = exports.retrievers__parent_document = exports.retrievers__multi_vector = exports.retrievers__multi_query = exports.retrievers__ensemble = exports.retrievers__document_compressors = exports.retrievers__contextual_compression = exports.output_parsers = exports.callbacks = exports.document_transformers__openai_functions = exports.document_loaders__base = exports.memory__chat_memory = exports.memory = exports.text_splitter = exports.vectorstores__memory = exports.embeddings__fake = exports.embeddings__cache_backed = exports.chains__retrieval = exports.chains__openai_functions = exports.chains__history_aware_retriever = exports.chains__combine_documents__reduce = exports.chains__combine_documents = exports.chains = exports.tools__retriever = exports.tools__render = exports.tools__chain = exports.tools = exports.agents__openai__output_parser = exports.agents__xml__output_parser = exports.agents__react__output_parser = exports.agents__format_scratchpad__log_to_message = exports.agents__format_scratchpad__xml = exports.agents__format_scratchpad__log = exports.agents__format_scratchpad__openai_tools = exports.agents__format_scratchpad = exports.agents__toolkits = exports.agents = void 0;
|
|
28
38
|
exports.schema__output = exports.schema__output_parser = exports.schema__runnable = exports.prompts__base = exports.prompts__pipeline = exports.prompts__image = exports.prompts__chat = exports.schema = exports.schema__messages = exports.prompts__prompt = exports.embeddings__azure_openai = exports.embeddings__openai = exports.llms__azure_openai = exports.llms__openai = exports.chat_models__azure_openai = exports.chat_models__openai = exports.schema__prompt_template = exports.schema__query_constructor = exports.indexes = exports.runnables__remote = exports.smith = exports.evaluation = exports.experimental__prompts__custom_format = exports.experimental__masking = exports.experimental__chains__violation_of_expectations = exports.experimental__plan_and_execute = exports.experimental__generative_agents = exports.experimental__babyagi = exports.experimental__openai_files = exports.experimental__openai_assistant = exports.experimental__autogpt = exports.util__time = void 0;
|
package/dist/load/index.cjs
CHANGED
|
@@ -15,15 +15,25 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
|
|
|
15
15
|
}) : function(o, v) {
|
|
16
16
|
o["default"] = v;
|
|
17
17
|
});
|
|
18
|
-
var __importStar = (this && this.__importStar) || function (
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
};
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
25
35
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
26
|
-
exports.load =
|
|
36
|
+
exports.load = load;
|
|
27
37
|
const load_1 = require("@langchain/core/load");
|
|
28
38
|
const import_constants_js_1 = require("./import_constants.cjs");
|
|
29
39
|
const importMap = __importStar(require("./import_map.cjs"));
|
|
@@ -50,4 +60,3 @@ additionalImportsMap = {}) {
|
|
|
50
60
|
importMap: { ...importMap, ...additionalImportsMap },
|
|
51
61
|
});
|
|
52
62
|
}
|
|
53
|
-
exports.load = load;
|
|
@@ -18,7 +18,7 @@ const chat_memory_js_1 = require("./chat_memory.cjs");
|
|
|
18
18
|
* ```typescript
|
|
19
19
|
* // Initialize the memory to store chat history and set up the language model with a specific temperature.
|
|
20
20
|
* const memory = new BufferMemory({ memoryKey: "chat_history" });
|
|
21
|
-
* const model = new ChatOpenAI({ temperature: 0.9 });
|
|
21
|
+
* const model = new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0.9 });
|
|
22
22
|
*
|
|
23
23
|
* // Create a prompt template for a friendly conversation between a human and an AI.
|
|
24
24
|
* const prompt =
|
|
@@ -23,7 +23,7 @@ export interface BufferMemoryInput extends BaseChatMemoryInput {
|
|
|
23
23
|
* ```typescript
|
|
24
24
|
* // Initialize the memory to store chat history and set up the language model with a specific temperature.
|
|
25
25
|
* const memory = new BufferMemory({ memoryKey: "chat_history" });
|
|
26
|
-
* const model = new ChatOpenAI({ temperature: 0.9 });
|
|
26
|
+
* const model = new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0.9 });
|
|
27
27
|
*
|
|
28
28
|
* // Create a prompt template for a friendly conversation between a human and an AI.
|
|
29
29
|
* const prompt =
|
|
@@ -15,7 +15,7 @@ import { BaseChatMemory } from "./chat_memory.js";
|
|
|
15
15
|
* ```typescript
|
|
16
16
|
* // Initialize the memory to store chat history and set up the language model with a specific temperature.
|
|
17
17
|
* const memory = new BufferMemory({ memoryKey: "chat_history" });
|
|
18
|
-
* const model = new ChatOpenAI({ temperature: 0.9 });
|
|
18
|
+
* const model = new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0.9 });
|
|
19
19
|
*
|
|
20
20
|
* // Create a prompt template for a friendly conversation between a human and an AI.
|
|
21
21
|
* const prompt =
|
|
@@ -10,7 +10,7 @@ const chat_memory_js_1 = require("./chat_memory.cjs");
|
|
|
10
10
|
* @example
|
|
11
11
|
* ```typescript
|
|
12
12
|
* const memory = new ConversationTokenBufferMemory({
|
|
13
|
-
* llm: new ChatOpenAI({}),
|
|
13
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini" }),
|
|
14
14
|
* maxTokenLimit: 10,
|
|
15
15
|
* });
|
|
16
16
|
*
|
|
@@ -18,7 +18,7 @@ export interface ConversationTokenBufferMemoryInput extends BaseChatMemoryInput
|
|
|
18
18
|
* @example
|
|
19
19
|
* ```typescript
|
|
20
20
|
* const memory = new ConversationTokenBufferMemory({
|
|
21
|
-
* llm: new ChatOpenAI({}),
|
|
21
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini" }),
|
|
22
22
|
* maxTokenLimit: 10,
|
|
23
23
|
* });
|
|
24
24
|
*
|
|
@@ -18,7 +18,7 @@ const chat_memory_js_1 = require("./chat_memory.cjs");
|
|
|
18
18
|
* AI:`);
|
|
19
19
|
*
|
|
20
20
|
* const chain = new LLMChain({
|
|
21
|
-
* llm: new ChatOpenAI({ temperature: 0.9 }),
|
|
21
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0.9 }),
|
|
22
22
|
* prompt,
|
|
23
23
|
* memory: new BufferWindowMemory({ memoryKey: "chat_history", k: 1 }),
|
|
24
24
|
* });
|
|
@@ -24,7 +24,7 @@ export interface BufferWindowMemoryInput extends BaseChatMemoryInput {
|
|
|
24
24
|
* AI:`);
|
|
25
25
|
*
|
|
26
26
|
* const chain = new LLMChain({
|
|
27
|
-
* llm: new ChatOpenAI({ temperature: 0.9 }),
|
|
27
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0.9 }),
|
|
28
28
|
* prompt,
|
|
29
29
|
* memory: new BufferWindowMemory({ memoryKey: "chat_history", k: 1 }),
|
|
30
30
|
* });
|
|
@@ -15,7 +15,7 @@ import { BaseChatMemory } from "./chat_memory.js";
|
|
|
15
15
|
* AI:`);
|
|
16
16
|
*
|
|
17
17
|
* const chain = new LLMChain({
|
|
18
|
-
* llm: new ChatOpenAI({ temperature: 0.9 }),
|
|
18
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0.9 }),
|
|
19
19
|
* prompt,
|
|
20
20
|
* memory: new BufferWindowMemory({ memoryKey: "chat_history", k: 1 }),
|
|
21
21
|
* });
|
|
@@ -15,11 +15,11 @@ const chat_memory_js_1 = require("./chat_memory.cjs");
|
|
|
15
15
|
* @example
|
|
16
16
|
* ```typescript
|
|
17
17
|
* const memory = new EntityMemory({
|
|
18
|
-
* llm: new ChatOpenAI({ temperature: 0 }),
|
|
18
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0 }),
|
|
19
19
|
* chatHistoryKey: "history",
|
|
20
20
|
* entitiesKey: "entities",
|
|
21
21
|
* });
|
|
22
|
-
* const model = new ChatOpenAI({ temperature: 0.9 });
|
|
22
|
+
* const model = new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0.9 });
|
|
23
23
|
* const chain = new LLMChain({
|
|
24
24
|
* llm: model,
|
|
25
25
|
* prompt: ENTITY_MEMORY_CONVERSATION_TEMPLATE,
|
|
@@ -25,11 +25,11 @@ export interface EntityMemoryInput extends BaseChatMemoryInput {
|
|
|
25
25
|
* @example
|
|
26
26
|
* ```typescript
|
|
27
27
|
* const memory = new EntityMemory({
|
|
28
|
-
* llm: new ChatOpenAI({ temperature: 0 }),
|
|
28
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0 }),
|
|
29
29
|
* chatHistoryKey: "history",
|
|
30
30
|
* entitiesKey: "entities",
|
|
31
31
|
* });
|
|
32
|
-
* const model = new ChatOpenAI({ temperature: 0.9 });
|
|
32
|
+
* const model = new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0.9 });
|
|
33
33
|
* const chain = new LLMChain({
|
|
34
34
|
* llm: model,
|
|
35
35
|
* prompt: ENTITY_MEMORY_CONVERSATION_TEMPLATE,
|
|
@@ -12,11 +12,11 @@ import { BaseChatMemory } from "./chat_memory.js";
|
|
|
12
12
|
* @example
|
|
13
13
|
* ```typescript
|
|
14
14
|
* const memory = new EntityMemory({
|
|
15
|
-
* llm: new ChatOpenAI({ temperature: 0 }),
|
|
15
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0 }),
|
|
16
16
|
* chatHistoryKey: "history",
|
|
17
17
|
* entitiesKey: "entities",
|
|
18
18
|
* });
|
|
19
|
-
* const model = new ChatOpenAI({ temperature: 0.9 });
|
|
19
|
+
* const model = new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0.9 });
|
|
20
20
|
* const chain = new LLMChain({
|
|
21
21
|
* llm: model,
|
|
22
22
|
* prompt: ENTITY_MEMORY_CONVERSATION_TEMPLATE,
|
package/dist/memory/summary.cjs
CHANGED
|
@@ -83,10 +83,10 @@ exports.BaseConversationSummaryMemory = BaseConversationSummaryMemory;
|
|
|
83
83
|
* ```typescript
|
|
84
84
|
* const memory = new ConversationSummaryMemory({
|
|
85
85
|
* memoryKey: "chat_history",
|
|
86
|
-
* llm: new ChatOpenAI({
|
|
86
|
+
* llm: new ChatOpenAI({ model: "gpt-3.5-turbo", temperature: 0 }),
|
|
87
87
|
* });
|
|
88
88
|
*
|
|
89
|
-
* const model = new ChatOpenAI();
|
|
89
|
+
* const model = new ChatOpenAI({ model: "gpt-4o-mini" });
|
|
90
90
|
* const prompt =
|
|
91
91
|
* PromptTemplate.fromTemplate(`The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.
|
|
92
92
|
*
|
package/dist/memory/summary.d.ts
CHANGED
|
@@ -51,10 +51,10 @@ export declare abstract class BaseConversationSummaryMemory extends BaseChatMemo
|
|
|
51
51
|
* ```typescript
|
|
52
52
|
* const memory = new ConversationSummaryMemory({
|
|
53
53
|
* memoryKey: "chat_history",
|
|
54
|
-
* llm: new ChatOpenAI({
|
|
54
|
+
* llm: new ChatOpenAI({ model: "gpt-3.5-turbo", temperature: 0 }),
|
|
55
55
|
* });
|
|
56
56
|
*
|
|
57
|
-
* const model = new ChatOpenAI();
|
|
57
|
+
* const model = new ChatOpenAI({ model: "gpt-4o-mini" });
|
|
58
58
|
* const prompt =
|
|
59
59
|
* PromptTemplate.fromTemplate(`The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.
|
|
60
60
|
*
|
package/dist/memory/summary.js
CHANGED
|
@@ -79,10 +79,10 @@ export class BaseConversationSummaryMemory extends BaseChatMemory {
|
|
|
79
79
|
* ```typescript
|
|
80
80
|
* const memory = new ConversationSummaryMemory({
|
|
81
81
|
* memoryKey: "chat_history",
|
|
82
|
-
* llm: new ChatOpenAI({
|
|
82
|
+
* llm: new ChatOpenAI({ model: "gpt-3.5-turbo", temperature: 0 }),
|
|
83
83
|
* });
|
|
84
84
|
*
|
|
85
|
-
* const model = new ChatOpenAI();
|
|
85
|
+
* const model = new ChatOpenAI({ model: "gpt-4o-mini" });
|
|
86
86
|
* const prompt =
|
|
87
87
|
* PromptTemplate.fromTemplate(`The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.
|
|
88
88
|
*
|
|
@@ -13,7 +13,7 @@ const summary_js_1 = require("./summary.cjs");
|
|
|
13
13
|
* ```typescript
|
|
14
14
|
* // Initialize the memory with a specific model and token limit
|
|
15
15
|
* const memory = new ConversationSummaryBufferMemory({
|
|
16
|
-
* llm: new ChatOpenAI({
|
|
16
|
+
* llm: new ChatOpenAI({ model: "gpt-3.5-turbo-instruct", temperature: 0 }),
|
|
17
17
|
* maxTokenLimit: 10,
|
|
18
18
|
* });
|
|
19
19
|
*
|
|
@@ -36,7 +36,7 @@ const summary_js_1 = require("./summary.cjs");
|
|
|
36
36
|
*
|
|
37
37
|
* // Initialize the conversation chain with the model, memory, and prompt
|
|
38
38
|
* const chain = new ConversationChain({
|
|
39
|
-
* llm: new ChatOpenAI({ temperature: 0.9, verbose: true }),
|
|
39
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0.9, verbose: true }),
|
|
40
40
|
* memory: memory,
|
|
41
41
|
* prompt: chatPrompt,
|
|
42
42
|
* });
|
|
@@ -17,7 +17,7 @@ export interface ConversationSummaryBufferMemoryInput extends BaseConversationSu
|
|
|
17
17
|
* ```typescript
|
|
18
18
|
* // Initialize the memory with a specific model and token limit
|
|
19
19
|
* const memory = new ConversationSummaryBufferMemory({
|
|
20
|
-
* llm: new ChatOpenAI({
|
|
20
|
+
* llm: new ChatOpenAI({ model: "gpt-3.5-turbo-instruct", temperature: 0 }),
|
|
21
21
|
* maxTokenLimit: 10,
|
|
22
22
|
* });
|
|
23
23
|
*
|
|
@@ -40,7 +40,7 @@ export interface ConversationSummaryBufferMemoryInput extends BaseConversationSu
|
|
|
40
40
|
*
|
|
41
41
|
* // Initialize the conversation chain with the model, memory, and prompt
|
|
42
42
|
* const chain = new ConversationChain({
|
|
43
|
-
* llm: new ChatOpenAI({ temperature: 0.9, verbose: true }),
|
|
43
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0.9, verbose: true }),
|
|
44
44
|
* memory: memory,
|
|
45
45
|
* prompt: chatPrompt,
|
|
46
46
|
* });
|
|
@@ -10,7 +10,7 @@ import { BaseConversationSummaryMemory, } from "./summary.js";
|
|
|
10
10
|
* ```typescript
|
|
11
11
|
* // Initialize the memory with a specific model and token limit
|
|
12
12
|
* const memory = new ConversationSummaryBufferMemory({
|
|
13
|
-
* llm: new ChatOpenAI({
|
|
13
|
+
* llm: new ChatOpenAI({ model: "gpt-3.5-turbo-instruct", temperature: 0 }),
|
|
14
14
|
* maxTokenLimit: 10,
|
|
15
15
|
* });
|
|
16
16
|
*
|
|
@@ -33,7 +33,7 @@ import { BaseConversationSummaryMemory, } from "./summary.js";
|
|
|
33
33
|
*
|
|
34
34
|
* // Initialize the conversation chain with the model, memory, and prompt
|
|
35
35
|
* const chain = new ConversationChain({
|
|
36
|
-
* llm: new ChatOpenAI({ temperature: 0.9, verbose: true }),
|
|
36
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0.9, verbose: true }),
|
|
37
37
|
* memory: memory,
|
|
38
38
|
* prompt: chatPrompt,
|
|
39
39
|
* });
|
|
@@ -1,4 +1,37 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
2
35
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
36
|
exports.ASTParser = exports.NodeHandler = void 0;
|
|
4
37
|
const parser_grammar_js_1 = require("./grammar/parser_grammar.cjs");
|
|
@@ -30,7 +63,7 @@ class ASTParser {
|
|
|
30
63
|
static async importASTParser() {
|
|
31
64
|
try {
|
|
32
65
|
if (!ASTParser.astParseInstance) {
|
|
33
|
-
const { default: peggy } = await
|
|
66
|
+
const { default: peggy } = await Promise.resolve().then(() => __importStar(require("peggy")));
|
|
34
67
|
const parser = peggy.generate(parser_grammar_js_1.GRAMMAR);
|
|
35
68
|
const { parse } = parser;
|
|
36
69
|
ASTParser.astParseInstance = parse;
|
package/dist/prompts/index.cjs
CHANGED
package/dist/prompts/index.d.ts
CHANGED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export {};
|
package/dist/prompts/index.js
CHANGED
package/dist/retrievers/hyde.cjs
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.
|
|
3
|
+
exports.HydeRetriever = void 0;
|
|
4
|
+
exports.getPromptTemplateFromKey = getPromptTemplateFromKey;
|
|
4
5
|
const prompts_1 = require("@langchain/core/prompts");
|
|
5
6
|
const prompt_values_1 = require("@langchain/core/prompt_values");
|
|
6
7
|
const vectorstores_1 = require("@langchain/core/vectorstores");
|
|
@@ -13,7 +14,7 @@ const vectorstores_1 = require("@langchain/core/vectorstores");
|
|
|
13
14
|
* ```typescript
|
|
14
15
|
* const retriever = new HydeRetriever({
|
|
15
16
|
* vectorStore: new MemoryVectorStore(new OpenAIEmbeddings()),
|
|
16
|
-
* llm: new ChatOpenAI(),
|
|
17
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini" }),
|
|
17
18
|
* k: 1,
|
|
18
19
|
* });
|
|
19
20
|
* await vectorStore.addDocuments(
|
|
@@ -128,4 +129,3 @@ Passage:`;
|
|
|
128
129
|
}
|
|
129
130
|
return prompts_1.PromptTemplate.fromTemplate(template);
|
|
130
131
|
}
|
|
131
|
-
exports.getPromptTemplateFromKey = getPromptTemplateFromKey;
|
|
@@ -25,7 +25,7 @@ export type HydeRetrieverOptions<V extends VectorStore> = VectorStoreRetrieverIn
|
|
|
25
25
|
* ```typescript
|
|
26
26
|
* const retriever = new HydeRetriever({
|
|
27
27
|
* vectorStore: new MemoryVectorStore(new OpenAIEmbeddings()),
|
|
28
|
-
* llm: new ChatOpenAI(),
|
|
28
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini" }),
|
|
29
29
|
* k: 1,
|
|
30
30
|
* });
|
|
31
31
|
* await vectorStore.addDocuments(
|
package/dist/retrievers/hyde.js
CHANGED
|
@@ -10,7 +10,7 @@ import { VectorStoreRetriever, } from "@langchain/core/vectorstores";
|
|
|
10
10
|
* ```typescript
|
|
11
11
|
* const retriever = new HydeRetriever({
|
|
12
12
|
* vectorStore: new MemoryVectorStore(new OpenAIEmbeddings()),
|
|
13
|
-
* llm: new ChatOpenAI(),
|
|
13
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini" }),
|
|
14
14
|
* k: 1,
|
|
15
15
|
* });
|
|
16
16
|
* await vectorStore.addDocuments(
|
|
@@ -15,13 +15,23 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (
|
|
|
15
15
|
}) : function(o, v) {
|
|
16
16
|
o["default"] = v;
|
|
17
17
|
});
|
|
18
|
-
var __importStar = (this && this.__importStar) || function (
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
};
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
25
35
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
26
36
|
exports.ParentDocumentRetriever = void 0;
|
|
27
37
|
const uuid = __importStar(require("uuid"));
|
|
@@ -14,7 +14,7 @@ const index_js_1 = require("../../chains/query_constructor/index.cjs");
|
|
|
14
14
|
* @example
|
|
15
15
|
* ```typescript
|
|
16
16
|
* const selfQueryRetriever = SelfQueryRetriever.fromLLM({
|
|
17
|
-
* llm: new ChatOpenAI(),
|
|
17
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini" }),
|
|
18
18
|
* vectorStore: await HNSWLib.fromDocuments(docs, new OpenAIEmbeddings()),
|
|
19
19
|
* documentContents: "Brief summary of a movie",
|
|
20
20
|
* attributeInfo: attributeInfo,
|
|
@@ -32,7 +32,7 @@ export interface SelfQueryRetrieverArgs<T extends VectorStore> extends BaseRetri
|
|
|
32
32
|
* @example
|
|
33
33
|
* ```typescript
|
|
34
34
|
* const selfQueryRetriever = SelfQueryRetriever.fromLLM({
|
|
35
|
-
* llm: new ChatOpenAI(),
|
|
35
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini" }),
|
|
36
36
|
* vectorStore: await HNSWLib.fromDocuments(docs, new OpenAIEmbeddings()),
|
|
37
37
|
* documentContents: "Brief summary of a movie",
|
|
38
38
|
* attributeInfo: attributeInfo,
|
|
@@ -9,7 +9,7 @@ export { BaseTranslator, BasicTranslator, FunctionalTranslator };
|
|
|
9
9
|
* @example
|
|
10
10
|
* ```typescript
|
|
11
11
|
* const selfQueryRetriever = SelfQueryRetriever.fromLLM({
|
|
12
|
-
* llm: new ChatOpenAI(),
|
|
12
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini" }),
|
|
13
13
|
* vectorStore: await HNSWLib.fromDocuments(docs, new OpenAIEmbeddings()),
|
|
14
14
|
* documentContents: "Brief summary of a movie",
|
|
15
15
|
* attributeInfo: attributeInfo,
|
package/dist/smith/config.cjs
CHANGED
|
@@ -1,14 +1,16 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.
|
|
3
|
+
exports.isOffTheShelfEvaluator = isOffTheShelfEvaluator;
|
|
4
|
+
exports.isCustomEvaluator = isCustomEvaluator;
|
|
5
|
+
exports.Criteria = Criteria;
|
|
6
|
+
exports.LabeledCriteria = LabeledCriteria;
|
|
7
|
+
exports.EmbeddingDistance = EmbeddingDistance;
|
|
4
8
|
function isOffTheShelfEvaluator(evaluator) {
|
|
5
9
|
return typeof evaluator === "string" || "evaluatorType" in evaluator;
|
|
6
10
|
}
|
|
7
|
-
exports.isOffTheShelfEvaluator = isOffTheShelfEvaluator;
|
|
8
11
|
function isCustomEvaluator(evaluator) {
|
|
9
12
|
return !isOffTheShelfEvaluator(evaluator);
|
|
10
13
|
}
|
|
11
|
-
exports.isCustomEvaluator = isCustomEvaluator;
|
|
12
14
|
const isStringifiableValue = (value) => typeof value === "string" ||
|
|
13
15
|
typeof value === "number" ||
|
|
14
16
|
typeof value === "boolean" ||
|
|
@@ -44,7 +46,6 @@ function Criteria(criteria, config) {
|
|
|
44
46
|
formatEvaluatorInputs,
|
|
45
47
|
};
|
|
46
48
|
}
|
|
47
|
-
exports.Criteria = Criteria;
|
|
48
49
|
function LabeledCriteria(criteria, config) {
|
|
49
50
|
const formatEvaluatorInputs = config?.formatEvaluatorInputs ??
|
|
50
51
|
((payload) => ({
|
|
@@ -64,7 +65,6 @@ function LabeledCriteria(criteria, config) {
|
|
|
64
65
|
formatEvaluatorInputs,
|
|
65
66
|
};
|
|
66
67
|
}
|
|
67
|
-
exports.LabeledCriteria = LabeledCriteria;
|
|
68
68
|
function EmbeddingDistance(distanceMetric, config) {
|
|
69
69
|
const formatEvaluatorInputs = config?.formatEvaluatorInputs ??
|
|
70
70
|
((payload) => ({
|
|
@@ -79,4 +79,3 @@ function EmbeddingDistance(distanceMetric, config) {
|
|
|
79
79
|
formatEvaluatorInputs,
|
|
80
80
|
};
|
|
81
81
|
}
|
|
82
|
-
exports.EmbeddingDistance = EmbeddingDistance;
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.randomName =
|
|
3
|
+
exports.randomName = randomName;
|
|
4
4
|
const adjectives = [
|
|
5
5
|
"abandoned",
|
|
6
6
|
"aching",
|
|
@@ -723,4 +723,3 @@ function randomName() {
|
|
|
723
723
|
const number = Math.floor(Math.random() * 100) + 1;
|
|
724
724
|
return `${adjective}-${noun}-${number}`;
|
|
725
725
|
}
|
|
726
|
-
exports.randomName = randomName;
|