langchain 0.3.29 → 0.3.31
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +15 -12
- package/dist/agents/agent.cjs +1 -1
- package/dist/agents/agent.d.ts +1 -1
- package/dist/agents/agent.js +1 -1
- package/dist/agents/mrkl/index.cjs +1 -1
- package/dist/agents/mrkl/index.d.ts +1 -1
- package/dist/agents/mrkl/index.js +1 -1
- package/dist/agents/openai_functions/index.cjs +1 -0
- package/dist/agents/openai_functions/index.d.ts +1 -0
- package/dist/agents/openai_functions/index.js +1 -0
- package/dist/agents/openai_functions/output_parser.cjs +1 -1
- package/dist/agents/openai_functions/output_parser.d.ts +1 -1
- package/dist/agents/openai_functions/output_parser.js +1 -1
- package/dist/agents/openai_tools/index.cjs +1 -1
- package/dist/agents/openai_tools/index.d.ts +1 -1
- package/dist/agents/openai_tools/index.js +1 -1
- package/dist/agents/openai_tools/output_parser.cjs +19 -14
- package/dist/agents/openai_tools/output_parser.d.ts +1 -1
- package/dist/agents/openai_tools/output_parser.js +19 -14
- package/dist/agents/structured_chat/index.cjs +1 -1
- package/dist/agents/structured_chat/index.d.ts +1 -1
- package/dist/agents/structured_chat/index.js +1 -1
- package/dist/agents/structured_chat/outputParser.cjs +1 -1
- package/dist/agents/structured_chat/outputParser.d.ts +1 -1
- package/dist/agents/structured_chat/outputParser.js +1 -1
- package/dist/agents/tool_calling/output_parser.cjs +1 -1
- package/dist/agents/tool_calling/output_parser.js +1 -1
- package/dist/agents/toolkits/openapi/openapi.cjs +1 -1
- package/dist/agents/toolkits/openapi/openapi.d.ts +1 -1
- package/dist/agents/toolkits/openapi/openapi.js +1 -1
- package/dist/agents/toolkits/sql/sql.cjs +1 -1
- package/dist/agents/toolkits/sql/sql.d.ts +1 -1
- package/dist/agents/toolkits/sql/sql.js +1 -1
- package/dist/agents/toolkits/vectorstore/vectorstore.cjs +1 -1
- package/dist/agents/toolkits/vectorstore/vectorstore.d.ts +1 -1
- package/dist/agents/toolkits/vectorstore/vectorstore.js +1 -1
- package/dist/chains/analyze_documents_chain.cjs +1 -1
- package/dist/chains/analyze_documents_chain.d.ts +1 -1
- package/dist/chains/analyze_documents_chain.js +1 -1
- package/dist/chains/constitutional_ai/constitutional_principle.cjs +2 -2
- package/dist/chains/constitutional_ai/constitutional_principle.d.ts +2 -2
- package/dist/chains/constitutional_ai/constitutional_principle.js +2 -2
- package/dist/chains/conversation.cjs +1 -1
- package/dist/chains/conversation.d.ts +1 -1
- package/dist/chains/conversation.js +1 -1
- package/dist/chains/graph_qa/cypher.cjs +1 -1
- package/dist/chains/graph_qa/cypher.d.ts +1 -1
- package/dist/chains/graph_qa/cypher.js +1 -1
- package/dist/chains/history_aware_retriever.cjs +1 -1
- package/dist/chains/history_aware_retriever.d.ts +1 -1
- package/dist/chains/history_aware_retriever.js +1 -1
- package/dist/chains/llm_chain.cjs +1 -1
- package/dist/chains/llm_chain.d.ts +1 -1
- package/dist/chains/llm_chain.js +1 -1
- package/dist/chains/openai_functions/base.cjs +1 -1
- package/dist/chains/openai_functions/base.d.ts +1 -1
- package/dist/chains/openai_functions/base.js +1 -1
- package/dist/chains/openai_functions/structured_output.cjs +1 -1
- package/dist/chains/openai_functions/structured_output.js +1 -1
- package/dist/chains/retrieval.cjs +1 -1
- package/dist/chains/retrieval.d.ts +1 -1
- package/dist/chains/retrieval.js +1 -1
- package/dist/chains/router/multi_prompt.cjs +16 -13
- package/dist/chains/router/multi_prompt.d.ts +16 -13
- package/dist/chains/router/multi_prompt.js +16 -13
- package/dist/chains/router/multi_retrieval_qa.cjs +1 -1
- package/dist/chains/router/multi_retrieval_qa.d.ts +1 -1
- package/dist/chains/router/multi_retrieval_qa.js +1 -1
- package/dist/chains/sequential_chain.cjs +2 -2
- package/dist/chains/sequential_chain.d.ts +2 -2
- package/dist/chains/sequential_chain.js +2 -2
- package/dist/chains/sql_db/sql_db_chain.cjs +1 -1
- package/dist/chains/sql_db/sql_db_chain.d.ts +1 -1
- package/dist/chains/sql_db/sql_db_chain.js +1 -1
- package/dist/chat_models/universal.cjs +1 -1
- package/dist/chat_models/universal.js +1 -1
- package/dist/document_transformers/openai_functions.cjs +1 -1
- package/dist/document_transformers/openai_functions.js +1 -1
- package/dist/evaluation/loader.cjs +1 -1
- package/dist/evaluation/loader.js +1 -1
- package/dist/experimental/autogpt/agent.cjs +1 -1
- package/dist/experimental/autogpt/agent.d.ts +1 -1
- package/dist/experimental/autogpt/agent.js +1 -1
- package/dist/experimental/generative_agents/generative_agent.cjs +1 -1
- package/dist/experimental/generative_agents/generative_agent.d.ts +1 -1
- package/dist/experimental/generative_agents/generative_agent.js +1 -1
- package/dist/hub/base.cjs +19 -0
- package/dist/hub/base.d.ts +1 -0
- package/dist/hub/base.js +18 -0
- package/dist/hub/index.cjs +1 -1
- package/dist/hub/index.js +2 -2
- package/dist/hub/node.cjs +1 -1
- package/dist/hub/node.js +2 -2
- package/dist/memory/buffer_memory.cjs +1 -1
- package/dist/memory/buffer_memory.d.ts +1 -1
- package/dist/memory/buffer_memory.js +1 -1
- package/dist/memory/buffer_token_memory.cjs +1 -1
- package/dist/memory/buffer_token_memory.d.ts +1 -1
- package/dist/memory/buffer_token_memory.js +1 -1
- package/dist/memory/buffer_window_memory.cjs +1 -1
- package/dist/memory/buffer_window_memory.d.ts +1 -1
- package/dist/memory/buffer_window_memory.js +1 -1
- package/dist/memory/entity_memory.cjs +2 -2
- package/dist/memory/entity_memory.d.ts +2 -2
- package/dist/memory/entity_memory.js +2 -2
- package/dist/memory/summary.cjs +2 -2
- package/dist/memory/summary.d.ts +2 -2
- package/dist/memory/summary.js +2 -2
- package/dist/memory/summary_buffer.cjs +2 -2
- package/dist/memory/summary_buffer.d.ts +2 -2
- package/dist/memory/summary_buffer.js +2 -2
- package/dist/retrievers/hyde.cjs +1 -1
- package/dist/retrievers/hyde.d.ts +1 -1
- package/dist/retrievers/hyde.js +1 -1
- package/dist/retrievers/self_query/index.cjs +1 -1
- package/dist/retrievers/self_query/index.d.ts +1 -1
- package/dist/retrievers/self_query/index.js +1 -1
- package/dist/smith/runner_utils.cjs +2 -1
- package/dist/smith/runner_utils.js +2 -1
- package/dist/tools/webbrowser.cjs +1 -1
- package/dist/tools/webbrowser.d.ts +1 -1
- package/dist/tools/webbrowser.js +1 -1
- package/dist/util/sql_utils.cjs +1 -0
- package/dist/util/sql_utils.js +1 -0
- package/package.json +4 -4
|
@@ -18,7 +18,7 @@ const chat_memory_js_1 = require("./chat_memory.cjs");
|
|
|
18
18
|
* ```typescript
|
|
19
19
|
* // Initialize the memory to store chat history and set up the language model with a specific temperature.
|
|
20
20
|
* const memory = new BufferMemory({ memoryKey: "chat_history" });
|
|
21
|
-
* const model = new ChatOpenAI({ temperature: 0.9 });
|
|
21
|
+
* const model = new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0.9 });
|
|
22
22
|
*
|
|
23
23
|
* // Create a prompt template for a friendly conversation between a human and an AI.
|
|
24
24
|
* const prompt =
|
|
@@ -23,7 +23,7 @@ export interface BufferMemoryInput extends BaseChatMemoryInput {
|
|
|
23
23
|
* ```typescript
|
|
24
24
|
* // Initialize the memory to store chat history and set up the language model with a specific temperature.
|
|
25
25
|
* const memory = new BufferMemory({ memoryKey: "chat_history" });
|
|
26
|
-
* const model = new ChatOpenAI({ temperature: 0.9 });
|
|
26
|
+
* const model = new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0.9 });
|
|
27
27
|
*
|
|
28
28
|
* // Create a prompt template for a friendly conversation between a human and an AI.
|
|
29
29
|
* const prompt =
|
|
@@ -15,7 +15,7 @@ import { BaseChatMemory } from "./chat_memory.js";
|
|
|
15
15
|
* ```typescript
|
|
16
16
|
* // Initialize the memory to store chat history and set up the language model with a specific temperature.
|
|
17
17
|
* const memory = new BufferMemory({ memoryKey: "chat_history" });
|
|
18
|
-
* const model = new ChatOpenAI({ temperature: 0.9 });
|
|
18
|
+
* const model = new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0.9 });
|
|
19
19
|
*
|
|
20
20
|
* // Create a prompt template for a friendly conversation between a human and an AI.
|
|
21
21
|
* const prompt =
|
|
@@ -10,7 +10,7 @@ const chat_memory_js_1 = require("./chat_memory.cjs");
|
|
|
10
10
|
* @example
|
|
11
11
|
* ```typescript
|
|
12
12
|
* const memory = new ConversationTokenBufferMemory({
|
|
13
|
-
* llm: new ChatOpenAI({}),
|
|
13
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini" }),
|
|
14
14
|
* maxTokenLimit: 10,
|
|
15
15
|
* });
|
|
16
16
|
*
|
|
@@ -18,7 +18,7 @@ export interface ConversationTokenBufferMemoryInput extends BaseChatMemoryInput
|
|
|
18
18
|
* @example
|
|
19
19
|
* ```typescript
|
|
20
20
|
* const memory = new ConversationTokenBufferMemory({
|
|
21
|
-
* llm: new ChatOpenAI({}),
|
|
21
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini" }),
|
|
22
22
|
* maxTokenLimit: 10,
|
|
23
23
|
* });
|
|
24
24
|
*
|
|
@@ -18,7 +18,7 @@ const chat_memory_js_1 = require("./chat_memory.cjs");
|
|
|
18
18
|
* AI:`);
|
|
19
19
|
*
|
|
20
20
|
* const chain = new LLMChain({
|
|
21
|
-
* llm: new ChatOpenAI({ temperature: 0.9 }),
|
|
21
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0.9 }),
|
|
22
22
|
* prompt,
|
|
23
23
|
* memory: new BufferWindowMemory({ memoryKey: "chat_history", k: 1 }),
|
|
24
24
|
* });
|
|
@@ -24,7 +24,7 @@ export interface BufferWindowMemoryInput extends BaseChatMemoryInput {
|
|
|
24
24
|
* AI:`);
|
|
25
25
|
*
|
|
26
26
|
* const chain = new LLMChain({
|
|
27
|
-
* llm: new ChatOpenAI({ temperature: 0.9 }),
|
|
27
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0.9 }),
|
|
28
28
|
* prompt,
|
|
29
29
|
* memory: new BufferWindowMemory({ memoryKey: "chat_history", k: 1 }),
|
|
30
30
|
* });
|
|
@@ -15,7 +15,7 @@ import { BaseChatMemory } from "./chat_memory.js";
|
|
|
15
15
|
* AI:`);
|
|
16
16
|
*
|
|
17
17
|
* const chain = new LLMChain({
|
|
18
|
-
* llm: new ChatOpenAI({ temperature: 0.9 }),
|
|
18
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0.9 }),
|
|
19
19
|
* prompt,
|
|
20
20
|
* memory: new BufferWindowMemory({ memoryKey: "chat_history", k: 1 }),
|
|
21
21
|
* });
|
|
@@ -15,11 +15,11 @@ const chat_memory_js_1 = require("./chat_memory.cjs");
|
|
|
15
15
|
* @example
|
|
16
16
|
* ```typescript
|
|
17
17
|
* const memory = new EntityMemory({
|
|
18
|
-
* llm: new ChatOpenAI({ temperature: 0 }),
|
|
18
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0 }),
|
|
19
19
|
* chatHistoryKey: "history",
|
|
20
20
|
* entitiesKey: "entities",
|
|
21
21
|
* });
|
|
22
|
-
* const model = new ChatOpenAI({ temperature: 0.9 });
|
|
22
|
+
* const model = new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0.9 });
|
|
23
23
|
* const chain = new LLMChain({
|
|
24
24
|
* llm: model,
|
|
25
25
|
* prompt: ENTITY_MEMORY_CONVERSATION_TEMPLATE,
|
|
@@ -25,11 +25,11 @@ export interface EntityMemoryInput extends BaseChatMemoryInput {
|
|
|
25
25
|
* @example
|
|
26
26
|
* ```typescript
|
|
27
27
|
* const memory = new EntityMemory({
|
|
28
|
-
* llm: new ChatOpenAI({ temperature: 0 }),
|
|
28
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0 }),
|
|
29
29
|
* chatHistoryKey: "history",
|
|
30
30
|
* entitiesKey: "entities",
|
|
31
31
|
* });
|
|
32
|
-
* const model = new ChatOpenAI({ temperature: 0.9 });
|
|
32
|
+
* const model = new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0.9 });
|
|
33
33
|
* const chain = new LLMChain({
|
|
34
34
|
* llm: model,
|
|
35
35
|
* prompt: ENTITY_MEMORY_CONVERSATION_TEMPLATE,
|
|
@@ -12,11 +12,11 @@ import { BaseChatMemory } from "./chat_memory.js";
|
|
|
12
12
|
* @example
|
|
13
13
|
* ```typescript
|
|
14
14
|
* const memory = new EntityMemory({
|
|
15
|
-
* llm: new ChatOpenAI({ temperature: 0 }),
|
|
15
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0 }),
|
|
16
16
|
* chatHistoryKey: "history",
|
|
17
17
|
* entitiesKey: "entities",
|
|
18
18
|
* });
|
|
19
|
-
* const model = new ChatOpenAI({ temperature: 0.9 });
|
|
19
|
+
* const model = new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0.9 });
|
|
20
20
|
* const chain = new LLMChain({
|
|
21
21
|
* llm: model,
|
|
22
22
|
* prompt: ENTITY_MEMORY_CONVERSATION_TEMPLATE,
|
package/dist/memory/summary.cjs
CHANGED
|
@@ -83,10 +83,10 @@ exports.BaseConversationSummaryMemory = BaseConversationSummaryMemory;
|
|
|
83
83
|
* ```typescript
|
|
84
84
|
* const memory = new ConversationSummaryMemory({
|
|
85
85
|
* memoryKey: "chat_history",
|
|
86
|
-
* llm: new ChatOpenAI({
|
|
86
|
+
* llm: new ChatOpenAI({ model: "gpt-3.5-turbo", temperature: 0 }),
|
|
87
87
|
* });
|
|
88
88
|
*
|
|
89
|
-
* const model = new ChatOpenAI();
|
|
89
|
+
* const model = new ChatOpenAI({ model: "gpt-4o-mini" });
|
|
90
90
|
* const prompt =
|
|
91
91
|
* PromptTemplate.fromTemplate(`The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.
|
|
92
92
|
*
|
package/dist/memory/summary.d.ts
CHANGED
|
@@ -51,10 +51,10 @@ export declare abstract class BaseConversationSummaryMemory extends BaseChatMemo
|
|
|
51
51
|
* ```typescript
|
|
52
52
|
* const memory = new ConversationSummaryMemory({
|
|
53
53
|
* memoryKey: "chat_history",
|
|
54
|
-
* llm: new ChatOpenAI({
|
|
54
|
+
* llm: new ChatOpenAI({ model: "gpt-3.5-turbo", temperature: 0 }),
|
|
55
55
|
* });
|
|
56
56
|
*
|
|
57
|
-
* const model = new ChatOpenAI();
|
|
57
|
+
* const model = new ChatOpenAI({ model: "gpt-4o-mini" });
|
|
58
58
|
* const prompt =
|
|
59
59
|
* PromptTemplate.fromTemplate(`The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.
|
|
60
60
|
*
|
package/dist/memory/summary.js
CHANGED
|
@@ -79,10 +79,10 @@ export class BaseConversationSummaryMemory extends BaseChatMemory {
|
|
|
79
79
|
* ```typescript
|
|
80
80
|
* const memory = new ConversationSummaryMemory({
|
|
81
81
|
* memoryKey: "chat_history",
|
|
82
|
-
* llm: new ChatOpenAI({
|
|
82
|
+
* llm: new ChatOpenAI({ model: "gpt-3.5-turbo", temperature: 0 }),
|
|
83
83
|
* });
|
|
84
84
|
*
|
|
85
|
-
* const model = new ChatOpenAI();
|
|
85
|
+
* const model = new ChatOpenAI({ model: "gpt-4o-mini" });
|
|
86
86
|
* const prompt =
|
|
87
87
|
* PromptTemplate.fromTemplate(`The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.
|
|
88
88
|
*
|
|
@@ -13,7 +13,7 @@ const summary_js_1 = require("./summary.cjs");
|
|
|
13
13
|
* ```typescript
|
|
14
14
|
* // Initialize the memory with a specific model and token limit
|
|
15
15
|
* const memory = new ConversationSummaryBufferMemory({
|
|
16
|
-
* llm: new ChatOpenAI({
|
|
16
|
+
* llm: new ChatOpenAI({ model: "gpt-3.5-turbo-instruct", temperature: 0 }),
|
|
17
17
|
* maxTokenLimit: 10,
|
|
18
18
|
* });
|
|
19
19
|
*
|
|
@@ -36,7 +36,7 @@ const summary_js_1 = require("./summary.cjs");
|
|
|
36
36
|
*
|
|
37
37
|
* // Initialize the conversation chain with the model, memory, and prompt
|
|
38
38
|
* const chain = new ConversationChain({
|
|
39
|
-
* llm: new ChatOpenAI({ temperature: 0.9, verbose: true }),
|
|
39
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0.9, verbose: true }),
|
|
40
40
|
* memory: memory,
|
|
41
41
|
* prompt: chatPrompt,
|
|
42
42
|
* });
|
|
@@ -17,7 +17,7 @@ export interface ConversationSummaryBufferMemoryInput extends BaseConversationSu
|
|
|
17
17
|
* ```typescript
|
|
18
18
|
* // Initialize the memory with a specific model and token limit
|
|
19
19
|
* const memory = new ConversationSummaryBufferMemory({
|
|
20
|
-
* llm: new ChatOpenAI({
|
|
20
|
+
* llm: new ChatOpenAI({ model: "gpt-3.5-turbo-instruct", temperature: 0 }),
|
|
21
21
|
* maxTokenLimit: 10,
|
|
22
22
|
* });
|
|
23
23
|
*
|
|
@@ -40,7 +40,7 @@ export interface ConversationSummaryBufferMemoryInput extends BaseConversationSu
|
|
|
40
40
|
*
|
|
41
41
|
* // Initialize the conversation chain with the model, memory, and prompt
|
|
42
42
|
* const chain = new ConversationChain({
|
|
43
|
-
* llm: new ChatOpenAI({ temperature: 0.9, verbose: true }),
|
|
43
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0.9, verbose: true }),
|
|
44
44
|
* memory: memory,
|
|
45
45
|
* prompt: chatPrompt,
|
|
46
46
|
* });
|
|
@@ -10,7 +10,7 @@ import { BaseConversationSummaryMemory, } from "./summary.js";
|
|
|
10
10
|
* ```typescript
|
|
11
11
|
* // Initialize the memory with a specific model and token limit
|
|
12
12
|
* const memory = new ConversationSummaryBufferMemory({
|
|
13
|
-
* llm: new ChatOpenAI({
|
|
13
|
+
* llm: new ChatOpenAI({ model: "gpt-3.5-turbo-instruct", temperature: 0 }),
|
|
14
14
|
* maxTokenLimit: 10,
|
|
15
15
|
* });
|
|
16
16
|
*
|
|
@@ -33,7 +33,7 @@ import { BaseConversationSummaryMemory, } from "./summary.js";
|
|
|
33
33
|
*
|
|
34
34
|
* // Initialize the conversation chain with the model, memory, and prompt
|
|
35
35
|
* const chain = new ConversationChain({
|
|
36
|
-
* llm: new ChatOpenAI({ temperature: 0.9, verbose: true }),
|
|
36
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0.9, verbose: true }),
|
|
37
37
|
* memory: memory,
|
|
38
38
|
* prompt: chatPrompt,
|
|
39
39
|
* });
|
package/dist/retrievers/hyde.cjs
CHANGED
|
@@ -14,7 +14,7 @@ const vectorstores_1 = require("@langchain/core/vectorstores");
|
|
|
14
14
|
* ```typescript
|
|
15
15
|
* const retriever = new HydeRetriever({
|
|
16
16
|
* vectorStore: new MemoryVectorStore(new OpenAIEmbeddings()),
|
|
17
|
-
* llm: new ChatOpenAI(),
|
|
17
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini" }),
|
|
18
18
|
* k: 1,
|
|
19
19
|
* });
|
|
20
20
|
* await vectorStore.addDocuments(
|
|
@@ -25,7 +25,7 @@ export type HydeRetrieverOptions<V extends VectorStore> = VectorStoreRetrieverIn
|
|
|
25
25
|
* ```typescript
|
|
26
26
|
* const retriever = new HydeRetriever({
|
|
27
27
|
* vectorStore: new MemoryVectorStore(new OpenAIEmbeddings()),
|
|
28
|
-
* llm: new ChatOpenAI(),
|
|
28
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini" }),
|
|
29
29
|
* k: 1,
|
|
30
30
|
* });
|
|
31
31
|
* await vectorStore.addDocuments(
|
package/dist/retrievers/hyde.js
CHANGED
|
@@ -10,7 +10,7 @@ import { VectorStoreRetriever, } from "@langchain/core/vectorstores";
|
|
|
10
10
|
* ```typescript
|
|
11
11
|
* const retriever = new HydeRetriever({
|
|
12
12
|
* vectorStore: new MemoryVectorStore(new OpenAIEmbeddings()),
|
|
13
|
-
* llm: new ChatOpenAI(),
|
|
13
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini" }),
|
|
14
14
|
* k: 1,
|
|
15
15
|
* });
|
|
16
16
|
* await vectorStore.addDocuments(
|
|
@@ -14,7 +14,7 @@ const index_js_1 = require("../../chains/query_constructor/index.cjs");
|
|
|
14
14
|
* @example
|
|
15
15
|
* ```typescript
|
|
16
16
|
* const selfQueryRetriever = SelfQueryRetriever.fromLLM({
|
|
17
|
-
* llm: new ChatOpenAI(),
|
|
17
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini" }),
|
|
18
18
|
* vectorStore: await HNSWLib.fromDocuments(docs, new OpenAIEmbeddings()),
|
|
19
19
|
* documentContents: "Brief summary of a movie",
|
|
20
20
|
* attributeInfo: attributeInfo,
|
|
@@ -32,7 +32,7 @@ export interface SelfQueryRetrieverArgs<T extends VectorStore> extends BaseRetri
|
|
|
32
32
|
* @example
|
|
33
33
|
* ```typescript
|
|
34
34
|
* const selfQueryRetriever = SelfQueryRetriever.fromLLM({
|
|
35
|
-
* llm: new ChatOpenAI(),
|
|
35
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini" }),
|
|
36
36
|
* vectorStore: await HNSWLib.fromDocuments(docs, new OpenAIEmbeddings()),
|
|
37
37
|
* documentContents: "Brief summary of a movie",
|
|
38
38
|
* attributeInfo: attributeInfo,
|
|
@@ -9,7 +9,7 @@ export { BaseTranslator, BasicTranslator, FunctionalTranslator };
|
|
|
9
9
|
* @example
|
|
10
10
|
* ```typescript
|
|
11
11
|
* const selfQueryRetriever = SelfQueryRetriever.fromLLM({
|
|
12
|
-
* llm: new ChatOpenAI(),
|
|
12
|
+
* llm: new ChatOpenAI({ model: "gpt-4o-mini" }),
|
|
13
13
|
* vectorStore: await HNSWLib.fromDocuments(docs, new OpenAIEmbeddings()),
|
|
14
14
|
* documentContents: "Brief summary of a movie",
|
|
15
15
|
* attributeInfo: attributeInfo,
|
|
@@ -422,7 +422,8 @@ const applyEvaluators = async ({ evaluation, runs, examples, client, maxConcurre
|
|
|
422
422
|
progress.increment();
|
|
423
423
|
return {
|
|
424
424
|
execution_time: run?.end_time && run.start_time
|
|
425
|
-
? run.end_time -
|
|
425
|
+
? new Date(run.end_time).getTime() -
|
|
426
|
+
new Date(run.start_time).getTime()
|
|
426
427
|
: undefined,
|
|
427
428
|
feedback: evaluatorResults.map((evalResult) => evalResult.status === "fulfilled"
|
|
428
429
|
? evalResult.value
|
|
@@ -419,7 +419,8 @@ const applyEvaluators = async ({ evaluation, runs, examples, client, maxConcurre
|
|
|
419
419
|
progress.increment();
|
|
420
420
|
return {
|
|
421
421
|
execution_time: run?.end_time && run.start_time
|
|
422
|
-
? run.end_time -
|
|
422
|
+
? new Date(run.end_time).getTime() -
|
|
423
|
+
new Date(run.start_time).getTime()
|
|
423
424
|
: undefined,
|
|
424
425
|
feedback: evaluatorResults.map((evalResult) => evalResult.status === "fulfilled"
|
|
425
426
|
? evalResult.value
|
|
@@ -154,7 +154,7 @@ const DEFAULT_HEADERS = {
|
|
|
154
154
|
* @example
|
|
155
155
|
* ```typescript
|
|
156
156
|
* const browser = new WebBrowser({
|
|
157
|
-
* model: new ChatOpenAI({ temperature: 0 }),
|
|
157
|
+
* model: new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0 }),
|
|
158
158
|
* embeddings: new OpenAIEmbeddings({}),
|
|
159
159
|
* });
|
|
160
160
|
* const result = await browser.invoke("https:exampleurl.com");
|
|
@@ -30,7 +30,7 @@ export interface WebBrowserArgs extends ToolParams {
|
|
|
30
30
|
* @example
|
|
31
31
|
* ```typescript
|
|
32
32
|
* const browser = new WebBrowser({
|
|
33
|
-
* model: new ChatOpenAI({ temperature: 0 }),
|
|
33
|
+
* model: new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0 }),
|
|
34
34
|
* embeddings: new OpenAIEmbeddings({}),
|
|
35
35
|
* });
|
|
36
36
|
* const result = await browser.invoke("https:exampleurl.com");
|
package/dist/tools/webbrowser.js
CHANGED
|
@@ -113,7 +113,7 @@ const DEFAULT_HEADERS = {
|
|
|
113
113
|
* @example
|
|
114
114
|
* ```typescript
|
|
115
115
|
* const browser = new WebBrowser({
|
|
116
|
-
* model: new ChatOpenAI({ temperature: 0 }),
|
|
116
|
+
* model: new ChatOpenAI({ model: "gpt-4o-mini", temperature: 0 }),
|
|
117
117
|
* embeddings: new OpenAIEmbeddings({}),
|
|
118
118
|
* });
|
|
119
119
|
* const result = await browser.invoke("https:exampleurl.com");
|
package/dist/util/sql_utils.cjs
CHANGED
|
@@ -64,6 +64,7 @@ const getTableAndColumnsName = async (appDataSource) => {
|
|
|
64
64
|
return formatToSqlTable(rep);
|
|
65
65
|
}
|
|
66
66
|
if (appDataSource.options.type === "sqlite" ||
|
|
67
|
+
appDataSource.options.type === "better-sqlite3" ||
|
|
67
68
|
appDataSource.options.type === "sqljs") {
|
|
68
69
|
sql =
|
|
69
70
|
"SELECT \n" +
|
package/dist/util/sql_utils.js
CHANGED
|
@@ -58,6 +58,7 @@ export const getTableAndColumnsName = async (appDataSource) => {
|
|
|
58
58
|
return formatToSqlTable(rep);
|
|
59
59
|
}
|
|
60
60
|
if (appDataSource.options.type === "sqlite" ||
|
|
61
|
+
appDataSource.options.type === "better-sqlite3" ||
|
|
61
62
|
appDataSource.options.type === "sqljs") {
|
|
62
63
|
sql =
|
|
63
64
|
"SELECT \n" +
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "langchain",
|
|
3
|
-
"version": "0.3.
|
|
3
|
+
"version": "0.3.31",
|
|
4
4
|
"description": "Typescript bindings for langchain",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"engines": {
|
|
@@ -440,7 +440,7 @@
|
|
|
440
440
|
"@types/ws": "^8",
|
|
441
441
|
"@typescript-eslint/eslint-plugin": "^5.58.0",
|
|
442
442
|
"@typescript-eslint/parser": "^5.58.0",
|
|
443
|
-
"axios": "^
|
|
443
|
+
"axios": "^1.11.0",
|
|
444
444
|
"cheerio": "1.0.0-rc.12",
|
|
445
445
|
"dotenv": "^16.0.3",
|
|
446
446
|
"dpdm": "^3.14.0",
|
|
@@ -540,12 +540,12 @@
|
|
|
540
540
|
}
|
|
541
541
|
},
|
|
542
542
|
"dependencies": {
|
|
543
|
-
"@langchain/openai": ">=0.1.0 <0.
|
|
543
|
+
"@langchain/openai": ">=0.1.0 <0.7.0",
|
|
544
544
|
"@langchain/textsplitters": ">=0.0.0 <0.2.0",
|
|
545
545
|
"js-tiktoken": "^1.0.12",
|
|
546
546
|
"js-yaml": "^4.1.0",
|
|
547
547
|
"jsonpointer": "^5.0.1",
|
|
548
|
-
"langsmith": "^0.3.
|
|
548
|
+
"langsmith": "^0.3.46",
|
|
549
549
|
"openapi-types": "^12.1.3",
|
|
550
550
|
"p-retry": "4",
|
|
551
551
|
"uuid": "^10.0.0",
|