langchain 0.0.195 → 0.0.197-rc.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/dist/agents/openai/index.cjs +6 -2
- package/dist/agents/openai/index.js +6 -2
- package/dist/agents/toolkits/conversational_retrieval/token_buffer_memory.d.ts +1 -1
- package/dist/base_language/count_tokens.cjs +5 -70
- package/dist/base_language/count_tokens.d.ts +1 -10
- package/dist/base_language/count_tokens.js +1 -65
- package/dist/base_language/index.cjs +6 -196
- package/dist/base_language/index.d.ts +1 -111
- package/dist/base_language/index.js +1 -191
- package/dist/cache/base.cjs +15 -37
- package/dist/cache/base.d.ts +1 -20
- package/dist/cache/base.js +1 -33
- package/dist/cache/index.cjs +2 -46
- package/dist/cache/index.d.ts +1 -29
- package/dist/cache/index.js +1 -45
- package/dist/callbacks/base.cjs +3 -139
- package/dist/callbacks/base.d.ts +1 -266
- package/dist/callbacks/base.js +1 -126
- package/dist/callbacks/handlers/console.cjs +14 -221
- package/dist/callbacks/handlers/console.d.ts +1 -117
- package/dist/callbacks/handlers/console.js +1 -217
- package/dist/callbacks/handlers/initialize.cjs +15 -30
- package/dist/callbacks/handlers/initialize.d.ts +1 -16
- package/dist/callbacks/handlers/initialize.js +1 -27
- package/dist/callbacks/handlers/log_stream.cjs +15 -293
- package/dist/callbacks/handlers/log_stream.d.ts +1 -100
- package/dist/callbacks/handlers/log_stream.js +1 -289
- package/dist/callbacks/handlers/run_collector.cjs +15 -48
- package/dist/callbacks/handlers/run_collector.d.ts +1 -26
- package/dist/callbacks/handlers/run_collector.js +1 -46
- package/dist/callbacks/handlers/tracer.cjs +15 -375
- package/dist/callbacks/handlers/tracer.d.ts +1 -70
- package/dist/callbacks/handlers/tracer.js +1 -373
- package/dist/callbacks/handlers/tracer_langchain.cjs +15 -104
- package/dist/callbacks/handlers/tracer_langchain.d.ts +1 -41
- package/dist/callbacks/handlers/tracer_langchain.js +1 -102
- package/dist/callbacks/handlers/tracer_langchain_v1.cjs +15 -197
- package/dist/callbacks/handlers/tracer_langchain_v1.d.ts +1 -57
- package/dist/callbacks/handlers/tracer_langchain_v1.js +1 -195
- package/dist/callbacks/manager.cjs +15 -676
- package/dist/callbacks/manager.d.ts +1 -180
- package/dist/callbacks/manager.js +1 -666
- package/dist/callbacks/promises.cjs +14 -42
- package/dist/callbacks/promises.d.ts +1 -11
- package/dist/callbacks/promises.js +1 -37
- package/dist/chains/graph_qa/prompts.d.ts +1 -1
- package/dist/chains/openai_functions/structured_output.cjs +2 -2
- package/dist/chains/openai_functions/structured_output.d.ts +1 -1
- package/dist/chains/openai_functions/structured_output.js +1 -1
- package/dist/chat_models/anthropic.cjs +15 -348
- package/dist/chat_models/anthropic.d.ts +1 -156
- package/dist/chat_models/anthropic.js +1 -346
- package/dist/chat_models/baiduwenxin.d.ts +1 -1
- package/dist/chat_models/base.cjs +15 -296
- package/dist/chat_models/base.d.ts +1 -122
- package/dist/chat_models/base.js +1 -292
- package/dist/chat_models/bedrock/web.cjs +21 -1
- package/dist/chat_models/bedrock/web.d.ts +2 -2
- package/dist/chat_models/bedrock/web.js +21 -1
- package/dist/chat_models/fireworks.d.ts +1 -1
- package/dist/document.cjs +2 -24
- package/dist/document.d.ts +1 -12
- package/dist/document.js +1 -23
- package/dist/document_loaders/web/azure_blob_storage_file.d.ts +1 -1
- package/dist/document_loaders/web/github.cjs +105 -0
- package/dist/document_loaders/web/github.d.ts +26 -0
- package/dist/document_loaders/web/github.js +105 -0
- package/dist/document_loaders/web/s3.d.ts +1 -1
- package/dist/embeddings/base.cjs +15 -22
- package/dist/embeddings/base.d.ts +1 -33
- package/dist/embeddings/base.js +1 -20
- package/dist/embeddings/cache_backed.cjs +2 -2
- package/dist/embeddings/cache_backed.js +1 -1
- package/dist/evaluation/agents/trajectory.d.ts +1 -1
- package/dist/evaluation/criteria/prompt.d.ts +2 -2
- package/dist/evaluation/qa/prompt.d.ts +2 -2
- package/dist/experimental/hubs/makersuite/googlemakersuitehub.d.ts +1 -1
- package/dist/experimental/plan_and_execute/prompt.d.ts +1 -1
- package/dist/llms/base.cjs +15 -278
- package/dist/llms/base.d.ts +1 -115
- package/dist/llms/base.js +1 -275
- package/dist/llms/bedrock/web.cjs +21 -1
- package/dist/llms/bedrock/web.d.ts +2 -2
- package/dist/llms/bedrock/web.js +21 -1
- package/dist/llms/fireworks.d.ts +1 -1
- package/dist/load/import_map.cjs +2 -1
- package/dist/load/import_map.d.ts +1 -0
- package/dist/load/import_map.js +1 -0
- package/dist/load/index.cjs +7 -148
- package/dist/load/index.js +7 -148
- package/dist/load/map_keys.cjs +0 -24
- package/dist/load/map_keys.d.ts +0 -6
- package/dist/load/map_keys.js +1 -17
- package/dist/load/serializable.cjs +15 -178
- package/dist/load/serializable.d.ts +1 -66
- package/dist/load/serializable.js +1 -175
- package/dist/memory/base.cjs +17 -92
- package/dist/memory/base.d.ts +2 -68
- package/dist/memory/base.js +2 -87
- package/dist/output_parsers/list.cjs +4 -122
- package/dist/output_parsers/list.d.ts +1 -57
- package/dist/output_parsers/list.js +1 -119
- package/dist/output_parsers/openai_functions.cjs +2 -2
- package/dist/output_parsers/openai_functions.d.ts +1 -1
- package/dist/output_parsers/openai_functions.js +1 -1
- package/dist/output_parsers/regex.d.ts +1 -1
- package/dist/output_parsers/structured.d.ts +1 -1
- package/dist/prompts/base.cjs +8 -183
- package/dist/prompts/base.d.ts +3 -132
- package/dist/prompts/base.js +3 -178
- package/dist/prompts/chat.cjs +13 -477
- package/dist/prompts/chat.d.ts +2 -219
- package/dist/prompts/chat.js +2 -466
- package/dist/prompts/few_shot.cjs +3 -352
- package/dist/prompts/few_shot.d.ts +1 -192
- package/dist/prompts/few_shot.js +1 -350
- package/dist/prompts/index.cjs +3 -2
- package/dist/prompts/index.d.ts +2 -1
- package/dist/prompts/index.js +2 -1
- package/dist/prompts/pipeline.cjs +2 -141
- package/dist/prompts/pipeline.d.ts +1 -98
- package/dist/prompts/pipeline.js +1 -140
- package/dist/prompts/prompt.cjs +2 -145
- package/dist/prompts/prompt.d.ts +1 -92
- package/dist/prompts/prompt.js +1 -144
- package/dist/prompts/selectors/LengthBasedExampleSelector.cjs +2 -147
- package/dist/prompts/selectors/LengthBasedExampleSelector.d.ts +1 -89
- package/dist/prompts/selectors/LengthBasedExampleSelector.js +1 -146
- package/dist/prompts/selectors/SemanticSimilarityExampleSelector.cjs +15 -137
- package/dist/prompts/selectors/SemanticSimilarityExampleSelector.d.ts +1 -91
- package/dist/prompts/selectors/SemanticSimilarityExampleSelector.js +1 -135
- package/dist/prompts/selectors/conditional.cjs +5 -73
- package/dist/prompts/selectors/conditional.d.ts +1 -63
- package/dist/prompts/selectors/conditional.js +1 -69
- package/dist/prompts/serde.d.ts +1 -43
- package/dist/prompts/template.cjs +8 -88
- package/dist/prompts/template.d.ts +1 -36
- package/dist/prompts/template.js +1 -83
- package/dist/{util/@cfworker/json-schema → runnables}/index.cjs +1 -1
- package/dist/runnables/index.d.ts +1 -0
- package/dist/runnables/index.js +1 -0
- package/dist/schema/document.cjs +3 -34
- package/dist/schema/document.d.ts +2 -29
- package/dist/schema/document.js +2 -32
- package/dist/schema/index.cjs +37 -612
- package/dist/schema/index.d.ts +11 -311
- package/dist/schema/index.js +8 -583
- package/dist/schema/output_parser.cjs +15 -309
- package/dist/schema/output_parser.d.ts +1 -173
- package/dist/schema/output_parser.js +1 -301
- package/dist/schema/retriever.cjs +15 -77
- package/dist/schema/retriever.d.ts +1 -43
- package/dist/schema/retriever.js +1 -75
- package/dist/schema/runnable/base.cjs +10 -1072
- package/dist/schema/runnable/base.d.ts +1 -356
- package/dist/schema/runnable/base.js +1 -1060
- package/dist/schema/runnable/branch.cjs +2 -131
- package/dist/schema/runnable/branch.d.ts +1 -94
- package/dist/schema/runnable/branch.js +1 -130
- package/dist/schema/runnable/config.cjs +0 -6
- package/dist/schema/runnable/config.d.ts +1 -3
- package/dist/schema/runnable/config.js +1 -4
- package/dist/schema/runnable/index.cjs +15 -16
- package/dist/schema/runnable/index.d.ts +1 -5
- package/dist/schema/runnable/index.js +1 -4
- package/dist/schema/runnable/passthrough.cjs +3 -113
- package/dist/schema/runnable/passthrough.d.ts +1 -72
- package/dist/schema/runnable/passthrough.js +1 -111
- package/dist/schema/runnable/router.cjs +2 -71
- package/dist/schema/runnable/router.d.ts +1 -29
- package/dist/schema/runnable/router.js +1 -70
- package/dist/schema/storage.cjs +15 -8
- package/dist/schema/storage.d.ts +1 -57
- package/dist/schema/storage.js +1 -6
- package/dist/tools/bingserpapi.d.ts +1 -1
- package/dist/tools/searchapi.d.ts +1 -1
- package/dist/tools/serpapi.d.ts +1 -1
- package/dist/tools/serper.d.ts +1 -1
- package/dist/util/async_caller.cjs +14 -128
- package/dist/util/async_caller.d.ts +1 -45
- package/dist/util/async_caller.js +1 -124
- package/dist/vectorstores/momento_vector_index.cjs +39 -0
- package/dist/vectorstores/momento_vector_index.d.ts +17 -1
- package/dist/vectorstores/momento_vector_index.js +40 -1
- package/dist/vectorstores/mongodb_atlas.cjs +22 -2
- package/dist/vectorstores/mongodb_atlas.d.ts +13 -0
- package/dist/vectorstores/mongodb_atlas.js +22 -2
- package/package.json +18 -11
- package/runnables.cjs +1 -0
- package/runnables.d.ts +1 -0
- package/runnables.js +1 -0
- package/dist/util/@cfworker/json-schema/index.d.ts +0 -1
- package/dist/util/@cfworker/json-schema/index.js +0 -1
- package/dist/util/@cfworker/json-schema/src/deep-compare-strict.cjs +0 -43
- package/dist/util/@cfworker/json-schema/src/deep-compare-strict.d.ts +0 -1
- package/dist/util/@cfworker/json-schema/src/deep-compare-strict.js +0 -39
- package/dist/util/@cfworker/json-schema/src/dereference.cjs +0 -169
- package/dist/util/@cfworker/json-schema/src/dereference.d.ts +0 -12
- package/dist/util/@cfworker/json-schema/src/dereference.js +0 -165
- package/dist/util/@cfworker/json-schema/src/format.cjs +0 -139
- package/dist/util/@cfworker/json-schema/src/format.d.ts +0 -2
- package/dist/util/@cfworker/json-schema/src/format.js +0 -136
- package/dist/util/@cfworker/json-schema/src/index.cjs +0 -24
- package/dist/util/@cfworker/json-schema/src/index.d.ts +0 -8
- package/dist/util/@cfworker/json-schema/src/index.js +0 -8
- package/dist/util/@cfworker/json-schema/src/pointer.cjs +0 -11
- package/dist/util/@cfworker/json-schema/src/pointer.d.ts +0 -2
- package/dist/util/@cfworker/json-schema/src/pointer.js +0 -6
- package/dist/util/@cfworker/json-schema/src/types.cjs +0 -2
- package/dist/util/@cfworker/json-schema/src/types.d.ts +0 -72
- package/dist/util/@cfworker/json-schema/src/types.js +0 -1
- package/dist/util/@cfworker/json-schema/src/ucs2-length.cjs +0 -28
- package/dist/util/@cfworker/json-schema/src/ucs2-length.d.ts +0 -6
- package/dist/util/@cfworker/json-schema/src/ucs2-length.js +0 -24
- package/dist/util/@cfworker/json-schema/src/validate.cjs +0 -808
- package/dist/util/@cfworker/json-schema/src/validate.d.ts +0 -3
- package/dist/util/@cfworker/json-schema/src/validate.js +0 -804
- package/dist/util/@cfworker/json-schema/src/validator.cjs +0 -44
- package/dist/util/@cfworker/json-schema/src/validator.d.ts +0 -10
- package/dist/util/@cfworker/json-schema/src/validator.js +0 -40
- package/dist/util/fast-json-patch/index.cjs +0 -49
- package/dist/util/fast-json-patch/index.d.ts +0 -22
- package/dist/util/fast-json-patch/index.js +0 -16
- package/dist/util/fast-json-patch/src/core.cjs +0 -469
- package/dist/util/fast-json-patch/src/core.d.ts +0 -111
- package/dist/util/fast-json-patch/src/core.js +0 -459
- package/dist/util/fast-json-patch/src/duplex.cjs +0 -237
- package/dist/util/fast-json-patch/src/duplex.d.ts +0 -23
- package/dist/util/fast-json-patch/src/duplex.js +0 -230
- package/dist/util/fast-json-patch/src/helpers.cjs +0 -194
- package/dist/util/fast-json-patch/src/helpers.d.ts +0 -36
- package/dist/util/fast-json-patch/src/helpers.js +0 -181
- package/dist/util/js-sha1/hash.cjs +0 -358
- package/dist/util/js-sha1/hash.d.ts +0 -1
- package/dist/util/js-sha1/hash.js +0 -355
|
@@ -1,122 +1 @@
|
|
|
1
|
-
|
|
2
|
-
import { BaseLanguageModel, BaseLanguageModelCallOptions, BaseLanguageModelInput, BaseLanguageModelParams } from "../base_language/index.js";
|
|
3
|
-
import { CallbackManagerForLLMRun, Callbacks } from "../callbacks/manager.js";
|
|
4
|
-
import { RunnableConfig } from "../schema/runnable/config.js";
|
|
5
|
-
/**
|
|
6
|
-
* Represents a serialized chat model.
|
|
7
|
-
*/
|
|
8
|
-
export type SerializedChatModel = {
|
|
9
|
-
_model: string;
|
|
10
|
-
_type: string;
|
|
11
|
-
} & Record<string, any>;
|
|
12
|
-
/**
|
|
13
|
-
* Represents a serialized large language model.
|
|
14
|
-
*/
|
|
15
|
-
export type SerializedLLM = {
|
|
16
|
-
_model: string;
|
|
17
|
-
_type: string;
|
|
18
|
-
} & Record<string, any>;
|
|
19
|
-
/**
|
|
20
|
-
* Represents the parameters for a base chat model.
|
|
21
|
-
*/
|
|
22
|
-
export type BaseChatModelParams = BaseLanguageModelParams;
|
|
23
|
-
/**
|
|
24
|
-
* Represents the call options for a base chat model.
|
|
25
|
-
*/
|
|
26
|
-
export type BaseChatModelCallOptions = BaseLanguageModelCallOptions;
|
|
27
|
-
/**
|
|
28
|
-
* Creates a transform stream for encoding chat message chunks.
|
|
29
|
-
* @deprecated Use {@link BytesOutputParser} instead
|
|
30
|
-
* @returns A TransformStream instance that encodes chat message chunks.
|
|
31
|
-
*/
|
|
32
|
-
export declare function createChatMessageChunkEncoderStream(): TransformStream<BaseMessageChunk, any>;
|
|
33
|
-
/**
|
|
34
|
-
* Base class for chat models. It extends the BaseLanguageModel class and
|
|
35
|
-
* provides methods for generating chat based on input messages.
|
|
36
|
-
*/
|
|
37
|
-
export declare abstract class BaseChatModel<CallOptions extends BaseChatModelCallOptions = BaseChatModelCallOptions> extends BaseLanguageModel<BaseMessageChunk, CallOptions> {
|
|
38
|
-
ParsedCallOptions: Omit<CallOptions, keyof RunnableConfig & "timeout">;
|
|
39
|
-
lc_namespace: string[];
|
|
40
|
-
constructor(fields: BaseChatModelParams);
|
|
41
|
-
abstract _combineLLMOutput?(...llmOutputs: LLMResult["llmOutput"][]): LLMResult["llmOutput"];
|
|
42
|
-
protected _separateRunnableConfigFromCallOptions(options?: Partial<CallOptions>): [RunnableConfig, this["ParsedCallOptions"]];
|
|
43
|
-
/**
|
|
44
|
-
* Invokes the chat model with a single input.
|
|
45
|
-
* @param input The input for the language model.
|
|
46
|
-
* @param options The call options.
|
|
47
|
-
* @returns A Promise that resolves to a BaseMessageChunk.
|
|
48
|
-
*/
|
|
49
|
-
invoke(input: BaseLanguageModelInput, options?: CallOptions): Promise<BaseMessageChunk>;
|
|
50
|
-
_streamResponseChunks(_messages: BaseMessage[], _options: this["ParsedCallOptions"], _runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
|
|
51
|
-
_streamIterator(input: BaseLanguageModelInput, options?: CallOptions): AsyncGenerator<BaseMessageChunk>;
|
|
52
|
-
/** @ignore */
|
|
53
|
-
_generateUncached(messages: BaseMessageLike[][], parsedOptions: this["ParsedCallOptions"], handledOptions: RunnableConfig): Promise<LLMResult>;
|
|
54
|
-
/**
|
|
55
|
-
* Generates chat based on the input messages.
|
|
56
|
-
* @param messages An array of arrays of BaseMessage instances.
|
|
57
|
-
* @param options The call options or an array of stop sequences.
|
|
58
|
-
* @param callbacks The callbacks for the language model.
|
|
59
|
-
* @returns A Promise that resolves to an LLMResult.
|
|
60
|
-
*/
|
|
61
|
-
generate(messages: BaseMessageLike[][], options?: string[] | CallOptions, callbacks?: Callbacks): Promise<LLMResult>;
|
|
62
|
-
/**
|
|
63
|
-
* Get the parameters used to invoke the model
|
|
64
|
-
*/
|
|
65
|
-
invocationParams(_options?: this["ParsedCallOptions"]): any;
|
|
66
|
-
_modelType(): string;
|
|
67
|
-
abstract _llmType(): string;
|
|
68
|
-
/**
|
|
69
|
-
* @deprecated
|
|
70
|
-
* Return a json-like object representing this LLM.
|
|
71
|
-
*/
|
|
72
|
-
serialize(): SerializedLLM;
|
|
73
|
-
/**
|
|
74
|
-
* Generates a prompt based on the input prompt values.
|
|
75
|
-
* @param promptValues An array of BasePromptValue instances.
|
|
76
|
-
* @param options The call options or an array of stop sequences.
|
|
77
|
-
* @param callbacks The callbacks for the language model.
|
|
78
|
-
* @returns A Promise that resolves to an LLMResult.
|
|
79
|
-
*/
|
|
80
|
-
generatePrompt(promptValues: BasePromptValue[], options?: string[] | CallOptions, callbacks?: Callbacks): Promise<LLMResult>;
|
|
81
|
-
abstract _generate(messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;
|
|
82
|
-
/**
|
|
83
|
-
* Makes a single call to the chat model.
|
|
84
|
-
* @param messages An array of BaseMessage instances.
|
|
85
|
-
* @param options The call options or an array of stop sequences.
|
|
86
|
-
* @param callbacks The callbacks for the language model.
|
|
87
|
-
* @returns A Promise that resolves to a BaseMessage.
|
|
88
|
-
*/
|
|
89
|
-
call(messages: BaseMessageLike[], options?: string[] | CallOptions, callbacks?: Callbacks): Promise<BaseMessage>;
|
|
90
|
-
/**
|
|
91
|
-
* Makes a single call to the chat model with a prompt value.
|
|
92
|
-
* @param promptValue The value of the prompt.
|
|
93
|
-
* @param options The call options or an array of stop sequences.
|
|
94
|
-
* @param callbacks The callbacks for the language model.
|
|
95
|
-
* @returns A Promise that resolves to a BaseMessage.
|
|
96
|
-
*/
|
|
97
|
-
callPrompt(promptValue: BasePromptValue, options?: string[] | CallOptions, callbacks?: Callbacks): Promise<BaseMessage>;
|
|
98
|
-
/**
|
|
99
|
-
* Predicts the next message based on the input messages.
|
|
100
|
-
* @param messages An array of BaseMessage instances.
|
|
101
|
-
* @param options The call options or an array of stop sequences.
|
|
102
|
-
* @param callbacks The callbacks for the language model.
|
|
103
|
-
* @returns A Promise that resolves to a BaseMessage.
|
|
104
|
-
*/
|
|
105
|
-
predictMessages(messages: BaseMessage[], options?: string[] | CallOptions, callbacks?: Callbacks): Promise<BaseMessage>;
|
|
106
|
-
/**
|
|
107
|
-
* Predicts the next message based on a text input.
|
|
108
|
-
* @param text The text input.
|
|
109
|
-
* @param options The call options or an array of stop sequences.
|
|
110
|
-
* @param callbacks The callbacks for the language model.
|
|
111
|
-
* @returns A Promise that resolves to a string.
|
|
112
|
-
*/
|
|
113
|
-
predict(text: string, options?: string[] | CallOptions, callbacks?: Callbacks): Promise<string>;
|
|
114
|
-
}
|
|
115
|
-
/**
|
|
116
|
-
* An abstract class that extends BaseChatModel and provides a simple
|
|
117
|
-
* implementation of _generate.
|
|
118
|
-
*/
|
|
119
|
-
export declare abstract class SimpleChatModel<CallOptions extends BaseChatModelCallOptions = BaseChatModelCallOptions> extends BaseChatModel<CallOptions> {
|
|
120
|
-
abstract _call(messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): Promise<string>;
|
|
121
|
-
_generate(messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): Promise<ChatResult>;
|
|
122
|
-
}
|
|
1
|
+
export * from "langchain-core/language_models/chat_models";
|
package/dist/chat_models/base.js
CHANGED
|
@@ -1,292 +1 @@
|
|
|
1
|
-
|
|
2
|
-
import { BaseLanguageModel, } from "../base_language/index.js";
|
|
3
|
-
import { CallbackManager, } from "../callbacks/manager.js";
|
|
4
|
-
/**
|
|
5
|
-
* Creates a transform stream for encoding chat message chunks.
|
|
6
|
-
* @deprecated Use {@link BytesOutputParser} instead
|
|
7
|
-
* @returns A TransformStream instance that encodes chat message chunks.
|
|
8
|
-
*/
|
|
9
|
-
export function createChatMessageChunkEncoderStream() {
|
|
10
|
-
const textEncoder = new TextEncoder();
|
|
11
|
-
return new TransformStream({
|
|
12
|
-
transform(chunk, controller) {
|
|
13
|
-
controller.enqueue(textEncoder.encode(typeof chunk.content === "string"
|
|
14
|
-
? chunk.content
|
|
15
|
-
: JSON.stringify(chunk.content)));
|
|
16
|
-
},
|
|
17
|
-
});
|
|
18
|
-
}
|
|
19
|
-
/**
|
|
20
|
-
* Base class for chat models. It extends the BaseLanguageModel class and
|
|
21
|
-
* provides methods for generating chat based on input messages.
|
|
22
|
-
*/
|
|
23
|
-
export class BaseChatModel extends BaseLanguageModel {
|
|
24
|
-
constructor(fields) {
|
|
25
|
-
super(fields);
|
|
26
|
-
Object.defineProperty(this, "lc_namespace", {
|
|
27
|
-
enumerable: true,
|
|
28
|
-
configurable: true,
|
|
29
|
-
writable: true,
|
|
30
|
-
value: ["langchain", "chat_models", this._llmType()]
|
|
31
|
-
});
|
|
32
|
-
}
|
|
33
|
-
_separateRunnableConfigFromCallOptions(options) {
|
|
34
|
-
const [runnableConfig, callOptions] = super._separateRunnableConfigFromCallOptions(options);
|
|
35
|
-
if (callOptions?.timeout && !callOptions.signal) {
|
|
36
|
-
callOptions.signal = AbortSignal.timeout(callOptions.timeout);
|
|
37
|
-
}
|
|
38
|
-
return [runnableConfig, callOptions];
|
|
39
|
-
}
|
|
40
|
-
/**
|
|
41
|
-
* Invokes the chat model with a single input.
|
|
42
|
-
* @param input The input for the language model.
|
|
43
|
-
* @param options The call options.
|
|
44
|
-
* @returns A Promise that resolves to a BaseMessageChunk.
|
|
45
|
-
*/
|
|
46
|
-
async invoke(input, options) {
|
|
47
|
-
const promptValue = BaseChatModel._convertInputToPromptValue(input);
|
|
48
|
-
const result = await this.generatePrompt([promptValue], options, options?.callbacks);
|
|
49
|
-
const chatGeneration = result.generations[0][0];
|
|
50
|
-
// TODO: Remove cast after figuring out inheritance
|
|
51
|
-
return chatGeneration.message;
|
|
52
|
-
}
|
|
53
|
-
// eslint-disable-next-line require-yield
|
|
54
|
-
async *_streamResponseChunks(_messages, _options, _runManager) {
|
|
55
|
-
throw new Error("Not implemented.");
|
|
56
|
-
}
|
|
57
|
-
async *_streamIterator(input, options) {
|
|
58
|
-
// Subclass check required to avoid double callbacks with default implementation
|
|
59
|
-
if (this._streamResponseChunks ===
|
|
60
|
-
BaseChatModel.prototype._streamResponseChunks) {
|
|
61
|
-
yield this.invoke(input, options);
|
|
62
|
-
}
|
|
63
|
-
else {
|
|
64
|
-
const prompt = BaseChatModel._convertInputToPromptValue(input);
|
|
65
|
-
const messages = prompt.toChatMessages();
|
|
66
|
-
const [runnableConfig, callOptions] = this._separateRunnableConfigFromCallOptions(options);
|
|
67
|
-
const callbackManager_ = await CallbackManager.configure(runnableConfig.callbacks, this.callbacks, runnableConfig.tags, this.tags, runnableConfig.metadata, this.metadata, { verbose: this.verbose });
|
|
68
|
-
const extra = {
|
|
69
|
-
options: callOptions,
|
|
70
|
-
invocation_params: this?.invocationParams(callOptions),
|
|
71
|
-
batch_size: 1,
|
|
72
|
-
};
|
|
73
|
-
const runManagers = await callbackManager_?.handleChatModelStart(this.toJSON(), [messages], undefined, undefined, extra, undefined, undefined, runnableConfig.runName);
|
|
74
|
-
let generationChunk;
|
|
75
|
-
try {
|
|
76
|
-
for await (const chunk of this._streamResponseChunks(messages, callOptions, runManagers?.[0])) {
|
|
77
|
-
yield chunk.message;
|
|
78
|
-
if (!generationChunk) {
|
|
79
|
-
generationChunk = chunk;
|
|
80
|
-
}
|
|
81
|
-
else {
|
|
82
|
-
generationChunk = generationChunk.concat(chunk);
|
|
83
|
-
}
|
|
84
|
-
}
|
|
85
|
-
}
|
|
86
|
-
catch (err) {
|
|
87
|
-
await Promise.all((runManagers ?? []).map((runManager) => runManager?.handleLLMError(err)));
|
|
88
|
-
throw err;
|
|
89
|
-
}
|
|
90
|
-
await Promise.all((runManagers ?? []).map((runManager) => runManager?.handleLLMEnd({
|
|
91
|
-
// TODO: Remove cast after figuring out inheritance
|
|
92
|
-
generations: [[generationChunk]],
|
|
93
|
-
})));
|
|
94
|
-
}
|
|
95
|
-
}
|
|
96
|
-
/** @ignore */
|
|
97
|
-
async _generateUncached(messages, parsedOptions, handledOptions) {
|
|
98
|
-
const baseMessages = messages.map((messageList) => messageList.map(coerceMessageLikeToMessage));
|
|
99
|
-
// create callback manager and start run
|
|
100
|
-
const callbackManager_ = await CallbackManager.configure(handledOptions.callbacks, this.callbacks, handledOptions.tags, this.tags, handledOptions.metadata, this.metadata, { verbose: this.verbose });
|
|
101
|
-
const extra = {
|
|
102
|
-
options: parsedOptions,
|
|
103
|
-
invocation_params: this?.invocationParams(parsedOptions),
|
|
104
|
-
batch_size: 1,
|
|
105
|
-
};
|
|
106
|
-
const runManagers = await callbackManager_?.handleChatModelStart(this.toJSON(), baseMessages, undefined, undefined, extra, undefined, undefined, handledOptions.runName);
|
|
107
|
-
// generate results
|
|
108
|
-
const results = await Promise.allSettled(baseMessages.map((messageList, i) => this._generate(messageList, { ...parsedOptions, promptIndex: i }, runManagers?.[i])));
|
|
109
|
-
// handle results
|
|
110
|
-
const generations = [];
|
|
111
|
-
const llmOutputs = [];
|
|
112
|
-
await Promise.all(results.map(async (pResult, i) => {
|
|
113
|
-
if (pResult.status === "fulfilled") {
|
|
114
|
-
const result = pResult.value;
|
|
115
|
-
generations[i] = result.generations;
|
|
116
|
-
llmOutputs[i] = result.llmOutput;
|
|
117
|
-
return runManagers?.[i]?.handleLLMEnd({
|
|
118
|
-
generations: [result.generations],
|
|
119
|
-
llmOutput: result.llmOutput,
|
|
120
|
-
});
|
|
121
|
-
}
|
|
122
|
-
else {
|
|
123
|
-
// status === "rejected"
|
|
124
|
-
await runManagers?.[i]?.handleLLMError(pResult.reason);
|
|
125
|
-
return Promise.reject(pResult.reason);
|
|
126
|
-
}
|
|
127
|
-
}));
|
|
128
|
-
// create combined output
|
|
129
|
-
const output = {
|
|
130
|
-
generations,
|
|
131
|
-
llmOutput: llmOutputs.length
|
|
132
|
-
? this._combineLLMOutput?.(...llmOutputs)
|
|
133
|
-
: undefined,
|
|
134
|
-
};
|
|
135
|
-
Object.defineProperty(output, RUN_KEY, {
|
|
136
|
-
value: runManagers
|
|
137
|
-
? { runIds: runManagers?.map((manager) => manager.runId) }
|
|
138
|
-
: undefined,
|
|
139
|
-
configurable: true,
|
|
140
|
-
});
|
|
141
|
-
return output;
|
|
142
|
-
}
|
|
143
|
-
/**
|
|
144
|
-
* Generates chat based on the input messages.
|
|
145
|
-
* @param messages An array of arrays of BaseMessage instances.
|
|
146
|
-
* @param options The call options or an array of stop sequences.
|
|
147
|
-
* @param callbacks The callbacks for the language model.
|
|
148
|
-
* @returns A Promise that resolves to an LLMResult.
|
|
149
|
-
*/
|
|
150
|
-
async generate(messages, options, callbacks) {
|
|
151
|
-
// parse call options
|
|
152
|
-
let parsedOptions;
|
|
153
|
-
if (Array.isArray(options)) {
|
|
154
|
-
parsedOptions = { stop: options };
|
|
155
|
-
}
|
|
156
|
-
else {
|
|
157
|
-
parsedOptions = options;
|
|
158
|
-
}
|
|
159
|
-
const baseMessages = messages.map((messageList) => messageList.map(coerceMessageLikeToMessage));
|
|
160
|
-
const [runnableConfig, callOptions] = this._separateRunnableConfigFromCallOptions(parsedOptions);
|
|
161
|
-
runnableConfig.callbacks = runnableConfig.callbacks ?? callbacks;
|
|
162
|
-
if (!this.cache) {
|
|
163
|
-
return this._generateUncached(baseMessages, callOptions, runnableConfig);
|
|
164
|
-
}
|
|
165
|
-
const { cache } = this;
|
|
166
|
-
const llmStringKey = this._getSerializedCacheKeyParametersForCall(callOptions);
|
|
167
|
-
const missingPromptIndices = [];
|
|
168
|
-
const generations = await Promise.all(baseMessages.map(async (baseMessage, index) => {
|
|
169
|
-
// Join all content into one string for the prompt index
|
|
170
|
-
const prompt = BaseChatModel._convertInputToPromptValue(baseMessage).toString();
|
|
171
|
-
const result = await cache.lookup(prompt, llmStringKey);
|
|
172
|
-
if (!result) {
|
|
173
|
-
missingPromptIndices.push(index);
|
|
174
|
-
}
|
|
175
|
-
return result;
|
|
176
|
-
}));
|
|
177
|
-
let llmOutput = {};
|
|
178
|
-
if (missingPromptIndices.length > 0) {
|
|
179
|
-
const results = await this._generateUncached(missingPromptIndices.map((i) => baseMessages[i]), callOptions, runnableConfig);
|
|
180
|
-
await Promise.all(results.generations.map(async (generation, index) => {
|
|
181
|
-
const promptIndex = missingPromptIndices[index];
|
|
182
|
-
generations[promptIndex] = generation;
|
|
183
|
-
// Join all content into one string for the prompt index
|
|
184
|
-
const prompt = BaseChatModel._convertInputToPromptValue(baseMessages[promptIndex]).toString();
|
|
185
|
-
return cache.update(prompt, llmStringKey, generation);
|
|
186
|
-
}));
|
|
187
|
-
llmOutput = results.llmOutput ?? {};
|
|
188
|
-
}
|
|
189
|
-
return { generations, llmOutput };
|
|
190
|
-
}
|
|
191
|
-
/**
|
|
192
|
-
* Get the parameters used to invoke the model
|
|
193
|
-
*/
|
|
194
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
195
|
-
invocationParams(_options) {
|
|
196
|
-
return {};
|
|
197
|
-
}
|
|
198
|
-
_modelType() {
|
|
199
|
-
return "base_chat_model";
|
|
200
|
-
}
|
|
201
|
-
/**
|
|
202
|
-
* @deprecated
|
|
203
|
-
* Return a json-like object representing this LLM.
|
|
204
|
-
*/
|
|
205
|
-
serialize() {
|
|
206
|
-
return {
|
|
207
|
-
...this.invocationParams(),
|
|
208
|
-
_type: this._llmType(),
|
|
209
|
-
_model: this._modelType(),
|
|
210
|
-
};
|
|
211
|
-
}
|
|
212
|
-
/**
|
|
213
|
-
* Generates a prompt based on the input prompt values.
|
|
214
|
-
* @param promptValues An array of BasePromptValue instances.
|
|
215
|
-
* @param options The call options or an array of stop sequences.
|
|
216
|
-
* @param callbacks The callbacks for the language model.
|
|
217
|
-
* @returns A Promise that resolves to an LLMResult.
|
|
218
|
-
*/
|
|
219
|
-
async generatePrompt(promptValues, options, callbacks) {
|
|
220
|
-
const promptMessages = promptValues.map((promptValue) => promptValue.toChatMessages());
|
|
221
|
-
return this.generate(promptMessages, options, callbacks);
|
|
222
|
-
}
|
|
223
|
-
/**
|
|
224
|
-
* Makes a single call to the chat model.
|
|
225
|
-
* @param messages An array of BaseMessage instances.
|
|
226
|
-
* @param options The call options or an array of stop sequences.
|
|
227
|
-
* @param callbacks The callbacks for the language model.
|
|
228
|
-
* @returns A Promise that resolves to a BaseMessage.
|
|
229
|
-
*/
|
|
230
|
-
async call(messages, options, callbacks) {
|
|
231
|
-
const result = await this.generate([messages.map(coerceMessageLikeToMessage)], options, callbacks);
|
|
232
|
-
const generations = result.generations;
|
|
233
|
-
return generations[0][0].message;
|
|
234
|
-
}
|
|
235
|
-
/**
|
|
236
|
-
* Makes a single call to the chat model with a prompt value.
|
|
237
|
-
* @param promptValue The value of the prompt.
|
|
238
|
-
* @param options The call options or an array of stop sequences.
|
|
239
|
-
* @param callbacks The callbacks for the language model.
|
|
240
|
-
* @returns A Promise that resolves to a BaseMessage.
|
|
241
|
-
*/
|
|
242
|
-
async callPrompt(promptValue, options, callbacks) {
|
|
243
|
-
const promptMessages = promptValue.toChatMessages();
|
|
244
|
-
return this.call(promptMessages, options, callbacks);
|
|
245
|
-
}
|
|
246
|
-
/**
|
|
247
|
-
* Predicts the next message based on the input messages.
|
|
248
|
-
* @param messages An array of BaseMessage instances.
|
|
249
|
-
* @param options The call options or an array of stop sequences.
|
|
250
|
-
* @param callbacks The callbacks for the language model.
|
|
251
|
-
* @returns A Promise that resolves to a BaseMessage.
|
|
252
|
-
*/
|
|
253
|
-
async predictMessages(messages, options, callbacks) {
|
|
254
|
-
return this.call(messages, options, callbacks);
|
|
255
|
-
}
|
|
256
|
-
/**
|
|
257
|
-
* Predicts the next message based on a text input.
|
|
258
|
-
* @param text The text input.
|
|
259
|
-
* @param options The call options or an array of stop sequences.
|
|
260
|
-
* @param callbacks The callbacks for the language model.
|
|
261
|
-
* @returns A Promise that resolves to a string.
|
|
262
|
-
*/
|
|
263
|
-
async predict(text, options, callbacks) {
|
|
264
|
-
const message = new HumanMessage(text);
|
|
265
|
-
const result = await this.call([message], options, callbacks);
|
|
266
|
-
if (typeof result.content !== "string") {
|
|
267
|
-
throw new Error("Cannot use predict when output is not a string.");
|
|
268
|
-
}
|
|
269
|
-
return result.content;
|
|
270
|
-
}
|
|
271
|
-
}
|
|
272
|
-
/**
|
|
273
|
-
* An abstract class that extends BaseChatModel and provides a simple
|
|
274
|
-
* implementation of _generate.
|
|
275
|
-
*/
|
|
276
|
-
export class SimpleChatModel extends BaseChatModel {
|
|
277
|
-
async _generate(messages, options, runManager) {
|
|
278
|
-
const text = await this._call(messages, options, runManager);
|
|
279
|
-
const message = new AIMessage(text);
|
|
280
|
-
if (typeof message.content !== "string") {
|
|
281
|
-
throw new Error("Cannot generate with a simple chat model when output is not a string.");
|
|
282
|
-
}
|
|
283
|
-
return {
|
|
284
|
-
generations: [
|
|
285
|
-
{
|
|
286
|
-
text: message.content,
|
|
287
|
-
message,
|
|
288
|
-
},
|
|
289
|
-
],
|
|
290
|
-
};
|
|
291
|
-
}
|
|
292
|
-
}
|
|
1
|
+
export * from "langchain-core/language_models/chat_models";
|
|
@@ -321,11 +321,31 @@ class BedrockChat extends base_js_1.SimpleChatModel {
|
|
|
321
321
|
}
|
|
322
322
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
323
323
|
_readChunks(reader) {
|
|
324
|
+
function _concatChunks(a, b) {
|
|
325
|
+
const newBuffer = new Uint8Array(a.length + b.length);
|
|
326
|
+
newBuffer.set(a);
|
|
327
|
+
newBuffer.set(b, a.length);
|
|
328
|
+
return newBuffer;
|
|
329
|
+
}
|
|
330
|
+
function getMessageLength(buffer) {
|
|
331
|
+
if (buffer.byteLength === 0)
|
|
332
|
+
return 0;
|
|
333
|
+
const view = new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength);
|
|
334
|
+
return view.getUint32(0, false);
|
|
335
|
+
}
|
|
324
336
|
return {
|
|
325
337
|
async *[Symbol.asyncIterator]() {
|
|
326
338
|
let readResult = await reader.read();
|
|
339
|
+
let buffer = new Uint8Array(0);
|
|
327
340
|
while (!readResult.done) {
|
|
328
|
-
|
|
341
|
+
const chunk = readResult.value;
|
|
342
|
+
buffer = _concatChunks(buffer, chunk);
|
|
343
|
+
let messageLength = getMessageLength(buffer);
|
|
344
|
+
while (buffer.byteLength > 0 && buffer.byteLength >= messageLength) {
|
|
345
|
+
yield buffer.slice(0, messageLength);
|
|
346
|
+
buffer = buffer.slice(messageLength);
|
|
347
|
+
messageLength = getMessageLength(buffer);
|
|
348
|
+
}
|
|
329
349
|
readResult = await reader.read();
|
|
330
350
|
}
|
|
331
351
|
},
|
|
@@ -3,7 +3,7 @@ import { BaseBedrockInput, type CredentialType } from "../../util/bedrock.js";
|
|
|
3
3
|
import { SimpleChatModel, BaseChatModelParams } from "../base.js";
|
|
4
4
|
import { CallbackManagerForLLMRun } from "../../callbacks/manager.js";
|
|
5
5
|
import { BaseMessage, ChatGenerationChunk } from "../../schema/index.js";
|
|
6
|
-
import { SerializedFields } from "../../load/map_keys.js";
|
|
6
|
+
import type { SerializedFields } from "../../load/map_keys.js";
|
|
7
7
|
export declare function convertMessagesToPromptAnthropic(messages: BaseMessage[], humanPrompt?: string, aiPrompt?: string): string;
|
|
8
8
|
/**
|
|
9
9
|
* Function that converts an array of messages into a single string prompt
|
|
@@ -72,7 +72,7 @@ export declare class BedrockChat extends SimpleChatModel implements BaseBedrockI
|
|
|
72
72
|
}): Promise<Response>;
|
|
73
73
|
_streamResponseChunks(messages: BaseMessage[], options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
|
|
74
74
|
_readChunks(reader: any): {
|
|
75
|
-
[Symbol.asyncIterator](): AsyncGenerator<
|
|
75
|
+
[Symbol.asyncIterator](): AsyncGenerator<Uint8Array, void, unknown>;
|
|
76
76
|
};
|
|
77
77
|
_combineLLMOutput(): {};
|
|
78
78
|
}
|
|
@@ -316,11 +316,31 @@ export class BedrockChat extends SimpleChatModel {
|
|
|
316
316
|
}
|
|
317
317
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
318
318
|
_readChunks(reader) {
|
|
319
|
+
function _concatChunks(a, b) {
|
|
320
|
+
const newBuffer = new Uint8Array(a.length + b.length);
|
|
321
|
+
newBuffer.set(a);
|
|
322
|
+
newBuffer.set(b, a.length);
|
|
323
|
+
return newBuffer;
|
|
324
|
+
}
|
|
325
|
+
function getMessageLength(buffer) {
|
|
326
|
+
if (buffer.byteLength === 0)
|
|
327
|
+
return 0;
|
|
328
|
+
const view = new DataView(buffer.buffer, buffer.byteOffset, buffer.byteLength);
|
|
329
|
+
return view.getUint32(0, false);
|
|
330
|
+
}
|
|
319
331
|
return {
|
|
320
332
|
async *[Symbol.asyncIterator]() {
|
|
321
333
|
let readResult = await reader.read();
|
|
334
|
+
let buffer = new Uint8Array(0);
|
|
322
335
|
while (!readResult.done) {
|
|
323
|
-
|
|
336
|
+
const chunk = readResult.value;
|
|
337
|
+
buffer = _concatChunks(buffer, chunk);
|
|
338
|
+
let messageLength = getMessageLength(buffer);
|
|
339
|
+
while (buffer.byteLength > 0 && buffer.byteLength >= messageLength) {
|
|
340
|
+
yield buffer.slice(0, messageLength);
|
|
341
|
+
buffer = buffer.slice(messageLength);
|
|
342
|
+
messageLength = getMessageLength(buffer);
|
|
343
|
+
}
|
|
324
344
|
readResult = await reader.read();
|
|
325
345
|
}
|
|
326
346
|
},
|
|
@@ -36,7 +36,7 @@ export declare class ChatFireworks extends ChatOpenAI<ChatFireworksCallOptions>
|
|
|
36
36
|
constructor(fields?: Partial<Omit<OpenAIChatInput, "openAIApiKey" | FireworksUnsupportedArgs>> & BaseChatModelParams & {
|
|
37
37
|
fireworksApiKey?: string;
|
|
38
38
|
});
|
|
39
|
-
toJSON(): import("
|
|
39
|
+
toJSON(): import("langchain-core/load/serializable").Serialized;
|
|
40
40
|
completionWithRetry(request: OpenAIClient.Chat.ChatCompletionCreateParamsStreaming, options?: OpenAICoreRequestOptions): Promise<AsyncIterable<OpenAIClient.Chat.Completions.ChatCompletionChunk>>;
|
|
41
41
|
completionWithRetry(request: OpenAIClient.Chat.ChatCompletionCreateParamsNonStreaming, options?: OpenAICoreRequestOptions): Promise<OpenAIClient.Chat.Completions.ChatCompletion>;
|
|
42
42
|
}
|
package/dist/document.cjs
CHANGED
|
@@ -1,27 +1,5 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.Document = void 0;
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
*/
|
|
7
|
-
class Document {
|
|
8
|
-
constructor(fields) {
|
|
9
|
-
Object.defineProperty(this, "pageContent", {
|
|
10
|
-
enumerable: true,
|
|
11
|
-
configurable: true,
|
|
12
|
-
writable: true,
|
|
13
|
-
value: void 0
|
|
14
|
-
});
|
|
15
|
-
Object.defineProperty(this, "metadata", {
|
|
16
|
-
enumerable: true,
|
|
17
|
-
configurable: true,
|
|
18
|
-
writable: true,
|
|
19
|
-
value: void 0
|
|
20
|
-
});
|
|
21
|
-
this.pageContent = fields.pageContent
|
|
22
|
-
? fields.pageContent.toString()
|
|
23
|
-
: this.pageContent;
|
|
24
|
-
this.metadata = fields.metadata ?? {};
|
|
25
|
-
}
|
|
26
|
-
}
|
|
27
|
-
exports.Document = Document;
|
|
4
|
+
var documents_1 = require("langchain-core/documents");
|
|
5
|
+
Object.defineProperty(exports, "Document", { enumerable: true, get: function () { return documents_1.Document; } });
|
package/dist/document.d.ts
CHANGED
|
@@ -1,12 +1 @@
|
|
|
1
|
-
export
|
|
2
|
-
pageContent: string;
|
|
3
|
-
metadata?: Metadata;
|
|
4
|
-
}
|
|
5
|
-
/**
|
|
6
|
-
* Interface for interacting with a document.
|
|
7
|
-
*/
|
|
8
|
-
export declare class Document<Metadata extends Record<string, any> = Record<string, any>> implements DocumentInput {
|
|
9
|
-
pageContent: string;
|
|
10
|
-
metadata: Metadata;
|
|
11
|
-
constructor(fields: DocumentInput<Metadata>);
|
|
12
|
-
}
|
|
1
|
+
export { type DocumentInput, Document } from "langchain-core/documents";
|
package/dist/document.js
CHANGED
|
@@ -1,23 +1 @@
|
|
|
1
|
-
|
|
2
|
-
* Interface for interacting with a document.
|
|
3
|
-
*/
|
|
4
|
-
export class Document {
|
|
5
|
-
constructor(fields) {
|
|
6
|
-
Object.defineProperty(this, "pageContent", {
|
|
7
|
-
enumerable: true,
|
|
8
|
-
configurable: true,
|
|
9
|
-
writable: true,
|
|
10
|
-
value: void 0
|
|
11
|
-
});
|
|
12
|
-
Object.defineProperty(this, "metadata", {
|
|
13
|
-
enumerable: true,
|
|
14
|
-
configurable: true,
|
|
15
|
-
writable: true,
|
|
16
|
-
value: void 0
|
|
17
|
-
});
|
|
18
|
-
this.pageContent = fields.pageContent
|
|
19
|
-
? fields.pageContent.toString()
|
|
20
|
-
: this.pageContent;
|
|
21
|
-
this.metadata = fields.metadata ?? {};
|
|
22
|
-
}
|
|
23
|
-
}
|
|
1
|
+
export { Document } from "langchain-core/documents";
|
|
@@ -47,6 +47,6 @@ export declare class AzureBlobStorageFileLoader extends BaseDocumentLoader {
|
|
|
47
47
|
* are returned, and the temporary directory is deleted.
|
|
48
48
|
* @returns An array of documents loaded from the file in Azure Blob Storage.
|
|
49
49
|
*/
|
|
50
|
-
load(): Promise<import("
|
|
50
|
+
load(): Promise<import("langchain-core/documents").Document<Record<string, any>>[]>;
|
|
51
51
|
}
|
|
52
52
|
export {};
|