langchain 0.0.194 → 0.0.196
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agents/toolkits/conversational_retrieval/token_buffer_memory.d.ts +1 -1
- package/dist/base_language/count_tokens.cjs +5 -70
- package/dist/base_language/count_tokens.d.ts +1 -10
- package/dist/base_language/count_tokens.js +1 -65
- package/dist/base_language/index.cjs +6 -196
- package/dist/base_language/index.d.ts +1 -111
- package/dist/base_language/index.js +1 -191
- package/dist/cache/base.cjs +15 -37
- package/dist/cache/base.d.ts +1 -20
- package/dist/cache/base.js +1 -33
- package/dist/cache/index.cjs +2 -46
- package/dist/cache/index.d.ts +1 -29
- package/dist/cache/index.js +1 -45
- package/dist/callbacks/base.cjs +3 -139
- package/dist/callbacks/base.d.ts +1 -266
- package/dist/callbacks/base.js +1 -126
- package/dist/callbacks/handlers/console.cjs +14 -221
- package/dist/callbacks/handlers/console.d.ts +1 -117
- package/dist/callbacks/handlers/console.js +1 -217
- package/dist/callbacks/handlers/initialize.cjs +15 -30
- package/dist/callbacks/handlers/initialize.d.ts +1 -16
- package/dist/callbacks/handlers/initialize.js +1 -27
- package/dist/callbacks/handlers/log_stream.cjs +15 -293
- package/dist/callbacks/handlers/log_stream.d.ts +1 -100
- package/dist/callbacks/handlers/log_stream.js +1 -289
- package/dist/callbacks/handlers/run_collector.cjs +15 -48
- package/dist/callbacks/handlers/run_collector.d.ts +1 -26
- package/dist/callbacks/handlers/run_collector.js +1 -46
- package/dist/callbacks/handlers/tracer.cjs +15 -375
- package/dist/callbacks/handlers/tracer.d.ts +1 -70
- package/dist/callbacks/handlers/tracer.js +1 -373
- package/dist/callbacks/handlers/tracer_langchain.cjs +15 -104
- package/dist/callbacks/handlers/tracer_langchain.d.ts +1 -41
- package/dist/callbacks/handlers/tracer_langchain.js +1 -102
- package/dist/callbacks/handlers/tracer_langchain_v1.cjs +15 -197
- package/dist/callbacks/handlers/tracer_langchain_v1.d.ts +1 -57
- package/dist/callbacks/handlers/tracer_langchain_v1.js +1 -195
- package/dist/callbacks/manager.cjs +15 -676
- package/dist/callbacks/manager.d.ts +1 -180
- package/dist/callbacks/manager.js +1 -666
- package/dist/callbacks/promises.cjs +14 -42
- package/dist/callbacks/promises.d.ts +1 -11
- package/dist/callbacks/promises.js +1 -37
- package/dist/chains/graph_qa/prompts.d.ts +1 -1
- package/dist/chains/openai_functions/structured_output.cjs +2 -2
- package/dist/chains/openai_functions/structured_output.d.ts +1 -1
- package/dist/chains/openai_functions/structured_output.js +1 -1
- package/dist/chat_models/baiduwenxin.d.ts +1 -1
- package/dist/chat_models/base.cjs +15 -296
- package/dist/chat_models/base.d.ts +1 -122
- package/dist/chat_models/base.js +1 -292
- package/dist/chat_models/bedrock/web.d.ts +1 -1
- package/dist/chat_models/fireworks.d.ts +1 -1
- package/dist/document.cjs +2 -24
- package/dist/document.d.ts +1 -12
- package/dist/document.js +1 -23
- package/dist/document_loaders/web/azure_blob_storage_file.d.ts +1 -1
- package/dist/document_loaders/web/s3.d.ts +1 -1
- package/dist/embeddings/base.cjs +15 -22
- package/dist/embeddings/base.d.ts +1 -33
- package/dist/embeddings/base.js +1 -20
- package/dist/embeddings/cache_backed.cjs +2 -2
- package/dist/embeddings/cache_backed.js +1 -1
- package/dist/embeddings/hf.cjs +1 -2
- package/dist/embeddings/hf.js +1 -2
- package/dist/evaluation/agents/trajectory.d.ts +1 -1
- package/dist/evaluation/criteria/prompt.d.ts +2 -2
- package/dist/evaluation/qa/prompt.d.ts +2 -2
- package/dist/experimental/hubs/makersuite/googlemakersuitehub.d.ts +1 -1
- package/dist/experimental/openai_assistant/index.cjs +2 -1
- package/dist/experimental/openai_assistant/index.d.ts +2 -1
- package/dist/experimental/openai_assistant/index.js +2 -1
- package/dist/experimental/openai_files/index.cjs +88 -0
- package/dist/experimental/openai_files/index.d.ts +79 -0
- package/dist/experimental/openai_files/index.js +84 -0
- package/dist/experimental/plan_and_execute/prompt.d.ts +1 -1
- package/dist/llms/base.cjs +15 -278
- package/dist/llms/base.d.ts +1 -115
- package/dist/llms/base.js +1 -275
- package/dist/llms/bedrock/web.d.ts +1 -1
- package/dist/llms/fireworks.d.ts +1 -1
- package/dist/load/import_constants.cjs +1 -0
- package/dist/load/import_constants.js +1 -0
- package/dist/load/import_map.cjs +4 -2
- package/dist/load/import_map.d.ts +2 -0
- package/dist/load/import_map.js +2 -0
- package/dist/load/index.cjs +7 -148
- package/dist/load/index.js +7 -148
- package/dist/load/map_keys.cjs +0 -24
- package/dist/load/map_keys.d.ts +0 -6
- package/dist/load/map_keys.js +1 -17
- package/dist/load/serializable.cjs +15 -178
- package/dist/load/serializable.d.ts +1 -66
- package/dist/load/serializable.js +1 -175
- package/dist/memory/base.cjs +17 -92
- package/dist/memory/base.d.ts +2 -68
- package/dist/memory/base.js +2 -87
- package/dist/output_parsers/openai_functions.cjs +2 -2
- package/dist/output_parsers/openai_functions.d.ts +1 -1
- package/dist/output_parsers/openai_functions.js +1 -1
- package/dist/output_parsers/regex.d.ts +1 -1
- package/dist/output_parsers/structured.d.ts +1 -1
- package/dist/prompts/base.cjs +8 -183
- package/dist/prompts/base.d.ts +3 -132
- package/dist/prompts/base.js +3 -178
- package/dist/prompts/chat.cjs +15 -477
- package/dist/prompts/chat.d.ts +1 -219
- package/dist/prompts/chat.js +1 -466
- package/dist/prompts/few_shot.cjs +15 -353
- package/dist/prompts/few_shot.d.ts +1 -192
- package/dist/prompts/few_shot.js +1 -350
- package/dist/prompts/index.cjs +3 -2
- package/dist/prompts/index.d.ts +2 -1
- package/dist/prompts/index.js +2 -1
- package/dist/prompts/pipeline.cjs +15 -142
- package/dist/prompts/pipeline.d.ts +1 -98
- package/dist/prompts/pipeline.js +1 -140
- package/dist/prompts/prompt.cjs +15 -146
- package/dist/prompts/prompt.d.ts +1 -92
- package/dist/prompts/prompt.js +1 -144
- package/dist/prompts/selectors/LengthBasedExampleSelector.cjs +15 -148
- package/dist/prompts/selectors/LengthBasedExampleSelector.d.ts +1 -89
- package/dist/prompts/selectors/LengthBasedExampleSelector.js +1 -146
- package/dist/prompts/selectors/SemanticSimilarityExampleSelector.cjs +15 -137
- package/dist/prompts/selectors/SemanticSimilarityExampleSelector.d.ts +1 -91
- package/dist/prompts/selectors/SemanticSimilarityExampleSelector.js +1 -135
- package/dist/prompts/selectors/conditional.cjs +15 -74
- package/dist/prompts/selectors/conditional.d.ts +1 -63
- package/dist/prompts/selectors/conditional.js +1 -69
- package/dist/prompts/serde.cjs +15 -0
- package/dist/prompts/serde.d.ts +1 -43
- package/dist/prompts/serde.js +1 -1
- package/dist/prompts/template.cjs +14 -88
- package/dist/prompts/template.d.ts +1 -36
- package/dist/prompts/template.js +1 -83
- package/dist/retrievers/chaindesk.cjs +9 -1
- package/dist/retrievers/chaindesk.d.ts +3 -1
- package/dist/retrievers/chaindesk.js +9 -1
- package/dist/retrievers/self_query/vectara.cjs +138 -0
- package/dist/retrievers/self_query/vectara.d.ts +41 -0
- package/dist/retrievers/self_query/vectara.js +134 -0
- package/dist/{util/@cfworker/json-schema → runnables}/index.cjs +1 -1
- package/dist/runnables/index.d.ts +1 -0
- package/dist/runnables/index.js +1 -0
- package/dist/schema/document.cjs +3 -34
- package/dist/schema/document.d.ts +2 -29
- package/dist/schema/document.js +2 -32
- package/dist/schema/index.cjs +37 -612
- package/dist/schema/index.d.ts +11 -311
- package/dist/schema/index.js +8 -583
- package/dist/schema/output_parser.cjs +15 -309
- package/dist/schema/output_parser.d.ts +1 -173
- package/dist/schema/output_parser.js +1 -301
- package/dist/schema/retriever.cjs +15 -77
- package/dist/schema/retriever.d.ts +1 -43
- package/dist/schema/retriever.js +1 -75
- package/dist/schema/runnable/base.cjs +10 -1072
- package/dist/schema/runnable/base.d.ts +1 -356
- package/dist/schema/runnable/base.js +1 -1060
- package/dist/schema/runnable/branch.cjs +2 -131
- package/dist/schema/runnable/branch.d.ts +1 -94
- package/dist/schema/runnable/branch.js +1 -130
- package/dist/schema/runnable/config.cjs +0 -6
- package/dist/schema/runnable/config.d.ts +1 -3
- package/dist/schema/runnable/config.js +1 -4
- package/dist/schema/runnable/index.cjs +15 -16
- package/dist/schema/runnable/index.d.ts +1 -5
- package/dist/schema/runnable/index.js +1 -4
- package/dist/schema/runnable/passthrough.cjs +3 -113
- package/dist/schema/runnable/passthrough.d.ts +1 -72
- package/dist/schema/runnable/passthrough.js +1 -111
- package/dist/schema/runnable/router.cjs +2 -71
- package/dist/schema/runnable/router.d.ts +1 -29
- package/dist/schema/runnable/router.js +1 -70
- package/dist/schema/storage.cjs +15 -8
- package/dist/schema/storage.d.ts +1 -57
- package/dist/schema/storage.js +1 -6
- package/dist/tools/bingserpapi.d.ts +1 -1
- package/dist/tools/searchapi.d.ts +1 -1
- package/dist/tools/serpapi.d.ts +1 -1
- package/dist/tools/serper.d.ts +1 -1
- package/dist/util/async_caller.cjs +14 -128
- package/dist/util/async_caller.d.ts +1 -45
- package/dist/util/async_caller.js +1 -124
- package/dist/vectorstores/vectara.cjs +77 -7
- package/dist/vectorstores/vectara.d.ts +9 -3
- package/dist/vectorstores/vectara.js +54 -7
- package/experimental/openai_files.cjs +1 -0
- package/experimental/openai_files.d.ts +1 -0
- package/experimental/openai_files.js +1 -0
- package/package.json +27 -5
- package/retrievers/self_query/vectara.cjs +1 -0
- package/retrievers/self_query/vectara.d.ts +1 -0
- package/retrievers/self_query/vectara.js +1 -0
- package/runnables.cjs +1 -0
- package/runnables.d.ts +1 -0
- package/runnables.js +1 -0
- package/dist/util/@cfworker/json-schema/index.d.ts +0 -1
- package/dist/util/@cfworker/json-schema/index.js +0 -1
- package/dist/util/@cfworker/json-schema/src/deep-compare-strict.cjs +0 -43
- package/dist/util/@cfworker/json-schema/src/deep-compare-strict.d.ts +0 -1
- package/dist/util/@cfworker/json-schema/src/deep-compare-strict.js +0 -39
- package/dist/util/@cfworker/json-schema/src/dereference.cjs +0 -169
- package/dist/util/@cfworker/json-schema/src/dereference.d.ts +0 -12
- package/dist/util/@cfworker/json-schema/src/dereference.js +0 -165
- package/dist/util/@cfworker/json-schema/src/format.cjs +0 -139
- package/dist/util/@cfworker/json-schema/src/format.d.ts +0 -2
- package/dist/util/@cfworker/json-schema/src/format.js +0 -136
- package/dist/util/@cfworker/json-schema/src/index.cjs +0 -24
- package/dist/util/@cfworker/json-schema/src/index.d.ts +0 -8
- package/dist/util/@cfworker/json-schema/src/index.js +0 -8
- package/dist/util/@cfworker/json-schema/src/pointer.cjs +0 -11
- package/dist/util/@cfworker/json-schema/src/pointer.d.ts +0 -2
- package/dist/util/@cfworker/json-schema/src/pointer.js +0 -6
- package/dist/util/@cfworker/json-schema/src/types.cjs +0 -2
- package/dist/util/@cfworker/json-schema/src/types.d.ts +0 -72
- package/dist/util/@cfworker/json-schema/src/types.js +0 -1
- package/dist/util/@cfworker/json-schema/src/ucs2-length.cjs +0 -28
- package/dist/util/@cfworker/json-schema/src/ucs2-length.d.ts +0 -6
- package/dist/util/@cfworker/json-schema/src/ucs2-length.js +0 -24
- package/dist/util/@cfworker/json-schema/src/validate.cjs +0 -808
- package/dist/util/@cfworker/json-schema/src/validate.d.ts +0 -3
- package/dist/util/@cfworker/json-schema/src/validate.js +0 -804
- package/dist/util/@cfworker/json-schema/src/validator.cjs +0 -44
- package/dist/util/@cfworker/json-schema/src/validator.d.ts +0 -10
- package/dist/util/@cfworker/json-schema/src/validator.js +0 -40
- package/dist/util/fast-json-patch/index.cjs +0 -49
- package/dist/util/fast-json-patch/index.d.ts +0 -22
- package/dist/util/fast-json-patch/index.js +0 -16
- package/dist/util/fast-json-patch/src/core.cjs +0 -469
- package/dist/util/fast-json-patch/src/core.d.ts +0 -111
- package/dist/util/fast-json-patch/src/core.js +0 -459
- package/dist/util/fast-json-patch/src/duplex.cjs +0 -237
- package/dist/util/fast-json-patch/src/duplex.d.ts +0 -23
- package/dist/util/fast-json-patch/src/duplex.js +0 -230
- package/dist/util/fast-json-patch/src/helpers.cjs +0 -194
- package/dist/util/fast-json-patch/src/helpers.d.ts +0 -36
- package/dist/util/fast-json-patch/src/helpers.js +0 -181
- package/dist/util/js-sha1/hash.cjs +0 -358
- package/dist/util/js-sha1/hash.d.ts +0 -1
- package/dist/util/js-sha1/hash.js +0 -355
package/dist/llms/base.js
CHANGED
|
@@ -1,275 +1 @@
|
|
|
1
|
-
|
|
2
|
-
import { CallbackManager, } from "../callbacks/manager.js";
|
|
3
|
-
import { BaseLanguageModel, } from "../base_language/index.js";
|
|
4
|
-
import { getBufferString } from "../memory/base.js";
|
|
5
|
-
/**
|
|
6
|
-
* LLM Wrapper. Provides an {@link call} (an {@link generate}) function that takes in a prompt (or prompts) and returns a string.
|
|
7
|
-
*/
|
|
8
|
-
export class BaseLLM extends BaseLanguageModel {
|
|
9
|
-
constructor({ concurrency, ...rest }) {
|
|
10
|
-
super(concurrency ? { maxConcurrency: concurrency, ...rest } : rest);
|
|
11
|
-
Object.defineProperty(this, "lc_namespace", {
|
|
12
|
-
enumerable: true,
|
|
13
|
-
configurable: true,
|
|
14
|
-
writable: true,
|
|
15
|
-
value: ["langchain", "llms", this._llmType()]
|
|
16
|
-
});
|
|
17
|
-
}
|
|
18
|
-
/**
|
|
19
|
-
* This method takes an input and options, and returns a string. It
|
|
20
|
-
* converts the input to a prompt value and generates a result based on
|
|
21
|
-
* the prompt.
|
|
22
|
-
* @param input Input for the LLM.
|
|
23
|
-
* @param options Options for the LLM call.
|
|
24
|
-
* @returns A string result based on the prompt.
|
|
25
|
-
*/
|
|
26
|
-
async invoke(input, options) {
|
|
27
|
-
const promptValue = BaseLLM._convertInputToPromptValue(input);
|
|
28
|
-
const result = await this.generatePrompt([promptValue], options, options?.callbacks);
|
|
29
|
-
return result.generations[0][0].text;
|
|
30
|
-
}
|
|
31
|
-
// eslint-disable-next-line require-yield
|
|
32
|
-
async *_streamResponseChunks(_input, _options, _runManager) {
|
|
33
|
-
throw new Error("Not implemented.");
|
|
34
|
-
}
|
|
35
|
-
_separateRunnableConfigFromCallOptions(options) {
|
|
36
|
-
const [runnableConfig, callOptions] = super._separateRunnableConfigFromCallOptions(options);
|
|
37
|
-
if (callOptions?.timeout && !callOptions.signal) {
|
|
38
|
-
callOptions.signal = AbortSignal.timeout(callOptions.timeout);
|
|
39
|
-
}
|
|
40
|
-
return [runnableConfig, callOptions];
|
|
41
|
-
}
|
|
42
|
-
async *_streamIterator(input, options) {
|
|
43
|
-
// Subclass check required to avoid double callbacks with default implementation
|
|
44
|
-
if (this._streamResponseChunks === BaseLLM.prototype._streamResponseChunks) {
|
|
45
|
-
yield this.invoke(input, options);
|
|
46
|
-
}
|
|
47
|
-
else {
|
|
48
|
-
const prompt = BaseLLM._convertInputToPromptValue(input);
|
|
49
|
-
const [runnableConfig, callOptions] = this._separateRunnableConfigFromCallOptions(options);
|
|
50
|
-
const callbackManager_ = await CallbackManager.configure(runnableConfig.callbacks, this.callbacks, runnableConfig.tags, this.tags, runnableConfig.metadata, this.metadata, { verbose: this.verbose });
|
|
51
|
-
const extra = {
|
|
52
|
-
options: callOptions,
|
|
53
|
-
invocation_params: this?.invocationParams(callOptions),
|
|
54
|
-
batch_size: 1,
|
|
55
|
-
};
|
|
56
|
-
const runManagers = await callbackManager_?.handleLLMStart(this.toJSON(), [prompt.toString()], undefined, undefined, extra, undefined, undefined, runnableConfig.runName);
|
|
57
|
-
let generation = new GenerationChunk({
|
|
58
|
-
text: "",
|
|
59
|
-
});
|
|
60
|
-
try {
|
|
61
|
-
for await (const chunk of this._streamResponseChunks(input.toString(), callOptions, runManagers?.[0])) {
|
|
62
|
-
if (!generation) {
|
|
63
|
-
generation = chunk;
|
|
64
|
-
}
|
|
65
|
-
else {
|
|
66
|
-
generation = generation.concat(chunk);
|
|
67
|
-
}
|
|
68
|
-
if (typeof chunk.text === "string") {
|
|
69
|
-
yield chunk.text;
|
|
70
|
-
}
|
|
71
|
-
}
|
|
72
|
-
}
|
|
73
|
-
catch (err) {
|
|
74
|
-
await Promise.all((runManagers ?? []).map((runManager) => runManager?.handleLLMError(err)));
|
|
75
|
-
throw err;
|
|
76
|
-
}
|
|
77
|
-
await Promise.all((runManagers ?? []).map((runManager) => runManager?.handleLLMEnd({
|
|
78
|
-
generations: [[generation]],
|
|
79
|
-
})));
|
|
80
|
-
}
|
|
81
|
-
}
|
|
82
|
-
/**
|
|
83
|
-
* This method takes prompt values, options, and callbacks, and generates
|
|
84
|
-
* a result based on the prompts.
|
|
85
|
-
* @param promptValues Prompt values for the LLM.
|
|
86
|
-
* @param options Options for the LLM call.
|
|
87
|
-
* @param callbacks Callbacks for the LLM call.
|
|
88
|
-
* @returns An LLMResult based on the prompts.
|
|
89
|
-
*/
|
|
90
|
-
async generatePrompt(promptValues, options, callbacks) {
|
|
91
|
-
const prompts = promptValues.map((promptValue) => promptValue.toString());
|
|
92
|
-
return this.generate(prompts, options, callbacks);
|
|
93
|
-
}
|
|
94
|
-
/**
|
|
95
|
-
* Get the parameters used to invoke the model
|
|
96
|
-
*/
|
|
97
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
98
|
-
invocationParams(_options) {
|
|
99
|
-
return {};
|
|
100
|
-
}
|
|
101
|
-
_flattenLLMResult(llmResult) {
|
|
102
|
-
const llmResults = [];
|
|
103
|
-
for (let i = 0; i < llmResult.generations.length; i += 1) {
|
|
104
|
-
const genList = llmResult.generations[i];
|
|
105
|
-
if (i === 0) {
|
|
106
|
-
llmResults.push({
|
|
107
|
-
generations: [genList],
|
|
108
|
-
llmOutput: llmResult.llmOutput,
|
|
109
|
-
});
|
|
110
|
-
}
|
|
111
|
-
else {
|
|
112
|
-
const llmOutput = llmResult.llmOutput
|
|
113
|
-
? { ...llmResult.llmOutput, tokenUsage: {} }
|
|
114
|
-
: undefined;
|
|
115
|
-
llmResults.push({
|
|
116
|
-
generations: [genList],
|
|
117
|
-
llmOutput,
|
|
118
|
-
});
|
|
119
|
-
}
|
|
120
|
-
}
|
|
121
|
-
return llmResults;
|
|
122
|
-
}
|
|
123
|
-
/** @ignore */
|
|
124
|
-
async _generateUncached(prompts, parsedOptions, handledOptions) {
|
|
125
|
-
const callbackManager_ = await CallbackManager.configure(handledOptions.callbacks, this.callbacks, handledOptions.tags, this.tags, handledOptions.metadata, this.metadata, { verbose: this.verbose });
|
|
126
|
-
const extra = {
|
|
127
|
-
options: parsedOptions,
|
|
128
|
-
invocation_params: this?.invocationParams(parsedOptions),
|
|
129
|
-
batch_size: prompts.length,
|
|
130
|
-
};
|
|
131
|
-
const runManagers = await callbackManager_?.handleLLMStart(this.toJSON(), prompts, undefined, undefined, extra, undefined, undefined, handledOptions?.runName);
|
|
132
|
-
let output;
|
|
133
|
-
try {
|
|
134
|
-
output = await this._generate(prompts, parsedOptions, runManagers?.[0]);
|
|
135
|
-
}
|
|
136
|
-
catch (err) {
|
|
137
|
-
await Promise.all((runManagers ?? []).map((runManager) => runManager?.handleLLMError(err)));
|
|
138
|
-
throw err;
|
|
139
|
-
}
|
|
140
|
-
const flattenedOutputs = this._flattenLLMResult(output);
|
|
141
|
-
await Promise.all((runManagers ?? []).map((runManager, i) => runManager?.handleLLMEnd(flattenedOutputs[i])));
|
|
142
|
-
const runIds = runManagers?.map((manager) => manager.runId) || undefined;
|
|
143
|
-
// This defines RUN_KEY as a non-enumerable property on the output object
|
|
144
|
-
// so that it is not serialized when the output is stringified, and so that
|
|
145
|
-
// it isnt included when listing the keys of the output object.
|
|
146
|
-
Object.defineProperty(output, RUN_KEY, {
|
|
147
|
-
value: runIds ? { runIds } : undefined,
|
|
148
|
-
configurable: true,
|
|
149
|
-
});
|
|
150
|
-
return output;
|
|
151
|
-
}
|
|
152
|
-
/**
|
|
153
|
-
* Run the LLM on the given prompts and input, handling caching.
|
|
154
|
-
*/
|
|
155
|
-
async generate(prompts, options, callbacks) {
|
|
156
|
-
if (!Array.isArray(prompts)) {
|
|
157
|
-
throw new Error("Argument 'prompts' is expected to be a string[]");
|
|
158
|
-
}
|
|
159
|
-
let parsedOptions;
|
|
160
|
-
if (Array.isArray(options)) {
|
|
161
|
-
parsedOptions = { stop: options };
|
|
162
|
-
}
|
|
163
|
-
else {
|
|
164
|
-
parsedOptions = options;
|
|
165
|
-
}
|
|
166
|
-
const [runnableConfig, callOptions] = this._separateRunnableConfigFromCallOptions(parsedOptions);
|
|
167
|
-
runnableConfig.callbacks = runnableConfig.callbacks ?? callbacks;
|
|
168
|
-
if (!this.cache) {
|
|
169
|
-
return this._generateUncached(prompts, callOptions, runnableConfig);
|
|
170
|
-
}
|
|
171
|
-
const { cache } = this;
|
|
172
|
-
const llmStringKey = this._getSerializedCacheKeyParametersForCall(callOptions);
|
|
173
|
-
const missingPromptIndices = [];
|
|
174
|
-
const generations = await Promise.all(prompts.map(async (prompt, index) => {
|
|
175
|
-
const result = await cache.lookup(prompt, llmStringKey);
|
|
176
|
-
if (!result) {
|
|
177
|
-
missingPromptIndices.push(index);
|
|
178
|
-
}
|
|
179
|
-
return result;
|
|
180
|
-
}));
|
|
181
|
-
let llmOutput = {};
|
|
182
|
-
if (missingPromptIndices.length > 0) {
|
|
183
|
-
const results = await this._generateUncached(missingPromptIndices.map((i) => prompts[i]), callOptions, runnableConfig);
|
|
184
|
-
await Promise.all(results.generations.map(async (generation, index) => {
|
|
185
|
-
const promptIndex = missingPromptIndices[index];
|
|
186
|
-
generations[promptIndex] = generation;
|
|
187
|
-
return cache.update(prompts[promptIndex], llmStringKey, generation);
|
|
188
|
-
}));
|
|
189
|
-
llmOutput = results.llmOutput ?? {};
|
|
190
|
-
}
|
|
191
|
-
return { generations, llmOutput };
|
|
192
|
-
}
|
|
193
|
-
/**
|
|
194
|
-
* Convenience wrapper for {@link generate} that takes in a single string prompt and returns a single string output.
|
|
195
|
-
*/
|
|
196
|
-
async call(prompt, options, callbacks) {
|
|
197
|
-
const { generations } = await this.generate([prompt], options, callbacks);
|
|
198
|
-
return generations[0][0].text;
|
|
199
|
-
}
|
|
200
|
-
/**
|
|
201
|
-
* This method is similar to `call`, but it's used for making predictions
|
|
202
|
-
* based on the input text.
|
|
203
|
-
* @param text Input text for the prediction.
|
|
204
|
-
* @param options Options for the LLM call.
|
|
205
|
-
* @param callbacks Callbacks for the LLM call.
|
|
206
|
-
* @returns A prediction based on the input text.
|
|
207
|
-
*/
|
|
208
|
-
async predict(text, options, callbacks) {
|
|
209
|
-
return this.call(text, options, callbacks);
|
|
210
|
-
}
|
|
211
|
-
/**
|
|
212
|
-
* This method takes a list of messages, options, and callbacks, and
|
|
213
|
-
* returns a predicted message.
|
|
214
|
-
* @param messages A list of messages for the prediction.
|
|
215
|
-
* @param options Options for the LLM call.
|
|
216
|
-
* @param callbacks Callbacks for the LLM call.
|
|
217
|
-
* @returns A predicted message based on the list of messages.
|
|
218
|
-
*/
|
|
219
|
-
async predictMessages(messages, options, callbacks) {
|
|
220
|
-
const text = getBufferString(messages);
|
|
221
|
-
const prediction = await this.call(text, options, callbacks);
|
|
222
|
-
return new AIMessage(prediction);
|
|
223
|
-
}
|
|
224
|
-
/**
|
|
225
|
-
* Get the identifying parameters of the LLM.
|
|
226
|
-
*/
|
|
227
|
-
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
|
228
|
-
_identifyingParams() {
|
|
229
|
-
return {};
|
|
230
|
-
}
|
|
231
|
-
/**
|
|
232
|
-
* @deprecated
|
|
233
|
-
* Return a json-like object representing this LLM.
|
|
234
|
-
*/
|
|
235
|
-
serialize() {
|
|
236
|
-
return {
|
|
237
|
-
...this._identifyingParams(),
|
|
238
|
-
_type: this._llmType(),
|
|
239
|
-
_model: this._modelType(),
|
|
240
|
-
};
|
|
241
|
-
}
|
|
242
|
-
_modelType() {
|
|
243
|
-
return "base_llm";
|
|
244
|
-
}
|
|
245
|
-
/**
|
|
246
|
-
* @deprecated
|
|
247
|
-
* Load an LLM from a json-like object describing it.
|
|
248
|
-
*/
|
|
249
|
-
static async deserialize(data) {
|
|
250
|
-
const { _type, _model, ...rest } = data;
|
|
251
|
-
if (_model && _model !== "base_llm") {
|
|
252
|
-
throw new Error(`Cannot load LLM with model ${_model}`);
|
|
253
|
-
}
|
|
254
|
-
const Cls = {
|
|
255
|
-
openai: (await import("./openai.js")).OpenAI,
|
|
256
|
-
}[_type];
|
|
257
|
-
if (Cls === undefined) {
|
|
258
|
-
throw new Error(`Cannot load LLM with type ${_type}`);
|
|
259
|
-
}
|
|
260
|
-
return new Cls(rest);
|
|
261
|
-
}
|
|
262
|
-
}
|
|
263
|
-
/**
|
|
264
|
-
* LLM class that provides a simpler interface to subclass than {@link BaseLLM}.
|
|
265
|
-
*
|
|
266
|
-
* Requires only implementing a simpler {@link _call} method instead of {@link _generate}.
|
|
267
|
-
*
|
|
268
|
-
* @augments BaseLLM
|
|
269
|
-
*/
|
|
270
|
-
export class LLM extends BaseLLM {
|
|
271
|
-
async _generate(prompts, options, runManager) {
|
|
272
|
-
const generations = await Promise.all(prompts.map((prompt, promptIndex) => this._call(prompt, { ...options, promptIndex }, runManager).then((text) => [{ text }])));
|
|
273
|
-
return { generations };
|
|
274
|
-
}
|
|
275
|
-
}
|
|
1
|
+
export * from "langchain-core/llm";
|
|
@@ -3,7 +3,7 @@ import { BaseBedrockInput, type CredentialType } from "../../util/bedrock.js";
|
|
|
3
3
|
import { LLM, BaseLLMParams } from "../base.js";
|
|
4
4
|
import { CallbackManagerForLLMRun } from "../../callbacks/manager.js";
|
|
5
5
|
import { GenerationChunk } from "../../schema/index.js";
|
|
6
|
-
import { SerializedFields } from "../../load/map_keys.js";
|
|
6
|
+
import type { SerializedFields } from "../../load/map_keys.js";
|
|
7
7
|
/**
|
|
8
8
|
* A type of Large Language Model (LLM) that interacts with the Bedrock
|
|
9
9
|
* service. It extends the base `LLM` class and implements the
|
package/dist/llms/fireworks.d.ts
CHANGED
|
@@ -26,7 +26,7 @@ export declare class Fireworks extends OpenAI<FireworksCallOptions> {
|
|
|
26
26
|
constructor(fields?: Partial<Omit<OpenAIInput, "openAIApiKey" | FireworksUnsupportedArgs>> & BaseLLMParams & {
|
|
27
27
|
fireworksApiKey?: string;
|
|
28
28
|
});
|
|
29
|
-
toJSON(): import("
|
|
29
|
+
toJSON(): import("langchain-core/load/serializable").Serialized;
|
|
30
30
|
completionWithRetry(request: OpenAIClient.CompletionCreateParamsStreaming, options?: OpenAICoreRequestOptions): Promise<AsyncIterable<OpenAIClient.Completion>>;
|
|
31
31
|
completionWithRetry(request: OpenAIClient.CompletionCreateParamsNonStreaming, options?: OpenAICoreRequestOptions): Promise<OpenAIClient.Completions.Completion>;
|
|
32
32
|
}
|
|
@@ -133,6 +133,7 @@ exports.optionalImportEntrypoints = [
|
|
|
133
133
|
"langchain/retrievers/self_query/pinecone",
|
|
134
134
|
"langchain/retrievers/self_query/supabase",
|
|
135
135
|
"langchain/retrievers/self_query/weaviate",
|
|
136
|
+
"langchain/retrievers/self_query/vectara",
|
|
136
137
|
"langchain/cache/cloudflare_kv",
|
|
137
138
|
"langchain/cache/momento",
|
|
138
139
|
"langchain/cache/redis",
|
|
@@ -130,6 +130,7 @@ export const optionalImportEntrypoints = [
|
|
|
130
130
|
"langchain/retrievers/self_query/pinecone",
|
|
131
131
|
"langchain/retrievers/self_query/supabase",
|
|
132
132
|
"langchain/retrievers/self_query/weaviate",
|
|
133
|
+
"langchain/retrievers/self_query/vectara",
|
|
133
134
|
"langchain/cache/cloudflare_kv",
|
|
134
135
|
"langchain/cache/momento",
|
|
135
136
|
"langchain/cache/redis",
|
package/dist/load/import_map.cjs
CHANGED
|
@@ -25,8 +25,8 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|
|
25
25
|
};
|
|
26
26
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
27
27
|
exports.chat_models__anthropic = exports.chat_models__openai = exports.chat_models__base = exports.document_transformers__openai_functions = exports.document_loaders__web__sort_xyz_blockchain = exports.document_loaders__web__serpapi = exports.document_loaders__web__searchapi = exports.document_loaders__base = exports.document = exports.memory = exports.text_splitter = exports.vectorstores__xata = exports.vectorstores__vectara = exports.vectorstores__prisma = exports.vectorstores__memory = exports.vectorstores__base = exports.prompts = exports.llms__fake = exports.llms__yandex = exports.llms__fireworks = exports.llms__ollama = exports.llms__cloudflare_workersai = exports.llms__aleph_alpha = exports.llms__ai21 = exports.llms__openai = exports.llms__base = exports.embeddings__voyage = exports.embeddings__minimax = exports.embeddings__openai = exports.embeddings__ollama = exports.embeddings__fake = exports.embeddings__cache_backed = exports.embeddings__base = exports.chains__openai_functions = exports.chains__combine_documents__reduce = exports.chains = exports.tools__render = exports.tools = exports.base_language = exports.agents__openai__output_parser = exports.agents__xml__output_parser = exports.agents__react__output_parser = exports.agents__format_scratchpad__log_to_message = exports.agents__format_scratchpad__xml = exports.agents__format_scratchpad__log = exports.agents__format_scratchpad__openai_tools = exports.agents__format_scratchpad = exports.agents__toolkits = exports.agents = exports.load__serializable = void 0;
|
|
28
|
-
exports.
|
|
29
|
-
exports.runnables__remote = void 0;
|
|
28
|
+
exports.experimental__chains__violation_of_expectations = exports.experimental__chat_models__ollama_functions = exports.experimental__chat_models__bittensor = exports.experimental__plan_and_execute = exports.experimental__generative_agents = exports.experimental__babyagi = exports.experimental__openai_files = exports.experimental__openai_assistant = exports.experimental__autogpt = exports.util__time = exports.util__math = exports.util__document = exports.storage__in_memory = exports.storage__encoder_backed = exports.stores__message__in_memory = exports.stores__file__in_memory = exports.stores__doc__in_memory = exports.cache = exports.retrievers__vespa = exports.retrievers__score_threshold = exports.retrievers__hyde = exports.retrievers__document_compressors__embeddings_filter = exports.retrievers__document_compressors__chain_extract = exports.retrievers__time_weighted = exports.retrievers__tavily_search_api = exports.retrievers__parent_document = exports.retrievers__multi_vector = exports.retrievers__multi_query = exports.retrievers__document_compressors = exports.retrievers__contextual_compression = exports.retrievers__databerry = exports.retrievers__chaindesk = exports.retrievers__remote = exports.output_parsers = exports.callbacks = exports.schema__storage = exports.schema__runnable = exports.schema__retriever = exports.schema__query_constructor = exports.schema__prompt_template = exports.schema__output_parser = exports.schema__document = exports.schema = exports.chat_models__fake = exports.chat_models__yandex = exports.chat_models__minimax = exports.chat_models__ollama = exports.chat_models__baiduwenxin = exports.chat_models__fireworks = exports.chat_models__cloudflare_workersai = void 0;
|
|
29
|
+
exports.runnables__remote = exports.runnables = exports.evaluation = void 0;
|
|
30
30
|
exports.load__serializable = __importStar(require("../load/serializable.cjs"));
|
|
31
31
|
exports.agents = __importStar(require("../agents/index.cjs"));
|
|
32
32
|
exports.agents__toolkits = __importStar(require("../agents/toolkits/index.cjs"));
|
|
@@ -120,6 +120,7 @@ exports.util__math = __importStar(require("../util/math.cjs"));
|
|
|
120
120
|
exports.util__time = __importStar(require("../util/time.cjs"));
|
|
121
121
|
exports.experimental__autogpt = __importStar(require("../experimental/autogpt/index.cjs"));
|
|
122
122
|
exports.experimental__openai_assistant = __importStar(require("../experimental/openai_assistant/index.cjs"));
|
|
123
|
+
exports.experimental__openai_files = __importStar(require("../experimental/openai_files/index.cjs"));
|
|
123
124
|
exports.experimental__babyagi = __importStar(require("../experimental/babyagi/index.cjs"));
|
|
124
125
|
exports.experimental__generative_agents = __importStar(require("../experimental/generative_agents/index.cjs"));
|
|
125
126
|
exports.experimental__plan_and_execute = __importStar(require("../experimental/plan_and_execute/index.cjs"));
|
|
@@ -127,4 +128,5 @@ exports.experimental__chat_models__bittensor = __importStar(require("../experime
|
|
|
127
128
|
exports.experimental__chat_models__ollama_functions = __importStar(require("../experimental/chat_models/ollama_functions.cjs"));
|
|
128
129
|
exports.experimental__chains__violation_of_expectations = __importStar(require("../experimental/chains/violation_of_expectations/index.cjs"));
|
|
129
130
|
exports.evaluation = __importStar(require("../evaluation/index.cjs"));
|
|
131
|
+
exports.runnables = __importStar(require("../runnables/index.cjs"));
|
|
130
132
|
exports.runnables__remote = __importStar(require("../runnables/remote.cjs"));
|
|
@@ -91,6 +91,7 @@ export * as util__math from "../util/math.js";
|
|
|
91
91
|
export * as util__time from "../util/time.js";
|
|
92
92
|
export * as experimental__autogpt from "../experimental/autogpt/index.js";
|
|
93
93
|
export * as experimental__openai_assistant from "../experimental/openai_assistant/index.js";
|
|
94
|
+
export * as experimental__openai_files from "../experimental/openai_files/index.js";
|
|
94
95
|
export * as experimental__babyagi from "../experimental/babyagi/index.js";
|
|
95
96
|
export * as experimental__generative_agents from "../experimental/generative_agents/index.js";
|
|
96
97
|
export * as experimental__plan_and_execute from "../experimental/plan_and_execute/index.js";
|
|
@@ -98,4 +99,5 @@ export * as experimental__chat_models__bittensor from "../experimental/chat_mode
|
|
|
98
99
|
export * as experimental__chat_models__ollama_functions from "../experimental/chat_models/ollama_functions.js";
|
|
99
100
|
export * as experimental__chains__violation_of_expectations from "../experimental/chains/violation_of_expectations/index.js";
|
|
100
101
|
export * as evaluation from "../evaluation/index.js";
|
|
102
|
+
export * as runnables from "../runnables/index.js";
|
|
101
103
|
export * as runnables__remote from "../runnables/remote.js";
|
package/dist/load/import_map.js
CHANGED
|
@@ -92,6 +92,7 @@ export * as util__math from "../util/math.js";
|
|
|
92
92
|
export * as util__time from "../util/time.js";
|
|
93
93
|
export * as experimental__autogpt from "../experimental/autogpt/index.js";
|
|
94
94
|
export * as experimental__openai_assistant from "../experimental/openai_assistant/index.js";
|
|
95
|
+
export * as experimental__openai_files from "../experimental/openai_files/index.js";
|
|
95
96
|
export * as experimental__babyagi from "../experimental/babyagi/index.js";
|
|
96
97
|
export * as experimental__generative_agents from "../experimental/generative_agents/index.js";
|
|
97
98
|
export * as experimental__plan_and_execute from "../experimental/plan_and_execute/index.js";
|
|
@@ -99,4 +100,5 @@ export * as experimental__chat_models__bittensor from "../experimental/chat_mode
|
|
|
99
100
|
export * as experimental__chat_models__ollama_functions from "../experimental/chat_models/ollama_functions.js";
|
|
100
101
|
export * as experimental__chains__violation_of_expectations from "../experimental/chains/violation_of_expectations/index.js";
|
|
101
102
|
export * as evaluation from "../evaluation/index.js";
|
|
103
|
+
export * as runnables from "../runnables/index.js";
|
|
102
104
|
export * as runnables__remote from "../runnables/remote.js";
|
package/dist/load/index.cjs
CHANGED
|
@@ -24,156 +24,15 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|
|
24
24
|
};
|
|
25
25
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
26
26
|
exports.load = void 0;
|
|
27
|
-
const
|
|
27
|
+
const load_1 = require("langchain-core/load");
|
|
28
28
|
const import_constants_js_1 = require("./import_constants.cjs");
|
|
29
29
|
const importMap = __importStar(require("./import_map.cjs"));
|
|
30
|
-
const map_keys_js_1 = require("./map_keys.cjs");
|
|
31
|
-
const env_js_1 = require("../util/env.cjs");
|
|
32
|
-
function combineAliasesAndInvert(constructor) {
|
|
33
|
-
const aliases = {};
|
|
34
|
-
for (
|
|
35
|
-
// eslint-disable-next-line @typescript-eslint/no-this-alias
|
|
36
|
-
let current = constructor; current && current.prototype; current = Object.getPrototypeOf(current)) {
|
|
37
|
-
Object.assign(aliases, Reflect.get(current.prototype, "lc_aliases"));
|
|
38
|
-
}
|
|
39
|
-
return Object.entries(aliases).reduce((acc, [key, value]) => {
|
|
40
|
-
acc[value] = key;
|
|
41
|
-
return acc;
|
|
42
|
-
}, {});
|
|
43
|
-
}
|
|
44
|
-
async function reviver(value) {
|
|
45
|
-
const { optionalImportsMap, secretsMap, path = ["$"] } = this;
|
|
46
|
-
const pathStr = path.join(".");
|
|
47
|
-
if (typeof value === "object" &&
|
|
48
|
-
value !== null &&
|
|
49
|
-
!Array.isArray(value) &&
|
|
50
|
-
"lc" in value &&
|
|
51
|
-
"type" in value &&
|
|
52
|
-
"id" in value &&
|
|
53
|
-
value.lc === 1 &&
|
|
54
|
-
value.type === "secret") {
|
|
55
|
-
const serialized = value;
|
|
56
|
-
const [key] = serialized.id;
|
|
57
|
-
if (key in secretsMap) {
|
|
58
|
-
return secretsMap[key];
|
|
59
|
-
}
|
|
60
|
-
else {
|
|
61
|
-
const secretValueInEnv = (0, env_js_1.getEnvironmentVariable)(key);
|
|
62
|
-
if (secretValueInEnv) {
|
|
63
|
-
return secretValueInEnv;
|
|
64
|
-
}
|
|
65
|
-
else {
|
|
66
|
-
throw new Error(`Missing key "${key}" for ${pathStr} in load(secretsMap={})`);
|
|
67
|
-
}
|
|
68
|
-
}
|
|
69
|
-
}
|
|
70
|
-
else if (typeof value === "object" &&
|
|
71
|
-
value !== null &&
|
|
72
|
-
!Array.isArray(value) &&
|
|
73
|
-
"lc" in value &&
|
|
74
|
-
"type" in value &&
|
|
75
|
-
"id" in value &&
|
|
76
|
-
value.lc === 1 &&
|
|
77
|
-
value.type === "not_implemented") {
|
|
78
|
-
const serialized = value;
|
|
79
|
-
const str = JSON.stringify(serialized);
|
|
80
|
-
throw new Error(`Trying to load an object that doesn't implement serialization: ${pathStr} -> ${str}`);
|
|
81
|
-
}
|
|
82
|
-
else if (typeof value === "object" &&
|
|
83
|
-
value !== null &&
|
|
84
|
-
!Array.isArray(value) &&
|
|
85
|
-
"lc" in value &&
|
|
86
|
-
"type" in value &&
|
|
87
|
-
"id" in value &&
|
|
88
|
-
"kwargs" in value &&
|
|
89
|
-
value.lc === 1) {
|
|
90
|
-
const serialized = value;
|
|
91
|
-
const str = JSON.stringify(serialized);
|
|
92
|
-
const [name, ...namespaceReverse] = serialized.id.slice().reverse();
|
|
93
|
-
const namespace = namespaceReverse.reverse();
|
|
94
|
-
let module;
|
|
95
|
-
if (import_constants_js_1.optionalImportEntrypoints.includes(namespace.join("/")) ||
|
|
96
|
-
namespace.join("/") in optionalImportsMap) {
|
|
97
|
-
if (namespace.join("/") in optionalImportsMap) {
|
|
98
|
-
module = await optionalImportsMap[namespace.join("/")];
|
|
99
|
-
}
|
|
100
|
-
else {
|
|
101
|
-
throw new Error(`Missing key "${namespace.join("/")}" for ${pathStr} in load(optionalImportsMap={})`);
|
|
102
|
-
}
|
|
103
|
-
}
|
|
104
|
-
else {
|
|
105
|
-
// Currently, we only support langchain imports.
|
|
106
|
-
if (namespace[0] === "langchain") {
|
|
107
|
-
namespace.shift();
|
|
108
|
-
}
|
|
109
|
-
else {
|
|
110
|
-
throw new Error(`Invalid namespace: ${pathStr} -> ${str}`);
|
|
111
|
-
}
|
|
112
|
-
// The root namespace "langchain" is not a valid import.
|
|
113
|
-
if (namespace.length === 0) {
|
|
114
|
-
throw new Error(`Invalid namespace: ${pathStr} -> ${str}`);
|
|
115
|
-
}
|
|
116
|
-
// Find the longest matching namespace.
|
|
117
|
-
let importMapKey;
|
|
118
|
-
do {
|
|
119
|
-
importMapKey = namespace.join("__");
|
|
120
|
-
if (importMapKey in importMap) {
|
|
121
|
-
break;
|
|
122
|
-
}
|
|
123
|
-
else {
|
|
124
|
-
namespace.pop();
|
|
125
|
-
}
|
|
126
|
-
} while (namespace.length > 0);
|
|
127
|
-
// If no matching namespace is found, throw an error.
|
|
128
|
-
if (importMapKey in importMap) {
|
|
129
|
-
module = importMap[importMapKey];
|
|
130
|
-
}
|
|
131
|
-
}
|
|
132
|
-
if (typeof module !== "object" || module === null) {
|
|
133
|
-
throw new Error(`Invalid namespace: ${pathStr} -> ${str}`);
|
|
134
|
-
}
|
|
135
|
-
// Extract the builder from the import map.
|
|
136
|
-
const builder =
|
|
137
|
-
// look for a named export with the same name as the class
|
|
138
|
-
module[name] ??
|
|
139
|
-
// look for an export with a lc_name property matching the class name
|
|
140
|
-
// this is necessary for classes that are minified
|
|
141
|
-
Object.values(module).find((v) => typeof v === "function" &&
|
|
142
|
-
(0, serializable_js_1.get_lc_unique_name)(v) === name);
|
|
143
|
-
if (typeof builder !== "function") {
|
|
144
|
-
throw new Error(`Invalid identifer: ${pathStr} -> ${str}`);
|
|
145
|
-
}
|
|
146
|
-
// Recurse on the arguments, which may be serialized objects themselves
|
|
147
|
-
const kwargs = await reviver.call({ ...this, path: [...path, "kwargs"] }, serialized.kwargs);
|
|
148
|
-
// Construct the object
|
|
149
|
-
if (serialized.type === "constructor") {
|
|
150
|
-
// eslint-disable-next-line new-cap, @typescript-eslint/no-explicit-any
|
|
151
|
-
const instance = new builder((0, map_keys_js_1.mapKeys)(kwargs, map_keys_js_1.keyFromJson, combineAliasesAndInvert(builder)));
|
|
152
|
-
// Minification in severless/edge runtimes will mange the
|
|
153
|
-
// name of classes presented in traces. As the names in import map
|
|
154
|
-
// are present as-is even with minification, use these names instead
|
|
155
|
-
Object.defineProperty(instance.constructor, "name", { value: name });
|
|
156
|
-
return instance;
|
|
157
|
-
}
|
|
158
|
-
else {
|
|
159
|
-
throw new Error(`Invalid type: ${pathStr} -> ${str}`);
|
|
160
|
-
}
|
|
161
|
-
}
|
|
162
|
-
else if (typeof value === "object" && value !== null) {
|
|
163
|
-
if (Array.isArray(value)) {
|
|
164
|
-
return Promise.all(value.map((v, i) => reviver.call({ ...this, path: [...path, `${i}`] }, v)));
|
|
165
|
-
}
|
|
166
|
-
else {
|
|
167
|
-
return Object.fromEntries(await Promise.all(Object.entries(value).map(async ([key, value]) => [
|
|
168
|
-
key,
|
|
169
|
-
await reviver.call({ ...this, path: [...path, key] }, value),
|
|
170
|
-
])));
|
|
171
|
-
}
|
|
172
|
-
}
|
|
173
|
-
return value;
|
|
174
|
-
}
|
|
175
30
|
async function load(text, secretsMap = {}, optionalImportsMap = {}) {
|
|
176
|
-
|
|
177
|
-
|
|
31
|
+
return (0, load_1.load)(text, {
|
|
32
|
+
secretsMap,
|
|
33
|
+
optionalImportsMap,
|
|
34
|
+
optionalImportEntrypoints: import_constants_js_1.optionalImportEntrypoints,
|
|
35
|
+
importMap,
|
|
36
|
+
});
|
|
178
37
|
}
|
|
179
38
|
exports.load = load;
|