@cellaware/utils 3.0.13 → 3.2.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -37,12 +37,12 @@ export declare class ChainStore {
|
|
|
37
37
|
private getPrompt;
|
|
38
38
|
/**
|
|
39
39
|
* - `name`: Chain name
|
|
40
|
+
* - `template`: Template chain name to copy prompt from
|
|
40
41
|
* - `modelName`: OpenAI model name
|
|
41
42
|
* - `temperature`: OpenAI model temperature
|
|
42
43
|
* - `verbose`: OpenAI verbose parameter
|
|
43
|
-
* - `templateName`: Template chain name to copy prompt from
|
|
44
44
|
*/
|
|
45
|
-
addChain(name: string, modelName: ModelName, temperature?: number, verbose?: boolean
|
|
45
|
+
addChain(name: string, template: string | null, modelName: ModelName, temperature?: number, verbose?: boolean): void;
|
|
46
46
|
addExistingChain(chain: SingleActionChain): void;
|
|
47
47
|
callChain(name: string, args: any, tokenUsages: any[]): Promise<import("langchain/dist/schema/index.js").ChainValues>;
|
|
48
48
|
translate(statement: string, language: string, tokenUsages?: any[]): Promise<string>;
|
package/dist/llm/chain-store.js
CHANGED
|
@@ -184,13 +184,13 @@ Your translation here:
|
|
|
184
184
|
}
|
|
185
185
|
/**
|
|
186
186
|
* - `name`: Chain name
|
|
187
|
+
* - `template`: Template chain name to copy prompt from
|
|
187
188
|
* - `modelName`: OpenAI model name
|
|
188
189
|
* - `temperature`: OpenAI model temperature
|
|
189
190
|
* - `verbose`: OpenAI verbose parameter
|
|
190
|
-
* - `templateName`: Template chain name to copy prompt from
|
|
191
191
|
*/
|
|
192
|
-
addChain(name, modelName, temperature, verbose
|
|
193
|
-
const prompt = this.getPrompt(name,
|
|
192
|
+
addChain(name, template, modelName, temperature, verbose) {
|
|
193
|
+
const prompt = this.getPrompt(name, template);
|
|
194
194
|
const llm = new ChatOpenAI({
|
|
195
195
|
modelName, temperature: temperature ?? 0, configuration: {
|
|
196
196
|
organization: process.env.OPENAI_ORGANIZATION,
|