@promptbook/remote-server 0.104.0-3 → 0.104.0-5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/esm/index.es.js +23 -2
- package/esm/index.es.js.map +1 -1
- package/esm/typings/src/collection/agent-collection/constructors/agent-collection-in-supabase/AgentsDatabaseSchema.d.ts +18 -15
- package/esm/typings/src/llm-providers/_multiple/MultipleLlmExecutionTools.d.ts +6 -2
- package/esm/typings/src/llm-providers/remote/RemoteLlmExecutionTools.d.ts +1 -0
- package/esm/typings/src/version.d.ts +1 -1
- package/package.json +2 -2
- package/umd/index.umd.js +23 -2
- package/umd/index.umd.js.map +1 -1
package/esm/index.es.js
CHANGED
|
@@ -33,7 +33,7 @@ const BOOK_LANGUAGE_VERSION = '2.0.0';
|
|
|
33
33
|
* @generated
|
|
34
34
|
* @see https://github.com/webgptorg/promptbook
|
|
35
35
|
*/
|
|
36
|
-
const PROMPTBOOK_ENGINE_VERSION = '0.104.0-
|
|
36
|
+
const PROMPTBOOK_ENGINE_VERSION = '0.104.0-5';
|
|
37
37
|
/**
|
|
38
38
|
* TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
|
|
39
39
|
* Note: [💞] Ignore a discrepancy between file name and entity name
|
|
@@ -3830,6 +3830,15 @@ function countUsage(llmTools) {
|
|
|
3830
3830
|
return promptResult;
|
|
3831
3831
|
};
|
|
3832
3832
|
}
|
|
3833
|
+
if (llmTools.callImageGenerationModel !== undefined) {
|
|
3834
|
+
proxyTools.callImageGenerationModel = async (prompt) => {
|
|
3835
|
+
// console.info('[🚕] callImageGenerationModel through countTotalUsage');
|
|
3836
|
+
const promptResult = await llmTools.callImageGenerationModel(prompt);
|
|
3837
|
+
totalUsage = addUsage(totalUsage, promptResult.usage);
|
|
3838
|
+
spending.next(promptResult.usage);
|
|
3839
|
+
return promptResult;
|
|
3840
|
+
};
|
|
3841
|
+
}
|
|
3833
3842
|
// <- Note: [🤖]
|
|
3834
3843
|
return proxyTools;
|
|
3835
3844
|
}
|
|
@@ -3939,6 +3948,12 @@ class MultipleLlmExecutionTools {
|
|
|
3939
3948
|
callEmbeddingModel(prompt) {
|
|
3940
3949
|
return this.callCommonModel(prompt);
|
|
3941
3950
|
}
|
|
3951
|
+
/**
|
|
3952
|
+
* Calls the best available embedding model
|
|
3953
|
+
*/
|
|
3954
|
+
callImageGenerationModel(prompt) {
|
|
3955
|
+
return this.callCommonModel(prompt);
|
|
3956
|
+
}
|
|
3942
3957
|
// <- Note: [🤖]
|
|
3943
3958
|
/**
|
|
3944
3959
|
* Calls the best available model
|
|
@@ -3965,6 +3980,11 @@ class MultipleLlmExecutionTools {
|
|
|
3965
3980
|
continue llm;
|
|
3966
3981
|
}
|
|
3967
3982
|
return await llmExecutionTools.callEmbeddingModel(prompt);
|
|
3983
|
+
case 'IMAGE_GENERATION':
|
|
3984
|
+
if (llmExecutionTools.callImageGenerationModel === undefined) {
|
|
3985
|
+
continue llm;
|
|
3986
|
+
}
|
|
3987
|
+
return await llmExecutionTools.callImageGenerationModel(prompt);
|
|
3968
3988
|
// <- case [🤖]:
|
|
3969
3989
|
default:
|
|
3970
3990
|
throw new UnexpectedError(`Unknown model variant "${prompt.modelRequirements.modelVariant}" in ${llmExecutionTools.title}`);
|
|
@@ -6511,8 +6531,9 @@ async function executeAttempts(options) {
|
|
|
6511
6531
|
$ongoingTaskResult.$resultString = $ongoingTaskResult.$completionResult.content;
|
|
6512
6532
|
break variant;
|
|
6513
6533
|
case 'EMBEDDING':
|
|
6534
|
+
case 'IMAGE_GENERATION':
|
|
6514
6535
|
throw new PipelineExecutionError(spaceTrim$1((block) => `
|
|
6515
|
-
|
|
6536
|
+
${modelRequirements.modelVariant} model can not be used in pipeline
|
|
6516
6537
|
|
|
6517
6538
|
This should be catched during parsing
|
|
6518
6539
|
|