@promptbook/documents 0.104.0-2 → 0.104.0-4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/esm/index.es.js +23 -2
- package/esm/index.es.js.map +1 -1
- package/esm/typings/src/_packages/types.index.d.ts +2 -0
- package/esm/typings/src/book-components/Chat/types/ChatMessage.d.ts +7 -11
- package/esm/typings/src/llm-providers/_multiple/MultipleLlmExecutionTools.d.ts +6 -2
- package/esm/typings/src/llm-providers/remote/RemoteLlmExecutionTools.d.ts +1 -0
- package/esm/typings/src/types/Message.d.ts +49 -0
- package/esm/typings/src/version.d.ts +1 -1
- package/package.json +2 -2
- package/umd/index.umd.js +23 -2
- package/umd/index.umd.js.map +1 -1
package/esm/index.es.js
CHANGED
|
@@ -26,7 +26,7 @@ const BOOK_LANGUAGE_VERSION = '2.0.0';
|
|
|
26
26
|
* @generated
|
|
27
27
|
* @see https://github.com/webgptorg/promptbook
|
|
28
28
|
*/
|
|
29
|
-
const PROMPTBOOK_ENGINE_VERSION = '0.104.0-
|
|
29
|
+
const PROMPTBOOK_ENGINE_VERSION = '0.104.0-4';
|
|
30
30
|
/**
|
|
31
31
|
* TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
|
|
32
32
|
* Note: [💞] Ignore a discrepancy between file name and entity name
|
|
@@ -4046,6 +4046,15 @@ function countUsage(llmTools) {
|
|
|
4046
4046
|
return promptResult;
|
|
4047
4047
|
};
|
|
4048
4048
|
}
|
|
4049
|
+
if (llmTools.callImageGenerationModel !== undefined) {
|
|
4050
|
+
proxyTools.callImageGenerationModel = async (prompt) => {
|
|
4051
|
+
// console.info('[🚕] callImageGenerationModel through countTotalUsage');
|
|
4052
|
+
const promptResult = await llmTools.callImageGenerationModel(prompt);
|
|
4053
|
+
totalUsage = addUsage(totalUsage, promptResult.usage);
|
|
4054
|
+
spending.next(promptResult.usage);
|
|
4055
|
+
return promptResult;
|
|
4056
|
+
};
|
|
4057
|
+
}
|
|
4049
4058
|
// <- Note: [🤖]
|
|
4050
4059
|
return proxyTools;
|
|
4051
4060
|
}
|
|
@@ -4155,6 +4164,12 @@ class MultipleLlmExecutionTools {
|
|
|
4155
4164
|
callEmbeddingModel(prompt) {
|
|
4156
4165
|
return this.callCommonModel(prompt);
|
|
4157
4166
|
}
|
|
4167
|
+
/**
|
|
4168
|
+
* Calls the best available embedding model
|
|
4169
|
+
*/
|
|
4170
|
+
callImageGenerationModel(prompt) {
|
|
4171
|
+
return this.callCommonModel(prompt);
|
|
4172
|
+
}
|
|
4158
4173
|
// <- Note: [🤖]
|
|
4159
4174
|
/**
|
|
4160
4175
|
* Calls the best available model
|
|
@@ -4181,6 +4196,11 @@ class MultipleLlmExecutionTools {
|
|
|
4181
4196
|
continue llm;
|
|
4182
4197
|
}
|
|
4183
4198
|
return await llmExecutionTools.callEmbeddingModel(prompt);
|
|
4199
|
+
case 'IMAGE_GENERATION':
|
|
4200
|
+
if (llmExecutionTools.callImageGenerationModel === undefined) {
|
|
4201
|
+
continue llm;
|
|
4202
|
+
}
|
|
4203
|
+
return await llmExecutionTools.callImageGenerationModel(prompt);
|
|
4184
4204
|
// <- case [🤖]:
|
|
4185
4205
|
default:
|
|
4186
4206
|
throw new UnexpectedError(`Unknown model variant "${prompt.modelRequirements.modelVariant}" in ${llmExecutionTools.title}`);
|
|
@@ -6305,8 +6325,9 @@ async function executeAttempts(options) {
|
|
|
6305
6325
|
$ongoingTaskResult.$resultString = $ongoingTaskResult.$completionResult.content;
|
|
6306
6326
|
break variant;
|
|
6307
6327
|
case 'EMBEDDING':
|
|
6328
|
+
case 'IMAGE_GENERATION':
|
|
6308
6329
|
throw new PipelineExecutionError(spaceTrim$1((block) => `
|
|
6309
|
-
|
|
6330
|
+
${modelRequirements.modelVariant} model can not be used in pipeline
|
|
6310
6331
|
|
|
6311
6332
|
This should be catched during parsing
|
|
6312
6333
|
|