@promptbook/documents 0.104.0-3 → 0.104.0-4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/esm/index.es.js +23 -2
- package/esm/index.es.js.map +1 -1
- package/esm/typings/src/llm-providers/_multiple/MultipleLlmExecutionTools.d.ts +6 -2
- package/esm/typings/src/llm-providers/remote/RemoteLlmExecutionTools.d.ts +1 -0
- package/esm/typings/src/version.d.ts +1 -1
- package/package.json +2 -2
- package/umd/index.umd.js +23 -2
- package/umd/index.umd.js.map +1 -1
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import type { ChatParticipant } from '../../book-components/Chat/types/ChatParticipant';
|
|
2
2
|
import type { AvailableModel } from '../../execution/AvailableModel';
|
|
3
3
|
import type { LlmExecutionTools } from '../../execution/LlmExecutionTools';
|
|
4
|
-
import type { ChatPromptResult, CompletionPromptResult, EmbeddingPromptResult, PromptResult } from '../../execution/PromptResult';
|
|
5
|
-
import type { ChatPrompt, CompletionPrompt, EmbeddingPrompt, Prompt } from '../../types/Prompt';
|
|
4
|
+
import type { ChatPromptResult, CompletionPromptResult, EmbeddingPromptResult, ImagePromptResult, PromptResult } from '../../execution/PromptResult';
|
|
5
|
+
import type { ChatPrompt, CompletionPrompt, EmbeddingPrompt, ImagePrompt, Prompt } from '../../types/Prompt';
|
|
6
6
|
import type { string_markdown, string_markdown_text, string_title } from '../../types/typeAliases';
|
|
7
7
|
/**
|
|
8
8
|
* Multiple LLM Execution Tools is a proxy server that uses multiple execution tools internally and exposes the executor interface externally.
|
|
@@ -43,6 +43,10 @@ export declare class MultipleLlmExecutionTools implements LlmExecutionTools {
|
|
|
43
43
|
* Calls the best available embedding model
|
|
44
44
|
*/
|
|
45
45
|
callEmbeddingModel(prompt: EmbeddingPrompt): Promise<EmbeddingPromptResult>;
|
|
46
|
+
/**
|
|
47
|
+
* Calls the best available embedding model
|
|
48
|
+
*/
|
|
49
|
+
callImageGenerationModel(prompt: ImagePrompt): Promise<ImagePromptResult>;
|
|
46
50
|
/**
|
|
47
51
|
* Calls the best available model
|
|
48
52
|
*
|
|
@@ -46,6 +46,7 @@ export declare class RemoteLlmExecutionTools<TCustomOptions = undefined> impleme
|
|
|
46
46
|
private callCommonModel;
|
|
47
47
|
}
|
|
48
48
|
/**
|
|
49
|
+
* TODO: !!!! Deprecate pipeline server and all of its components
|
|
49
50
|
* TODO: Maybe use `$exportJson`
|
|
50
51
|
* TODO: [🧠][🛍] Maybe not `isAnonymous: boolean` BUT `mode: 'ANONYMOUS'|'COLLECTION'`
|
|
51
52
|
* TODO: [🍓] Allow to list compatible models with each variant
|
|
@@ -15,7 +15,7 @@ export declare const BOOK_LANGUAGE_VERSION: string_semantic_version;
|
|
|
15
15
|
export declare const PROMPTBOOK_ENGINE_VERSION: string_promptbook_version;
|
|
16
16
|
/**
|
|
17
17
|
* Represents the version string of the Promptbook engine.
|
|
18
|
-
* It follows semantic versioning (e.g., `0.104.0-
|
|
18
|
+
* It follows semantic versioning (e.g., `0.104.0-3`).
|
|
19
19
|
*
|
|
20
20
|
* @generated
|
|
21
21
|
*/
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@promptbook/documents",
|
|
3
|
-
"version": "0.104.0-
|
|
3
|
+
"version": "0.104.0-4",
|
|
4
4
|
"description": "Promptbook: Turn your company's scattered knowledge into AI ready books",
|
|
5
5
|
"private": false,
|
|
6
6
|
"sideEffects": false,
|
|
@@ -95,7 +95,7 @@
|
|
|
95
95
|
"module": "./esm/index.es.js",
|
|
96
96
|
"typings": "./esm/typings/src/_packages/documents.index.d.ts",
|
|
97
97
|
"peerDependencies": {
|
|
98
|
-
"@promptbook/core": "0.104.0-
|
|
98
|
+
"@promptbook/core": "0.104.0-4"
|
|
99
99
|
},
|
|
100
100
|
"dependencies": {
|
|
101
101
|
"colors": "1.4.0",
|
package/umd/index.umd.js
CHANGED
|
@@ -25,7 +25,7 @@
|
|
|
25
25
|
* @generated
|
|
26
26
|
* @see https://github.com/webgptorg/promptbook
|
|
27
27
|
*/
|
|
28
|
-
const PROMPTBOOK_ENGINE_VERSION = '0.104.0-
|
|
28
|
+
const PROMPTBOOK_ENGINE_VERSION = '0.104.0-4';
|
|
29
29
|
/**
|
|
30
30
|
* TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
|
|
31
31
|
* Note: [💞] Ignore a discrepancy between file name and entity name
|
|
@@ -4045,6 +4045,15 @@
|
|
|
4045
4045
|
return promptResult;
|
|
4046
4046
|
};
|
|
4047
4047
|
}
|
|
4048
|
+
if (llmTools.callImageGenerationModel !== undefined) {
|
|
4049
|
+
proxyTools.callImageGenerationModel = async (prompt) => {
|
|
4050
|
+
// console.info('[🚕] callImageGenerationModel through countTotalUsage');
|
|
4051
|
+
const promptResult = await llmTools.callImageGenerationModel(prompt);
|
|
4052
|
+
totalUsage = addUsage(totalUsage, promptResult.usage);
|
|
4053
|
+
spending.next(promptResult.usage);
|
|
4054
|
+
return promptResult;
|
|
4055
|
+
};
|
|
4056
|
+
}
|
|
4048
4057
|
// <- Note: [🤖]
|
|
4049
4058
|
return proxyTools;
|
|
4050
4059
|
}
|
|
@@ -4154,6 +4163,12 @@
|
|
|
4154
4163
|
callEmbeddingModel(prompt) {
|
|
4155
4164
|
return this.callCommonModel(prompt);
|
|
4156
4165
|
}
|
|
4166
|
+
/**
|
|
4167
|
+
* Calls the best available embedding model
|
|
4168
|
+
*/
|
|
4169
|
+
callImageGenerationModel(prompt) {
|
|
4170
|
+
return this.callCommonModel(prompt);
|
|
4171
|
+
}
|
|
4157
4172
|
// <- Note: [🤖]
|
|
4158
4173
|
/**
|
|
4159
4174
|
* Calls the best available model
|
|
@@ -4180,6 +4195,11 @@
|
|
|
4180
4195
|
continue llm;
|
|
4181
4196
|
}
|
|
4182
4197
|
return await llmExecutionTools.callEmbeddingModel(prompt);
|
|
4198
|
+
case 'IMAGE_GENERATION':
|
|
4199
|
+
if (llmExecutionTools.callImageGenerationModel === undefined) {
|
|
4200
|
+
continue llm;
|
|
4201
|
+
}
|
|
4202
|
+
return await llmExecutionTools.callImageGenerationModel(prompt);
|
|
4183
4203
|
// <- case [🤖]:
|
|
4184
4204
|
default:
|
|
4185
4205
|
throw new UnexpectedError(`Unknown model variant "${prompt.modelRequirements.modelVariant}" in ${llmExecutionTools.title}`);
|
|
@@ -6304,8 +6324,9 @@
|
|
|
6304
6324
|
$ongoingTaskResult.$resultString = $ongoingTaskResult.$completionResult.content;
|
|
6305
6325
|
break variant;
|
|
6306
6326
|
case 'EMBEDDING':
|
|
6327
|
+
case 'IMAGE_GENERATION':
|
|
6307
6328
|
throw new PipelineExecutionError(spaceTrim$1.spaceTrim((block) => `
|
|
6308
|
-
|
|
6329
|
+
${modelRequirements.modelVariant} model can not be used in pipeline
|
|
6309
6330
|
|
|
6310
6331
|
This should be catched during parsing
|
|
6311
6332
|
|