@promptbook/core 0.92.0-33 → 0.92.0-34
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/esm/index.es.js +27 -10
- package/esm/index.es.js.map +1 -1
- package/esm/typings/src/_packages/core.index.d.ts +4 -4
- package/esm/typings/src/llm-providers/_common/register/$provideLlmToolsFromEnv.d.ts +1 -1
- package/esm/typings/src/llm-providers/_common/register/createLlmToolsFromConfiguration.d.ts +1 -1
- package/esm/typings/src/version.d.ts +1 -1
- package/package.json +1 -1
- package/umd/index.umd.js +27 -10
- package/umd/index.umd.js.map +1 -1
- /package/esm/typings/src/llm-providers/{multiple → _multiple}/MultipleLlmExecutionTools.d.ts +0 -0
- /package/esm/typings/src/llm-providers/{multiple → _multiple}/joinLlmExecutionTools.d.ts +0 -0
- /package/esm/typings/src/llm-providers/{multiple → _multiple}/playground/playground.d.ts +0 -0
package/esm/index.es.js
CHANGED
|
@@ -27,7 +27,7 @@ const BOOK_LANGUAGE_VERSION = '1.0.0';
|
|
|
27
27
|
* @generated
|
|
28
28
|
* @see https://github.com/webgptorg/promptbook
|
|
29
29
|
*/
|
|
30
|
-
const PROMPTBOOK_ENGINE_VERSION = '0.92.0-
|
|
30
|
+
const PROMPTBOOK_ENGINE_VERSION = '0.92.0-34';
|
|
31
31
|
/**
|
|
32
32
|
* TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
|
|
33
33
|
* Note: [💞] Ignore a discrepancy between file name and entity name
|
|
@@ -3204,7 +3204,14 @@ class MultipleLlmExecutionTools {
|
|
|
3204
3204
|
return 'Multiple LLM Providers';
|
|
3205
3205
|
}
|
|
3206
3206
|
get description() {
|
|
3207
|
-
|
|
3207
|
+
const innerModelsTitlesAndDescriptions = this.llmExecutionTools
|
|
3208
|
+
.map(({ title, description }, index) => `${index + 1}) \`${title}\`\n${description}`)
|
|
3209
|
+
.join('\n\n');
|
|
3210
|
+
return spaceTrim((block) => `
|
|
3211
|
+
Multiple LLM Providers:
|
|
3212
|
+
|
|
3213
|
+
${block(innerModelsTitlesAndDescriptions)}
|
|
3214
|
+
`);
|
|
3208
3215
|
}
|
|
3209
3216
|
/**
|
|
3210
3217
|
* Check the configuration of all execution tools
|
|
@@ -4558,6 +4565,7 @@ function knowledgePiecesToString(knowledgePieces) {
|
|
|
4558
4565
|
*/
|
|
4559
4566
|
async function getKnowledgeForTask(options) {
|
|
4560
4567
|
const { tools, preparedPipeline, task, parameters } = options;
|
|
4568
|
+
console.log('!!! getKnowledgeForTask', options);
|
|
4561
4569
|
const firstKnowlegePiece = preparedPipeline.knowledgePieces[0];
|
|
4562
4570
|
const firstKnowlegeIndex = firstKnowlegePiece === null || firstKnowlegePiece === void 0 ? void 0 : firstKnowlegePiece.index[0];
|
|
4563
4571
|
// <- TODO: Do not use just first knowledge piece and first index to determine embedding model, use also keyword search
|
|
@@ -5200,12 +5208,14 @@ function countUsage(llmTools) {
|
|
|
5200
5208
|
const spending = new Subject();
|
|
5201
5209
|
const proxyTools = {
|
|
5202
5210
|
get title() {
|
|
5203
|
-
|
|
5204
|
-
|
|
5211
|
+
return `${llmTools.title} (+usage)`;
|
|
5212
|
+
// <- TODO: [🧈] Maybe standartize the suffix when wrapping `LlmExecutionTools` up
|
|
5213
|
+
// <- TODO: [🧈][🧠] Does it make sence to suffix "(+usage)"?
|
|
5205
5214
|
},
|
|
5206
5215
|
get description() {
|
|
5207
|
-
|
|
5208
|
-
|
|
5216
|
+
return `${llmTools.description} (+usage)`;
|
|
5217
|
+
// <- TODO: [🧈] Maybe standartize the suffix when wrapping `LlmExecutionTools` up
|
|
5218
|
+
// <- TODO: [🧈][🧠] Does it make sence to suffix "(+usage)"?
|
|
5209
5219
|
},
|
|
5210
5220
|
checkConfiguration() {
|
|
5211
5221
|
return /* not await */ llmTools.checkConfiguration();
|
|
@@ -10350,8 +10360,13 @@ function filterModels(llmTools, modelFilter) {
|
|
|
10350
10360
|
const filteredTools = {
|
|
10351
10361
|
// Keep all properties from the original llmTools
|
|
10352
10362
|
...llmTools,
|
|
10363
|
+
get title() {
|
|
10364
|
+
return `${llmTools.title} (filtered)`;
|
|
10365
|
+
// <- TODO: [🧈] Maybe standartize the suffix when wrapping `LlmExecutionTools` up
|
|
10366
|
+
},
|
|
10353
10367
|
get description() {
|
|
10354
10368
|
return `${llmTools.description} (filtered)`;
|
|
10369
|
+
// <- TODO: [🧈] Maybe standartize the suffix when wrapping `LlmExecutionTools` up
|
|
10355
10370
|
},
|
|
10356
10371
|
// Override listModels to filter the models
|
|
10357
10372
|
async listModels() {
|
|
@@ -10712,12 +10727,14 @@ function cacheLlmTools(llmTools, options = {}) {
|
|
|
10712
10727
|
...llmTools,
|
|
10713
10728
|
// <- Note: [🥫]
|
|
10714
10729
|
get title() {
|
|
10715
|
-
|
|
10716
|
-
|
|
10730
|
+
return `${llmTools.title} (cached)`;
|
|
10731
|
+
// <- TODO: [🧈] Maybe standartize the suffix when wrapping `LlmExecutionTools` up
|
|
10732
|
+
// <- TODO: [🧈][🧠] Does it make sence to suffix "(cached)"?
|
|
10717
10733
|
},
|
|
10718
10734
|
get description() {
|
|
10719
|
-
|
|
10720
|
-
|
|
10735
|
+
return `${llmTools.description} (cached)`;
|
|
10736
|
+
// <- TODO: [🧈] Maybe standartize the suffix when wrapping `LlmExecutionTools` up
|
|
10737
|
+
// <- TODO: [🧈][🧠] Does it make sence to suffix "(cached)"?
|
|
10721
10738
|
},
|
|
10722
10739
|
listModels() {
|
|
10723
10740
|
// TODO: [🧠] Should be model listing also cached?
|