@promptbook/markdown-utils 0.92.0-33 → 0.92.0-34
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/esm/index.es.js +22 -6
- package/esm/index.es.js.map +1 -1
- package/esm/typings/src/_packages/core.index.d.ts +4 -4
- package/esm/typings/src/llm-providers/_common/register/$provideLlmToolsFromEnv.d.ts +1 -1
- package/esm/typings/src/llm-providers/_common/register/createLlmToolsFromConfiguration.d.ts +1 -1
- package/esm/typings/src/version.d.ts +1 -1
- package/package.json +1 -1
- package/umd/index.umd.js +22 -6
- package/umd/index.umd.js.map +1 -1
- /package/esm/typings/src/llm-providers/{multiple → _multiple}/MultipleLlmExecutionTools.d.ts +0 -0
- /package/esm/typings/src/llm-providers/{multiple → _multiple}/joinLlmExecutionTools.d.ts +0 -0
- /package/esm/typings/src/llm-providers/{multiple → _multiple}/playground/playground.d.ts +0 -0
package/esm/index.es.js
CHANGED
|
@@ -25,7 +25,7 @@ const BOOK_LANGUAGE_VERSION = '1.0.0';
|
|
|
25
25
|
* @generated
|
|
26
26
|
* @see https://github.com/webgptorg/promptbook
|
|
27
27
|
*/
|
|
28
|
-
const PROMPTBOOK_ENGINE_VERSION = '0.92.0-
|
|
28
|
+
const PROMPTBOOK_ENGINE_VERSION = '0.92.0-34';
|
|
29
29
|
/**
|
|
30
30
|
* TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
|
|
31
31
|
* Note: [💞] Ignore a discrepancy between file name and entity name
|
|
@@ -2459,12 +2459,14 @@ function countUsage(llmTools) {
|
|
|
2459
2459
|
const spending = new Subject();
|
|
2460
2460
|
const proxyTools = {
|
|
2461
2461
|
get title() {
|
|
2462
|
-
|
|
2463
|
-
|
|
2462
|
+
return `${llmTools.title} (+usage)`;
|
|
2463
|
+
// <- TODO: [🧈] Maybe standartize the suffix when wrapping `LlmExecutionTools` up
|
|
2464
|
+
// <- TODO: [🧈][🧠] Does it make sence to suffix "(+usage)"?
|
|
2464
2465
|
},
|
|
2465
2466
|
get description() {
|
|
2466
|
-
|
|
2467
|
-
|
|
2467
|
+
return `${llmTools.description} (+usage)`;
|
|
2468
|
+
// <- TODO: [🧈] Maybe standartize the suffix when wrapping `LlmExecutionTools` up
|
|
2469
|
+
// <- TODO: [🧈][🧠] Does it make sence to suffix "(+usage)"?
|
|
2468
2470
|
},
|
|
2469
2471
|
checkConfiguration() {
|
|
2470
2472
|
return /* not await */ llmTools.checkConfiguration();
|
|
@@ -2535,7 +2537,14 @@ class MultipleLlmExecutionTools {
|
|
|
2535
2537
|
return 'Multiple LLM Providers';
|
|
2536
2538
|
}
|
|
2537
2539
|
get description() {
|
|
2538
|
-
|
|
2540
|
+
const innerModelsTitlesAndDescriptions = this.llmExecutionTools
|
|
2541
|
+
.map(({ title, description }, index) => `${index + 1}) \`${title}\`\n${description}`)
|
|
2542
|
+
.join('\n\n');
|
|
2543
|
+
return spaceTrim((block) => `
|
|
2544
|
+
Multiple LLM Providers:
|
|
2545
|
+
|
|
2546
|
+
${block(innerModelsTitlesAndDescriptions)}
|
|
2547
|
+
`);
|
|
2539
2548
|
}
|
|
2540
2549
|
/**
|
|
2541
2550
|
* Check the configuration of all execution tools
|
|
@@ -5309,6 +5318,7 @@ function knowledgePiecesToString(knowledgePieces) {
|
|
|
5309
5318
|
*/
|
|
5310
5319
|
async function getKnowledgeForTask(options) {
|
|
5311
5320
|
const { tools, preparedPipeline, task, parameters } = options;
|
|
5321
|
+
console.log('!!! getKnowledgeForTask', options);
|
|
5312
5322
|
const firstKnowlegePiece = preparedPipeline.knowledgePieces[0];
|
|
5313
5323
|
const firstKnowlegeIndex = firstKnowlegePiece === null || firstKnowlegePiece === void 0 ? void 0 : firstKnowlegePiece.index[0];
|
|
5314
5324
|
// <- TODO: Do not use just first knowledge piece and first index to determine embedding model, use also keyword search
|
|
@@ -6030,6 +6040,12 @@ class MarkdownScraper {
|
|
|
6030
6040
|
}
|
|
6031
6041
|
// ---
|
|
6032
6042
|
if (!llmTools.callEmbeddingModel) {
|
|
6043
|
+
console.log('!!! No callEmbeddingModel function provided', {
|
|
6044
|
+
'llmTools.title': llmTools.title,
|
|
6045
|
+
'llmTools.description': llmTools.description,
|
|
6046
|
+
'llmTools.callEmbeddingModel': llmTools.callEmbeddingModel,
|
|
6047
|
+
llmTools,
|
|
6048
|
+
});
|
|
6033
6049
|
// TODO: [🟥] Detect browser / node and make it colorfull
|
|
6034
6050
|
console.error('No callEmbeddingModel function provided');
|
|
6035
6051
|
}
|