@promptbook/node 0.92.0-33 → 0.92.0-34
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/esm/index.es.js +16 -6
- package/esm/index.es.js.map +1 -1
- package/esm/typings/src/_packages/core.index.d.ts +4 -4
- package/esm/typings/src/llm-providers/_common/register/$provideLlmToolsFromEnv.d.ts +1 -1
- package/esm/typings/src/llm-providers/_common/register/createLlmToolsFromConfiguration.d.ts +1 -1
- package/esm/typings/src/version.d.ts +1 -1
- package/package.json +2 -2
- package/umd/index.umd.js +16 -6
- package/umd/index.umd.js.map +1 -1
- /package/esm/typings/src/llm-providers/{multiple → _multiple}/MultipleLlmExecutionTools.d.ts +0 -0
- /package/esm/typings/src/llm-providers/{multiple → _multiple}/joinLlmExecutionTools.d.ts +0 -0
- /package/esm/typings/src/llm-providers/{multiple → _multiple}/playground/playground.d.ts +0 -0
package/esm/index.es.js
CHANGED
|
@@ -30,7 +30,7 @@ const BOOK_LANGUAGE_VERSION = '1.0.0';
|
|
|
30
30
|
* @generated
|
|
31
31
|
* @see https://github.com/webgptorg/promptbook
|
|
32
32
|
*/
|
|
33
|
-
const PROMPTBOOK_ENGINE_VERSION = '0.92.0-
|
|
33
|
+
const PROMPTBOOK_ENGINE_VERSION = '0.92.0-34';
|
|
34
34
|
/**
|
|
35
35
|
* TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
|
|
36
36
|
* Note: [💞] Ignore a discrepancy between file name and entity name
|
|
@@ -2922,7 +2922,14 @@ class MultipleLlmExecutionTools {
|
|
|
2922
2922
|
return 'Multiple LLM Providers';
|
|
2923
2923
|
}
|
|
2924
2924
|
get description() {
|
|
2925
|
-
|
|
2925
|
+
const innerModelsTitlesAndDescriptions = this.llmExecutionTools
|
|
2926
|
+
.map(({ title, description }, index) => `${index + 1}) \`${title}\`\n${description}`)
|
|
2927
|
+
.join('\n\n');
|
|
2928
|
+
return spaceTrim((block) => `
|
|
2929
|
+
Multiple LLM Providers:
|
|
2930
|
+
|
|
2931
|
+
${block(innerModelsTitlesAndDescriptions)}
|
|
2932
|
+
`);
|
|
2926
2933
|
}
|
|
2927
2934
|
/**
|
|
2928
2935
|
* Check the configuration of all execution tools
|
|
@@ -4254,6 +4261,7 @@ function knowledgePiecesToString(knowledgePieces) {
|
|
|
4254
4261
|
*/
|
|
4255
4262
|
async function getKnowledgeForTask(options) {
|
|
4256
4263
|
const { tools, preparedPipeline, task, parameters } = options;
|
|
4264
|
+
console.log('!!! getKnowledgeForTask', options);
|
|
4257
4265
|
const firstKnowlegePiece = preparedPipeline.knowledgePieces[0];
|
|
4258
4266
|
const firstKnowlegeIndex = firstKnowlegePiece === null || firstKnowlegePiece === void 0 ? void 0 : firstKnowlegePiece.index[0];
|
|
4259
4267
|
// <- TODO: Do not use just first knowledge piece and first index to determine embedding model, use also keyword search
|
|
@@ -4896,12 +4904,14 @@ function countUsage(llmTools) {
|
|
|
4896
4904
|
const spending = new Subject();
|
|
4897
4905
|
const proxyTools = {
|
|
4898
4906
|
get title() {
|
|
4899
|
-
|
|
4900
|
-
|
|
4907
|
+
return `${llmTools.title} (+usage)`;
|
|
4908
|
+
// <- TODO: [🧈] Maybe standartize the suffix when wrapping `LlmExecutionTools` up
|
|
4909
|
+
// <- TODO: [🧈][🧠] Does it make sence to suffix "(+usage)"?
|
|
4901
4910
|
},
|
|
4902
4911
|
get description() {
|
|
4903
|
-
|
|
4904
|
-
|
|
4912
|
+
return `${llmTools.description} (+usage)`;
|
|
4913
|
+
// <- TODO: [🧈] Maybe standartize the suffix when wrapping `LlmExecutionTools` up
|
|
4914
|
+
// <- TODO: [🧈][🧠] Does it make sence to suffix "(+usage)"?
|
|
4905
4915
|
},
|
|
4906
4916
|
checkConfiguration() {
|
|
4907
4917
|
return /* not await */ llmTools.checkConfiguration();
|