@promptbook/pdf 0.92.0-33 → 0.92.0-34
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/esm/index.es.js +22 -6
- package/esm/index.es.js.map +1 -1
- package/esm/typings/src/_packages/core.index.d.ts +4 -4
- package/esm/typings/src/llm-providers/_common/register/$provideLlmToolsFromEnv.d.ts +1 -1
- package/esm/typings/src/llm-providers/_common/register/createLlmToolsFromConfiguration.d.ts +1 -1
- package/esm/typings/src/version.d.ts +1 -1
- package/package.json +2 -2
- package/umd/index.umd.js +22 -6
- package/umd/index.umd.js.map +1 -1
- /package/esm/typings/src/llm-providers/{multiple → _multiple}/MultipleLlmExecutionTools.d.ts +0 -0
- /package/esm/typings/src/llm-providers/{multiple → _multiple}/joinLlmExecutionTools.d.ts +0 -0
- /package/esm/typings/src/llm-providers/{multiple → _multiple}/playground/playground.d.ts +0 -0
|
@@ -102,12 +102,12 @@ import { createLlmToolsFromConfiguration } from '../llm-providers/_common/regist
|
|
|
102
102
|
import { cacheLlmTools } from '../llm-providers/_common/utils/cache/cacheLlmTools';
|
|
103
103
|
import { countUsage } from '../llm-providers/_common/utils/count-total-usage/countUsage';
|
|
104
104
|
import { limitTotalUsage } from '../llm-providers/_common/utils/count-total-usage/limitTotalUsage';
|
|
105
|
+
import { joinLlmExecutionTools } from '../llm-providers/_multiple/joinLlmExecutionTools';
|
|
106
|
+
import { MultipleLlmExecutionTools } from '../llm-providers/_multiple/MultipleLlmExecutionTools';
|
|
105
107
|
import { _AnthropicClaudeMetadataRegistration } from '../llm-providers/anthropic-claude/register-configuration';
|
|
106
108
|
import { _AzureOpenAiMetadataRegistration } from '../llm-providers/azure-openai/register-configuration';
|
|
107
109
|
import { _DeepseekMetadataRegistration } from '../llm-providers/deepseek/register-configuration';
|
|
108
110
|
import { _GoogleMetadataRegistration } from '../llm-providers/google/register-configuration';
|
|
109
|
-
import { joinLlmExecutionTools } from '../llm-providers/multiple/joinLlmExecutionTools';
|
|
110
|
-
import { MultipleLlmExecutionTools } from '../llm-providers/multiple/MultipleLlmExecutionTools';
|
|
111
111
|
import { _OpenAiMetadataRegistration } from '../llm-providers/openai/register-configuration';
|
|
112
112
|
import { _OpenAiAssistantMetadataRegistration } from '../llm-providers/openai/register-configuration';
|
|
113
113
|
import { migratePipeline } from '../migrations/migratePipeline';
|
|
@@ -249,12 +249,12 @@ export { createLlmToolsFromConfiguration };
|
|
|
249
249
|
export { cacheLlmTools };
|
|
250
250
|
export { countUsage };
|
|
251
251
|
export { limitTotalUsage };
|
|
252
|
+
export { joinLlmExecutionTools };
|
|
253
|
+
export { MultipleLlmExecutionTools };
|
|
252
254
|
export { _AnthropicClaudeMetadataRegistration };
|
|
253
255
|
export { _AzureOpenAiMetadataRegistration };
|
|
254
256
|
export { _DeepseekMetadataRegistration };
|
|
255
257
|
export { _GoogleMetadataRegistration };
|
|
256
|
-
export { joinLlmExecutionTools };
|
|
257
|
-
export { MultipleLlmExecutionTools };
|
|
258
258
|
export { _OpenAiMetadataRegistration };
|
|
259
259
|
export { _OpenAiAssistantMetadataRegistration };
|
|
260
260
|
export { migratePipeline };
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { MultipleLlmExecutionTools } from '../../
|
|
1
|
+
import { MultipleLlmExecutionTools } from '../../_multiple/MultipleLlmExecutionTools';
|
|
2
2
|
import type { CreateLlmToolsFromConfigurationOptions } from './createLlmToolsFromConfiguration';
|
|
3
3
|
/**
|
|
4
4
|
* Automatically configures LLM tools from environment variables in Node.js
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import type { string_user_id } from '../../../types/typeAliases';
|
|
2
|
-
import { MultipleLlmExecutionTools } from '../../
|
|
2
|
+
import { MultipleLlmExecutionTools } from '../../_multiple/MultipleLlmExecutionTools';
|
|
3
3
|
import type { LlmToolsConfiguration } from './LlmToolsConfiguration';
|
|
4
4
|
/**
|
|
5
5
|
* Options for `$provideLlmToolsFromEnv`
|
|
@@ -15,7 +15,7 @@ export declare const BOOK_LANGUAGE_VERSION: string_semantic_version;
|
|
|
15
15
|
export declare const PROMPTBOOK_ENGINE_VERSION: string_promptbook_version;
|
|
16
16
|
/**
|
|
17
17
|
* Represents the version string of the Promptbook engine.
|
|
18
|
-
* It follows semantic versioning (e.g., `0.92.0-
|
|
18
|
+
* It follows semantic versioning (e.g., `0.92.0-33`).
|
|
19
19
|
*
|
|
20
20
|
* @generated
|
|
21
21
|
*/
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@promptbook/pdf",
|
|
3
|
-
"version": "0.92.0-
|
|
3
|
+
"version": "0.92.0-34",
|
|
4
4
|
"description": "It's time for a paradigm shift. The future of software in plain English, French or Latin",
|
|
5
5
|
"private": false,
|
|
6
6
|
"sideEffects": false,
|
|
@@ -51,7 +51,7 @@
|
|
|
51
51
|
"module": "./esm/index.es.js",
|
|
52
52
|
"typings": "./esm/typings/src/_packages/pdf.index.d.ts",
|
|
53
53
|
"peerDependencies": {
|
|
54
|
-
"@promptbook/core": "0.92.0-
|
|
54
|
+
"@promptbook/core": "0.92.0-34"
|
|
55
55
|
},
|
|
56
56
|
"dependencies": {
|
|
57
57
|
"crypto": "1.0.1",
|
package/umd/index.umd.js
CHANGED
|
@@ -25,7 +25,7 @@
|
|
|
25
25
|
* @generated
|
|
26
26
|
* @see https://github.com/webgptorg/promptbook
|
|
27
27
|
*/
|
|
28
|
-
const PROMPTBOOK_ENGINE_VERSION = '0.92.0-
|
|
28
|
+
const PROMPTBOOK_ENGINE_VERSION = '0.92.0-34';
|
|
29
29
|
/**
|
|
30
30
|
* TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
|
|
31
31
|
* Note: [💞] Ignore a discrepancy between file name and entity name
|
|
@@ -2772,12 +2772,14 @@
|
|
|
2772
2772
|
const spending = new rxjs.Subject();
|
|
2773
2773
|
const proxyTools = {
|
|
2774
2774
|
get title() {
|
|
2775
|
-
|
|
2776
|
-
|
|
2775
|
+
return `${llmTools.title} (+usage)`;
|
|
2776
|
+
// <- TODO: [🧈] Maybe standartize the suffix when wrapping `LlmExecutionTools` up
|
|
2777
|
+
// <- TODO: [🧈][🧠] Does it make sence to suffix "(+usage)"?
|
|
2777
2778
|
},
|
|
2778
2779
|
get description() {
|
|
2779
|
-
|
|
2780
|
-
|
|
2780
|
+
return `${llmTools.description} (+usage)`;
|
|
2781
|
+
// <- TODO: [🧈] Maybe standartize the suffix when wrapping `LlmExecutionTools` up
|
|
2782
|
+
// <- TODO: [🧈][🧠] Does it make sence to suffix "(+usage)"?
|
|
2781
2783
|
},
|
|
2782
2784
|
checkConfiguration() {
|
|
2783
2785
|
return /* not await */ llmTools.checkConfiguration();
|
|
@@ -2848,7 +2850,14 @@
|
|
|
2848
2850
|
return 'Multiple LLM Providers';
|
|
2849
2851
|
}
|
|
2850
2852
|
get description() {
|
|
2851
|
-
|
|
2853
|
+
const innerModelsTitlesAndDescriptions = this.llmExecutionTools
|
|
2854
|
+
.map(({ title, description }, index) => `${index + 1}) \`${title}\`\n${description}`)
|
|
2855
|
+
.join('\n\n');
|
|
2856
|
+
return spaceTrim__default["default"]((block) => `
|
|
2857
|
+
Multiple LLM Providers:
|
|
2858
|
+
|
|
2859
|
+
${block(innerModelsTitlesAndDescriptions)}
|
|
2860
|
+
`);
|
|
2852
2861
|
}
|
|
2853
2862
|
/**
|
|
2854
2863
|
* Check the configuration of all execution tools
|
|
@@ -5337,6 +5346,7 @@
|
|
|
5337
5346
|
*/
|
|
5338
5347
|
async function getKnowledgeForTask(options) {
|
|
5339
5348
|
const { tools, preparedPipeline, task, parameters } = options;
|
|
5349
|
+
console.log('!!! getKnowledgeForTask', options);
|
|
5340
5350
|
const firstKnowlegePiece = preparedPipeline.knowledgePieces[0];
|
|
5341
5351
|
const firstKnowlegeIndex = firstKnowlegePiece === null || firstKnowlegePiece === void 0 ? void 0 : firstKnowlegePiece.index[0];
|
|
5342
5352
|
// <- TODO: Do not use just first knowledge piece and first index to determine embedding model, use also keyword search
|
|
@@ -6058,6 +6068,12 @@
|
|
|
6058
6068
|
}
|
|
6059
6069
|
// ---
|
|
6060
6070
|
if (!llmTools.callEmbeddingModel) {
|
|
6071
|
+
console.log('!!! No callEmbeddingModel function provided', {
|
|
6072
|
+
'llmTools.title': llmTools.title,
|
|
6073
|
+
'llmTools.description': llmTools.description,
|
|
6074
|
+
'llmTools.callEmbeddingModel': llmTools.callEmbeddingModel,
|
|
6075
|
+
llmTools,
|
|
6076
|
+
});
|
|
6061
6077
|
// TODO: [🟥] Detect browser / node and make it colorfull
|
|
6062
6078
|
console.error('No callEmbeddingModel function provided');
|
|
6063
6079
|
}
|