@promptbook/node 0.88.0 → 0.89.0-1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +4 -0
- package/esm/index.es.js +11 -4
- package/esm/index.es.js.map +1 -1
- package/esm/typings/src/_packages/core.index.d.ts +2 -2
- package/esm/typings/src/_packages/types.index.d.ts +10 -0
- package/esm/typings/src/llm-providers/_common/utils/count-total-usage/LlmExecutionToolsWithTotalUsage.d.ts +7 -0
- package/esm/typings/src/llm-providers/_common/utils/count-total-usage/{countTotalUsage.d.ts → countUsage.d.ts} +1 -1
- package/esm/typings/src/playground/BrjappConnector.d.ts +64 -0
- package/esm/typings/src/playground/brjapp-api-schema.d.ts +12879 -0
- package/esm/typings/src/playground/playground.d.ts +5 -0
- package/esm/typings/src/remote-server/socket-types/_subtypes/PromptbookServer_Identification.d.ts +2 -1
- package/esm/typings/src/remote-server/types/RemoteServerOptions.d.ts +15 -3
- package/esm/typings/src/types/typeAliases.d.ts +2 -2
- package/package.json +2 -2
- package/umd/index.umd.js +11 -4
- package/umd/index.umd.js.map +1 -1
package/README.md
CHANGED
|
@@ -23,6 +23,10 @@
|
|
|
23
23
|
|
|
24
24
|
|
|
25
25
|
|
|
26
|
+
<blockquote style="color: #ff8811">
|
|
27
|
+
<b>⚠ Warning:</b> This is a pre-release version of the library. It is not yet ready for production use. Please look at <a href="https://www.npmjs.com/package/@promptbook/core?activeTab=versions">latest stable release</a>.
|
|
28
|
+
</blockquote>
|
|
29
|
+
|
|
26
30
|
## 📦 Package `@promptbook/node`
|
|
27
31
|
|
|
28
32
|
- Promptbooks are [divided into several](#-packages) packages, all are published from [single monorepo](https://github.com/webgptorg/promptbook).
|
package/esm/index.es.js
CHANGED
|
@@ -30,7 +30,7 @@ const BOOK_LANGUAGE_VERSION = '1.0.0';
|
|
|
30
30
|
* @generated
|
|
31
31
|
* @see https://github.com/webgptorg/promptbook
|
|
32
32
|
*/
|
|
33
|
-
const PROMPTBOOK_ENGINE_VERSION = '0.
|
|
33
|
+
const PROMPTBOOK_ENGINE_VERSION = '0.89.0-1';
|
|
34
34
|
/**
|
|
35
35
|
* TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
|
|
36
36
|
* Note: [💞] Ignore a discrepancy between file name and entity name
|
|
@@ -4476,8 +4476,9 @@ async function forEachAsync(array, options, callbackfunction) {
|
|
|
4476
4476
|
* @returns LLM tools with same functionality with added total cost counting
|
|
4477
4477
|
* @public exported from `@promptbook/core`
|
|
4478
4478
|
*/
|
|
4479
|
-
function
|
|
4479
|
+
function countUsage(llmTools) {
|
|
4480
4480
|
let totalUsage = ZERO_USAGE;
|
|
4481
|
+
const spending = new Subject();
|
|
4481
4482
|
const proxyTools = {
|
|
4482
4483
|
get title() {
|
|
4483
4484
|
// TODO: [🧠] Maybe put here some suffix
|
|
@@ -4487,12 +4488,15 @@ function countTotalUsage(llmTools) {
|
|
|
4487
4488
|
// TODO: [🧠] Maybe put here some suffix
|
|
4488
4489
|
return llmTools.description;
|
|
4489
4490
|
},
|
|
4490
|
-
|
|
4491
|
+
checkConfiguration() {
|
|
4491
4492
|
return /* not await */ llmTools.checkConfiguration();
|
|
4492
4493
|
},
|
|
4493
4494
|
listModels() {
|
|
4494
4495
|
return /* not await */ llmTools.listModels();
|
|
4495
4496
|
},
|
|
4497
|
+
spending() {
|
|
4498
|
+
return spending.asObservable();
|
|
4499
|
+
},
|
|
4496
4500
|
getTotalUsage() {
|
|
4497
4501
|
// <- Note: [🥫] Not using getter `get totalUsage` but `getTotalUsage` to allow this object to be proxied
|
|
4498
4502
|
return totalUsage;
|
|
@@ -4503,6 +4507,7 @@ function countTotalUsage(llmTools) {
|
|
|
4503
4507
|
// console.info('[🚕] callChatModel through countTotalUsage');
|
|
4504
4508
|
const promptResult = await llmTools.callChatModel(prompt);
|
|
4505
4509
|
totalUsage = addUsage(totalUsage, promptResult.usage);
|
|
4510
|
+
spending.next(promptResult.usage);
|
|
4506
4511
|
return promptResult;
|
|
4507
4512
|
};
|
|
4508
4513
|
}
|
|
@@ -4511,6 +4516,7 @@ function countTotalUsage(llmTools) {
|
|
|
4511
4516
|
// console.info('[🚕] callCompletionModel through countTotalUsage');
|
|
4512
4517
|
const promptResult = await llmTools.callCompletionModel(prompt);
|
|
4513
4518
|
totalUsage = addUsage(totalUsage, promptResult.usage);
|
|
4519
|
+
spending.next(promptResult.usage);
|
|
4514
4520
|
return promptResult;
|
|
4515
4521
|
};
|
|
4516
4522
|
}
|
|
@@ -4519,6 +4525,7 @@ function countTotalUsage(llmTools) {
|
|
|
4519
4525
|
// console.info('[🚕] callEmbeddingModel through countTotalUsage');
|
|
4520
4526
|
const promptResult = await llmTools.callEmbeddingModel(prompt);
|
|
4521
4527
|
totalUsage = addUsage(totalUsage, promptResult.usage);
|
|
4528
|
+
spending.next(promptResult.usage);
|
|
4522
4529
|
return promptResult;
|
|
4523
4530
|
};
|
|
4524
4531
|
}
|
|
@@ -5331,7 +5338,7 @@ async function preparePipeline(pipeline, tools, options) {
|
|
|
5331
5338
|
// TODO: [🚐] Make arrayable LLMs -> single LLM DRY
|
|
5332
5339
|
const _llms = arrayableToArray(tools.llm);
|
|
5333
5340
|
const llmTools = _llms.length === 1 ? _llms[0] : joinLlmExecutionTools(..._llms);
|
|
5334
|
-
const llmToolsWithUsage =
|
|
5341
|
+
const llmToolsWithUsage = countUsage(llmTools);
|
|
5335
5342
|
// <- TODO: [🌯]
|
|
5336
5343
|
/*
|
|
5337
5344
|
TODO: [🧠][🪑][🔃] Should this be done or not
|