@promptbook/remote-server 0.88.0 → 0.89.0-1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +4 -0
- package/esm/index.es.js +11 -4
- package/esm/index.es.js.map +1 -1
- package/esm/typings/src/_packages/core.index.d.ts +2 -2
- package/esm/typings/src/_packages/types.index.d.ts +10 -0
- package/esm/typings/src/llm-providers/_common/utils/count-total-usage/LlmExecutionToolsWithTotalUsage.d.ts +7 -0
- package/esm/typings/src/llm-providers/_common/utils/count-total-usage/{countTotalUsage.d.ts → countUsage.d.ts} +1 -1
- package/esm/typings/src/playground/BrjappConnector.d.ts +64 -0
- package/esm/typings/src/playground/brjapp-api-schema.d.ts +12879 -0
- package/esm/typings/src/playground/playground.d.ts +5 -0
- package/esm/typings/src/remote-server/socket-types/_subtypes/PromptbookServer_Identification.d.ts +2 -1
- package/esm/typings/src/remote-server/types/RemoteServerOptions.d.ts +15 -3
- package/esm/typings/src/types/typeAliases.d.ts +2 -2
- package/package.json +2 -2
- package/umd/index.umd.js +11 -4
- package/umd/index.umd.js.map +1 -1
package/README.md
CHANGED
|
@@ -23,6 +23,10 @@
|
|
|
23
23
|
|
|
24
24
|
|
|
25
25
|
|
|
26
|
+
<blockquote style="color: #ff8811">
|
|
27
|
+
<b>⚠ Warning:</b> This is a pre-release version of the library. It is not yet ready for production use. Please look at <a href="https://www.npmjs.com/package/@promptbook/core?activeTab=versions">latest stable release</a>.
|
|
28
|
+
</blockquote>
|
|
29
|
+
|
|
26
30
|
## 📦 Package `@promptbook/remote-server`
|
|
27
31
|
|
|
28
32
|
- Promptbooks are [divided into several](#-packages) packages, all are published from [single monorepo](https://github.com/webgptorg/promptbook).
|
package/esm/index.es.js
CHANGED
|
@@ -31,7 +31,7 @@ const BOOK_LANGUAGE_VERSION = '1.0.0';
|
|
|
31
31
|
* @generated
|
|
32
32
|
* @see https://github.com/webgptorg/promptbook
|
|
33
33
|
*/
|
|
34
|
-
const PROMPTBOOK_ENGINE_VERSION = '0.
|
|
34
|
+
const PROMPTBOOK_ENGINE_VERSION = '0.89.0-1';
|
|
35
35
|
/**
|
|
36
36
|
* TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
|
|
37
37
|
* Note: [💞] Ignore a discrepancy between file name and entity name
|
|
@@ -2530,8 +2530,9 @@ function addUsage(...usageItems) {
|
|
|
2530
2530
|
* @returns LLM tools with same functionality with added total cost counting
|
|
2531
2531
|
* @public exported from `@promptbook/core`
|
|
2532
2532
|
*/
|
|
2533
|
-
function
|
|
2533
|
+
function countUsage(llmTools) {
|
|
2534
2534
|
let totalUsage = ZERO_USAGE;
|
|
2535
|
+
const spending = new Subject();
|
|
2535
2536
|
const proxyTools = {
|
|
2536
2537
|
get title() {
|
|
2537
2538
|
// TODO: [🧠] Maybe put here some suffix
|
|
@@ -2541,12 +2542,15 @@ function countTotalUsage(llmTools) {
|
|
|
2541
2542
|
// TODO: [🧠] Maybe put here some suffix
|
|
2542
2543
|
return llmTools.description;
|
|
2543
2544
|
},
|
|
2544
|
-
|
|
2545
|
+
checkConfiguration() {
|
|
2545
2546
|
return /* not await */ llmTools.checkConfiguration();
|
|
2546
2547
|
},
|
|
2547
2548
|
listModels() {
|
|
2548
2549
|
return /* not await */ llmTools.listModels();
|
|
2549
2550
|
},
|
|
2551
|
+
spending() {
|
|
2552
|
+
return spending.asObservable();
|
|
2553
|
+
},
|
|
2550
2554
|
getTotalUsage() {
|
|
2551
2555
|
// <- Note: [🥫] Not using getter `get totalUsage` but `getTotalUsage` to allow this object to be proxied
|
|
2552
2556
|
return totalUsage;
|
|
@@ -2557,6 +2561,7 @@ function countTotalUsage(llmTools) {
|
|
|
2557
2561
|
// console.info('[🚕] callChatModel through countTotalUsage');
|
|
2558
2562
|
const promptResult = await llmTools.callChatModel(prompt);
|
|
2559
2563
|
totalUsage = addUsage(totalUsage, promptResult.usage);
|
|
2564
|
+
spending.next(promptResult.usage);
|
|
2560
2565
|
return promptResult;
|
|
2561
2566
|
};
|
|
2562
2567
|
}
|
|
@@ -2565,6 +2570,7 @@ function countTotalUsage(llmTools) {
|
|
|
2565
2570
|
// console.info('[🚕] callCompletionModel through countTotalUsage');
|
|
2566
2571
|
const promptResult = await llmTools.callCompletionModel(prompt);
|
|
2567
2572
|
totalUsage = addUsage(totalUsage, promptResult.usage);
|
|
2573
|
+
spending.next(promptResult.usage);
|
|
2568
2574
|
return promptResult;
|
|
2569
2575
|
};
|
|
2570
2576
|
}
|
|
@@ -2573,6 +2579,7 @@ function countTotalUsage(llmTools) {
|
|
|
2573
2579
|
// console.info('[🚕] callEmbeddingModel through countTotalUsage');
|
|
2574
2580
|
const promptResult = await llmTools.callEmbeddingModel(prompt);
|
|
2575
2581
|
totalUsage = addUsage(totalUsage, promptResult.usage);
|
|
2582
|
+
spending.next(promptResult.usage);
|
|
2576
2583
|
return promptResult;
|
|
2577
2584
|
};
|
|
2578
2585
|
}
|
|
@@ -3855,7 +3862,7 @@ async function preparePipeline(pipeline, tools, options) {
|
|
|
3855
3862
|
// TODO: [🚐] Make arrayable LLMs -> single LLM DRY
|
|
3856
3863
|
const _llms = arrayableToArray(tools.llm);
|
|
3857
3864
|
const llmTools = _llms.length === 1 ? _llms[0] : joinLlmExecutionTools(..._llms);
|
|
3858
|
-
const llmToolsWithUsage =
|
|
3865
|
+
const llmToolsWithUsage = countUsage(llmTools);
|
|
3859
3866
|
// <- TODO: [🌯]
|
|
3860
3867
|
/*
|
|
3861
3868
|
TODO: [🧠][🪑][🔃] Should this be done or not
|