@promptbook/documents 0.88.0 → 0.89.0-1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -23,6 +23,10 @@
23
23
 
24
24
 
25
25
 
26
+ <blockquote style="color: #ff8811">
27
+ <b>⚠ Warning:</b> This is a pre-release version of the library. It is not yet ready for production use. Please look at <a href="https://www.npmjs.com/package/@promptbook/core?activeTab=versions">latest stable release</a>.
28
+ </blockquote>
29
+
26
30
  ## 📦 Package `@promptbook/documents`
27
31
 
28
32
  - Promptbooks are [divided into several](#-packages) packages, all are published from [single monorepo](https://github.com/webgptorg/promptbook).
package/esm/index.es.js CHANGED
@@ -28,7 +28,7 @@ const BOOK_LANGUAGE_VERSION = '1.0.0';
28
28
  * @generated
29
29
  * @see https://github.com/webgptorg/promptbook
30
30
  */
31
- const PROMPTBOOK_ENGINE_VERSION = '0.88.0';
31
+ const PROMPTBOOK_ENGINE_VERSION = '0.89.0-1';
32
32
  /**
33
33
  * TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
34
34
  * Note: [💞] Ignore a discrepancy between file name and entity name
@@ -2755,8 +2755,9 @@ function addUsage(...usageItems) {
2755
2755
  * @returns LLM tools with same functionality with added total cost counting
2756
2756
  * @public exported from `@promptbook/core`
2757
2757
  */
2758
- function countTotalUsage(llmTools) {
2758
+ function countUsage(llmTools) {
2759
2759
  let totalUsage = ZERO_USAGE;
2760
+ const spending = new Subject();
2760
2761
  const proxyTools = {
2761
2762
  get title() {
2762
2763
  // TODO: [🧠] Maybe put here some suffix
@@ -2766,12 +2767,15 @@ function countTotalUsage(llmTools) {
2766
2767
  // TODO: [🧠] Maybe put here some suffix
2767
2768
  return llmTools.description;
2768
2769
  },
2769
- async checkConfiguration() {
2770
+ checkConfiguration() {
2770
2771
  return /* not await */ llmTools.checkConfiguration();
2771
2772
  },
2772
2773
  listModels() {
2773
2774
  return /* not await */ llmTools.listModels();
2774
2775
  },
2776
+ spending() {
2777
+ return spending.asObservable();
2778
+ },
2775
2779
  getTotalUsage() {
2776
2780
  // <- Note: [🥫] Not using getter `get totalUsage` but `getTotalUsage` to allow this object to be proxied
2777
2781
  return totalUsage;
@@ -2782,6 +2786,7 @@ function countTotalUsage(llmTools) {
2782
2786
  // console.info('[🚕] callChatModel through countTotalUsage');
2783
2787
  const promptResult = await llmTools.callChatModel(prompt);
2784
2788
  totalUsage = addUsage(totalUsage, promptResult.usage);
2789
+ spending.next(promptResult.usage);
2785
2790
  return promptResult;
2786
2791
  };
2787
2792
  }
@@ -2790,6 +2795,7 @@ function countTotalUsage(llmTools) {
2790
2795
  // console.info('[🚕] callCompletionModel through countTotalUsage');
2791
2796
  const promptResult = await llmTools.callCompletionModel(prompt);
2792
2797
  totalUsage = addUsage(totalUsage, promptResult.usage);
2798
+ spending.next(promptResult.usage);
2793
2799
  return promptResult;
2794
2800
  };
2795
2801
  }
@@ -2798,6 +2804,7 @@ function countTotalUsage(llmTools) {
2798
2804
  // console.info('[🚕] callEmbeddingModel through countTotalUsage');
2799
2805
  const promptResult = await llmTools.callEmbeddingModel(prompt);
2800
2806
  totalUsage = addUsage(totalUsage, promptResult.usage);
2807
+ spending.next(promptResult.usage);
2801
2808
  return promptResult;
2802
2809
  };
2803
2810
  }
@@ -3684,7 +3691,7 @@ async function preparePipeline(pipeline, tools, options) {
3684
3691
  // TODO: [🚐] Make arrayable LLMs -> single LLM DRY
3685
3692
  const _llms = arrayableToArray(tools.llm);
3686
3693
  const llmTools = _llms.length === 1 ? _llms[0] : joinLlmExecutionTools(..._llms);
3687
- const llmToolsWithUsage = countTotalUsage(llmTools);
3694
+ const llmToolsWithUsage = countUsage(llmTools);
3688
3695
  // <- TODO: [🌯]
3689
3696
  /*
3690
3697
  TODO: [🧠][🪑][🔃] Should this be done or not