@promptbook/documents 0.104.0-12 → 0.104.0-14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (28) hide show
  1. package/esm/index.es.js +25 -17
  2. package/esm/index.es.js.map +1 -1
  3. package/esm/typings/src/_packages/components.index.d.ts +0 -6
  4. package/esm/typings/src/book-components/Chat/save/_common/string_chat_format_name.d.ts +1 -1
  5. package/esm/typings/src/book-components/Chat/types/ChatMessage.d.ts +4 -1
  6. package/esm/typings/src/book-components/_common/Dropdown/Dropdown.d.ts +5 -1
  7. package/esm/typings/src/book-components/_common/HamburgerMenu/HamburgerMenu.d.ts +4 -0
  8. package/esm/typings/src/book-components/icons/AboutIcon.d.ts +5 -1
  9. package/esm/typings/src/book-components/icons/AttachmentIcon.d.ts +6 -2
  10. package/esm/typings/src/book-components/icons/CameraIcon.d.ts +6 -2
  11. package/esm/typings/src/book-components/icons/DownloadIcon.d.ts +5 -1
  12. package/esm/typings/src/book-components/icons/MenuIcon.d.ts +5 -1
  13. package/esm/typings/src/book-components/icons/SaveIcon.d.ts +6 -2
  14. package/esm/typings/src/collection/agent-collection/constructors/agent-collection-in-supabase/AgentCollectionInSupabase.d.ts +7 -5
  15. package/esm/typings/src/commands/_common/types/Command.d.ts +1 -1
  16. package/esm/typings/src/commitments/_base/BookCommitment.d.ts +1 -1
  17. package/esm/typings/src/formfactors/_common/FormfactorDefinition.d.ts +1 -1
  18. package/esm/typings/src/llm-providers/_common/utils/count-total-usage/countUsage.d.ts +7 -3
  19. package/esm/typings/src/llm-providers/_multiple/joinLlmExecutionTools.d.ts +11 -7
  20. package/esm/typings/src/remote-server/ui/ServerApp.d.ts +5 -1
  21. package/esm/typings/src/types/typeAliasEmoji.d.ts +2 -2
  22. package/esm/typings/src/utils/random/$randomAgentPersona.d.ts +4 -0
  23. package/esm/typings/src/utils/random/$randomItem.d.ts +1 -1
  24. package/esm/typings/src/utils/random/$randomSeed.d.ts +1 -1
  25. package/esm/typings/src/version.d.ts +1 -1
  26. package/package.json +2 -2
  27. package/umd/index.umd.js +25 -17
  28. package/umd/index.umd.js.map +1 -1
package/umd/index.umd.js CHANGED
@@ -25,7 +25,7 @@
25
25
  * @generated
26
26
  * @see https://github.com/webgptorg/promptbook
27
27
  */
28
- const PROMPTBOOK_ENGINE_VERSION = '0.104.0-12';
28
+ const PROMPTBOOK_ENGINE_VERSION = '0.104.0-14';
29
29
  /**
30
30
  * TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
31
31
  * Note: [💞] Ignore a discrepancy between file name and entity name
@@ -1755,8 +1755,8 @@
1755
1755
  */
1756
1756
  function removeDiacritics(input) {
1757
1757
  /*eslint no-control-regex: "off"*/
1758
- return input.replace(/[^\u0000-\u007E]/g, (a) => {
1759
- return DIACRITIC_VARIANTS_LETTERS[a] || a;
1758
+ return input.replace(/[^\u0000-\u007E]/g, (character) => {
1759
+ return DIACRITIC_VARIANTS_LETTERS[character] || character;
1760
1760
  });
1761
1761
  }
1762
1762
  /**
@@ -3927,7 +3927,7 @@
3927
3927
  tasks.push(task);
3928
3928
  runningTasks.push(task);
3929
3929
  /* not await */ Promise.resolve(task).then(() => {
3930
- runningTasks = runningTasks.filter((t) => t !== task);
3930
+ runningTasks = runningTasks.filter((runningTask) => runningTask !== task);
3931
3931
  });
3932
3932
  if (maxParallelCount < runningTasks.length) {
3933
3933
  await Promise.race(runningTasks);
@@ -3984,10 +3984,14 @@
3984
3984
  }
3985
3985
 
3986
3986
  /**
3987
- * Intercepts LLM tools and counts total usage of the tools
3987
+ * Intercepts LLM tools and counts total usage of the tools.
3988
3988
  *
3989
- * @param llmTools LLM tools to be intercepted with usage counting
3990
- * @returns LLM tools with same functionality with added total cost counting
3989
+ * This function wraps the provided `LlmExecutionTools` with a proxy that tracks the cumulative
3990
+ * usage (tokens, cost, etc.) across all model calls. It provides a way to monitor spending
3991
+ * in real-time through an observable.
3992
+ *
3993
+ * @param llmTools - The LLM tools to be intercepted and tracked
3994
+ * @returns An augmented version of the tools that includes usage tracking capabilities
3991
3995
  * @public exported from `@promptbook/core`
3992
3996
  */
3993
3997
  function countUsage(llmTools) {
@@ -4252,17 +4256,21 @@
4252
4256
  */
4253
4257
 
4254
4258
  /**
4255
- * Joins multiple LLM Execution Tools into one
4259
+ * Joins multiple LLM Execution Tools into one.
4256
4260
  *
4257
- * @returns {LlmExecutionTools} Single wrapper for multiple LlmExecutionTools
4261
+ * This function takes a list of `LlmExecutionTools` and returns a single unified
4262
+ * `MultipleLlmExecutionTools` object. It provides failover and aggregation logic:
4258
4263
  *
4259
- * 0) If there is no LlmExecutionTools, it warns and returns valid but empty LlmExecutionTools
4260
- * 1) If there is only one LlmExecutionTools, it returns it wrapped in a proxy object
4261
- * 2) If there are multiple LlmExecutionTools, first will be used first, second will be used if the first hasn`t defined model variant or fails, etc.
4262
- * 3) When all LlmExecutionTools fail, it throws an error with a list of all errors merged into one
4264
+ * 1. **Failover**: When a model call is made, it tries providers in the order they were provided.
4265
+ * If the first provider doesn't support the requested model or fails, it tries the next one.
4266
+ * 2. **Aggregation**: `listModels` returns a combined list of all models available from all providers.
4267
+ * 3. **Empty case**: If no tools are provided, it logs a warning (as Promptbook requires LLMs to function).
4263
4268
  *
4269
+ * @param title - A descriptive title for this collection of joined tools
4270
+ * @param llmExecutionTools - An array of execution tools to be joined
4271
+ * @returns A single unified execution tool wrapper
4264
4272
  *
4265
- * Tip: You don't have to use this function directly, just pass an array of LlmExecutionTools to the `ExecutionTools`
4273
+ * Tip: You don't have to use this function directly, just pass an array of LlmExecutionTools to the `ExecutionTools`.
4266
4274
  *
4267
4275
  * @public exported from `@promptbook/core`
4268
4276
  */
@@ -6786,7 +6794,7 @@
6786
6794
  const taskEmbeddingResult = await llmTools.callEmbeddingModel(taskEmbeddingPrompt);
6787
6795
  const knowledgePiecesWithRelevance = preparedPipeline.knowledgePieces.map((knowledgePiece) => {
6788
6796
  const { index } = knowledgePiece;
6789
- const knowledgePieceIndex = index.find((i) => i.modelName === firstKnowledgeIndex.modelName);
6797
+ const knowledgePieceIndex = index.find((knowledgePieceIndex) => knowledgePieceIndex.modelName === firstKnowledgeIndex.modelName);
6790
6798
  // <- TODO: Do not use just first knowledge piece and first index to determine embedding model
6791
6799
  if (knowledgePieceIndex === undefined) {
6792
6800
  return {
@@ -7234,7 +7242,7 @@
7234
7242
  resovedParameterNames = [...resovedParameterNames, currentTask.resultingParameterName];
7235
7243
  })
7236
7244
  .then(() => {
7237
- resolving = resolving.filter((w) => w !== work);
7245
+ resolving = resolving.filter((workItem) => workItem !== work);
7238
7246
  });
7239
7247
  // <- Note: Errors are catched here [3]
7240
7248
  // TODO: BUT if in multiple tasks are errors, only the first one is catched so maybe we should catch errors here and save them to errors array here
@@ -7400,7 +7408,7 @@
7400
7408
  // Calculate and update tldr based on pipeline progress
7401
7409
  const cv = newOngoingResult;
7402
7410
  // Calculate progress based on parameters resolved vs total parameters
7403
- const totalParameters = pipeline.parameters.filter((p) => !p.isInput).length;
7411
+ const totalParameters = pipeline.parameters.filter((parameter) => !parameter.isInput).length;
7404
7412
  let resolvedParameters = 0;
7405
7413
  let currentTaskTitle = '';
7406
7414
  // Get the resolved parameters from output parameters