@promptbook/website-crawler 0.105.0-1 → 0.105.0-3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. package/esm/index.es.js +80 -63
  2. package/esm/index.es.js.map +1 -1
  3. package/esm/typings/src/_packages/core.index.d.ts +2 -0
  4. package/esm/typings/src/_packages/types.index.d.ts +4 -0
  5. package/esm/typings/src/book-2.0/agent-source/AgentBasicInformation.d.ts +10 -3
  6. package/esm/typings/src/book-2.0/agent-source/AgentModelRequirements.d.ts +11 -1
  7. package/esm/typings/src/book-2.0/agent-source/communication-samples.test.d.ts +1 -0
  8. package/esm/typings/src/book-2.0/agent-source/createAgentModelRequirementsWithCommitments.blocks.test.d.ts +1 -0
  9. package/esm/typings/src/book-2.0/agent-source/createAgentModelRequirementsWithCommitments.import.test.d.ts +1 -0
  10. package/esm/typings/src/book-2.0/agent-source/parseAgentSource.import.test.d.ts +1 -0
  11. package/esm/typings/src/book-2.0/agent-source/parseAgentSourceWithCommitments.blocks.test.d.ts +1 -0
  12. package/esm/typings/src/commitments/USE_TIME/USE_TIME.d.ts +40 -0
  13. package/esm/typings/src/commitments/USE_TIME/USE_TIME.test.d.ts +1 -0
  14. package/esm/typings/src/commitments/_base/BaseCommitmentDefinition.d.ts +8 -0
  15. package/esm/typings/src/commitments/_base/CommitmentDefinition.d.ts +8 -0
  16. package/esm/typings/src/commitments/index.d.ts +11 -2
  17. package/esm/typings/src/config.d.ts +1 -0
  18. package/esm/typings/src/import-plugins/$fileImportPlugins.d.ts +7 -0
  19. package/esm/typings/src/import-plugins/AgentFileImportPlugin.d.ts +7 -0
  20. package/esm/typings/src/import-plugins/FileImportPlugin.d.ts +24 -0
  21. package/esm/typings/src/import-plugins/JsonFileImportPlugin.d.ts +7 -0
  22. package/esm/typings/src/import-plugins/TextFileImportPlugin.d.ts +7 -0
  23. package/esm/typings/src/llm-providers/_common/utils/cache/cacheLlmTools.d.ts +2 -1
  24. package/esm/typings/src/llm-providers/_common/utils/count-total-usage/countUsage.d.ts +2 -2
  25. package/esm/typings/src/llm-providers/agent/Agent.d.ts +9 -2
  26. package/esm/typings/src/llm-providers/agent/AgentLlmExecutionTools.d.ts +3 -1
  27. package/esm/typings/src/llm-providers/openai/OpenAiAssistantExecutionTools.d.ts +10 -0
  28. package/esm/typings/src/llm-providers/remote/RemoteLlmExecutionTools.d.ts +1 -1
  29. package/esm/typings/src/scripting/javascript/JavascriptExecutionToolsOptions.d.ts +6 -1
  30. package/esm/typings/src/types/ModelRequirements.d.ts +6 -12
  31. package/esm/typings/src/utils/execCommand/$execCommandNormalizeOptions.d.ts +2 -3
  32. package/esm/typings/src/utils/execCommand/ExecCommandOptions.d.ts +7 -1
  33. package/esm/typings/src/utils/organization/keepImported.d.ts +9 -0
  34. package/esm/typings/src/utils/organization/keepTypeImported.d.ts +0 -1
  35. package/esm/typings/src/version.d.ts +1 -1
  36. package/package.json +2 -2
  37. package/umd/index.umd.js +80 -63
  38. package/umd/index.umd.js.map +1 -1
package/esm/index.es.js CHANGED
@@ -27,7 +27,7 @@ const BOOK_LANGUAGE_VERSION = '2.0.0';
27
27
  * @generated
28
28
  * @see https://github.com/webgptorg/promptbook
29
29
  */
30
- const PROMPTBOOK_ENGINE_VERSION = '0.105.0-1';
30
+ const PROMPTBOOK_ENGINE_VERSION = '0.105.0-3';
31
31
  /**
32
32
  * TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
33
33
  * Note: [💞] Ignore a discrepancy between file name and entity name
@@ -1016,6 +1016,7 @@ const PROMPTBOOK_COLOR = Color.fromString('promptbook');
1016
1016
  SEPARATOR: Color.fromHex('#cccccc'),
1017
1017
  COMMITMENT: Color.fromHex('#DA0F78'),
1018
1018
  PARAMETER: Color.fromHex('#8e44ad'),
1019
+ CODE_BLOCK: Color.fromHex('#7700ffff'),
1019
1020
  });
1020
1021
  // <- TODO: [🧠][🈵] Using `Color` here increases the package size approx 3kb, maybe remove it
1021
1022
  /**
@@ -3952,74 +3953,90 @@ function addUsage(...usageItems) {
3952
3953
  * in real-time through an observable.
3953
3954
  *
3954
3955
  * @param llmTools - The LLM tools to be intercepted and tracked
3955
- * @returns An augmented version of the tools that includes usage tracking capabilities
3956
+ * @returns Full proxy of the tools with added usage tracking capabilities
3956
3957
  * @public exported from `@promptbook/core`
3957
3958
  */
3958
3959
  function countUsage(llmTools) {
3959
3960
  let totalUsage = ZERO_USAGE;
3960
3961
  const spending = new Subject();
3961
- const proxyTools = {
3962
- get title() {
3963
- return `${llmTools.title} (+usage)`;
3964
- // <- TODO: [🧈] Maybe standartize the suffix when wrapping `LlmExecutionTools` up
3965
- // <- TODO: [🧈][🧠] Does it make sense to suffix "(+usage)"?
3966
- },
3967
- get description() {
3968
- return `${llmTools.description} (+usage)`;
3969
- // <- TODO: [🧈] Maybe standartize the suffix when wrapping `LlmExecutionTools` up
3970
- // <- TODO: [🧈][🧠] Does it make sense to suffix "(+usage)"?
3971
- },
3972
- checkConfiguration() {
3973
- return /* not await */ llmTools.checkConfiguration();
3974
- },
3975
- listModels() {
3976
- return /* not await */ llmTools.listModels();
3977
- },
3978
- spending() {
3979
- return spending.asObservable();
3980
- },
3981
- getTotalUsage() {
3982
- // <- Note: [🥫] Not using getter `get totalUsage` but `getTotalUsage` to allow this object to be proxied
3983
- return totalUsage;
3962
+ // Create a Proxy to intercept all property access and ensure full proxying of all properties
3963
+ const proxyTools = new Proxy(llmTools, {
3964
+ get(target, prop, receiver) {
3965
+ // Handle title property
3966
+ if (prop === 'title') {
3967
+ return `${target.title} (+usage)`;
3968
+ // <- TODO: [🧈] Maybe standartize the suffix when wrapping `LlmExecutionTools` up
3969
+ // <- TODO: [🧈][🧠] Does it make sense to suffix "(+usage)"?
3970
+ }
3971
+ // Handle description property
3972
+ if (prop === 'description') {
3973
+ return `${target.description} (+usage)`;
3974
+ // <- TODO: [🧈] Maybe standartize the suffix when wrapping `LlmExecutionTools` up
3975
+ // <- TODO: [🧈][🧠] Does it make sense to suffix "(+usage)"?
3976
+ }
3977
+ // Handle spending method (new method added by this wrapper)
3978
+ if (prop === 'spending') {
3979
+ return () => {
3980
+ return spending.asObservable();
3981
+ };
3982
+ }
3983
+ // Handle getTotalUsage method (new method added by this wrapper)
3984
+ if (prop === 'getTotalUsage') {
3985
+ // <- Note: [🥫] Not using getter `get totalUsage` but `getTotalUsage` to allow this object to be proxied
3986
+ return () => {
3987
+ return totalUsage;
3988
+ };
3989
+ }
3990
+ // Handle callChatModel method with usage counting
3991
+ if (prop === 'callChatModel' && target.callChatModel !== undefined) {
3992
+ return async (prompt) => {
3993
+ // console.info('[🚕] callChatModel through countTotalUsage');
3994
+ const promptResult = await target.callChatModel(prompt);
3995
+ totalUsage = addUsage(totalUsage, promptResult.usage);
3996
+ spending.next(promptResult.usage);
3997
+ return promptResult;
3998
+ };
3999
+ }
4000
+ // Handle callCompletionModel method with usage counting
4001
+ if (prop === 'callCompletionModel' && target.callCompletionModel !== undefined) {
4002
+ return async (prompt) => {
4003
+ // console.info('[🚕] callCompletionModel through countTotalUsage');
4004
+ const promptResult = await target.callCompletionModel(prompt);
4005
+ totalUsage = addUsage(totalUsage, promptResult.usage);
4006
+ spending.next(promptResult.usage);
4007
+ return promptResult;
4008
+ };
4009
+ }
4010
+ // Handle callEmbeddingModel method with usage counting
4011
+ if (prop === 'callEmbeddingModel' && target.callEmbeddingModel !== undefined) {
4012
+ return async (prompt) => {
4013
+ // console.info('[🚕] callEmbeddingModel through countTotalUsage');
4014
+ const promptResult = await target.callEmbeddingModel(prompt);
4015
+ totalUsage = addUsage(totalUsage, promptResult.usage);
4016
+ spending.next(promptResult.usage);
4017
+ return promptResult;
4018
+ };
4019
+ }
4020
+ // Handle callImageGenerationModel method with usage counting
4021
+ if (prop === 'callImageGenerationModel' && target.callImageGenerationModel !== undefined) {
4022
+ return async (prompt) => {
4023
+ // console.info('[🚕] callImageGenerationModel through countTotalUsage');
4024
+ const promptResult = await target.callImageGenerationModel(prompt);
4025
+ totalUsage = addUsage(totalUsage, promptResult.usage);
4026
+ spending.next(promptResult.usage);
4027
+ return promptResult;
4028
+ };
4029
+ }
4030
+ // <- Note: [🤖]
4031
+ // For all other properties and methods, delegate to the original target
4032
+ const value = Reflect.get(target, prop, receiver);
4033
+ // If it's a function, bind it to the target to preserve context
4034
+ if (typeof value === 'function') {
4035
+ return value.bind(target);
4036
+ }
4037
+ return value;
3984
4038
  },
3985
- };
3986
- if (llmTools.callChatModel !== undefined) {
3987
- proxyTools.callChatModel = async (prompt) => {
3988
- // console.info('[🚕] callChatModel through countTotalUsage');
3989
- const promptResult = await llmTools.callChatModel(prompt);
3990
- totalUsage = addUsage(totalUsage, promptResult.usage);
3991
- spending.next(promptResult.usage);
3992
- return promptResult;
3993
- };
3994
- }
3995
- if (llmTools.callCompletionModel !== undefined) {
3996
- proxyTools.callCompletionModel = async (prompt) => {
3997
- // console.info('[🚕] callCompletionModel through countTotalUsage');
3998
- const promptResult = await llmTools.callCompletionModel(prompt);
3999
- totalUsage = addUsage(totalUsage, promptResult.usage);
4000
- spending.next(promptResult.usage);
4001
- return promptResult;
4002
- };
4003
- }
4004
- if (llmTools.callEmbeddingModel !== undefined) {
4005
- proxyTools.callEmbeddingModel = async (prompt) => {
4006
- // console.info('[🚕] callEmbeddingModel through countTotalUsage');
4007
- const promptResult = await llmTools.callEmbeddingModel(prompt);
4008
- totalUsage = addUsage(totalUsage, promptResult.usage);
4009
- spending.next(promptResult.usage);
4010
- return promptResult;
4011
- };
4012
- }
4013
- if (llmTools.callImageGenerationModel !== undefined) {
4014
- proxyTools.callImageGenerationModel = async (prompt) => {
4015
- // console.info('[🚕] callImageGenerationModel through countTotalUsage');
4016
- const promptResult = await llmTools.callImageGenerationModel(prompt);
4017
- totalUsage = addUsage(totalUsage, promptResult.usage);
4018
- spending.next(promptResult.usage);
4019
- return promptResult;
4020
- };
4021
- }
4022
- // <- Note: [🤖]
4039
+ });
4023
4040
  return proxyTools;
4024
4041
  }
4025
4042
  /**