@promptbook/legacy-documents 0.105.0-1 → 0.105.0-3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. package/esm/index.es.js +90 -66
  2. package/esm/index.es.js.map +1 -1
  3. package/esm/typings/src/_packages/core.index.d.ts +2 -0
  4. package/esm/typings/src/_packages/types.index.d.ts +4 -0
  5. package/esm/typings/src/book-2.0/agent-source/AgentBasicInformation.d.ts +10 -3
  6. package/esm/typings/src/book-2.0/agent-source/AgentModelRequirements.d.ts +11 -1
  7. package/esm/typings/src/book-2.0/agent-source/communication-samples.test.d.ts +1 -0
  8. package/esm/typings/src/book-2.0/agent-source/createAgentModelRequirementsWithCommitments.blocks.test.d.ts +1 -0
  9. package/esm/typings/src/book-2.0/agent-source/createAgentModelRequirementsWithCommitments.import.test.d.ts +1 -0
  10. package/esm/typings/src/book-2.0/agent-source/parseAgentSource.import.test.d.ts +1 -0
  11. package/esm/typings/src/book-2.0/agent-source/parseAgentSourceWithCommitments.blocks.test.d.ts +1 -0
  12. package/esm/typings/src/commitments/USE_TIME/USE_TIME.d.ts +40 -0
  13. package/esm/typings/src/commitments/USE_TIME/USE_TIME.test.d.ts +1 -0
  14. package/esm/typings/src/commitments/_base/BaseCommitmentDefinition.d.ts +8 -0
  15. package/esm/typings/src/commitments/_base/CommitmentDefinition.d.ts +8 -0
  16. package/esm/typings/src/commitments/index.d.ts +11 -2
  17. package/esm/typings/src/config.d.ts +1 -0
  18. package/esm/typings/src/import-plugins/$fileImportPlugins.d.ts +7 -0
  19. package/esm/typings/src/import-plugins/AgentFileImportPlugin.d.ts +7 -0
  20. package/esm/typings/src/import-plugins/FileImportPlugin.d.ts +24 -0
  21. package/esm/typings/src/import-plugins/JsonFileImportPlugin.d.ts +7 -0
  22. package/esm/typings/src/import-plugins/TextFileImportPlugin.d.ts +7 -0
  23. package/esm/typings/src/llm-providers/_common/utils/cache/cacheLlmTools.d.ts +2 -1
  24. package/esm/typings/src/llm-providers/_common/utils/count-total-usage/countUsage.d.ts +2 -2
  25. package/esm/typings/src/llm-providers/agent/Agent.d.ts +9 -2
  26. package/esm/typings/src/llm-providers/agent/AgentLlmExecutionTools.d.ts +3 -1
  27. package/esm/typings/src/llm-providers/openai/OpenAiAssistantExecutionTools.d.ts +10 -0
  28. package/esm/typings/src/llm-providers/remote/RemoteLlmExecutionTools.d.ts +1 -1
  29. package/esm/typings/src/scripting/javascript/JavascriptExecutionToolsOptions.d.ts +6 -1
  30. package/esm/typings/src/types/ModelRequirements.d.ts +6 -12
  31. package/esm/typings/src/utils/execCommand/$execCommandNormalizeOptions.d.ts +2 -3
  32. package/esm/typings/src/utils/execCommand/ExecCommandOptions.d.ts +7 -1
  33. package/esm/typings/src/utils/organization/keepImported.d.ts +9 -0
  34. package/esm/typings/src/utils/organization/keepTypeImported.d.ts +0 -1
  35. package/esm/typings/src/version.d.ts +1 -1
  36. package/package.json +2 -2
  37. package/umd/index.umd.js +90 -66
  38. package/umd/index.umd.js.map +1 -1
package/umd/index.umd.js CHANGED
@@ -25,7 +25,7 @@
25
25
  * @generated
26
26
  * @see https://github.com/webgptorg/promptbook
27
27
  */
28
- const PROMPTBOOK_ENGINE_VERSION = '0.105.0-1';
28
+ const PROMPTBOOK_ENGINE_VERSION = '0.105.0-3';
29
29
  /**
30
30
  * TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
31
31
  * Note: [💞] Ignore a discrepancy between file name and entity name
@@ -987,6 +987,7 @@
987
987
  SEPARATOR: Color.fromHex('#cccccc'),
988
988
  COMMITMENT: Color.fromHex('#DA0F78'),
989
989
  PARAMETER: Color.fromHex('#8e44ad'),
990
+ CODE_BLOCK: Color.fromHex('#7700ffff'),
990
991
  });
991
992
  // <- TODO: [🧠][🈵] Using `Color` here increases the package size approx 3kb, maybe remove it
992
993
  /**
@@ -1308,6 +1309,7 @@
1308
1309
  let args = [];
1309
1310
  let timeout;
1310
1311
  let isVerbose;
1312
+ let env;
1311
1313
  if (typeof options === 'string') {
1312
1314
  // TODO: [1] DRY default values
1313
1315
  command = options;
@@ -1315,6 +1317,7 @@
1315
1317
  crashOnError = true;
1316
1318
  timeout = Infinity; // <- TODO: [⏳]
1317
1319
  isVerbose = DEFAULT_IS_VERBOSE;
1320
+ env = undefined;
1318
1321
  }
1319
1322
  else {
1320
1323
  /*
@@ -1331,6 +1334,7 @@
1331
1334
  crashOnError = (_b = options.crashOnError) !== null && _b !== void 0 ? _b : true;
1332
1335
  timeout = (_c = options.timeout) !== null && _c !== void 0 ? _c : Infinity;
1333
1336
  isVerbose = (_d = options.isVerbose) !== null && _d !== void 0 ? _d : DEFAULT_IS_VERBOSE;
1337
+ env = options.env;
1334
1338
  }
1335
1339
  // TODO: /(-[a-zA-Z0-9-]+\s+[^\s]*)|[^\s]*/g
1336
1340
  const _ = Array.from(command.matchAll(/(".*")|([^\s]*)/g))
@@ -1349,7 +1353,7 @@
1349
1353
  if (/^win/.test(process.platform) && ['npm', 'npx'].includes(command)) {
1350
1354
  command = `${command}.cmd`;
1351
1355
  }
1352
- return { command, humanReadableCommand, args, cwd, crashOnError, timeout, isVerbose };
1356
+ return { command, humanReadableCommand, args, cwd, crashOnError, timeout, isVerbose, env };
1353
1357
  }
1354
1358
  // TODO: This should show type error> execCommandNormalizeOptions({ command: '', commands: [''] });
1355
1359
 
@@ -1370,7 +1374,7 @@
1370
1374
  }
1371
1375
  return new Promise((resolve, reject) => {
1372
1376
  // eslint-disable-next-line prefer-const
1373
- const { command, humanReadableCommand, args, cwd, crashOnError, timeout, isVerbose = DEFAULT_IS_VERBOSE, } = $execCommandNormalizeOptions(options);
1377
+ const { command, humanReadableCommand, args, cwd, crashOnError, timeout, isVerbose = DEFAULT_IS_VERBOSE, env, } = $execCommandNormalizeOptions(options);
1374
1378
  if (timeout !== Infinity) {
1375
1379
  // TODO: In waitasecond forTime(Infinity) should be equivalent to forEver()
1376
1380
  waitasecond.forTime(timeout).then(() => {
@@ -1388,7 +1392,11 @@
1388
1392
  console.info(colors__default["default"].yellow(cwd) + ' ' + colors__default["default"].green(command) + ' ' + colors__default["default"].blue(args.join(' ')));
1389
1393
  }
1390
1394
  try {
1391
- const commandProcess = child_process.spawn(command, args, { cwd, shell: true });
1395
+ const commandProcess = child_process.spawn(command, args, {
1396
+ cwd,
1397
+ shell: true,
1398
+ env: env ? { ...process.env, ...env } : process.env,
1399
+ });
1392
1400
  if (isVerbose) {
1393
1401
  commandProcess.on('message', (message) => {
1394
1402
  console.info({ message });
@@ -3993,74 +4001,90 @@
3993
4001
  * in real-time through an observable.
3994
4002
  *
3995
4003
  * @param llmTools - The LLM tools to be intercepted and tracked
3996
- * @returns An augmented version of the tools that includes usage tracking capabilities
4004
+ * @returns Full proxy of the tools with added usage tracking capabilities
3997
4005
  * @public exported from `@promptbook/core`
3998
4006
  */
3999
4007
  function countUsage(llmTools) {
4000
4008
  let totalUsage = ZERO_USAGE;
4001
4009
  const spending = new rxjs.Subject();
4002
- const proxyTools = {
4003
- get title() {
4004
- return `${llmTools.title} (+usage)`;
4005
- // <- TODO: [🧈] Maybe standartize the suffix when wrapping `LlmExecutionTools` up
4006
- // <- TODO: [🧈][🧠] Does it make sense to suffix "(+usage)"?
4007
- },
4008
- get description() {
4009
- return `${llmTools.description} (+usage)`;
4010
- // <- TODO: [🧈] Maybe standartize the suffix when wrapping `LlmExecutionTools` up
4011
- // <- TODO: [🧈][🧠] Does it make sense to suffix "(+usage)"?
4012
- },
4013
- checkConfiguration() {
4014
- return /* not await */ llmTools.checkConfiguration();
4015
- },
4016
- listModels() {
4017
- return /* not await */ llmTools.listModels();
4018
- },
4019
- spending() {
4020
- return spending.asObservable();
4021
- },
4022
- getTotalUsage() {
4023
- // <- Note: [🥫] Not using getter `get totalUsage` but `getTotalUsage` to allow this object to be proxied
4024
- return totalUsage;
4010
+ // Create a Proxy to intercept all property access and ensure full proxying of all properties
4011
+ const proxyTools = new Proxy(llmTools, {
4012
+ get(target, prop, receiver) {
4013
+ // Handle title property
4014
+ if (prop === 'title') {
4015
+ return `${target.title} (+usage)`;
4016
+ // <- TODO: [🧈] Maybe standartize the suffix when wrapping `LlmExecutionTools` up
4017
+ // <- TODO: [🧈][🧠] Does it make sense to suffix "(+usage)"?
4018
+ }
4019
+ // Handle description property
4020
+ if (prop === 'description') {
4021
+ return `${target.description} (+usage)`;
4022
+ // <- TODO: [🧈] Maybe standartize the suffix when wrapping `LlmExecutionTools` up
4023
+ // <- TODO: [🧈][🧠] Does it make sense to suffix "(+usage)"?
4024
+ }
4025
+ // Handle spending method (new method added by this wrapper)
4026
+ if (prop === 'spending') {
4027
+ return () => {
4028
+ return spending.asObservable();
4029
+ };
4030
+ }
4031
+ // Handle getTotalUsage method (new method added by this wrapper)
4032
+ if (prop === 'getTotalUsage') {
4033
+ // <- Note: [🥫] Not using getter `get totalUsage` but `getTotalUsage` to allow this object to be proxied
4034
+ return () => {
4035
+ return totalUsage;
4036
+ };
4037
+ }
4038
+ // Handle callChatModel method with usage counting
4039
+ if (prop === 'callChatModel' && target.callChatModel !== undefined) {
4040
+ return async (prompt) => {
4041
+ // console.info('[🚕] callChatModel through countTotalUsage');
4042
+ const promptResult = await target.callChatModel(prompt);
4043
+ totalUsage = addUsage(totalUsage, promptResult.usage);
4044
+ spending.next(promptResult.usage);
4045
+ return promptResult;
4046
+ };
4047
+ }
4048
+ // Handle callCompletionModel method with usage counting
4049
+ if (prop === 'callCompletionModel' && target.callCompletionModel !== undefined) {
4050
+ return async (prompt) => {
4051
+ // console.info('[🚕] callCompletionModel through countTotalUsage');
4052
+ const promptResult = await target.callCompletionModel(prompt);
4053
+ totalUsage = addUsage(totalUsage, promptResult.usage);
4054
+ spending.next(promptResult.usage);
4055
+ return promptResult;
4056
+ };
4057
+ }
4058
+ // Handle callEmbeddingModel method with usage counting
4059
+ if (prop === 'callEmbeddingModel' && target.callEmbeddingModel !== undefined) {
4060
+ return async (prompt) => {
4061
+ // console.info('[🚕] callEmbeddingModel through countTotalUsage');
4062
+ const promptResult = await target.callEmbeddingModel(prompt);
4063
+ totalUsage = addUsage(totalUsage, promptResult.usage);
4064
+ spending.next(promptResult.usage);
4065
+ return promptResult;
4066
+ };
4067
+ }
4068
+ // Handle callImageGenerationModel method with usage counting
4069
+ if (prop === 'callImageGenerationModel' && target.callImageGenerationModel !== undefined) {
4070
+ return async (prompt) => {
4071
+ // console.info('[🚕] callImageGenerationModel through countTotalUsage');
4072
+ const promptResult = await target.callImageGenerationModel(prompt);
4073
+ totalUsage = addUsage(totalUsage, promptResult.usage);
4074
+ spending.next(promptResult.usage);
4075
+ return promptResult;
4076
+ };
4077
+ }
4078
+ // <- Note: [🤖]
4079
+ // For all other properties and methods, delegate to the original target
4080
+ const value = Reflect.get(target, prop, receiver);
4081
+ // If it's a function, bind it to the target to preserve context
4082
+ if (typeof value === 'function') {
4083
+ return value.bind(target);
4084
+ }
4085
+ return value;
4025
4086
  },
4026
- };
4027
- if (llmTools.callChatModel !== undefined) {
4028
- proxyTools.callChatModel = async (prompt) => {
4029
- // console.info('[🚕] callChatModel through countTotalUsage');
4030
- const promptResult = await llmTools.callChatModel(prompt);
4031
- totalUsage = addUsage(totalUsage, promptResult.usage);
4032
- spending.next(promptResult.usage);
4033
- return promptResult;
4034
- };
4035
- }
4036
- if (llmTools.callCompletionModel !== undefined) {
4037
- proxyTools.callCompletionModel = async (prompt) => {
4038
- // console.info('[🚕] callCompletionModel through countTotalUsage');
4039
- const promptResult = await llmTools.callCompletionModel(prompt);
4040
- totalUsage = addUsage(totalUsage, promptResult.usage);
4041
- spending.next(promptResult.usage);
4042
- return promptResult;
4043
- };
4044
- }
4045
- if (llmTools.callEmbeddingModel !== undefined) {
4046
- proxyTools.callEmbeddingModel = async (prompt) => {
4047
- // console.info('[🚕] callEmbeddingModel through countTotalUsage');
4048
- const promptResult = await llmTools.callEmbeddingModel(prompt);
4049
- totalUsage = addUsage(totalUsage, promptResult.usage);
4050
- spending.next(promptResult.usage);
4051
- return promptResult;
4052
- };
4053
- }
4054
- if (llmTools.callImageGenerationModel !== undefined) {
4055
- proxyTools.callImageGenerationModel = async (prompt) => {
4056
- // console.info('[🚕] callImageGenerationModel through countTotalUsage');
4057
- const promptResult = await llmTools.callImageGenerationModel(prompt);
4058
- totalUsage = addUsage(totalUsage, promptResult.usage);
4059
- spending.next(promptResult.usage);
4060
- return promptResult;
4061
- };
4062
- }
4063
- // <- Note: [🤖]
4087
+ });
4064
4088
  return proxyTools;
4065
4089
  }
4066
4090
  /**