@promptbook/node 0.94.0-0 → 0.94.0-12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. package/README.md +1 -8
  2. package/esm/index.es.js +9 -7
  3. package/esm/index.es.js.map +1 -1
  4. package/esm/typings/src/_packages/cli.index.d.ts +4 -0
  5. package/esm/typings/src/_packages/core.index.d.ts +2 -0
  6. package/esm/typings/src/_packages/ollama.index.d.ts +14 -0
  7. package/esm/typings/src/_packages/openai.index.d.ts +2 -0
  8. package/esm/typings/src/_packages/types.index.d.ts +2 -0
  9. package/esm/typings/src/_packages/wizzard.index.d.ts +4 -0
  10. package/esm/typings/src/execution/AvailableModel.d.ts +9 -1
  11. package/esm/typings/src/execution/ExecutionTask.d.ts +3 -1
  12. package/esm/typings/src/llm-providers/_common/filterModels.d.ts +2 -2
  13. package/esm/typings/src/llm-providers/{openai/computeUsage.d.ts → _common/utils/pricing.d.ts} +2 -2
  14. package/esm/typings/src/llm-providers/anthropic-claude/AnthropicClaudeExecutionToolsOptions.d.ts +1 -1
  15. package/esm/typings/src/llm-providers/azure-openai/AzureOpenAiExecutionToolsOptions.d.ts +1 -1
  16. package/esm/typings/src/llm-providers/deepseek/DeepseekExecutionToolsOptions.d.ts +1 -1
  17. package/esm/typings/src/llm-providers/google/GoogleExecutionToolsOptions.d.ts +1 -1
  18. package/esm/typings/src/llm-providers/ollama/OllamaExecutionTools.d.ts +44 -0
  19. package/esm/typings/src/llm-providers/ollama/OllamaExecutionToolsOptions.d.ts +23 -0
  20. package/esm/typings/src/llm-providers/ollama/createOllamaExecutionTools.d.ts +11 -0
  21. package/esm/typings/src/llm-providers/ollama/ollama-models.d.ts +14 -0
  22. package/esm/typings/src/llm-providers/ollama/playground/playground.d.ts +6 -0
  23. package/esm/typings/src/llm-providers/ollama/register-configuration.d.ts +14 -0
  24. package/esm/typings/src/llm-providers/ollama/register-constructor.d.ts +15 -0
  25. package/esm/typings/src/llm-providers/openai/OpenAiAssistantExecutionToolsOptions.d.ts +1 -1
  26. package/esm/typings/src/llm-providers/openai/OpenAiCompatibleExecutionTools.d.ts +91 -0
  27. package/esm/typings/src/llm-providers/openai/OpenAiExecutionTools.d.ts +12 -53
  28. package/esm/typings/src/llm-providers/openai/OpenAiExecutionToolsOptions.d.ts +1 -1
  29. package/esm/typings/src/llm-providers/openai/createOpenAiExecutionTools.d.ts +2 -0
  30. package/esm/typings/src/llm-providers/openai/openai-models.d.ts +1 -7
  31. package/esm/typings/src/version.d.ts +1 -1
  32. package/package.json +25 -2
  33. package/umd/index.umd.js +9 -7
  34. package/umd/index.umd.js.map +1 -1
  35. /package/esm/typings/src/llm-providers/{openai/computeUsage.test.d.ts → _common/utils/pricing.test.d.ts} +0 -0
package/umd/index.umd.js CHANGED
@@ -46,7 +46,7 @@
46
46
  * @generated
47
47
  * @see https://github.com/webgptorg/promptbook
48
48
  */
49
- const PROMPTBOOK_ENGINE_VERSION = '0.94.0-0';
49
+ const PROMPTBOOK_ENGINE_VERSION = '0.94.0-12';
50
50
  /**
51
51
  * TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
52
52
  * Note: [💞] Ignore a discrepancy between file name and entity name
@@ -4463,11 +4463,6 @@
4463
4463
  const jokerParameterNames = currentTask.jokerParameterNames || [];
4464
4464
  const preparedContent = (currentTask.preparedContent || '{content}').split('{content}').join(currentTask.content);
4465
4465
  // <- TODO: [🍵] Use here `templateParameters` to replace {websiteContent} with option to ignore missing parameters
4466
- await onProgress({
4467
- outputParameters: {
4468
- [currentTask.resultingParameterName]: '',
4469
- },
4470
- });
4471
4466
  const resultString = await executeFormatSubvalues({
4472
4467
  jokerParameterNames,
4473
4468
  priority,
@@ -4566,6 +4561,13 @@
4566
4561
  * Note: This is a flag to prevent `onProgress` call after the pipeline execution is finished
4567
4562
  */
4568
4563
  let isReturned = false;
4564
+ // Note: Report all output parameters upfront as empty strings
4565
+ if (onProgress) {
4566
+ const emptyOutputParameters = Object.fromEntries(preparedPipeline.parameters.filter((param) => !param.isInput).map((param) => [param.name, '']));
4567
+ onProgress({
4568
+ outputParameters: emptyOutputParameters,
4569
+ });
4570
+ }
4569
4571
  // Note: Check that all input input parameters are defined
4570
4572
  for (const parameter of preparedPipeline.parameters.filter(({ isInput }) => isInput)) {
4571
4573
  if (inputParameters[parameter.name] === undefined) {
@@ -10583,7 +10585,7 @@
10583
10585
  function preserve(func) {
10584
10586
  // Note: NOT calling the function
10585
10587
  (async () => {
10586
- // TODO: [💩] Change to `await forEver` or something better
10588
+ // TODO: [💩] Change to `await forEver` or `forTime(Infinity)`
10587
10589
  await waitasecond.forTime(100000000);
10588
10590
  // [1]
10589
10591
  try {