@promptbook/remote-server 0.94.0-0 → 0.94.0-12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. package/README.md +1 -8
  2. package/esm/index.es.js +9 -7
  3. package/esm/index.es.js.map +1 -1
  4. package/esm/typings/src/_packages/cli.index.d.ts +4 -0
  5. package/esm/typings/src/_packages/core.index.d.ts +2 -0
  6. package/esm/typings/src/_packages/ollama.index.d.ts +14 -0
  7. package/esm/typings/src/_packages/openai.index.d.ts +2 -0
  8. package/esm/typings/src/_packages/types.index.d.ts +2 -0
  9. package/esm/typings/src/_packages/wizzard.index.d.ts +4 -0
  10. package/esm/typings/src/execution/AvailableModel.d.ts +9 -1
  11. package/esm/typings/src/execution/ExecutionTask.d.ts +3 -1
  12. package/esm/typings/src/llm-providers/_common/filterModels.d.ts +2 -2
  13. package/esm/typings/src/llm-providers/{openai/computeUsage.d.ts → _common/utils/pricing.d.ts} +2 -2
  14. package/esm/typings/src/llm-providers/anthropic-claude/AnthropicClaudeExecutionToolsOptions.d.ts +1 -1
  15. package/esm/typings/src/llm-providers/azure-openai/AzureOpenAiExecutionToolsOptions.d.ts +1 -1
  16. package/esm/typings/src/llm-providers/deepseek/DeepseekExecutionToolsOptions.d.ts +1 -1
  17. package/esm/typings/src/llm-providers/google/GoogleExecutionToolsOptions.d.ts +1 -1
  18. package/esm/typings/src/llm-providers/ollama/OllamaExecutionTools.d.ts +44 -0
  19. package/esm/typings/src/llm-providers/ollama/OllamaExecutionToolsOptions.d.ts +23 -0
  20. package/esm/typings/src/llm-providers/ollama/createOllamaExecutionTools.d.ts +11 -0
  21. package/esm/typings/src/llm-providers/ollama/ollama-models.d.ts +14 -0
  22. package/esm/typings/src/llm-providers/ollama/playground/playground.d.ts +6 -0
  23. package/esm/typings/src/llm-providers/ollama/register-configuration.d.ts +14 -0
  24. package/esm/typings/src/llm-providers/ollama/register-constructor.d.ts +15 -0
  25. package/esm/typings/src/llm-providers/openai/OpenAiAssistantExecutionToolsOptions.d.ts +1 -1
  26. package/esm/typings/src/llm-providers/openai/OpenAiCompatibleExecutionTools.d.ts +91 -0
  27. package/esm/typings/src/llm-providers/openai/OpenAiExecutionTools.d.ts +12 -53
  28. package/esm/typings/src/llm-providers/openai/OpenAiExecutionToolsOptions.d.ts +1 -1
  29. package/esm/typings/src/llm-providers/openai/createOpenAiExecutionTools.d.ts +2 -0
  30. package/esm/typings/src/llm-providers/openai/openai-models.d.ts +1 -7
  31. package/esm/typings/src/version.d.ts +1 -1
  32. package/package.json +25 -2
  33. package/umd/index.umd.js +9 -7
  34. package/umd/index.umd.js.map +1 -1
  35. /package/esm/typings/src/llm-providers/{openai/computeUsage.test.d.ts → _common/utils/pricing.test.d.ts} +0 -0
package/umd/index.umd.js CHANGED
@@ -48,7 +48,7 @@
48
48
  * @generated
49
49
  * @see https://github.com/webgptorg/promptbook
50
50
  */
51
- const PROMPTBOOK_ENGINE_VERSION = '0.94.0-0';
51
+ const PROMPTBOOK_ENGINE_VERSION = '0.94.0-12';
52
52
  /**
53
53
  * TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
54
54
  * Note: [💞] Ignore a discrepancy between file name and entity name
@@ -5890,11 +5890,6 @@
5890
5890
  const jokerParameterNames = currentTask.jokerParameterNames || [];
5891
5891
  const preparedContent = (currentTask.preparedContent || '{content}').split('{content}').join(currentTask.content);
5892
5892
  // <- TODO: [🍵] Use here `templateParameters` to replace {websiteContent} with option to ignore missing parameters
5893
- await onProgress({
5894
- outputParameters: {
5895
- [currentTask.resultingParameterName]: '',
5896
- },
5897
- });
5898
5893
  const resultString = await executeFormatSubvalues({
5899
5894
  jokerParameterNames,
5900
5895
  priority,
@@ -5993,6 +5988,13 @@
5993
5988
  * Note: This is a flag to prevent `onProgress` call after the pipeline execution is finished
5994
5989
  */
5995
5990
  let isReturned = false;
5991
+ // Note: Report all output parameters upfront as empty strings
5992
+ if (onProgress) {
5993
+ const emptyOutputParameters = Object.fromEntries(preparedPipeline.parameters.filter((param) => !param.isInput).map((param) => [param.name, '']));
5994
+ onProgress({
5995
+ outputParameters: emptyOutputParameters,
5996
+ });
5997
+ }
5996
5998
  // Note: Check that all input input parameters are defined
5997
5999
  for (const parameter of preparedPipeline.parameters.filter(({ isInput }) => isInput)) {
5998
6000
  if (inputParameters[parameter.name] === undefined) {
@@ -6929,7 +6931,7 @@
6929
6931
  function preserve(func) {
6930
6932
  // Note: NOT calling the function
6931
6933
  (async () => {
6932
- // TODO: [💩] Change to `await forEver` or something better
6934
+ // TODO: [💩] Change to `await forEver` or `forTime(Infinity)`
6933
6935
  await waitasecond.forTime(100000000);
6934
6936
  // [1]
6935
6937
  try {