@promptbook/node 0.56.0 → 0.57.0-0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/esm/index.es.js CHANGED
@@ -448,7 +448,7 @@ function union() {
448
448
  /**
449
449
  * The version of the Promptbook library
450
450
  */
451
- var PROMPTBOOK_VERSION = '0.56.0-4';
451
+ var PROMPTBOOK_VERSION = '0.56.0';
452
452
 
453
453
  /**
454
454
  * Parses the template and returns the list of all parameter names
@@ -22,7 +22,7 @@ export declare class OpenAiExecutionTools implements LlmExecutionTools {
22
22
  /**
23
23
  * Calls OpenAI API to use a chat model.
24
24
  */
25
- gptChat(prompt: Pick<Prompt, 'content' | 'modelRequirements'>): Promise<PromptChatResult>;
25
+ gptChat(prompt: Pick<Prompt, 'content' | 'modelRequirements' | 'expectFormat'>): Promise<PromptChatResult>;
26
26
  /**
27
27
  * Calls OpenAI API to use a complete model.
28
28
  */
@@ -1,10 +1,11 @@
1
+ import type { PostprocessingFunction } from '../execution/plugins/script-execution-tools/javascript/JavascriptExecutionToolsOptions';
2
+ import type { ExpectFormatCommand } from './Command';
3
+ import type { ModelRequirements } from './ModelRequirements';
4
+ import type { Expectations } from './PromptbookJson/PromptTemplateJson';
1
5
  import type { string_name } from './typeAliases';
2
6
  import type { string_prompt } from './typeAliases';
3
7
  import type { string_promptbook_url_with_hashtemplate } from './typeAliases';
4
8
  import type { string_title } from './typeAliases';
5
- import type { PostprocessingFunction } from '../execution/plugins/script-execution-tools/javascript/JavascriptExecutionToolsOptions';
6
- import type { ModelRequirements } from './ModelRequirements';
7
- import type { Expectations } from './PromptbookJson/PromptTemplateJson';
8
9
  /**
9
10
  * Prompt in a text along with model requirements, but without any execution or templating logic.
10
11
  *
@@ -39,6 +40,13 @@ export type Prompt = {
39
40
  * If not set, nothing is expected from the answer
40
41
  */
41
42
  readonly expectations?: Expectations;
43
+ /**
44
+ * Expect this format of the answer
45
+ *
46
+ * Note: Expectations are performed after all postprocessing steps
47
+ * @deprecated [💝]
48
+ */
49
+ readonly expectFormat?: ExpectFormatCommand['format'];
42
50
  /**
43
51
  * Unique identifier of the promptbook with specific template name as hash
44
52
  *
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@promptbook/node",
3
- "version": "0.56.0",
3
+ "version": "0.57.0-0",
4
4
  "description": "Library to supercharge your use of large language models",
5
5
  "private": false,
6
6
  "sideEffects": false,
@@ -48,7 +48,7 @@
48
48
  }
49
49
  ],
50
50
  "peerDependencies": {
51
- "@promptbook/core": "0.56.0"
51
+ "@promptbook/core": "0.57.0-0"
52
52
  },
53
53
  "main": "./umd/index.umd.js",
54
54
  "module": "./esm/index.es.js",
package/umd/index.umd.js CHANGED
@@ -453,7 +453,7 @@
453
453
  /**
454
454
  * The version of the Promptbook library
455
455
  */
456
- var PROMPTBOOK_VERSION = '0.56.0-4';
456
+ var PROMPTBOOK_VERSION = '0.56.0';
457
457
 
458
458
  /**
459
459
  * Parses the template and returns the list of all parameter names
@@ -22,7 +22,7 @@ export declare class OpenAiExecutionTools implements LlmExecutionTools {
22
22
  /**
23
23
  * Calls OpenAI API to use a chat model.
24
24
  */
25
- gptChat(prompt: Pick<Prompt, 'content' | 'modelRequirements'>): Promise<PromptChatResult>;
25
+ gptChat(prompt: Pick<Prompt, 'content' | 'modelRequirements' | 'expectFormat'>): Promise<PromptChatResult>;
26
26
  /**
27
27
  * Calls OpenAI API to use a complete model.
28
28
  */
@@ -1,10 +1,11 @@
1
+ import type { PostprocessingFunction } from '../execution/plugins/script-execution-tools/javascript/JavascriptExecutionToolsOptions';
2
+ import type { ExpectFormatCommand } from './Command';
3
+ import type { ModelRequirements } from './ModelRequirements';
4
+ import type { Expectations } from './PromptbookJson/PromptTemplateJson';
1
5
  import type { string_name } from './typeAliases';
2
6
  import type { string_prompt } from './typeAliases';
3
7
  import type { string_promptbook_url_with_hashtemplate } from './typeAliases';
4
8
  import type { string_title } from './typeAliases';
5
- import type { PostprocessingFunction } from '../execution/plugins/script-execution-tools/javascript/JavascriptExecutionToolsOptions';
6
- import type { ModelRequirements } from './ModelRequirements';
7
- import type { Expectations } from './PromptbookJson/PromptTemplateJson';
8
9
  /**
9
10
  * Prompt in a text along with model requirements, but without any execution or templating logic.
10
11
  *
@@ -39,6 +40,13 @@ export type Prompt = {
39
40
  * If not set, nothing is expected from the answer
40
41
  */
41
42
  readonly expectations?: Expectations;
43
+ /**
44
+ * Expect this format of the answer
45
+ *
46
+ * Note: Expectations are performed after all postprocessing steps
47
+ * @deprecated [💝]
48
+ */
49
+ readonly expectFormat?: ExpectFormatCommand['format'];
42
50
  /**
43
51
  * Unique identifier of the promptbook with specific template name as hash
44
52
  *