@promptbook/remote-client 0.56.0 → 0.57.0-0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/esm/index.es.js CHANGED
@@ -156,7 +156,7 @@ var RemoteLlmExecutionTools = /** @class */ (function () {
156
156
  /**
157
157
  * The version of the Promptbook library
158
158
  */
159
- var PROMPTBOOK_VERSION = '0.56.0-4';
159
+ var PROMPTBOOK_VERSION = '0.56.0';
160
160
 
161
161
  export { PROMPTBOOK_VERSION, RemoteLlmExecutionTools };
162
162
  //# sourceMappingURL=index.es.js.map
@@ -22,7 +22,7 @@ export declare class OpenAiExecutionTools implements LlmExecutionTools {
22
22
  /**
23
23
  * Calls OpenAI API to use a chat model.
24
24
  */
25
- gptChat(prompt: Pick<Prompt, 'content' | 'modelRequirements'>): Promise<PromptChatResult>;
25
+ gptChat(prompt: Pick<Prompt, 'content' | 'modelRequirements' | 'expectFormat'>): Promise<PromptChatResult>;
26
26
  /**
27
27
  * Calls OpenAI API to use a complete model.
28
28
  */
@@ -1,10 +1,11 @@
1
+ import type { PostprocessingFunction } from '../execution/plugins/script-execution-tools/javascript/JavascriptExecutionToolsOptions';
2
+ import type { ExpectFormatCommand } from './Command';
3
+ import type { ModelRequirements } from './ModelRequirements';
4
+ import type { Expectations } from './PromptbookJson/PromptTemplateJson';
1
5
  import type { string_name } from './typeAliases';
2
6
  import type { string_prompt } from './typeAliases';
3
7
  import type { string_promptbook_url_with_hashtemplate } from './typeAliases';
4
8
  import type { string_title } from './typeAliases';
5
- import type { PostprocessingFunction } from '../execution/plugins/script-execution-tools/javascript/JavascriptExecutionToolsOptions';
6
- import type { ModelRequirements } from './ModelRequirements';
7
- import type { Expectations } from './PromptbookJson/PromptTemplateJson';
8
9
  /**
9
10
  * Prompt in a text along with model requirements, but without any execution or templating logic.
10
11
  *
@@ -39,6 +40,13 @@ export type Prompt = {
39
40
  * If not set, nothing is expected from the answer
40
41
  */
41
42
  readonly expectations?: Expectations;
43
+ /**
44
+ * Expect this format of the answer
45
+ *
46
+ * Note: Expectations are performed after all postprocessing steps
47
+ * @deprecated [💝]
48
+ */
49
+ readonly expectFormat?: ExpectFormatCommand['format'];
42
50
  /**
43
51
  * Unique identifier of the promptbook with specific template name as hash
44
52
  *
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@promptbook/remote-client",
3
- "version": "0.56.0",
3
+ "version": "0.57.0-0",
4
4
  "description": "Library to supercharge your use of large language models",
5
5
  "private": false,
6
6
  "sideEffects": false,
@@ -47,7 +47,7 @@
47
47
  }
48
48
  ],
49
49
  "peerDependencies": {
50
- "@promptbook/core": "0.56.0"
50
+ "@promptbook/core": "0.57.0-0"
51
51
  },
52
52
  "main": "./umd/index.umd.js",
53
53
  "module": "./esm/index.es.js",
package/umd/index.umd.js CHANGED
@@ -160,7 +160,7 @@
160
160
  /**
161
161
  * The version of the Promptbook library
162
162
  */
163
- var PROMPTBOOK_VERSION = '0.56.0-4';
163
+ var PROMPTBOOK_VERSION = '0.56.0';
164
164
 
165
165
  exports.PROMPTBOOK_VERSION = PROMPTBOOK_VERSION;
166
166
  exports.RemoteLlmExecutionTools = RemoteLlmExecutionTools;
@@ -22,7 +22,7 @@ export declare class OpenAiExecutionTools implements LlmExecutionTools {
22
22
  /**
23
23
  * Calls OpenAI API to use a chat model.
24
24
  */
25
- gptChat(prompt: Pick<Prompt, 'content' | 'modelRequirements'>): Promise<PromptChatResult>;
25
+ gptChat(prompt: Pick<Prompt, 'content' | 'modelRequirements' | 'expectFormat'>): Promise<PromptChatResult>;
26
26
  /**
27
27
  * Calls OpenAI API to use a complete model.
28
28
  */
@@ -1,10 +1,11 @@
1
+ import type { PostprocessingFunction } from '../execution/plugins/script-execution-tools/javascript/JavascriptExecutionToolsOptions';
2
+ import type { ExpectFormatCommand } from './Command';
3
+ import type { ModelRequirements } from './ModelRequirements';
4
+ import type { Expectations } from './PromptbookJson/PromptTemplateJson';
1
5
  import type { string_name } from './typeAliases';
2
6
  import type { string_prompt } from './typeAliases';
3
7
  import type { string_promptbook_url_with_hashtemplate } from './typeAliases';
4
8
  import type { string_title } from './typeAliases';
5
- import type { PostprocessingFunction } from '../execution/plugins/script-execution-tools/javascript/JavascriptExecutionToolsOptions';
6
- import type { ModelRequirements } from './ModelRequirements';
7
- import type { Expectations } from './PromptbookJson/PromptTemplateJson';
8
9
  /**
9
10
  * Prompt in a text along with model requirements, but without any execution or templating logic.
10
11
  *
@@ -39,6 +40,13 @@ export type Prompt = {
39
40
  * If not set, nothing is expected from the answer
40
41
  */
41
42
  readonly expectations?: Expectations;
43
+ /**
44
+ * Expect this format of the answer
45
+ *
46
+ * Note: Expectations are performed after all postprocessing steps
47
+ * @deprecated [💝]
48
+ */
49
+ readonly expectFormat?: ExpectFormatCommand['format'];
42
50
  /**
43
51
  * Unique identifier of the promptbook with specific template name as hash
44
52
  *