@promptbook/openai 0.26.0 → 0.27.0-1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (25) hide show
  1. package/esm/index.es.js +1 -1
  2. package/esm/typings/conversion/parseCommand.test.d.ts +1 -2
  3. package/esm/typings/conversion/promptTemplatePipelineStringToJson.d.ts +1 -3
  4. package/esm/typings/execution/ExpectError.d.ts +10 -0
  5. package/esm/typings/execution/PtpExecutor.d.ts +1 -1
  6. package/esm/typings/execution/ScriptExecutionTools.d.ts +1 -1
  7. package/esm/typings/execution/createPtpExecutor.d.ts +2 -2
  8. package/esm/typings/execution/plugins/natural-execution-tools/openai/OpenAiExecutionTools.d.ts +1 -1
  9. package/esm/typings/types/Command.d.ts +2 -2
  10. package/esm/typings/types/Parameters.d.ts +2 -2
  11. package/esm/typings/types/PromptTemplatePipelineJson/PromptTemplateJson.d.ts +22 -14
  12. package/esm/typings/types/PromptTemplatePipelineJson/PromptTemplateParameterJson.d.ts +1 -1
  13. package/package.json +2 -2
  14. package/umd/index.umd.js +1 -1
  15. package/umd/typings/conversion/parseCommand.test.d.ts +1 -2
  16. package/umd/typings/conversion/promptTemplatePipelineStringToJson.d.ts +1 -3
  17. package/umd/typings/execution/ExpectError.d.ts +10 -0
  18. package/umd/typings/execution/PtpExecutor.d.ts +1 -1
  19. package/umd/typings/execution/ScriptExecutionTools.d.ts +1 -1
  20. package/umd/typings/execution/createPtpExecutor.d.ts +2 -2
  21. package/umd/typings/execution/plugins/natural-execution-tools/openai/OpenAiExecutionTools.d.ts +1 -1
  22. package/umd/typings/types/Command.d.ts +2 -2
  23. package/umd/typings/types/Parameters.d.ts +2 -2
  24. package/umd/typings/types/PromptTemplatePipelineJson/PromptTemplateJson.d.ts +22 -14
  25. package/umd/typings/types/PromptTemplatePipelineJson/PromptTemplateParameterJson.d.ts +1 -1
package/esm/index.es.js CHANGED
@@ -191,7 +191,7 @@ var OpenAiExecutionTools = /** @class */ (function () {
191
191
  return OpenAiExecutionTools;
192
192
  }());
193
193
  /**
194
- * TODO: !!!! Allow to use other models - List all from openai
194
+
195
195
  * TODO: Maybe Create some common util for gptChat and gptComplete
196
196
  * TODO: Maybe make custom OpenaiError
197
197
  */
@@ -1,6 +1,5 @@
1
1
  export {};
2
2
  /**
3
3
  * TODO: [🧠] Probbably change syntax MODEL VARIANT -> MODEL
4
- * TODO: !!!! Allow to skip segments SKIP IF {foo} NOT DEFINED
5
- * TODO: !!! Allow to EXPECT 3 words
4
+ * TODO: !!! Allow to skip segments SKIP IF {foo} NOT DEFINED
6
5
  */
@@ -9,7 +9,5 @@ export declare function promptTemplatePipelineStringToJson(promptTemplatePipelin
9
9
  /**
10
10
  * TODO: Report here line/column of error
11
11
  * TODO: Use spaceTrim more effectively
12
- * TODO: !!!! Parameter flags - isInput, isOutput, isInternal, isBeforePostprocessing, isBeforeFinal, canonicalName
13
- * TODO: !!!! Allow to have non-immutable parameters - suffix them with fooPrevious3 -> fooPrevious2 -> fooPrevious1 -> foo
14
- * This must work with other technial parameters
12
+ * TODO: [🧠] Parameter flags - isInput, isOutput, isInternal
15
13
  */
@@ -0,0 +1,10 @@
1
+ /**
2
+ * This error occurs when some expectation is not met in the execution of the pipeline
3
+ */
4
+ export declare class ExpectError extends Error {
5
+ readonly name = "ExpectError";
6
+ constructor(message: string);
7
+ }
8
+ /**
9
+ * TODO: [🧠] Should be this exported from the library
10
+ */
@@ -3,7 +3,7 @@ import type { string_name } from '.././types/typeAliases';
3
3
  import type { TaskProgress } from '../types/TaskProgress';
4
4
  import type { ExecutionReportJson } from '../types/execution-report/ExecutionReportJson';
5
5
  /**
6
- * Executor is a simple async function that takes input parameters and returns result parameters _(along with all intermediate parameters and input parameters = it extends input object)_.
6
+ * Executor is a simple async function that takes INPUT PARAMETERs and returns result parameters _(along with all intermediate parameters and INPUT PARAMETERs = it extends input object)_.
7
7
  * Executor is made by combining execution tools and prompt template pipeline library.
8
8
  *
9
9
  * It can be done in two ways:
@@ -1,7 +1,7 @@
1
1
  import { string_name, string_script } from '.././types/typeAliases';
2
2
  import { ScriptLanguage } from '../types/ScriptLanguage';
3
3
  /**
4
- * Represents all the tools needed to execute scripts
4
+ * Represents all the tools needed to EXECUTE SCRIPTs
5
5
  *
6
6
  * @see https://github.com/webgptorg/promptbook#script-execution-tools
7
7
  */
@@ -1,4 +1,4 @@
1
- import { PromptTemplatePipeline } from '../classes/PromptTemplatePipeline';
1
+ import type { PromptTemplatePipeline } from '../classes/PromptTemplatePipeline';
2
2
  import { ExecutionTools } from './ExecutionTools';
3
3
  import { PtpExecutor } from './PtpExecutor';
4
4
  export interface CreatePtpExecutorSettings {
@@ -7,7 +7,7 @@ export interface CreatePtpExecutorSettings {
7
7
  *
8
8
  * @default 3
9
9
  */
10
- readonly maxNaturalExecutionAttempts: number;
10
+ readonly maxExecutionAttempts: number;
11
11
  }
12
12
  /**
13
13
  * Options for creating a PTP (Prompt Template Pipeline) executor
@@ -22,7 +22,7 @@ export declare class OpenAiExecutionTools implements NaturalExecutionTools {
22
22
  gptComplete(prompt: Prompt): Promise<PromptCompletionResult>;
23
23
  }
24
24
  /**
25
- * TODO: !!!! Allow to use other models - List all from openai
25
+
26
26
  * TODO: Maybe Create some common util for gptChat and gptComplete
27
27
  * TODO: Maybe make custom OpenaiError
28
28
  */
@@ -46,7 +46,7 @@ export interface ModelCommand {
46
46
  /**
47
47
  * Parameter command describes one parameter of the prompt template
48
48
  *
49
- * - It can tell if it is input or output parameter
49
+ * - It can tell if it is input or OUTPUT PARAMETER
50
50
  * - It can have description
51
51
  * - In description it can have simple formatting BUT not markdown structure or reference to other parameters
52
52
  */
@@ -58,7 +58,7 @@ export interface ParameterCommand {
58
58
  }
59
59
  /**
60
60
  * Postprocess command describes which function to use for postprocessing
61
- * This will be created as separate execute script block bellow
61
+ * This will be created as separate EXECUTE SCRIPT block bellow
62
62
  */
63
63
  export interface PostprocessCommand {
64
64
  readonly type: 'POSTPROCESS';
@@ -2,9 +2,9 @@
2
2
  * Parameters of the prompt template (pipeline)
3
3
  *
4
4
  * There are three types of parameters:
5
- * - **Input parameters** are required to execute the prompt template pipeline.
5
+ * - **INPUT PARAMETERs** are required to execute the prompt template pipeline.
6
6
  * - **Intermediate parameters** are used internally in the prompt template pipeline.
7
- * - **Output parameters** are not used internally in the prompt template pipeline, but are returned as the result of the prompt template pipeline execution.
7
+ * - **OUTPUT PARAMETERs** are not used internally in the prompt template pipeline, but are returned as the result of the prompt template pipeline execution.
8
8
  *
9
9
  * @see https://github.com/webgptorg/promptbook#parameters
10
10
  */
@@ -1,4 +1,4 @@
1
- import { number_integer, number_positive_or_zero, string_javascript, string_markdown, string_name, string_prompt, string_template } from '../.././types/typeAliases';
1
+ import { number_integer, number_positive_or_zero, string_javascript, string_javascript_name, string_markdown, string_name, string_prompt, string_template } from '../.././types/typeAliases';
2
2
  import { ExpectFormatCommand } from '../Command';
3
3
  import { ExecutionType } from '../ExecutionTypes';
4
4
  import { ModelRequirements } from '../ModelRequirements';
@@ -12,19 +12,6 @@ export type PromptTemplateJson = NaturalTemplateJson | SimpleTemplateJson | Scri
12
12
  */
13
13
  export interface NaturalTemplateJson extends PromptTemplateJsonCommon {
14
14
  readonly executionType: 'PROMPT_TEMPLATE';
15
- /**
16
- * Expect this amount of each unit in the answer
17
- *
18
- * For example 5 words, 3 sentences, 2 paragraphs, ...
19
- */
20
- readonly expectAmount?: Partial<Record<Lowercase<ExpectationUnit>, {
21
- min?: ExpectationAmount;
22
- max?: ExpectationAmount;
23
- }>>;
24
- /**
25
- * Expect this format of the answer
26
- */
27
- readonly expectFormat?: ExpectFormatCommand['format'];
28
15
  /**
29
16
  * Requirements for the model
30
17
  * - This is required only for executionType PROMPT_TEMPLATE
@@ -97,6 +84,27 @@ interface PromptTemplateJsonCommon {
97
84
  * Content of the template with {placeholders} for parameters
98
85
  */
99
86
  readonly content: (string_prompt | string_javascript | string_markdown) & string_template;
87
+ /**
88
+ * List of postprocessing steps that are executed after the prompt template
89
+ */
90
+ readonly postprocessing?: Array<string_javascript_name>;
91
+ /**
92
+ * Expect this amount of each unit in the answer
93
+ *
94
+ * For example 5 words, 3 sentences, 2 paragraphs, ...
95
+ *
96
+ * Note: Expectations are performed after all postprocessing steps
97
+ */
98
+ readonly expectAmount?: Partial<Record<Lowercase<ExpectationUnit>, {
99
+ min?: ExpectationAmount;
100
+ max?: ExpectationAmount;
101
+ }>>;
102
+ /**
103
+ * Expect this format of the answer
104
+ *
105
+ * Note: Expectations are performed after all postprocessing steps
106
+ */
107
+ readonly expectFormat?: ExpectFormatCommand['format'];
100
108
  /**
101
109
  * Name of the parameter that is the result of the prompt template
102
110
  */
@@ -12,7 +12,7 @@ export interface PromptTemplateParameterJson {
12
12
  /**
13
13
  * The parameter is input of the pipeline
14
14
  *
15
- * Note: Output parameter is every parameter including input one
15
+ * Note: OUTPUT PARAMETER is every parameter including input one
16
16
  */
17
17
  readonly isInput: boolean;
18
18
  /**
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@promptbook/openai",
3
- "version": "0.26.0",
3
+ "version": "0.27.0-1",
4
4
  "description": "Library to supercharge your use of large language models",
5
5
  "private": false,
6
6
  "sideEffects": false,
@@ -37,7 +37,7 @@
37
37
  "openai": "4.2.0"
38
38
  },
39
39
  "peerDependencies": {
40
- "@promptbook/core": "0.26.0"
40
+ "@promptbook/core": "0.27.0-1"
41
41
  },
42
42
  "main": "./umd/index.umd.js",
43
43
  "module": "./esm/index.es.js",
package/umd/index.umd.js CHANGED
@@ -199,7 +199,7 @@
199
199
  return OpenAiExecutionTools;
200
200
  }());
201
201
  /**
202
- * TODO: !!!! Allow to use other models - List all from openai
202
+
203
203
  * TODO: Maybe Create some common util for gptChat and gptComplete
204
204
  * TODO: Maybe make custom OpenaiError
205
205
  */
@@ -1,6 +1,5 @@
1
1
  export {};
2
2
  /**
3
3
  * TODO: [🧠] Probbably change syntax MODEL VARIANT -> MODEL
4
- * TODO: !!!! Allow to skip segments SKIP IF {foo} NOT DEFINED
5
- * TODO: !!! Allow to EXPECT 3 words
4
+ * TODO: !!! Allow to skip segments SKIP IF {foo} NOT DEFINED
6
5
  */
@@ -9,7 +9,5 @@ export declare function promptTemplatePipelineStringToJson(promptTemplatePipelin
9
9
  /**
10
10
  * TODO: Report here line/column of error
11
11
  * TODO: Use spaceTrim more effectively
12
- * TODO: !!!! Parameter flags - isInput, isOutput, isInternal, isBeforePostprocessing, isBeforeFinal, canonicalName
13
- * TODO: !!!! Allow to have non-immutable parameters - suffix them with fooPrevious3 -> fooPrevious2 -> fooPrevious1 -> foo
14
- * This must work with other technial parameters
12
+ * TODO: [🧠] Parameter flags - isInput, isOutput, isInternal
15
13
  */
@@ -0,0 +1,10 @@
1
+ /**
2
+ * This error occurs when some expectation is not met in the execution of the pipeline
3
+ */
4
+ export declare class ExpectError extends Error {
5
+ readonly name = "ExpectError";
6
+ constructor(message: string);
7
+ }
8
+ /**
9
+ * TODO: [🧠] Should be this exported from the library
10
+ */
@@ -3,7 +3,7 @@ import type { string_name } from '.././types/typeAliases';
3
3
  import type { TaskProgress } from '../types/TaskProgress';
4
4
  import type { ExecutionReportJson } from '../types/execution-report/ExecutionReportJson';
5
5
  /**
6
- * Executor is a simple async function that takes input parameters and returns result parameters _(along with all intermediate parameters and input parameters = it extends input object)_.
6
+ * Executor is a simple async function that takes INPUT PARAMETERs and returns result parameters _(along with all intermediate parameters and INPUT PARAMETERs = it extends input object)_.
7
7
  * Executor is made by combining execution tools and prompt template pipeline library.
8
8
  *
9
9
  * It can be done in two ways:
@@ -1,7 +1,7 @@
1
1
  import { string_name, string_script } from '.././types/typeAliases';
2
2
  import { ScriptLanguage } from '../types/ScriptLanguage';
3
3
  /**
4
- * Represents all the tools needed to execute scripts
4
+ * Represents all the tools needed to EXECUTE SCRIPTs
5
5
  *
6
6
  * @see https://github.com/webgptorg/promptbook#script-execution-tools
7
7
  */
@@ -1,4 +1,4 @@
1
- import { PromptTemplatePipeline } from '../classes/PromptTemplatePipeline';
1
+ import type { PromptTemplatePipeline } from '../classes/PromptTemplatePipeline';
2
2
  import { ExecutionTools } from './ExecutionTools';
3
3
  import { PtpExecutor } from './PtpExecutor';
4
4
  export interface CreatePtpExecutorSettings {
@@ -7,7 +7,7 @@ export interface CreatePtpExecutorSettings {
7
7
  *
8
8
  * @default 3
9
9
  */
10
- readonly maxNaturalExecutionAttempts: number;
10
+ readonly maxExecutionAttempts: number;
11
11
  }
12
12
  /**
13
13
  * Options for creating a PTP (Prompt Template Pipeline) executor
@@ -22,7 +22,7 @@ export declare class OpenAiExecutionTools implements NaturalExecutionTools {
22
22
  gptComplete(prompt: Prompt): Promise<PromptCompletionResult>;
23
23
  }
24
24
  /**
25
- * TODO: !!!! Allow to use other models - List all from openai
25
+
26
26
  * TODO: Maybe Create some common util for gptChat and gptComplete
27
27
  * TODO: Maybe make custom OpenaiError
28
28
  */
@@ -46,7 +46,7 @@ export interface ModelCommand {
46
46
  /**
47
47
  * Parameter command describes one parameter of the prompt template
48
48
  *
49
- * - It can tell if it is input or output parameter
49
+ * - It can tell if it is input or OUTPUT PARAMETER
50
50
  * - It can have description
51
51
  * - In description it can have simple formatting BUT not markdown structure or reference to other parameters
52
52
  */
@@ -58,7 +58,7 @@ export interface ParameterCommand {
58
58
  }
59
59
  /**
60
60
  * Postprocess command describes which function to use for postprocessing
61
- * This will be created as separate execute script block bellow
61
+ * This will be created as separate EXECUTE SCRIPT block bellow
62
62
  */
63
63
  export interface PostprocessCommand {
64
64
  readonly type: 'POSTPROCESS';
@@ -2,9 +2,9 @@
2
2
  * Parameters of the prompt template (pipeline)
3
3
  *
4
4
  * There are three types of parameters:
5
- * - **Input parameters** are required to execute the prompt template pipeline.
5
+ * - **INPUT PARAMETERs** are required to execute the prompt template pipeline.
6
6
  * - **Intermediate parameters** are used internally in the prompt template pipeline.
7
- * - **Output parameters** are not used internally in the prompt template pipeline, but are returned as the result of the prompt template pipeline execution.
7
+ * - **OUTPUT PARAMETERs** are not used internally in the prompt template pipeline, but are returned as the result of the prompt template pipeline execution.
8
8
  *
9
9
  * @see https://github.com/webgptorg/promptbook#parameters
10
10
  */
@@ -1,4 +1,4 @@
1
- import { number_integer, number_positive_or_zero, string_javascript, string_markdown, string_name, string_prompt, string_template } from '../.././types/typeAliases';
1
+ import { number_integer, number_positive_or_zero, string_javascript, string_javascript_name, string_markdown, string_name, string_prompt, string_template } from '../.././types/typeAliases';
2
2
  import { ExpectFormatCommand } from '../Command';
3
3
  import { ExecutionType } from '../ExecutionTypes';
4
4
  import { ModelRequirements } from '../ModelRequirements';
@@ -12,19 +12,6 @@ export type PromptTemplateJson = NaturalTemplateJson | SimpleTemplateJson | Scri
12
12
  */
13
13
  export interface NaturalTemplateJson extends PromptTemplateJsonCommon {
14
14
  readonly executionType: 'PROMPT_TEMPLATE';
15
- /**
16
- * Expect this amount of each unit in the answer
17
- *
18
- * For example 5 words, 3 sentences, 2 paragraphs, ...
19
- */
20
- readonly expectAmount?: Partial<Record<Lowercase<ExpectationUnit>, {
21
- min?: ExpectationAmount;
22
- max?: ExpectationAmount;
23
- }>>;
24
- /**
25
- * Expect this format of the answer
26
- */
27
- readonly expectFormat?: ExpectFormatCommand['format'];
28
15
  /**
29
16
  * Requirements for the model
30
17
  * - This is required only for executionType PROMPT_TEMPLATE
@@ -97,6 +84,27 @@ interface PromptTemplateJsonCommon {
97
84
  * Content of the template with {placeholders} for parameters
98
85
  */
99
86
  readonly content: (string_prompt | string_javascript | string_markdown) & string_template;
87
+ /**
88
+ * List of postprocessing steps that are executed after the prompt template
89
+ */
90
+ readonly postprocessing?: Array<string_javascript_name>;
91
+ /**
92
+ * Expect this amount of each unit in the answer
93
+ *
94
+ * For example 5 words, 3 sentences, 2 paragraphs, ...
95
+ *
96
+ * Note: Expectations are performed after all postprocessing steps
97
+ */
98
+ readonly expectAmount?: Partial<Record<Lowercase<ExpectationUnit>, {
99
+ min?: ExpectationAmount;
100
+ max?: ExpectationAmount;
101
+ }>>;
102
+ /**
103
+ * Expect this format of the answer
104
+ *
105
+ * Note: Expectations are performed after all postprocessing steps
106
+ */
107
+ readonly expectFormat?: ExpectFormatCommand['format'];
100
108
  /**
101
109
  * Name of the parameter that is the result of the prompt template
102
110
  */
@@ -12,7 +12,7 @@ export interface PromptTemplateParameterJson {
12
12
  /**
13
13
  * The parameter is input of the pipeline
14
14
  *
15
- * Note: Output parameter is every parameter including input one
15
+ * Note: OUTPUT PARAMETER is every parameter including input one
16
16
  */
17
17
  readonly isInput: boolean;
18
18
  /**