@promptbook/node 0.92.0-23 → 0.92.0-24

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/esm/index.es.js CHANGED
@@ -30,7 +30,7 @@ const BOOK_LANGUAGE_VERSION = '1.0.0';
30
30
  * @generated
31
31
  * @see https://github.com/webgptorg/promptbook
32
32
  */
33
- const PROMPTBOOK_ENGINE_VERSION = '0.92.0-23';
33
+ const PROMPTBOOK_ENGINE_VERSION = '0.92.0-24';
34
34
  /**
35
35
  * TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
36
36
  * Note: [💞] Ignore a discrepancy between file name and entity name
@@ -102,11 +102,20 @@ const DEFAULT_BOOK_OUTPUT_PARAMETER_NAME = 'result';
102
102
  */
103
103
  const DEFAULT_MAX_FILE_SIZE = 100 * 1024 * 1024; // 100MB
104
104
  /**
105
- * @@@
105
+ * Threshold value that determines when a dataset is considered "big"
106
+ * and may require special handling or optimizations
107
+ *
108
+ * For example, when error occurs in one item of the big dataset, it will not fail the whole pipeline
106
109
  *
107
110
  * @public exported from `@promptbook/core`
108
111
  */
109
112
  const BIG_DATASET_TRESHOLD = 50;
113
+ /**
114
+ * Placeholder text used to represent a placeholder value of failed operation
115
+ *
116
+ * @public exported from `@promptbook/core`
117
+ */
118
+ const FAILED_VALUE_PLACEHOLDER = '!?';
110
119
  // <- TODO: [🧠] Better system for generator warnings - not always "code" and "by `@promptbook/cli`"
111
120
  /**
112
121
  * The maximum number of iterations for a loops
@@ -1536,7 +1545,7 @@ class SimplePipelineCollection {
1536
1545
  /**
1537
1546
  * Constructs a pipeline collection from pipelines
1538
1547
  *
1539
- * @param pipelines @@@
1548
+ * @param pipelines Array of pipeline JSON objects to include in the collection
1540
1549
  *
1541
1550
  * Note: During the construction logic of all pipelines are validated
1542
1551
  * Note: It is not recommended to use this constructor directly, use `createCollectionFromJson` *(or other variant)* instead
@@ -2626,6 +2635,15 @@ const CsvFormatParser = {
2626
2635
  mappedData.push(mappedRow);
2627
2636
  if (onProgress) {
2628
2637
  // Note: Report the CSV with all rows mapped so far
2638
+ /*
2639
+ !!!!
2640
+ // Report progress with updated value
2641
+ const progressData = mappedData.map((row, i) =>
2642
+ i > index ? { ...row, [outputParameterName]: PENDING_VALUE_PLACEHOLDER } : row,
2643
+ );
2644
+
2645
+
2646
+ */
2629
2647
  await onProgress(unparse(mappedData, { ...settings, ...MANDATORY_CSV_SETTINGS }));
2630
2648
  }
2631
2649
  }
@@ -4123,7 +4141,7 @@ async function executeFormatSubvalues(options) {
4123
4141
  `));
4124
4142
  if (length > BIG_DATASET_TRESHOLD) {
4125
4143
  console.error(highLevelError);
4126
- return '~';
4144
+ return FAILED_VALUE_PLACEHOLDER;
4127
4145
  }
4128
4146
  throw highLevelError;
4129
4147
  }
@@ -4154,7 +4172,7 @@ async function executeFormatSubvalues(options) {
4154
4172
  ${block(pipelineIdentification)}
4155
4173
  Subparameter index: ${index}
4156
4174
  `));
4157
- return '~';
4175
+ return FAILED_VALUE_PLACEHOLDER;
4158
4176
  }
4159
4177
  throw error;
4160
4178
  }
@@ -4993,7 +5011,8 @@ async function preparePersona(personaDescription, tools, options) {
4993
5011
  */
4994
5012
 
4995
5013
  /**
4996
- * @@@
5014
+ * Safely retrieves the global scope object (window in browser, global in Node.js)
5015
+ * regardless of the JavaScript environment in which the code is running
4997
5016
  *
4998
5017
  * Note: `$` is used to indicate that this function is not a pure function - it access global scope
4999
5018
  *
@@ -7160,14 +7179,15 @@ const MatcherFormfactorDefinition = {
7160
7179
  };
7161
7180
 
7162
7181
  /**
7163
- * Sheets is form of app that @@@
7182
+ * Sheets is form of app that processes tabular data in CSV format, allowing transformation
7183
+ * and analysis of structured data through AI-powered operations
7164
7184
  *
7165
7185
  * @public exported from `@promptbook/core`
7166
7186
  */
7167
7187
  const SheetsFormfactorDefinition = {
7168
7188
  name: 'SHEETS',
7169
7189
  aliasNames: ['SHEETS', 'SHEET'],
7170
- description: `@@@`,
7190
+ description: `A formfactor for processing spreadsheet-like data in CSV format, enabling AI transformations on tabular data`,
7171
7191
  documentationUrl: `https://github.com/webgptorg/promptbook/discussions/176`,
7172
7192
  pipelineInterface: {
7173
7193
  inputParameters: [
@@ -7243,7 +7263,7 @@ const FORMFACTOR_DEFINITIONS = [
7243
7263
  /**
7244
7264
  * Parses the formfactor command
7245
7265
  *
7246
- * Note: @@@ This command is used as formfactor for new commands - it should NOT be used in any `.book` file
7266
+ * Note: This command is used as a formfactor for new commands and defines the app type format - it should NOT be used in any `.book` file
7247
7267
  *
7248
7268
  * @see `documentationUrl` for more details
7249
7269
  * @public exported from `@promptbook/editable`
@@ -7265,7 +7285,7 @@ const formfactorCommandParser = {
7265
7285
  /**
7266
7286
  * Description of the FORMFACTOR command
7267
7287
  */
7268
- description: `@@`,
7288
+ description: `Specifies the application type and interface requirements that this promptbook should conform to`,
7269
7289
  /**
7270
7290
  * Link to documentation
7271
7291
  */