@promptbook/cli 0.92.0-23 → 0.92.0-24

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/esm/index.es.js CHANGED
@@ -47,7 +47,7 @@ const BOOK_LANGUAGE_VERSION = '1.0.0';
47
47
  * @generated
48
48
  * @see https://github.com/webgptorg/promptbook
49
49
  */
50
- const PROMPTBOOK_ENGINE_VERSION = '0.92.0-23';
50
+ const PROMPTBOOK_ENGINE_VERSION = '0.92.0-24';
51
51
  /**
52
52
  * TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
53
53
  * Note: [💞] Ignore a discrepancy between file name and entity name
@@ -164,11 +164,20 @@ const DEFAULT_BOOK_OUTPUT_PARAMETER_NAME = 'result';
164
164
  */
165
165
  const DEFAULT_MAX_FILE_SIZE = 100 * 1024 * 1024; // 100MB
166
166
  /**
167
- * @@@
167
+ * Threshold value that determines when a dataset is considered "big"
168
+ * and may require special handling or optimizations
169
+ *
170
+ * For example, when error occurs in one item of the big dataset, it will not fail the whole pipeline
168
171
  *
169
172
  * @public exported from `@promptbook/core`
170
173
  */
171
174
  const BIG_DATASET_TRESHOLD = 50;
175
+ /**
176
+ * Placeholder text used to represent a placeholder value of failed operation
177
+ *
178
+ * @public exported from `@promptbook/core`
179
+ */
180
+ const FAILED_VALUE_PLACEHOLDER = '!?';
172
181
  // <- TODO: !!!! Use
173
182
  /**
174
183
  * Warning message for the generated sections and files files
@@ -703,7 +712,8 @@ class NotYetImplementedError extends Error {
703
712
  }
704
713
 
705
714
  /**
706
- * @@@
715
+ * Safely retrieves the global scope object (window in browser, global in Node.js)
716
+ * regardless of the JavaScript environment in which the code is running
707
717
  *
708
718
  * Note: `$` is used to indicate that this function is not a pure function - it access global scope
709
719
  *
@@ -2782,9 +2792,9 @@ function cacheLlmTools(llmTools, options = {}) {
2782
2792
  /**
2783
2793
  * TODO: [🧠][💸] Maybe make some common abstraction `interceptLlmTools` and use here (or use javascript Proxy?)
2784
2794
  * TODO: [🧠] Is there some meaningfull way how to test this util
2785
- * TODO: [👷‍♂️] @@@ Manual about construction of llmTools
2786
- * @@@ write discussion about this and storages
2787
- * @@@ write how to combine multiple interceptors
2795
+ * TODO: [👷‍♂️] Comprehensive manual about construction of llmTools
2796
+ * Detailed explanation about caching strategies and appropriate storage selection for different use cases
2797
+ * Examples of how to combine multiple interceptors for advanced caching, logging, and usage tracking
2788
2798
  */
2789
2799
 
2790
2800
  /**
@@ -4822,7 +4832,7 @@ class SimplePipelineCollection {
4822
4832
  /**
4823
4833
  * Constructs a pipeline collection from pipelines
4824
4834
  *
4825
- * @param pipelines @@@
4835
+ * @param pipelines Array of pipeline JSON objects to include in the collection
4826
4836
  *
4827
4837
  * Note: During the construction logic of all pipelines are validated
4828
4838
  * Note: It is not recommended to use this constructor directly, use `createCollectionFromJson` *(or other variant)* instead
@@ -5510,6 +5520,15 @@ const CsvFormatParser = {
5510
5520
  mappedData.push(mappedRow);
5511
5521
  if (onProgress) {
5512
5522
  // Note: Report the CSV with all rows mapped so far
5523
+ /*
5524
+ !!!!
5525
+ // Report progress with updated value
5526
+ const progressData = mappedData.map((row, i) =>
5527
+ i > index ? { ...row, [outputParameterName]: PENDING_VALUE_PLACEHOLDER } : row,
5528
+ );
5529
+
5530
+
5531
+ */
5513
5532
  await onProgress(unparse(mappedData, { ...settings, ...MANDATORY_CSV_SETTINGS }));
5514
5533
  }
5515
5534
  }
@@ -6537,7 +6556,7 @@ async function executeFormatSubvalues(options) {
6537
6556
  `));
6538
6557
  if (length > BIG_DATASET_TRESHOLD) {
6539
6558
  console.error(highLevelError);
6540
- return '~';
6559
+ return FAILED_VALUE_PLACEHOLDER;
6541
6560
  }
6542
6561
  throw highLevelError;
6543
6562
  }
@@ -6568,7 +6587,7 @@ async function executeFormatSubvalues(options) {
6568
6587
  ${block(pipelineIdentification)}
6569
6588
  Subparameter index: ${index}
6570
6589
  `));
6571
- return '~';
6590
+ return FAILED_VALUE_PLACEHOLDER;
6572
6591
  }
6573
6592
  throw error;
6574
6593
  }
@@ -9109,14 +9128,15 @@ const MatcherFormfactorDefinition = {
9109
9128
  };
9110
9129
 
9111
9130
  /**
9112
- * Sheets is form of app that @@@
9131
+ * Sheets is form of app that processes tabular data in CSV format, allowing transformation
9132
+ * and analysis of structured data through AI-powered operations
9113
9133
  *
9114
9134
  * @public exported from `@promptbook/core`
9115
9135
  */
9116
9136
  const SheetsFormfactorDefinition = {
9117
9137
  name: 'SHEETS',
9118
9138
  aliasNames: ['SHEETS', 'SHEET'],
9119
- description: `@@@`,
9139
+ description: `A formfactor for processing spreadsheet-like data in CSV format, enabling AI transformations on tabular data`,
9120
9140
  documentationUrl: `https://github.com/webgptorg/promptbook/discussions/176`,
9121
9141
  pipelineInterface: {
9122
9142
  inputParameters: [
@@ -9192,7 +9212,7 @@ const FORMFACTOR_DEFINITIONS = [
9192
9212
  /**
9193
9213
  * Parses the formfactor command
9194
9214
  *
9195
- * Note: @@@ This command is used as formfactor for new commands - it should NOT be used in any `.book` file
9215
+ * Note: This command is used as a formfactor for new commands and defines the app type format - it should NOT be used in any `.book` file
9196
9216
  *
9197
9217
  * @see `documentationUrl` for more details
9198
9218
  * @public exported from `@promptbook/editable`
@@ -9214,7 +9234,7 @@ const formfactorCommandParser = {
9214
9234
  /**
9215
9235
  * Description of the FORMFACTOR command
9216
9236
  */
9217
- description: `@@`,
9237
+ description: `Specifies the application type and interface requirements that this promptbook should conform to`,
9218
9238
  /**
9219
9239
  * Link to documentation
9220
9240
  */
@@ -16946,14 +16966,23 @@ function computeOpenAiUsage(promptContent, // <- Note: Intentionally using [] to
16946
16966
  resultContent, rawResponse) {
16947
16967
  var _a, _b;
16948
16968
  if (rawResponse.usage === undefined) {
16969
+ console.log('!!! computeOpenAiUsage', 'The usage is not defined in the response from OpenAI');
16949
16970
  throw new PipelineExecutionError('The usage is not defined in the response from OpenAI');
16950
16971
  }
16951
16972
  if (((_a = rawResponse.usage) === null || _a === void 0 ? void 0 : _a.prompt_tokens) === undefined) {
16973
+ console.log('!!! computeOpenAiUsage', 'In OpenAI response `usage.prompt_tokens` not defined');
16952
16974
  throw new PipelineExecutionError('In OpenAI response `usage.prompt_tokens` not defined');
16953
16975
  }
16954
16976
  const inputTokens = rawResponse.usage.prompt_tokens;
16955
16977
  const outputTokens = ((_b = rawResponse.usage) === null || _b === void 0 ? void 0 : _b.completion_tokens) || 0;
16956
16978
  const modelInfo = OPENAI_MODELS.find((model) => model.modelName === rawResponse.model);
16979
+ console.log('!!! computeOpenAiUsage', {
16980
+ inputTokens,
16981
+ outputTokens,
16982
+ rawResponse,
16983
+ resultContent,
16984
+ modelInfo,
16985
+ });
16957
16986
  let price;
16958
16987
  if (modelInfo === undefined || modelInfo.pricing === undefined) {
16959
16988
  price = uncertainNumber();