@promptbook/node 0.75.3 → 0.75.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -24,6 +24,7 @@ import { DEFAULT_REMOTE_URL } from '../config';
24
24
  import { DEFAULT_REMOTE_URL_PATH } from '../config';
25
25
  import { DEFAULT_CSV_SETTINGS } from '../config';
26
26
  import { DEFAULT_IS_VERBOSE } from '../config';
27
+ import { SET_IS_VERBOSE } from '../config';
27
28
  import { DEFAULT_IS_AUTO_INSTALLED } from '../config';
28
29
  import { pipelineJsonToString } from '../conversion/pipelineJsonToString';
29
30
  import { pipelineStringToJson } from '../conversion/pipelineStringToJson';
@@ -136,6 +137,7 @@ export { DEFAULT_REMOTE_URL };
136
137
  export { DEFAULT_REMOTE_URL_PATH };
137
138
  export { DEFAULT_CSV_SETTINGS };
138
139
  export { DEFAULT_IS_VERBOSE };
140
+ export { SET_IS_VERBOSE };
139
141
  export { DEFAULT_IS_AUTO_INSTALLED };
140
142
  export { pipelineJsonToString };
141
143
  export { pipelineStringToJson };
@@ -204,7 +204,15 @@ export declare const DEFAULT_CSV_SETTINGS: CsvSettings;
204
204
  *
205
205
  * @public exported from `@promptbook/core`
206
206
  */
207
- export declare const DEFAULT_IS_VERBOSE = false;
207
+ export declare let DEFAULT_IS_VERBOSE: boolean;
208
+ /**
209
+ * @@@
210
+ *
211
+ * Note: This is experimental feature
212
+ *
213
+ * @public exported from `@promptbook/core`
214
+ */
215
+ export declare function SET_IS_VERBOSE(isVerbose: boolean): void;
208
216
  /**
209
217
  * @@@
210
218
  *
@@ -1,4 +1,5 @@
1
1
  import type { string_markdown } from '../../../types/typeAliases';
2
+ import type { Scraper } from '../Scraper';
2
3
  /**
3
4
  * Creates a message with all registered scrapers
4
5
  *
@@ -6,7 +7,7 @@ import type { string_markdown } from '../../../types/typeAliases';
6
7
  *
7
8
  * @private internal function of `createScrapersFromConfiguration` and `createScrapersFromEnv`
8
9
  */
9
- export declare function $registeredScrapersMessage(): string_markdown;
10
+ export declare function $registeredScrapersMessage(availableScrapers: ReadonlyArray<Scraper>): string_markdown;
10
11
  /**
11
12
  * TODO: [®] DRY Register logic
12
13
  */
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@promptbook/node",
3
- "version": "0.75.3",
3
+ "version": "0.75.4",
4
4
  "description": "It's time for a paradigm shift. The future of software in plain English, French or Latin",
5
5
  "--note-0": " <- [🐊]",
6
6
  "private": false,
@@ -54,7 +54,7 @@
54
54
  "module": "./esm/index.es.js",
55
55
  "typings": "./esm/typings/src/_packages/node.index.d.ts",
56
56
  "peerDependencies": {
57
- "@promptbook/core": "0.75.3"
57
+ "@promptbook/core": "0.75.4"
58
58
  },
59
59
  "dependencies": {
60
60
  "colors": "1.4.0",
package/umd/index.umd.js CHANGED
@@ -43,7 +43,7 @@
43
43
  *
44
44
  * @see https://github.com/webgptorg/promptbook
45
45
  */
46
- var PROMPTBOOK_ENGINE_VERSION = '0.75.2';
46
+ var PROMPTBOOK_ENGINE_VERSION = '0.75.3';
47
47
  /**
48
48
  * TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
49
49
  * Note: [💞] Ignore a discrepancy between file name and entity name
@@ -4769,8 +4769,8 @@
4769
4769
  *
4770
4770
  * @private internal function of `createScrapersFromConfiguration` and `createScrapersFromEnv`
4771
4771
  */
4772
- function $registeredScrapersMessage() {
4773
- var e_1, _a, e_2, _b;
4772
+ function $registeredScrapersMessage(availableScrapers) {
4773
+ var e_1, _a, e_2, _b, e_3, _c;
4774
4774
  /**
4775
4775
  * Mixes registered scrapers from $scrapersMetadataRegister and $scrapersRegister
4776
4776
  */
@@ -4782,15 +4782,15 @@
4782
4782
  all.push({ packageName: packageName, className: className, mimeTypes: mimeTypes, documentationUrl: documentationUrl, isAvilableInBrowser: isAvilableInBrowser });
4783
4783
  };
4784
4784
  try {
4785
- for (var _c = __values($scrapersMetadataRegister.list()), _d = _c.next(); !_d.done; _d = _c.next()) {
4786
- var _e = _d.value, packageName = _e.packageName, className = _e.className, mimeTypes = _e.mimeTypes, documentationUrl = _e.documentationUrl, isAvilableInBrowser = _e.isAvilableInBrowser;
4785
+ for (var _d = __values($scrapersMetadataRegister.list()), _e = _d.next(); !_e.done; _e = _d.next()) {
4786
+ var _f = _e.value, packageName = _f.packageName, className = _f.className, mimeTypes = _f.mimeTypes, documentationUrl = _f.documentationUrl, isAvilableInBrowser = _f.isAvilableInBrowser;
4787
4787
  _loop_1(packageName, className, mimeTypes, documentationUrl, isAvilableInBrowser);
4788
4788
  }
4789
4789
  }
4790
4790
  catch (e_1_1) { e_1 = { error: e_1_1 }; }
4791
4791
  finally {
4792
4792
  try {
4793
- if (_d && !_d.done && (_a = _c.return)) _a.call(_c);
4793
+ if (_e && !_e.done && (_a = _d.return)) _a.call(_d);
4794
4794
  }
4795
4795
  finally { if (e_1) throw e_1.error; }
4796
4796
  }
@@ -4801,18 +4801,31 @@
4801
4801
  all.push({ packageName: packageName, className: className, mimeTypes: mimeTypes, documentationUrl: documentationUrl, isAvilableInBrowser: isAvilableInBrowser });
4802
4802
  };
4803
4803
  try {
4804
- for (var _f = __values($scrapersRegister.list()), _g = _f.next(); !_g.done; _g = _f.next()) {
4805
- var _h = _g.value, packageName = _h.packageName, className = _h.className, mimeTypes = _h.mimeTypes, documentationUrl = _h.documentationUrl, isAvilableInBrowser = _h.isAvilableInBrowser;
4804
+ for (var _g = __values($scrapersRegister.list()), _h = _g.next(); !_h.done; _h = _g.next()) {
4805
+ var _j = _h.value, packageName = _j.packageName, className = _j.className, mimeTypes = _j.mimeTypes, documentationUrl = _j.documentationUrl, isAvilableInBrowser = _j.isAvilableInBrowser;
4806
4806
  _loop_2(packageName, className, mimeTypes, documentationUrl, isAvilableInBrowser);
4807
4807
  }
4808
4808
  }
4809
4809
  catch (e_2_1) { e_2 = { error: e_2_1 }; }
4810
4810
  finally {
4811
4811
  try {
4812
- if (_g && !_g.done && (_b = _f.return)) _b.call(_f);
4812
+ if (_h && !_h.done && (_b = _g.return)) _b.call(_g);
4813
4813
  }
4814
4814
  finally { if (e_2) throw e_2.error; }
4815
4815
  }
4816
+ try {
4817
+ for (var availableScrapers_1 = __values(availableScrapers), availableScrapers_1_1 = availableScrapers_1.next(); !availableScrapers_1_1.done; availableScrapers_1_1 = availableScrapers_1.next()) {
4818
+ var metadata_1 = availableScrapers_1_1.value.metadata;
4819
+ all.push(metadata_1);
4820
+ }
4821
+ }
4822
+ catch (e_3_1) { e_3 = { error: e_3_1 }; }
4823
+ finally {
4824
+ try {
4825
+ if (availableScrapers_1_1 && !availableScrapers_1_1.done && (_c = availableScrapers_1.return)) _c.call(availableScrapers_1);
4826
+ }
4827
+ finally { if (e_3) throw e_3.error; }
4828
+ }
4816
4829
  var metadata = all.map(function (metadata) {
4817
4830
  var isMetadataAviailable = $scrapersMetadataRegister
4818
4831
  .list()
@@ -4826,42 +4839,44 @@
4826
4839
  var packageName = _a.packageName, className = _a.className;
4827
4840
  return metadata.packageName === packageName && metadata.className === className;
4828
4841
  });
4829
- return __assign(__assign({}, metadata), { isMetadataAviailable: isMetadataAviailable, isInstalled: isInstalled });
4842
+ var isAvilableInTools = availableScrapers.some(function (_a) {
4843
+ var _b = _a.metadata, packageName = _b.packageName, className = _b.className;
4844
+ return metadata.packageName === packageName && metadata.className === className;
4845
+ });
4846
+ return __assign(__assign({}, metadata), { isMetadataAviailable: isMetadataAviailable, isInstalled: isInstalled, isAvilableInTools: isAvilableInTools });
4830
4847
  });
4831
4848
  if (metadata.length === 0) {
4832
- return "No scrapers are available";
4849
+ return spaceTrim__default["default"]("\n **No scrapers are available**\n\n This is a unexpected behavior, you are probably using some broken version of Promptbook\n At least there should be available the metadata of the scrapers\n ");
4833
4850
  }
4834
4851
  return spaceTrim__default["default"](function (block) { return "\n Available scrapers are:\n ".concat(block(metadata
4835
4852
  .map(function (_a, i) {
4836
- var packageName = _a.packageName, className = _a.className, isMetadataAviailable = _a.isMetadataAviailable, isInstalled = _a.isInstalled, mimeTypes = _a.mimeTypes, isAvilableInBrowser = _a.isAvilableInBrowser;
4837
- var more;
4838
- // TODO: Use documentationUrl
4839
- if (just(false)) {
4840
- more = '';
4841
- }
4842
- else if (!isMetadataAviailable && !isInstalled) {
4843
- // TODO: [�][�] Maybe do allow to do auto-install if package not registered and not found
4844
- more = "*(not installed and no metadata, looks like a unexpected behavior)*";
4845
- }
4846
- else if (isMetadataAviailable && !isInstalled) {
4847
- // TODO: [�][�]
4848
- more = "*(not installed)*";
4849
- }
4850
- else if (!isMetadataAviailable && isInstalled) {
4851
- more = "*(no metadata, looks like a unexpected behavior)*";
4852
- }
4853
- else if (isMetadataAviailable && isInstalled) {
4854
- more = "(installed)";
4855
- }
4856
- else {
4857
- more = "*(unknown state, looks like a unexpected behavior)*";
4858
- }
4853
+ var packageName = _a.packageName, className = _a.className, isMetadataAviailable = _a.isMetadataAviailable, isInstalled = _a.isInstalled, mimeTypes = _a.mimeTypes, isAvilableInBrowser = _a.isAvilableInBrowser, isAvilableInTools = _a.isAvilableInTools;
4854
+ var more = [];
4855
+ // TODO: [🧠] Maybe use `documentationUrl`
4856
+ if (isMetadataAviailable) {
4857
+ more.push("\u2B1C Metadata registered");
4858
+ } // not else
4859
+ if (isInstalled) {
4860
+ more.push("\uD83D\uDFE9 Installed");
4861
+ } // not else
4862
+ if (isAvilableInTools) {
4863
+ more.push("\uD83D\uDFE6 Available in tools");
4864
+ } // not else
4865
+ if (!isMetadataAviailable && isInstalled) {
4866
+ more.push("When no metadata registered but scraper is installed, it is an unexpected behavior");
4867
+ } // not else
4868
+ if (!isInstalled && isAvilableInTools) {
4869
+ more.push("When the scraper is not installed but available in tools, it is an unexpected compatibility behavior");
4870
+ } // not else
4859
4871
  if (!isAvilableInBrowser) {
4860
- more += " *(not available in browser)*";
4872
+ more.push("Not usable in browser");
4861
4873
  }
4862
- return "".concat(i + 1, ") `").concat(className, "` from `").concat(packageName, "` compatible to scrape ").concat(mimeTypes.join(', '), " ").concat(more);
4874
+ var moreText = more.length === 0 ? '' : " *(".concat(more.join('; '), ")*");
4875
+ return "".concat(i + 1, ") `").concat(className, "` from `").concat(packageName, "` compatible to scrape ").concat(mimeTypes
4876
+ .map(function (mimeType) { return "\"".concat(mimeType, "\""); })
4877
+ .join(', ')).concat(moreText);
4863
4878
  })
4864
- .join('\n')), "\n "); });
4879
+ .join('\n')), "\n\n Legend:\n - \u2B1C **Metadata registered** means that Promptbook knows about the scraper, it is similar to registration in some registry\n - \uD83D\uDFE9 **Installed** means that you have imported package with particular scraper\n - \uD83D\uDFE6 **Available in tools** means that you have passed scraper as dependency into prepare or execution process\n\n "); });
4865
4880
  }
4866
4881
  /**
4867
4882
  * TODO: [®] DRY Register logic
@@ -5197,57 +5212,75 @@
5197
5212
  _a = options.maxParallelCount, maxParallelCount = _a === void 0 ? DEFAULT_MAX_PARALLEL_COUNT : _a, rootDirname = options.rootDirname, _b = options.isVerbose, isVerbose = _b === void 0 ? DEFAULT_IS_VERBOSE : _b;
5198
5213
  knowledgePreparedUnflatten = new Array(knowledgeSources.length);
5199
5214
  return [4 /*yield*/, forEachAsync(knowledgeSources, { maxParallelCount: maxParallelCount }, function (knowledgeSource, index) { return __awaiter(_this, void 0, void 0, function () {
5200
- var partialPieces, sourceHandler, _a, _b, scraper, partialPiecesUnchecked, e_1_1, pieces;
5201
- var e_1, _c;
5202
- return __generator(this, function (_d) {
5203
- switch (_d.label) {
5215
+ var partialPieces, sourceHandler, scrapers, _loop_1, scrapers_1, scrapers_1_1, scraper, state_1, e_1_1, pieces;
5216
+ var e_1, _a;
5217
+ return __generator(this, function (_b) {
5218
+ switch (_b.label) {
5204
5219
  case 0:
5205
5220
  partialPieces = null;
5206
5221
  return [4 /*yield*/, makeKnowledgeSourceHandler(knowledgeSource, tools, { rootDirname: rootDirname, isVerbose: isVerbose })];
5207
5222
  case 1:
5208
- sourceHandler = _d.sent();
5209
- _d.label = 2;
5223
+ sourceHandler = _b.sent();
5224
+ scrapers = arrayableToArray(tools.scrapers);
5225
+ _loop_1 = function (scraper) {
5226
+ var partialPiecesUnchecked;
5227
+ return __generator(this, function (_c) {
5228
+ switch (_c.label) {
5229
+ case 0:
5230
+ if (!scraper.metadata.mimeTypes.includes(sourceHandler.mimeType)
5231
+ // <- TODO: [🦔] Implement mime-type wildcards
5232
+ ) {
5233
+ return [2 /*return*/, "continue"];
5234
+ }
5235
+ return [4 /*yield*/, scraper.scrape(sourceHandler)];
5236
+ case 1:
5237
+ partialPiecesUnchecked = _c.sent();
5238
+ if (partialPiecesUnchecked !== null) {
5239
+ partialPieces = __spreadArray([], __read(partialPiecesUnchecked), false);
5240
+ return [2 /*return*/, "break"];
5241
+ }
5242
+ console.warn(spaceTrim__default["default"](function (block) { return "\n Cannot scrape knowledge from source despite the scraper `".concat(scraper.metadata.className, "` supports the mime type \"").concat(sourceHandler.mimeType, "\".\n \n The source:\n > ").concat(block(knowledgeSource.sourceContent
5243
+ .split('\n')
5244
+ .map(function (line) { return "> ".concat(line); })
5245
+ .join('\n')), "\n\n ").concat(block($registeredScrapersMessage(scrapers)), "\n\n\n "); }));
5246
+ return [2 /*return*/];
5247
+ }
5248
+ });
5249
+ };
5250
+ _b.label = 2;
5210
5251
  case 2:
5211
- _d.trys.push([2, 7, 8, 9]);
5212
- _a = __values(arrayableToArray(tools.scrapers)), _b = _a.next();
5213
- _d.label = 3;
5252
+ _b.trys.push([2, 7, 8, 9]);
5253
+ scrapers_1 = __values(scrapers), scrapers_1_1 = scrapers_1.next();
5254
+ _b.label = 3;
5214
5255
  case 3:
5215
- if (!!_b.done) return [3 /*break*/, 6];
5216
- scraper = _b.value;
5217
- if (!scraper.metadata.mimeTypes.includes(sourceHandler.mimeType)
5218
- // <- TODO: [🦔] Implement mime-type wildcards
5219
- ) {
5220
- return [3 /*break*/, 5];
5221
- }
5222
- return [4 /*yield*/, scraper.scrape(sourceHandler)];
5256
+ if (!!scrapers_1_1.done) return [3 /*break*/, 6];
5257
+ scraper = scrapers_1_1.value;
5258
+ return [5 /*yield**/, _loop_1(scraper)];
5223
5259
  case 4:
5224
- partialPiecesUnchecked = _d.sent();
5225
- if (partialPiecesUnchecked !== null) {
5226
- partialPieces = __spreadArray([], __read(partialPiecesUnchecked), false);
5227
- // <- TODO: [🪓] Here should be no need for spreading new array, just `partialPieces = partialPiecesUnchecked`
5260
+ state_1 = _b.sent();
5261
+ if (state_1 === "break")
5228
5262
  return [3 /*break*/, 6];
5229
- }
5230
- _d.label = 5;
5263
+ _b.label = 5;
5231
5264
  case 5:
5232
- _b = _a.next();
5265
+ scrapers_1_1 = scrapers_1.next();
5233
5266
  return [3 /*break*/, 3];
5234
5267
  case 6: return [3 /*break*/, 9];
5235
5268
  case 7:
5236
- e_1_1 = _d.sent();
5269
+ e_1_1 = _b.sent();
5237
5270
  e_1 = { error: e_1_1 };
5238
5271
  return [3 /*break*/, 9];
5239
5272
  case 8:
5240
5273
  try {
5241
- if (_b && !_b.done && (_c = _a.return)) _c.call(_a);
5274
+ if (scrapers_1_1 && !scrapers_1_1.done && (_a = scrapers_1.return)) _a.call(scrapers_1);
5242
5275
  }
5243
5276
  finally { if (e_1) throw e_1.error; }
5244
5277
  return [7 /*endfinally*/];
5245
5278
  case 9:
5246
5279
  if (partialPieces === null) {
5247
- throw new KnowledgeScrapeError(spaceTrim__default["default"](function (block) { return "\n Cannot scrape knowledge from source:\n \n > ".concat(block(knowledgeSource.sourceContent
5280
+ throw new KnowledgeScrapeError(spaceTrim__default["default"](function (block) { return "\n Cannot scrape knowledge\n \n The source:\n > ".concat(block(knowledgeSource.sourceContent
5248
5281
  .split('\n')
5249
5282
  .map(function (line) { return "> ".concat(line); })
5250
- .join('\n')), "\n\n No scraper found for the mime type \"").concat(sourceHandler.mimeType, "\"\n\n ").concat(block($registeredScrapersMessage()), "\n\n\n "); }));
5283
+ .join('\n')), "\n\n No scraper found for the mime type \"").concat(sourceHandler.mimeType, "\"\n\n ").concat(block($registeredScrapersMessage(scrapers)), "\n\n\n "); }));
5251
5284
  }
5252
5285
  pieces = partialPieces.map(function (partialPiece) { return (__assign(__assign({}, partialPiece), { sources: [
5253
5286
  {