@promptbook/cli 0.86.6 → 0.86.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/esm/index.es.js CHANGED
@@ -44,7 +44,7 @@ var BOOK_LANGUAGE_VERSION = '1.0.0';
44
44
  * @generated
45
45
  * @see https://github.com/webgptorg/promptbook
46
46
  */
47
- var PROMPTBOOK_ENGINE_VERSION = '0.86.6';
47
+ var PROMPTBOOK_ENGINE_VERSION = '0.86.8';
48
48
  /**
49
49
  * TODO: string_promptbook_version should be constrained to the all versions of Promptbook engine
50
50
  * Note: [💞] Ignore a discrepancy between file name and entity name
@@ -1506,9 +1506,11 @@ var FileCacheStorage = /** @class */ (function () {
1506
1506
  FileCacheStorage.prototype.getFilenameForKey = function (key) {
1507
1507
  // TODO: [👬] DRY
1508
1508
  var name = titleToName(key);
1509
+ var nameStart = name.split('-', 2)[0] || 'unnamed';
1509
1510
  var hash = sha256(hexEncoder.parse(name)).toString( /* hex */);
1510
1511
  // <- TODO: [🥬] Encapsulate sha256 to some private utility function
1511
- return join.apply(void 0, __spreadArray(__spreadArray([this.options.rootFolderPath], __read(nameToSubfolderPath(hash /* <- TODO: [🎎] Maybe add some SHA256 prefix */)), false), ["".concat(name.substring(0, MAX_FILENAME_LENGTH), ".json")], false));
1512
+ return join.apply(void 0, __spreadArray(__spreadArray([this.options.rootFolderPath,
1513
+ nameStart], __read(nameToSubfolderPath(hash /* <- TODO: [🎎] Maybe add some SHA256 prefix */)), false), ["".concat(name.substring(0, MAX_FILENAME_LENGTH), ".json")], false));
1512
1514
  };
1513
1515
  /**
1514
1516
  * @@@ Returns the current value associated with the given key, or null if the given key does not exist in the list associated with the object.
@@ -1694,13 +1696,14 @@ function cacheLlmTools(llmTools, options) {
1694
1696
  return /* not await */ llmTools.listModels();
1695
1697
  } });
1696
1698
  var callCommonModel = function (prompt) { return __awaiter(_this, void 0, void 0, function () {
1697
- var key, cacheItem, _a, promptResult, _b;
1699
+ var parameters, content, modelRequirements, key, cacheItem, _a, promptResult, _b;
1698
1700
  return __generator(this, function (_c) {
1699
1701
  switch (_c.label) {
1700
1702
  case 0:
1703
+ parameters = prompt.parameters, content = prompt.content, modelRequirements = prompt.modelRequirements;
1701
1704
  key = titleToName(prompt.title.substring(0, MAX_FILENAME_LENGTH - 10) +
1702
1705
  '-' +
1703
- sha256(hexEncoder.parse(JSON.stringify(prompt.parameters))).toString( /* hex */));
1706
+ sha256(hexEncoder.parse(JSON.stringify({ parameters: parameters, content: content, modelRequirements: modelRequirements }))).toString( /* hex */));
1704
1707
  if (!!isCacheReloaded) return [3 /*break*/, 2];
1705
1708
  return [4 /*yield*/, storage.getItem(key)];
1706
1709
  case 1: