@fjell/cache 4.7.15 → 4.7.17

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,5 +1,5 @@
1
1
  // src/CacheContext.ts
2
- var createCacheContext = (api, cacheMap, pkType, options, eventEmitter, ttlManager, evictionManager, statsManager) => {
2
+ var createCacheContext = (api, cacheMap, pkType, options, eventEmitter, ttlManager, evictionManager, statsManager, registry) => {
3
3
  return {
4
4
  api,
5
5
  cacheMap,
@@ -8,7 +8,8 @@ var createCacheContext = (api, cacheMap, pkType, options, eventEmitter, ttlManag
8
8
  eventEmitter,
9
9
  ttlManager,
10
10
  evictionManager,
11
- statsManager
11
+ statsManager,
12
+ registry
12
13
  };
13
14
  };
14
15
 
@@ -964,18 +965,120 @@ import {
964
965
  isValidItemKey as isValidItemKey5,
965
966
  validatePK as validatePK7
966
967
  } from "@fjell/core";
967
- var logger8 = logger_default.get("action");
968
+
969
+ // src/utils/cacheInvalidation.ts
970
+ import { toKeyTypeArray } from "@fjell/core";
971
+ var logger8 = logger_default.get("cache", "utils", "cacheInvalidation");
972
+ var extractKeysAndKeyTypesFromActionResult = (affectedItems) => {
973
+ const keys = [];
974
+ const keyTypeArrays = [];
975
+ for (const item of affectedItems) {
976
+ if (Array.isArray(item)) {
977
+ const keyTypes = item.map((locKey) => locKey.kt);
978
+ keyTypeArrays.push(keyTypes);
979
+ } else if ("kt" in item && "pk" in item) {
980
+ keys.push(item);
981
+ const keyTypes = toKeyTypeArray(item);
982
+ keyTypeArrays.push(keyTypes);
983
+ }
984
+ }
985
+ return { keys, keyTypeArrays };
986
+ };
987
+ var invalidateCachesByKeysAndKeyTypes = async (registry, keys, keyTypeArrays) => {
988
+ logger8.debug("Invalidating caches by keys and key types", {
989
+ keysCount: keys.length,
990
+ keyTypeArrays
991
+ });
992
+ const keysByKeyTypes = /* @__PURE__ */ new Map();
993
+ for (const key of keys) {
994
+ const keyTypes = "loc" in key ? [key.kt, ...key.loc.map((locKey) => locKey.kt)] : [key.kt];
995
+ const keyTypesKey = keyTypes.join("|");
996
+ if (!keysByKeyTypes.has(keyTypesKey)) {
997
+ keysByKeyTypes.set(keyTypesKey, []);
998
+ }
999
+ keysByKeyTypes.get(keyTypesKey).push(key);
1000
+ }
1001
+ for (const [keyTypesKey, cacheKeys] of keysByKeyTypes) {
1002
+ const keyTypes = keyTypesKey.split("|");
1003
+ try {
1004
+ const cacheInstance = registry.get(keyTypes);
1005
+ if (cacheInstance && isCache(cacheInstance)) {
1006
+ logger8.debug("Found cache instance for targeted invalidation", {
1007
+ keyTypes,
1008
+ cacheType: cacheInstance.coordinate.kta,
1009
+ keysToInvalidate: cacheKeys.length
1010
+ });
1011
+ await cacheInstance.cacheMap.invalidateItemKeys(cacheKeys);
1012
+ await cacheInstance.cacheMap.clearQueryResults();
1013
+ logger8.debug("Successfully invalidated specific items in cache", {
1014
+ keyTypes,
1015
+ invalidatedCount: cacheKeys.length
1016
+ });
1017
+ } else {
1018
+ logger8.debug("No cache instance found for key types", { keyTypes });
1019
+ }
1020
+ } catch (error) {
1021
+ logger8.warning("Failed to invalidate cache for key types", {
1022
+ keyTypes,
1023
+ error: error instanceof Error ? error.message : String(error)
1024
+ });
1025
+ }
1026
+ }
1027
+ for (const keyTypes of keyTypeArrays) {
1028
+ try {
1029
+ const cacheInstance = registry.get(keyTypes);
1030
+ if (cacheInstance && isCache(cacheInstance)) {
1031
+ logger8.debug("Handling location-based invalidation", { keyTypes });
1032
+ await cacheInstance.cacheMap.clearQueryResults();
1033
+ logger8.debug("Successfully cleared query results for location", { keyTypes });
1034
+ }
1035
+ } catch (error) {
1036
+ logger8.warning("Failed to handle location-based invalidation", {
1037
+ keyTypes,
1038
+ error: error instanceof Error ? error.message : String(error)
1039
+ });
1040
+ }
1041
+ }
1042
+ };
1043
+ function isCache(instance) {
1044
+ return instance !== null && typeof instance === "object" && "operations" in instance && "cacheMap" in instance && typeof instance.cacheMap.invalidateItemKeys === "function";
1045
+ }
1046
+ var handleActionCacheInvalidation = async (registry, affectedItems) => {
1047
+ logger8.debug("Handling action cache invalidation", {
1048
+ affectedItemsCount: affectedItems.length
1049
+ });
1050
+ const { keys, keyTypeArrays } = extractKeysAndKeyTypesFromActionResult(affectedItems);
1051
+ await invalidateCachesByKeysAndKeyTypes(registry, keys, keyTypeArrays);
1052
+ };
1053
+
1054
+ // src/ops/action.ts
1055
+ var logger9 = logger_default.get("action");
968
1056
  var action = async (key, action2, body = {}, context) => {
969
- const { api, cacheMap, pkType } = context;
970
- logger8.default("action", { key, action: action2, body });
1057
+ const { api, cacheMap, pkType, registry } = context;
1058
+ logger9.default("action", { key, action: action2, body });
971
1059
  if (!isValidItemKey5(key)) {
972
- logger8.error("Key for Action is not a valid ItemKey: %j", key);
1060
+ logger9.error("Key for Action is not a valid ItemKey: %j", key);
973
1061
  throw new Error("Key for Action is not a valid ItemKey");
974
1062
  }
975
- logger8.debug("Invalidating item key before action", { key });
1063
+ logger9.debug("Invalidating item key before action", { key });
976
1064
  cacheMap.invalidateItemKeys([key]);
977
- const updated = await api.action(key, action2, body);
978
- logger8.debug("Caching action result", { updatedKey: updated.key });
1065
+ const result = await api.action(key, action2, body);
1066
+ const updated = result[0];
1067
+ const affectedItems = result[1];
1068
+ if (affectedItems && affectedItems.length > 0) {
1069
+ logger9.debug("Handling cache invalidation for affected items", {
1070
+ affectedItemsCount: affectedItems.length
1071
+ });
1072
+ try {
1073
+ await handleActionCacheInvalidation(registry, affectedItems);
1074
+ } catch (error) {
1075
+ logger9.warning("Failed to handle cache invalidation for affected items", {
1076
+ error: error instanceof Error ? error.message : String(error),
1077
+ affectedItems
1078
+ });
1079
+ }
1080
+ }
1081
+ logger9.debug("Caching action result", { updatedKey: updated.key });
979
1082
  cacheMap.set(updated.key, updated);
980
1083
  const keyStr = JSON.stringify(updated.key);
981
1084
  context.ttlManager.onItemAdded(keyStr, cacheMap);
@@ -985,19 +1088,19 @@ var action = async (key, action2, body = {}, context) => {
985
1088
  const parsedKey = JSON.parse(evictedKey);
986
1089
  await cacheMap.delete(parsedKey);
987
1090
  } catch (error) {
988
- logger8.error("Failed to parse evicted key during deletion", {
1091
+ logger9.error("Failed to parse evicted key during deletion", {
989
1092
  evictedKey,
990
1093
  error: error instanceof Error ? error.message : String(error)
991
1094
  });
992
1095
  }
993
1096
  }
994
- logger8.debug("Emitting itemUpdated event after action", {
1097
+ logger9.debug("Emitting itemUpdated event after action", {
995
1098
  key: updated.key,
996
1099
  action: action2
997
1100
  });
998
1101
  const itemEvent = CacheEventFactory.itemUpdated(updated.key, updated, null, "api");
999
1102
  context.eventEmitter.emit(itemEvent);
1000
- logger8.debug("Emitting queryInvalidatedEvent after action", {
1103
+ logger9.debug("Emitting queryInvalidatedEvent after action", {
1001
1104
  eventType: "query_invalidated",
1002
1105
  reason: "item_changed",
1003
1106
  action: action2
@@ -1009,7 +1112,7 @@ var action = async (key, action2, body = {}, context) => {
1009
1112
  { source: "operation", context: { operation: "action" } }
1010
1113
  );
1011
1114
  context.eventEmitter.emit(queryInvalidatedEvent);
1012
- return [context, validatePK7(updated, pkType)];
1115
+ return [context, validatePK7(updated, pkType), affectedItems];
1013
1116
  };
1014
1117
 
1015
1118
  // src/ops/allAction.ts
@@ -1017,10 +1120,10 @@ import {
1017
1120
  validatePK as validatePK8
1018
1121
  } from "@fjell/core";
1019
1122
  import { NotFoundError as NotFoundError3 } from "@fjell/http-api";
1020
- var logger9 = logger_default.get("allAction");
1123
+ var logger10 = logger_default.get("allAction");
1021
1124
  var allAction = async (action2, body = {}, locations = [], context) => {
1022
- const { api, cacheMap, pkType, eventEmitter } = context;
1023
- logger9.default("allAction", { action: action2, body, locations });
1125
+ const { api, cacheMap, pkType, eventEmitter, registry } = context;
1126
+ logger10.default("allAction", { action: action2, body, locations });
1024
1127
  const existingItems = [];
1025
1128
  if (locations && locations.length > 0) {
1026
1129
  try {
@@ -1029,15 +1132,41 @@ var allAction = async (action2, body = {}, locations = [], context) => {
1029
1132
  existingItems.push(...cachedItems);
1030
1133
  }
1031
1134
  } catch (error) {
1032
- logger9.debug("Could not retrieve existing items for comparison", { error });
1135
+ logger10.debug("Could not retrieve existing items for comparison", { error });
1033
1136
  }
1034
1137
  }
1035
- logger9.debug("Invalidating location before allAction", { locations });
1138
+ logger10.debug("Invalidating location before allAction", { locations });
1036
1139
  await cacheMap.invalidateLocation(locations);
1037
1140
  let ret = [];
1141
+ let affectedItems = [];
1038
1142
  try {
1039
- ret = await api.allAction(action2, body, locations);
1040
- logger9.debug("Caching allAction results", { resultCount: ret.length });
1143
+ const result = await api.allAction(action2, body, locations);
1144
+ if (Array.isArray(result) && result.length === 2) {
1145
+ ret = result[0];
1146
+ affectedItems = result[1];
1147
+ } else {
1148
+ logger10.warning("Unexpected result format from allAction", {
1149
+ resultType: typeof result,
1150
+ isArray: Array.isArray(result),
1151
+ resultLength: Array.isArray(result) ? result.length : "not array"
1152
+ });
1153
+ ret = [];
1154
+ affectedItems = [];
1155
+ }
1156
+ if (affectedItems && affectedItems.length > 0) {
1157
+ logger10.debug("Handling cache invalidation for affected items", {
1158
+ affectedItemsCount: affectedItems.length
1159
+ });
1160
+ try {
1161
+ await handleActionCacheInvalidation(registry, affectedItems);
1162
+ } catch (error) {
1163
+ logger10.warning("Failed to handle cache invalidation for affected items", {
1164
+ error: error instanceof Error ? error.message : String(error),
1165
+ affectedItems
1166
+ });
1167
+ }
1168
+ }
1169
+ logger10.debug("Caching allAction results", { resultCount: ret.length });
1041
1170
  const modifiedItems = [];
1042
1171
  const newItems = [];
1043
1172
  for (const v of ret) {
@@ -1059,7 +1188,7 @@ var allAction = async (action2, body = {}, locations = [], context) => {
1059
1188
  }
1060
1189
  }
1061
1190
  for (const item of modifiedItems) {
1062
- logger9.debug("Emitting item_updated event for modified item", { key: item.key });
1191
+ logger10.debug("Emitting item_updated event for modified item", { key: item.key });
1063
1192
  const itemEvent = CacheEventFactory.itemUpdated(
1064
1193
  item.key,
1065
1194
  item,
@@ -1070,7 +1199,7 @@ var allAction = async (action2, body = {}, locations = [], context) => {
1070
1199
  eventEmitter.emit(itemEvent);
1071
1200
  }
1072
1201
  for (const item of newItems) {
1073
- logger9.debug("Emitting item_created event for new item", { key: item.key });
1202
+ logger10.debug("Emitting item_created event for new item", { key: item.key });
1074
1203
  const itemEvent = CacheEventFactory.itemCreated(
1075
1204
  item.key,
1076
1205
  item,
@@ -1080,14 +1209,14 @@ var allAction = async (action2, body = {}, locations = [], context) => {
1080
1209
  }
1081
1210
  if (modifiedItems.length > 0) {
1082
1211
  const modifiedKeys = modifiedItems.map((item) => item.key);
1083
- logger9.debug("Invalidating individual item keys for modified items", {
1212
+ logger10.debug("Invalidating individual item keys for modified items", {
1084
1213
  keyCount: modifiedKeys.length,
1085
1214
  keys: modifiedKeys
1086
1215
  });
1087
1216
  await cacheMap.invalidateItemKeys(modifiedKeys);
1088
1217
  }
1089
1218
  await cacheMap.clearQueryResults();
1090
- logger9.debug("Emitting query_invalidated event after allAction", {
1219
+ logger10.debug("Emitting query_invalidated event after allAction", {
1091
1220
  eventType: "query_invalidated",
1092
1221
  reason: "item_changed",
1093
1222
  action: action2,
@@ -1113,23 +1242,23 @@ var allAction = async (action2, body = {}, locations = [], context) => {
1113
1242
  throw e;
1114
1243
  }
1115
1244
  }
1116
- return [context, validatePK8(ret, pkType)];
1245
+ return [context, validatePK8(ret, pkType), affectedItems];
1117
1246
  };
1118
1247
 
1119
1248
  // src/ops/facet.ts
1120
- var logger10 = logger_default.get("facet");
1249
+ var logger11 = logger_default.get("facet");
1121
1250
  var facet = async (key, facet2, params = {}, context) => {
1122
1251
  const { api } = context;
1123
- logger10.default("facet", { key, facet: facet2 });
1252
+ logger11.default("facet", { key, facet: facet2 });
1124
1253
  const ret = await api.facet(key, facet2, params);
1125
1254
  return ret;
1126
1255
  };
1127
1256
 
1128
1257
  // src/ops/allFacet.ts
1129
- var logger11 = logger_default.get("allFacet");
1258
+ var logger12 = logger_default.get("allFacet");
1130
1259
  var allFacet = async (facet2, params = {}, locations = [], context) => {
1131
1260
  const { api } = context;
1132
- logger11.default("allFacet", { facet: facet2, params, locations });
1261
+ logger12.default("allFacet", { facet: facet2, params, locations });
1133
1262
  const ret = await api.allFacet(facet2, params, locations);
1134
1263
  return ret;
1135
1264
  };
@@ -1138,26 +1267,26 @@ var allFacet = async (facet2, params = {}, locations = [], context) => {
1138
1267
  import {
1139
1268
  validatePK as validatePK9
1140
1269
  } from "@fjell/core";
1141
- var logger12 = logger_default.get("find");
1270
+ var logger13 = logger_default.get("find");
1142
1271
  var find = async (finder, params = {}, locations = [], context) => {
1143
1272
  const { api, cacheMap, pkType, ttlManager, eventEmitter } = context;
1144
- logger12.default("find", { finder, params, locations });
1273
+ logger13.default("find", { finder, params, locations });
1145
1274
  if (context.options?.bypassCache) {
1146
- logger12.debug("Cache bypass enabled, fetching directly from API", { finder, params, locations });
1275
+ logger13.debug("Cache bypass enabled, fetching directly from API", { finder, params, locations });
1147
1276
  try {
1148
1277
  const ret2 = await api.find(finder, params, locations);
1149
- logger12.debug("API response received (not cached due to bypass)", { finder, params, locations, itemCount: ret2.length });
1278
+ logger13.debug("API response received (not cached due to bypass)", { finder, params, locations, itemCount: ret2.length });
1150
1279
  return [context, validatePK9(ret2, pkType)];
1151
1280
  } catch (error) {
1152
- logger12.error("API request failed", { finder, params, locations, error });
1281
+ logger13.error("API request failed", { finder, params, locations, error });
1153
1282
  throw error;
1154
1283
  }
1155
1284
  }
1156
1285
  const queryHash = createFinderHash(finder, params, locations);
1157
- logger12.debug("Generated query hash for find", { queryHash, finder, params, locations });
1286
+ logger13.debug("Generated query hash for find", { queryHash, finder, params, locations });
1158
1287
  const cachedItemKeys = await cacheMap.getQueryResult(queryHash);
1159
1288
  if (cachedItemKeys) {
1160
- logger12.debug("Using cached query results", { cachedKeyCount: cachedItemKeys.length, queryHash });
1289
+ logger13.debug("Using cached query results", { cachedKeyCount: cachedItemKeys.length, queryHash });
1161
1290
  const cachedItems = [];
1162
1291
  let allItemsAvailable = true;
1163
1292
  for (const itemKey of cachedItemKeys) {
@@ -1172,7 +1301,7 @@ var find = async (finder, params = {}, locations = [], context) => {
1172
1301
  if (allItemsAvailable) {
1173
1302
  return [context, validatePK9(cachedItems, pkType)];
1174
1303
  } else {
1175
- logger12.debug("Some cached items missing, invalidating query cache");
1304
+ logger13.debug("Some cached items missing, invalidating query cache");
1176
1305
  cacheMap.deleteQueryResult(queryHash);
1177
1306
  }
1178
1307
  }
@@ -1189,7 +1318,7 @@ var find = async (finder, params = {}, locations = [], context) => {
1189
1318
  }
1190
1319
  const itemKeys = ret.map((item) => item.key);
1191
1320
  cacheMap.setQueryResult(queryHash, itemKeys);
1192
- logger12.debug("Cached query result", { queryHash, itemKeyCount: itemKeys.length });
1321
+ logger13.debug("Cached query result", { queryHash, itemKeyCount: itemKeys.length });
1193
1322
  const event = CacheEventFactory.createQueryEvent(params, locations, ret);
1194
1323
  eventEmitter.emit(event);
1195
1324
  return [context, validatePK9(ret, pkType)];
@@ -1199,31 +1328,31 @@ var find = async (finder, params = {}, locations = [], context) => {
1199
1328
  import {
1200
1329
  validatePK as validatePK10
1201
1330
  } from "@fjell/core";
1202
- var logger13 = logger_default.get("findOne");
1331
+ var logger14 = logger_default.get("findOne");
1203
1332
  var findOne = async (finder, finderParams = {}, locations = [], context) => {
1204
1333
  const { api, cacheMap, pkType, ttlManager, eventEmitter } = context;
1205
- logger13.default("findOne", { finder, finderParams, locations });
1334
+ logger14.default("findOne", { finder, finderParams, locations });
1206
1335
  if (context.options?.bypassCache) {
1207
- logger13.debug("Cache bypass enabled, fetching directly from API", { finder, finderParams, locations });
1336
+ logger14.debug("Cache bypass enabled, fetching directly from API", { finder, finderParams, locations });
1208
1337
  try {
1209
1338
  const ret2 = await api.findOne(finder, finderParams, locations);
1210
- logger13.debug("API response received (not cached due to bypass)", { finder, finderParams, locations });
1339
+ logger14.debug("API response received (not cached due to bypass)", { finder, finderParams, locations });
1211
1340
  return [context, validatePK10(ret2, pkType)];
1212
1341
  } catch (error) {
1213
- logger13.error("API request failed", { finder, finderParams, locations, error });
1342
+ logger14.error("API request failed", { finder, finderParams, locations, error });
1214
1343
  throw error;
1215
1344
  }
1216
1345
  }
1217
1346
  const queryHash = createFinderHash(finder, finderParams, locations);
1218
- logger13.debug("Generated query hash for findOne", { queryHash });
1347
+ logger14.debug("Generated query hash for findOne", { queryHash });
1219
1348
  const cachedItemKeys = await cacheMap.getQueryResult(queryHash);
1220
1349
  if (cachedItemKeys && cachedItemKeys.length > 0) {
1221
- logger13.debug("Using cached query results", { cachedKeyCount: cachedItemKeys.length });
1350
+ logger14.debug("Using cached query results", { cachedKeyCount: cachedItemKeys.length });
1222
1351
  const item = await cacheMap.get(cachedItemKeys[0]);
1223
1352
  if (item) {
1224
1353
  return [context, validatePK10(item, pkType)];
1225
1354
  } else {
1226
- logger13.debug("Cached item missing, invalidating query cache");
1355
+ logger14.debug("Cached item missing, invalidating query cache");
1227
1356
  cacheMap.deleteQueryResult(queryHash);
1228
1357
  }
1229
1358
  }
@@ -1237,7 +1366,7 @@ var findOne = async (finder, finderParams = {}, locations = [], context) => {
1237
1366
  await cacheMap.delete(parsedKey);
1238
1367
  }
1239
1368
  cacheMap.setQueryResult(queryHash, [ret.key]);
1240
- logger13.debug("Cached query result", { queryHash, itemKey: ret.key });
1369
+ logger14.debug("Cached query result", { queryHash, itemKey: ret.key });
1241
1370
  const event = CacheEventFactory.createQueryEvent(finderParams, locations, [ret]);
1242
1371
  eventEmitter.emit(event);
1243
1372
  return [context, validatePK10(ret, pkType)];
@@ -1249,7 +1378,7 @@ import {
1249
1378
  isValidItemKey as isValidItemKey6,
1250
1379
  validatePK as validatePK11
1251
1380
  } from "@fjell/core";
1252
- var logger14 = logger_default.get("set");
1381
+ var logger15 = logger_default.get("set");
1253
1382
  var normalizeKeyValue2 = (value) => {
1254
1383
  return String(value);
1255
1384
  };
@@ -1299,14 +1428,14 @@ var normalizeKey = (key) => {
1299
1428
  };
1300
1429
  var set = async (key, v, context) => {
1301
1430
  const { cacheMap, pkType, ttlManager, evictionManager, eventEmitter } = context;
1302
- logger14.default("set", { key, v });
1431
+ logger15.default("set", { key, v });
1303
1432
  if (!isValidItemKey6(key)) {
1304
- logger14.error("Key for Set is not a valid ItemKey: %j", key);
1433
+ logger15.error("Key for Set is not a valid ItemKey: %j", key);
1305
1434
  throw new Error("Key for Set is not a valid ItemKey");
1306
1435
  }
1307
1436
  validatePK11(v, pkType);
1308
1437
  if (!isItemKeyEqualNormalized(key, v.key)) {
1309
- logger14.error("Key does not match item key: %j != %j", key, v.key);
1438
+ logger15.error("Key does not match item key: %j != %j", key, v.key);
1310
1439
  throw new Error("Key does not match item key");
1311
1440
  }
1312
1441
  const previousItem = await cacheMap.get(key);
@@ -1350,7 +1479,7 @@ var CacheMap = class {
1350
1479
  };
1351
1480
 
1352
1481
  // src/memory/MemoryCacheMap.ts
1353
- var logger15 = logger_default.get("MemoryCacheMap");
1482
+ var logger16 = logger_default.get("MemoryCacheMap");
1354
1483
  var MemoryCacheMap = class _MemoryCacheMap extends CacheMap {
1355
1484
  implementationType = "memory/memory";
1356
1485
  map = {};
@@ -1368,13 +1497,13 @@ var MemoryCacheMap = class _MemoryCacheMap extends CacheMap {
1368
1497
  const key = JSON.parse(keyStr);
1369
1498
  this.set(key, value);
1370
1499
  } catch (error) {
1371
- logger15.error("Failed to parse initial data key", { keyStr, error });
1500
+ logger16.error("Failed to parse initial data key", { keyStr, error });
1372
1501
  }
1373
1502
  }
1374
1503
  }
1375
1504
  }
1376
1505
  async get(key) {
1377
- logger15.trace("get", { key });
1506
+ logger16.trace("get", { key });
1378
1507
  const hashedKey = this.normalizedHashFunction(key);
1379
1508
  const entry = this.map[hashedKey];
1380
1509
  if (entry && this.normalizedHashFunction(entry.originalKey) === hashedKey) {
@@ -1389,7 +1518,7 @@ var MemoryCacheMap = class _MemoryCacheMap extends CacheMap {
1389
1518
  return null;
1390
1519
  }
1391
1520
  async set(key, value) {
1392
- logger15.trace("set", { key, value });
1521
+ logger16.trace("set", { key, value });
1393
1522
  const hashedKey = this.normalizedHashFunction(key);
1394
1523
  const keyStr = JSON.stringify(key);
1395
1524
  this.map[hashedKey] = { originalKey: key, value };
@@ -1416,7 +1545,7 @@ var MemoryCacheMap = class _MemoryCacheMap extends CacheMap {
1416
1545
  return !!entry && this.normalizedHashFunction(entry.originalKey) === hashedKey;
1417
1546
  }
1418
1547
  async delete(key) {
1419
- logger15.trace("delete", { key });
1548
+ logger16.trace("delete", { key });
1420
1549
  const hashedKey = this.normalizedHashFunction(key);
1421
1550
  const entry = this.map[hashedKey];
1422
1551
  if (entry && this.normalizedHashFunction(entry.originalKey) === hashedKey) {
@@ -1445,10 +1574,10 @@ var MemoryCacheMap = class _MemoryCacheMap extends CacheMap {
1445
1574
  async allIn(locations) {
1446
1575
  const allValues = await this.values();
1447
1576
  if (locations.length === 0) {
1448
- logger15.debug("Returning all items, LocKeys is empty");
1577
+ logger16.debug("Returning all items, LocKeys is empty");
1449
1578
  return allValues;
1450
1579
  } else {
1451
- logger15.debug("allIn", { locations, count: allValues.length });
1580
+ logger16.debug("allIn", { locations, count: allValues.length });
1452
1581
  return allValues.filter((item) => {
1453
1582
  const key = item.key;
1454
1583
  if (key && isComKey(key)) {
@@ -1460,12 +1589,12 @@ var MemoryCacheMap = class _MemoryCacheMap extends CacheMap {
1460
1589
  }
1461
1590
  }
1462
1591
  async contains(query, locations) {
1463
- logger15.debug("contains", { query, locations });
1592
+ logger16.debug("contains", { query, locations });
1464
1593
  const items = await this.allIn(locations);
1465
1594
  return items.some((item) => isQueryMatch(item, query));
1466
1595
  }
1467
1596
  async queryIn(query, locations = []) {
1468
- logger15.debug("queryIn", { query, locations });
1597
+ logger16.debug("queryIn", { query, locations });
1469
1598
  const items = await this.allIn(locations);
1470
1599
  return items.filter((item) => isQueryMatch(item, query));
1471
1600
  }
@@ -1488,7 +1617,7 @@ var MemoryCacheMap = class _MemoryCacheMap extends CacheMap {
1488
1617
  }
1489
1618
  // Query result caching methods implementation
1490
1619
  async setQueryResult(queryHash, itemKeys) {
1491
- logger15.trace("setQueryResult", { queryHash, itemKeys });
1620
+ logger16.trace("setQueryResult", { queryHash, itemKeys });
1492
1621
  const entry = {
1493
1622
  itemKeys: [...itemKeys]
1494
1623
  // Create a copy to avoid external mutations
@@ -1496,7 +1625,7 @@ var MemoryCacheMap = class _MemoryCacheMap extends CacheMap {
1496
1625
  this.queryResultCache[queryHash] = entry;
1497
1626
  }
1498
1627
  async getQueryResult(queryHash) {
1499
- logger15.trace("getQueryResult", { queryHash });
1628
+ logger16.trace("getQueryResult", { queryHash });
1500
1629
  const entry = this.queryResultCache[queryHash];
1501
1630
  if (!entry) {
1502
1631
  return null;
@@ -1508,11 +1637,11 @@ var MemoryCacheMap = class _MemoryCacheMap extends CacheMap {
1508
1637
  return !!entry;
1509
1638
  }
1510
1639
  async deleteQueryResult(queryHash) {
1511
- logger15.trace("deleteQueryResult", { queryHash });
1640
+ logger16.trace("deleteQueryResult", { queryHash });
1512
1641
  delete this.queryResultCache[queryHash];
1513
1642
  }
1514
1643
  async invalidateItemKeys(keys) {
1515
- logger15.debug("invalidateItemKeys", { keys });
1644
+ logger16.debug("invalidateItemKeys", { keys });
1516
1645
  if (keys.length === 0) {
1517
1646
  return;
1518
1647
  }
@@ -1543,14 +1672,14 @@ var MemoryCacheMap = class _MemoryCacheMap extends CacheMap {
1543
1672
  queriesToRemove.forEach((queryHash) => {
1544
1673
  this.deleteQueryResult(queryHash);
1545
1674
  });
1546
- logger15.debug("Selectively invalidated queries referencing affected keys", {
1675
+ logger16.debug("Selectively invalidated queries referencing affected keys", {
1547
1676
  affectedKeys: keys.length,
1548
1677
  queriesRemoved: queriesToRemove.length,
1549
1678
  totalQueries: Object.keys(this.queryResultCache).length
1550
1679
  });
1551
1680
  }
1552
1681
  async invalidateLocation(locations) {
1553
- logger15.debug("invalidateLocation", { locations });
1682
+ logger16.debug("invalidateLocation", { locations });
1554
1683
  let keysToInvalidate = [];
1555
1684
  if (locations.length === 0) {
1556
1685
  const allKeys = await this.keys();
@@ -1563,7 +1692,7 @@ var MemoryCacheMap = class _MemoryCacheMap extends CacheMap {
1563
1692
  await this.invalidateItemKeys(keysToInvalidate);
1564
1693
  }
1565
1694
  async clearQueryResults() {
1566
- logger15.trace("clearQueryResults");
1695
+ logger16.trace("clearQueryResults");
1567
1696
  this.queryResultCache = {};
1568
1697
  }
1569
1698
  // CacheMapMetadataProvider implementation
@@ -1605,7 +1734,7 @@ import {
1605
1734
  isComKey as isComKey2,
1606
1735
  isQueryMatch as isQueryMatch2
1607
1736
  } from "@fjell/core";
1608
- var logger16 = logger_default.get("EnhancedMemoryCacheMap");
1737
+ var logger17 = logger_default.get("EnhancedMemoryCacheMap");
1609
1738
  var EnhancedMemoryCacheMap = class _EnhancedMemoryCacheMap extends CacheMap {
1610
1739
  implementationType = "memory/enhanced";
1611
1740
  map = {};
@@ -1624,11 +1753,11 @@ var EnhancedMemoryCacheMap = class _EnhancedMemoryCacheMap extends CacheMap {
1624
1753
  this.normalizedHashFunction = createNormalizedHashFunction();
1625
1754
  if (sizeConfig?.maxSizeBytes) {
1626
1755
  this.maxSizeBytes = parseSizeString(sizeConfig.maxSizeBytes);
1627
- logger16.debug("Cache size limit set", { maxSizeBytes: this.maxSizeBytes });
1756
+ logger17.debug("Cache size limit set", { maxSizeBytes: this.maxSizeBytes });
1628
1757
  }
1629
1758
  if (sizeConfig?.maxItems) {
1630
1759
  this.maxItems = sizeConfig.maxItems;
1631
- logger16.debug("Cache item limit set", { maxItems: this.maxItems });
1760
+ logger17.debug("Cache item limit set", { maxItems: this.maxItems });
1632
1761
  }
1633
1762
  if (initialData) {
1634
1763
  for (const [keyStr, value] of Object.entries(initialData)) {
@@ -1636,13 +1765,13 @@ var EnhancedMemoryCacheMap = class _EnhancedMemoryCacheMap extends CacheMap {
1636
1765
  const key = JSON.parse(keyStr);
1637
1766
  this.set(key, value);
1638
1767
  } catch (error) {
1639
- logger16.error("Failed to parse initial data key", { keyStr, error });
1768
+ logger17.error("Failed to parse initial data key", { keyStr, error });
1640
1769
  }
1641
1770
  }
1642
1771
  }
1643
1772
  }
1644
1773
  async get(key) {
1645
- logger16.trace("get", { key });
1774
+ logger17.trace("get", { key });
1646
1775
  const hashedKey = this.normalizedHashFunction(key);
1647
1776
  const entry = this.map[hashedKey];
1648
1777
  if (entry && this.normalizedHashFunction(entry.originalKey) === hashedKey && entry.value !== null) {
@@ -1651,7 +1780,7 @@ var EnhancedMemoryCacheMap = class _EnhancedMemoryCacheMap extends CacheMap {
1651
1780
  return null;
1652
1781
  }
1653
1782
  async set(key, value) {
1654
- logger16.trace("set", { key, value });
1783
+ logger17.trace("set", { key, value });
1655
1784
  const hashedKey = this.normalizedHashFunction(key);
1656
1785
  const estimatedSize = estimateValueSize(value);
1657
1786
  const existingEntry = this.map[hashedKey];
@@ -1662,7 +1791,7 @@ var EnhancedMemoryCacheMap = class _EnhancedMemoryCacheMap extends CacheMap {
1662
1791
  const oldValue = existingEntry.value;
1663
1792
  existingEntry.value = value;
1664
1793
  existingEntry.metadata.estimatedSize = estimatedSize;
1665
- logger16.trace("Updated existing cache entry", {
1794
+ logger17.trace("Updated existing cache entry", {
1666
1795
  key: hashedKey,
1667
1796
  sizeDiff,
1668
1797
  currentSize: this.currentSizeBytes,
@@ -1683,7 +1812,7 @@ var EnhancedMemoryCacheMap = class _EnhancedMemoryCacheMap extends CacheMap {
1683
1812
  };
1684
1813
  this.currentSizeBytes += estimatedSize;
1685
1814
  this.currentItemCount++;
1686
- logger16.trace("Added new cache entry", {
1815
+ logger17.trace("Added new cache entry", {
1687
1816
  key: hashedKey,
1688
1817
  size: estimatedSize,
1689
1818
  currentSize: this.currentSizeBytes,
@@ -1700,14 +1829,14 @@ var EnhancedMemoryCacheMap = class _EnhancedMemoryCacheMap extends CacheMap {
1700
1829
  this.deleteInternal(key, true, "filter");
1701
1830
  }
1702
1831
  deleteInternal(key, invalidateQueries = false, invalidationMode = "remove") {
1703
- logger16.trace("delete", { key });
1832
+ logger17.trace("delete", { key });
1704
1833
  const hashedKey = this.normalizedHashFunction(key);
1705
1834
  const entry = this.map[hashedKey];
1706
1835
  if (entry && this.normalizedHashFunction(entry.originalKey) === hashedKey) {
1707
1836
  this.currentSizeBytes -= entry.metadata.estimatedSize;
1708
1837
  this.currentItemCount--;
1709
1838
  delete this.map[hashedKey];
1710
- logger16.trace("Deleted cache entry", {
1839
+ logger17.trace("Deleted cache entry", {
1711
1840
  key: hashedKey,
1712
1841
  freedSize: entry.metadata.estimatedSize,
1713
1842
  currentSize: this.currentSizeBytes,
@@ -1729,7 +1858,7 @@ var EnhancedMemoryCacheMap = class _EnhancedMemoryCacheMap extends CacheMap {
1729
1858
  return Object.values(this.map).filter((entry) => entry.value !== null).map((entry) => entry.value);
1730
1859
  }
1731
1860
  async clear() {
1732
- logger16.debug("Clearing cache", {
1861
+ logger17.debug("Clearing cache", {
1733
1862
  itemsCleared: this.currentItemCount,
1734
1863
  bytesFreed: this.currentSizeBytes
1735
1864
  });
@@ -1740,10 +1869,10 @@ var EnhancedMemoryCacheMap = class _EnhancedMemoryCacheMap extends CacheMap {
1740
1869
  async allIn(locations) {
1741
1870
  const allValues = await this.values();
1742
1871
  if (locations.length === 0) {
1743
- logger16.debug("Returning all items, LocKeys is empty");
1872
+ logger17.debug("Returning all items, LocKeys is empty");
1744
1873
  return allValues;
1745
1874
  } else {
1746
- logger16.debug("allIn", { locations, count: allValues.length });
1875
+ logger17.debug("allIn", { locations, count: allValues.length });
1747
1876
  return allValues.filter((item) => {
1748
1877
  const key = item.key;
1749
1878
  if (key && isComKey2(key)) {
@@ -1754,12 +1883,12 @@ var EnhancedMemoryCacheMap = class _EnhancedMemoryCacheMap extends CacheMap {
1754
1883
  }
1755
1884
  }
1756
1885
  async contains(query, locations) {
1757
- logger16.debug("contains", { query, locations });
1886
+ logger17.debug("contains", { query, locations });
1758
1887
  const items = await this.allIn(locations);
1759
1888
  return items.some((item) => isQueryMatch2(item, query));
1760
1889
  }
1761
1890
  async queryIn(query, locations = []) {
1762
- logger16.debug("queryIn", { query, locations });
1891
+ logger17.debug("queryIn", { query, locations });
1763
1892
  const items = await this.allIn(locations);
1764
1893
  return items.filter((item) => isQueryMatch2(item, query));
1765
1894
  }
@@ -1805,7 +1934,7 @@ var EnhancedMemoryCacheMap = class _EnhancedMemoryCacheMap extends CacheMap {
1805
1934
  }
1806
1935
  // Query result caching methods
1807
1936
  async setQueryResult(queryHash, itemKeys) {
1808
- logger16.trace("setQueryResult", { queryHash, itemKeys });
1937
+ logger17.trace("setQueryResult", { queryHash, itemKeys });
1809
1938
  if (queryHash in this.queryResultCache) {
1810
1939
  this.removeQueryResultFromSizeTracking(queryHash);
1811
1940
  }
@@ -1817,7 +1946,7 @@ var EnhancedMemoryCacheMap = class _EnhancedMemoryCacheMap extends CacheMap {
1817
1946
  this.addQueryResultToSizeTracking(queryHash, entry);
1818
1947
  }
1819
1948
  async getQueryResult(queryHash) {
1820
- logger16.trace("getQueryResult", { queryHash });
1949
+ logger17.trace("getQueryResult", { queryHash });
1821
1950
  const entry = this.queryResultCache[queryHash];
1822
1951
  if (!entry) {
1823
1952
  return null;
@@ -1839,7 +1968,7 @@ var EnhancedMemoryCacheMap = class _EnhancedMemoryCacheMap extends CacheMap {
1839
1968
  this.queryResultsCacheSize = 0;
1840
1969
  }
1841
1970
  async invalidateItemKeys(keys) {
1842
- logger16.debug("invalidateItemKeys", { keys });
1971
+ logger17.debug("invalidateItemKeys", { keys });
1843
1972
  if (keys.length === 0) {
1844
1973
  return;
1845
1974
  }
@@ -1889,7 +2018,7 @@ var EnhancedMemoryCacheMap = class _EnhancedMemoryCacheMap extends CacheMap {
1889
2018
  });
1890
2019
  }
1891
2020
  async invalidateLocation(locations) {
1892
- logger16.debug("invalidateLocation", { locations });
2021
+ logger17.debug("invalidateLocation", { locations });
1893
2022
  let keysToInvalidate = [];
1894
2023
  if (locations.length === 0) {
1895
2024
  const allKeys = await this.keys();
@@ -1909,7 +2038,7 @@ var EnhancedMemoryCacheMap = class _EnhancedMemoryCacheMap extends CacheMap {
1909
2038
  const itemKeysSize = estimateValueSize(entry.itemKeys);
1910
2039
  const totalSize = hashSize + itemKeysSize;
1911
2040
  this.queryResultsCacheSize += totalSize;
1912
- logger16.trace("Added query result to size tracking", {
2041
+ logger17.trace("Added query result to size tracking", {
1913
2042
  queryHash,
1914
2043
  estimatedSize: totalSize,
1915
2044
  totalQueryCacheSize: this.queryResultsCacheSize
@@ -1925,7 +2054,7 @@ var EnhancedMemoryCacheMap = class _EnhancedMemoryCacheMap extends CacheMap {
1925
2054
  const itemKeysSize = estimateValueSize(entry.itemKeys);
1926
2055
  const totalSize = hashSize + itemKeysSize;
1927
2056
  this.queryResultsCacheSize = Math.max(0, this.queryResultsCacheSize - totalSize);
1928
- logger16.trace("Removed query result from size tracking", {
2057
+ logger17.trace("Removed query result from size tracking", {
1929
2058
  queryHash,
1930
2059
  estimatedSize: totalSize,
1931
2060
  totalQueryCacheSize: this.queryResultsCacheSize
@@ -2010,7 +2139,7 @@ import {
2010
2139
  isComKey as isComKey3,
2011
2140
  isQueryMatch as isQueryMatch3
2012
2141
  } from "@fjell/core";
2013
- var logger17 = logger_default.get("LocalStorageCacheMap");
2142
+ var logger18 = logger_default.get("LocalStorageCacheMap");
2014
2143
  var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
2015
2144
  implementationType = "browser/localStorage";
2016
2145
  keyPrefix;
@@ -2041,7 +2170,7 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
2041
2170
  }
2042
2171
  return keys;
2043
2172
  } catch (error) {
2044
- logger17.error("Error getting keys by prefix from localStorage", { prefix, error });
2173
+ logger18.error("Error getting keys by prefix from localStorage", { prefix, error });
2045
2174
  throw error;
2046
2175
  }
2047
2176
  }
@@ -2049,12 +2178,12 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
2049
2178
  try {
2050
2179
  const allEntries = this.collectCacheEntries();
2051
2180
  if (allEntries.length === 0) {
2052
- logger17.debug("No entries to clean up");
2181
+ logger18.debug("No entries to clean up");
2053
2182
  return false;
2054
2183
  }
2055
2184
  return this.removeOldestEntries(allEntries, aggressive);
2056
2185
  } catch (error) {
2057
- logger17.error("Failed to cleanup old localStorage entries", { error });
2186
+ logger18.error("Failed to cleanup old localStorage entries", { error });
2058
2187
  return false;
2059
2188
  }
2060
2189
  }
@@ -2080,7 +2209,7 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
2080
2209
  }
2081
2210
  }
2082
2211
  } catch (error) {
2083
- logger17.debug("Found corrupted entry during cleanup", { key, error });
2212
+ logger18.debug("Found corrupted entry during cleanup", { key, error });
2084
2213
  allEntries.push({ key, timestamp: 0, size: 0 });
2085
2214
  }
2086
2215
  }
@@ -2099,12 +2228,12 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
2099
2228
  removedCount++;
2100
2229
  removedSize += allEntries[i].size;
2101
2230
  } catch (error) {
2102
- logger17.error("Failed to remove entry during cleanup", { key: allEntries[i].key, error });
2231
+ logger18.error("Failed to remove entry during cleanup", { key: allEntries[i].key, error });
2103
2232
  }
2104
2233
  }
2105
2234
  if (removedCount > 0) {
2106
2235
  const cleanupType = aggressive ? "aggressive" : "normal";
2107
- logger17.info(`Cleaned up ${removedCount} old localStorage entries (${removedSize} bytes) using ${cleanupType} cleanup to free space`);
2236
+ logger18.info(`Cleaned up ${removedCount} old localStorage entries (${removedSize} bytes) using ${cleanupType} cleanup to free space`);
2108
2237
  }
2109
2238
  return removedCount > 0;
2110
2239
  }
@@ -2112,7 +2241,7 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
2112
2241
  return this.getAllKeysStartingWith(`${this.keyPrefix}:`);
2113
2242
  }
2114
2243
  async get(key) {
2115
- logger17.trace("get", { key });
2244
+ logger18.trace("get", { key });
2116
2245
  try {
2117
2246
  const storageKey = this.getStorageKey(key);
2118
2247
  let stored = localStorage.getItem(storageKey);
@@ -2127,18 +2256,18 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
2127
2256
  return parsed.value;
2128
2257
  }
2129
2258
  } catch (parseError) {
2130
- logger17.debug("Failed to parse stored value", { key, error: parseError });
2259
+ logger18.debug("Failed to parse stored value", { key, error: parseError });
2131
2260
  return null;
2132
2261
  }
2133
2262
  }
2134
2263
  return null;
2135
2264
  } catch (error) {
2136
- logger17.error("Error retrieving from localStorage", { key, error });
2265
+ logger18.error("Error retrieving from localStorage", { key, error });
2137
2266
  return null;
2138
2267
  }
2139
2268
  }
2140
2269
  async set(key, value) {
2141
- logger17.trace("set", { key, value });
2270
+ logger18.trace("set", { key, value });
2142
2271
  for (let attempt = 0; attempt < this.MAX_RETRY_ATTEMPTS; attempt++) {
2143
2272
  try {
2144
2273
  const storageKey = this.getStorageKey(key);
@@ -2149,12 +2278,12 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
2149
2278
  };
2150
2279
  localStorage.setItem(storageKey, JSON.stringify(toStore));
2151
2280
  if (attempt > 0) {
2152
- logger17.info(`Successfully stored item after ${attempt} retries`);
2281
+ logger18.info(`Successfully stored item after ${attempt} retries`);
2153
2282
  }
2154
2283
  return;
2155
2284
  } catch (error) {
2156
2285
  const isLastAttempt = attempt === this.MAX_RETRY_ATTEMPTS - 1;
2157
- logger17.error(`Error storing to localStorage (attempt ${attempt + 1}/${this.MAX_RETRY_ATTEMPTS})`, {
2286
+ logger18.error(`Error storing to localStorage (attempt ${attempt + 1}/${this.MAX_RETRY_ATTEMPTS})`, {
2158
2287
  key,
2159
2288
  value,
2160
2289
  error,
@@ -2181,30 +2310,30 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
2181
2310
  const parsed = JSON.parse(stored);
2182
2311
  return this.normalizedHashFunction(parsed.originalKey) === this.normalizedHashFunction(key);
2183
2312
  } catch (parseError) {
2184
- logger17.debug("Failed to parse stored value in includesKey", { key, error: parseError });
2313
+ logger18.debug("Failed to parse stored value in includesKey", { key, error: parseError });
2185
2314
  return false;
2186
2315
  }
2187
2316
  }
2188
2317
  return false;
2189
2318
  } catch (error) {
2190
- logger17.error("Error checking key in localStorage", { key, error });
2319
+ logger18.error("Error checking key in localStorage", { key, error });
2191
2320
  return false;
2192
2321
  }
2193
2322
  }
2194
2323
  async delete(key) {
2195
- logger17.trace("delete", { key });
2324
+ logger18.trace("delete", { key });
2196
2325
  try {
2197
2326
  const storageKey = this.getStorageKey(key);
2198
2327
  localStorage.removeItem(storageKey);
2199
2328
  } catch (error) {
2200
- logger17.error("Error deleting from localStorage", { key, error });
2329
+ logger18.error("Error deleting from localStorage", { key, error });
2201
2330
  throw error;
2202
2331
  }
2203
2332
  }
2204
2333
  async allIn(locations) {
2205
2334
  const allKeys = this.keys();
2206
2335
  if (locations.length === 0) {
2207
- logger17.debug("Returning all items, LocKeys is empty");
2336
+ logger18.debug("Returning all items, LocKeys is empty");
2208
2337
  const items = [];
2209
2338
  for (const key of await allKeys) {
2210
2339
  const item = await this.get(key);
@@ -2216,14 +2345,14 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
2216
2345
  } else {
2217
2346
  const locKeys = locations;
2218
2347
  const resolvedKeys = await allKeys;
2219
- logger17.debug("allIn", { locKeys, keys: resolvedKeys.length });
2348
+ logger18.debug("allIn", { locKeys, keys: resolvedKeys.length });
2220
2349
  const filteredKeys = resolvedKeys.filter((key) => key && isComKey3(key)).filter((key) => {
2221
- const ComKey13 = key;
2222
- logger17.debug("Comparing Location Keys", {
2350
+ const ComKey15 = key;
2351
+ logger18.debug("Comparing Location Keys", {
2223
2352
  locKeys,
2224
- ComKey: ComKey13
2353
+ ComKey: ComKey15
2225
2354
  });
2226
- return isLocKeyArrayEqual(locKeys, ComKey13.loc);
2355
+ return isLocKeyArrayEqual(locKeys, ComKey15.loc);
2227
2356
  });
2228
2357
  const items = [];
2229
2358
  for (const key of filteredKeys) {
@@ -2236,12 +2365,12 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
2236
2365
  }
2237
2366
  }
2238
2367
  async contains(query, locations) {
2239
- logger17.debug("contains", { query, locations });
2368
+ logger18.debug("contains", { query, locations });
2240
2369
  const items = await this.allIn(locations);
2241
2370
  return items.some((item) => isQueryMatch3(item, query));
2242
2371
  }
2243
2372
  async queryIn(query, locations = []) {
2244
- logger17.debug("queryIn", { query, locations });
2373
+ logger18.debug("queryIn", { query, locations });
2245
2374
  const items = await this.allIn(locations);
2246
2375
  return items.filter((item) => isQueryMatch3(item, query));
2247
2376
  }
@@ -2255,7 +2384,7 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
2255
2384
  return JSON.parse(stored);
2256
2385
  }
2257
2386
  } catch (parseError) {
2258
- logger17.debug("Skipping corrupted localStorage entry", { storageKey, error: parseError });
2387
+ logger18.debug("Skipping corrupted localStorage entry", { storageKey, error: parseError });
2259
2388
  }
2260
2389
  return null;
2261
2390
  }
@@ -2270,7 +2399,7 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
2270
2399
  }
2271
2400
  }
2272
2401
  } catch (error) {
2273
- logger17.error("Error getting keys from localStorage", { error });
2402
+ logger18.error("Error getting keys from localStorage", { error });
2274
2403
  }
2275
2404
  return keys;
2276
2405
  }
@@ -2285,25 +2414,25 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
2285
2414
  }
2286
2415
  }
2287
2416
  } catch (error) {
2288
- logger17.error("Error getting values from localStorage", { error });
2417
+ logger18.error("Error getting values from localStorage", { error });
2289
2418
  }
2290
2419
  return values;
2291
2420
  }
2292
2421
  async clear() {
2293
- logger17.debug("Clearing localStorage cache");
2422
+ logger18.debug("Clearing localStorage cache");
2294
2423
  try {
2295
2424
  const storageKeys = this.getAllStorageKeys();
2296
2425
  for (const storageKey of storageKeys) {
2297
2426
  localStorage.removeItem(storageKey);
2298
2427
  }
2299
2428
  } catch (error) {
2300
- logger17.error("Error clearing localStorage cache", { error });
2429
+ logger18.error("Error clearing localStorage cache", { error });
2301
2430
  throw error;
2302
2431
  }
2303
2432
  }
2304
2433
  // Query result caching methods implementation
2305
2434
  async setQueryResult(queryHash, itemKeys) {
2306
- logger17.trace("setQueryResult", { queryHash, itemKeys });
2435
+ logger18.trace("setQueryResult", { queryHash, itemKeys });
2307
2436
  const queryKey = `${this.keyPrefix}:query:${queryHash}`;
2308
2437
  const entry = {
2309
2438
  itemKeys
@@ -2311,11 +2440,11 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
2311
2440
  try {
2312
2441
  localStorage.setItem(queryKey, JSON.stringify(entry));
2313
2442
  } catch (error) {
2314
- logger17.error("Failed to store query result in localStorage", { queryHash, error });
2443
+ logger18.error("Failed to store query result in localStorage", { queryHash, error });
2315
2444
  }
2316
2445
  }
2317
2446
  async getQueryResult(queryHash) {
2318
- logger17.trace("getQueryResult", { queryHash });
2447
+ logger18.trace("getQueryResult", { queryHash });
2319
2448
  const queryKey = `${this.keyPrefix}:query:${queryHash}`;
2320
2449
  try {
2321
2450
  const data = localStorage.getItem(queryKey);
@@ -2328,7 +2457,7 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
2328
2457
  }
2329
2458
  return entry.itemKeys || null;
2330
2459
  } catch (error) {
2331
- logger17.error("Failed to retrieve query result from localStorage", { queryHash, error });
2460
+ logger18.error("Failed to retrieve query result from localStorage", { queryHash, error });
2332
2461
  return null;
2333
2462
  }
2334
2463
  }
@@ -2337,21 +2466,21 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
2337
2466
  try {
2338
2467
  return localStorage.getItem(queryKey) !== null;
2339
2468
  } catch (error) {
2340
- logger17.error("Failed to check query result in localStorage", { queryHash, error });
2469
+ logger18.error("Failed to check query result in localStorage", { queryHash, error });
2341
2470
  return false;
2342
2471
  }
2343
2472
  }
2344
2473
  async deleteQueryResult(queryHash) {
2345
- logger17.trace("deleteQueryResult", { queryHash });
2474
+ logger18.trace("deleteQueryResult", { queryHash });
2346
2475
  const queryKey = `${this.keyPrefix}:query:${queryHash}`;
2347
2476
  try {
2348
2477
  localStorage.removeItem(queryKey);
2349
2478
  } catch (error) {
2350
- logger17.error("Failed to delete query result from localStorage", { queryHash, error });
2479
+ logger18.error("Failed to delete query result from localStorage", { queryHash, error });
2351
2480
  }
2352
2481
  }
2353
2482
  async invalidateItemKeys(keys) {
2354
- logger17.debug("invalidateItemKeys", { keys });
2483
+ logger18.debug("invalidateItemKeys", { keys });
2355
2484
  if (keys.length === 0) {
2356
2485
  return;
2357
2486
  }
@@ -2389,24 +2518,24 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
2389
2518
  }
2390
2519
  }
2391
2520
  } catch (error) {
2392
- logger17.debug("Failed to parse query result", { queryKey, error });
2521
+ logger18.debug("Failed to parse query result", { queryKey, error });
2393
2522
  }
2394
2523
  }
2395
2524
  queriesToRemove.forEach((queryKey) => {
2396
2525
  localStorage.removeItem(queryKey);
2397
2526
  });
2398
- logger17.debug("Selectively invalidated queries referencing affected keys", {
2527
+ logger18.debug("Selectively invalidated queries referencing affected keys", {
2399
2528
  affectedKeys: keys.length,
2400
2529
  queriesRemoved: queriesToRemove.length,
2401
2530
  totalQueries: queryKeys.length
2402
2531
  });
2403
2532
  } catch (error) {
2404
- logger17.error("Error during selective query invalidation, falling back to clearing all queries", { error });
2533
+ logger18.error("Error during selective query invalidation, falling back to clearing all queries", { error });
2405
2534
  await this.clearQueryResults();
2406
2535
  }
2407
2536
  }
2408
2537
  async invalidateLocation(locations) {
2409
- logger17.debug("invalidateLocation", { locations });
2538
+ logger18.debug("invalidateLocation", { locations });
2410
2539
  let keysToInvalidate = [];
2411
2540
  if (locations.length === 0) {
2412
2541
  const allKeys = await this.keys();
@@ -2421,7 +2550,7 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
2421
2550
  }
2422
2551
  }
2423
2552
  async clearQueryResults() {
2424
- logger17.trace("clearQueryResults");
2553
+ logger18.trace("clearQueryResults");
2425
2554
  const queryPrefix = `${this.keyPrefix}:query:`;
2426
2555
  try {
2427
2556
  const keysToRemove = this.getAllKeysStartingWith(queryPrefix);
@@ -2429,11 +2558,11 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
2429
2558
  try {
2430
2559
  localStorage.removeItem(key);
2431
2560
  } catch (error) {
2432
- logger17.error("Failed to remove query result from localStorage", { key, error });
2561
+ logger18.error("Failed to remove query result from localStorage", { key, error });
2433
2562
  }
2434
2563
  }
2435
2564
  } catch (error) {
2436
- logger17.error("Failed to clear query results from localStorage", { error });
2565
+ logger18.error("Failed to clear query results from localStorage", { error });
2437
2566
  }
2438
2567
  }
2439
2568
  // CacheMapMetadataProvider implementation
@@ -2445,13 +2574,13 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
2445
2574
  try {
2446
2575
  return JSON.parse(stored);
2447
2576
  } catch (e) {
2448
- logger17.debug("Invalid metadata JSON, treating as null", { key, error: e });
2577
+ logger18.debug("Invalid metadata JSON, treating as null", { key, error: e });
2449
2578
  return null;
2450
2579
  }
2451
2580
  }
2452
2581
  return null;
2453
2582
  } catch (error) {
2454
- logger17.error("Error getting metadata from localStorage", { key, error });
2583
+ logger18.error("Error getting metadata from localStorage", { key, error });
2455
2584
  throw error;
2456
2585
  }
2457
2586
  }
@@ -2461,12 +2590,12 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
2461
2590
  const metadataKey = `${this.keyPrefix}:metadata:${key}`;
2462
2591
  localStorage.setItem(metadataKey, JSON.stringify(metadata));
2463
2592
  if (attempt > 0) {
2464
- logger17.info(`Successfully stored metadata after ${attempt} retries`);
2593
+ logger18.info(`Successfully stored metadata after ${attempt} retries`);
2465
2594
  }
2466
2595
  return;
2467
2596
  } catch (error) {
2468
2597
  const isLastAttempt = attempt === this.MAX_RETRY_ATTEMPTS - 1;
2469
- logger17.error(`Error storing metadata to localStorage (attempt ${attempt + 1}/${this.MAX_RETRY_ATTEMPTS})`, {
2598
+ logger18.error(`Error storing metadata to localStorage (attempt ${attempt + 1}/${this.MAX_RETRY_ATTEMPTS})`, {
2470
2599
  key,
2471
2600
  error,
2472
2601
  isLastAttempt
@@ -2488,7 +2617,7 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
2488
2617
  const metadataKey = `${this.keyPrefix}:metadata:${key}`;
2489
2618
  localStorage.removeItem(metadataKey);
2490
2619
  } catch (error) {
2491
- logger17.error("Error deleting metadata from localStorage", { key, error });
2620
+ logger18.error("Error deleting metadata from localStorage", { key, error });
2492
2621
  throw error;
2493
2622
  }
2494
2623
  }
@@ -2507,11 +2636,11 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
2507
2636
  metadata.set(metadataKey, parsed);
2508
2637
  }
2509
2638
  } catch (error) {
2510
- logger17.debug("Skipping invalid metadata entry", { key, error });
2639
+ logger18.debug("Skipping invalid metadata entry", { key, error });
2511
2640
  }
2512
2641
  }
2513
2642
  } catch (error) {
2514
- logger17.error("Error getting metadata from localStorage", { error });
2643
+ logger18.error("Error getting metadata from localStorage", { error });
2515
2644
  throw error;
2516
2645
  }
2517
2646
  return metadata;
@@ -2522,7 +2651,7 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
2522
2651
  const keysToDelete = this.getAllKeysStartingWith(metadataPrefix);
2523
2652
  keysToDelete.forEach((key) => localStorage.removeItem(key));
2524
2653
  } catch (error) {
2525
- logger17.error("Error clearing metadata from localStorage", { error });
2654
+ logger18.error("Error clearing metadata from localStorage", { error });
2526
2655
  throw error;
2527
2656
  }
2528
2657
  }
@@ -2549,16 +2678,16 @@ var LocalStorageCacheMap = class _LocalStorageCacheMap extends CacheMap {
2549
2678
  itemCount++;
2550
2679
  }
2551
2680
  } catch (error) {
2552
- logger17.debug("Invalid entry in getCurrentSize", { key, error });
2681
+ logger18.debug("Invalid entry in getCurrentSize", { key, error });
2553
2682
  }
2554
2683
  }
2555
2684
  } catch (error) {
2556
- logger17.debug("Size calculation failed, using string length", { key, error });
2685
+ logger18.debug("Size calculation failed, using string length", { key, error });
2557
2686
  sizeBytes += value.length;
2558
2687
  }
2559
2688
  }
2560
2689
  } catch (error) {
2561
- logger17.error("Error calculating size from localStorage", { error });
2690
+ logger18.error("Error calculating size from localStorage", { error });
2562
2691
  throw error;
2563
2692
  }
2564
2693
  return { itemCount, sizeBytes };
@@ -2579,7 +2708,7 @@ import {
2579
2708
  isQueryMatch as isQueryMatch4
2580
2709
  } from "@fjell/core";
2581
2710
  import safeStringify2 from "fast-safe-stringify";
2582
- var logger18 = logger_default.get("SessionStorageCacheMap");
2711
+ var logger19 = logger_default.get("SessionStorageCacheMap");
2583
2712
  var SessionStorageCacheMap = class _SessionStorageCacheMap extends CacheMap {
2584
2713
  implementationType = "browser/sessionStorage";
2585
2714
  keyPrefix;
@@ -2607,7 +2736,7 @@ var SessionStorageCacheMap = class _SessionStorageCacheMap extends CacheMap {
2607
2736
  }
2608
2737
  }
2609
2738
  } catch (error) {
2610
- logger18.error("Error getting keys from sessionStorage", { error });
2739
+ logger19.error("Error getting keys from sessionStorage", { error });
2611
2740
  }
2612
2741
  return keys;
2613
2742
  }
@@ -2630,7 +2759,7 @@ var SessionStorageCacheMap = class _SessionStorageCacheMap extends CacheMap {
2630
2759
  }
2631
2760
  }
2632
2761
  async get(key) {
2633
- logger18.trace("get", { key });
2762
+ logger19.trace("get", { key });
2634
2763
  try {
2635
2764
  const currentHash = this.normalizedHashFunction(key);
2636
2765
  if (this.hasCollisionForHash(currentHash)) {
@@ -2650,14 +2779,14 @@ var SessionStorageCacheMap = class _SessionStorageCacheMap extends CacheMap {
2650
2779
  }
2651
2780
  return null;
2652
2781
  } catch (error) {
2653
- logger18.error("Error retrieving from sessionStorage", { key, error });
2782
+ logger19.error("Error retrieving from sessionStorage", { key, error });
2654
2783
  return null;
2655
2784
  }
2656
2785
  }
2657
2786
  async set(key, value) {
2658
2787
  try {
2659
2788
  const storageKey = this.getStorageKey(key);
2660
- logger18.trace("set", { storageKey });
2789
+ logger19.trace("set", { storageKey });
2661
2790
  const toStore = {
2662
2791
  originalKey: key,
2663
2792
  value,
@@ -2667,7 +2796,7 @@ var SessionStorageCacheMap = class _SessionStorageCacheMap extends CacheMap {
2667
2796
  const jsonString = safeStringify2(toStore);
2668
2797
  sessionStorage.setItem(storageKey, jsonString);
2669
2798
  } catch (error) {
2670
- logger18.error("Error storing to sessionStorage", { errorMessage: error?.message });
2799
+ logger19.error("Error storing to sessionStorage", { errorMessage: error?.message });
2671
2800
  throw new Error(`Failed to store item in sessionStorage: ${error}`);
2672
2801
  }
2673
2802
  }
@@ -2688,23 +2817,23 @@ var SessionStorageCacheMap = class _SessionStorageCacheMap extends CacheMap {
2688
2817
  }
2689
2818
  return false;
2690
2819
  } catch (error) {
2691
- logger18.error("Error checking key in sessionStorage", { key, error });
2820
+ logger19.error("Error checking key in sessionStorage", { key, error });
2692
2821
  return false;
2693
2822
  }
2694
2823
  }
2695
2824
  async delete(key) {
2696
- logger18.trace("delete", { key });
2825
+ logger19.trace("delete", { key });
2697
2826
  try {
2698
2827
  const storageKey = this.getStorageKey(key);
2699
2828
  sessionStorage.removeItem(storageKey);
2700
2829
  } catch (error) {
2701
- logger18.error("Error deleting from sessionStorage", { key, error });
2830
+ logger19.error("Error deleting from sessionStorage", { key, error });
2702
2831
  }
2703
2832
  }
2704
2833
  async allIn(locations) {
2705
2834
  const allKeys = this.keys();
2706
2835
  if (locations.length === 0) {
2707
- logger18.debug("Returning all items, LocKeys is empty");
2836
+ logger19.debug("Returning all items, LocKeys is empty");
2708
2837
  const items = [];
2709
2838
  for (const key of await allKeys) {
2710
2839
  const item = await this.get(key);
@@ -2716,14 +2845,14 @@ var SessionStorageCacheMap = class _SessionStorageCacheMap extends CacheMap {
2716
2845
  } else {
2717
2846
  const locKeys = locations;
2718
2847
  const resolvedKeys = await allKeys;
2719
- logger18.debug("allIn", { locKeys, keys: resolvedKeys.length });
2848
+ logger19.debug("allIn", { locKeys, keys: resolvedKeys.length });
2720
2849
  const filteredKeys = resolvedKeys.filter((key) => key && isComKey4(key)).filter((key) => {
2721
- const ComKey13 = key;
2722
- logger18.debug("Comparing Location Keys", {
2850
+ const ComKey15 = key;
2851
+ logger19.debug("Comparing Location Keys", {
2723
2852
  locKeys,
2724
- ComKey: ComKey13
2853
+ ComKey: ComKey15
2725
2854
  });
2726
- return isLocKeyArrayEqual(locKeys, ComKey13.loc);
2855
+ return isLocKeyArrayEqual(locKeys, ComKey15.loc);
2727
2856
  });
2728
2857
  const items = [];
2729
2858
  for (const key of filteredKeys) {
@@ -2736,12 +2865,12 @@ var SessionStorageCacheMap = class _SessionStorageCacheMap extends CacheMap {
2736
2865
  }
2737
2866
  }
2738
2867
  async contains(query, locations) {
2739
- logger18.debug("contains", { query, locations });
2868
+ logger19.debug("contains", { query, locations });
2740
2869
  const items = await this.allIn(locations);
2741
2870
  return items.some((item) => isQueryMatch4(item, query));
2742
2871
  }
2743
2872
  async queryIn(query, locations = []) {
2744
- logger18.debug("queryIn", { query, locations });
2873
+ logger19.debug("queryIn", { query, locations });
2745
2874
  const items = await this.allIn(locations);
2746
2875
  return items.filter((item) => isQueryMatch4(item, query));
2747
2876
  }
@@ -2761,11 +2890,11 @@ var SessionStorageCacheMap = class _SessionStorageCacheMap extends CacheMap {
2761
2890
  keys.push(parsed.originalKey);
2762
2891
  }
2763
2892
  } catch (itemError) {
2764
- logger18.trace("Skipping invalid storage item", { storageKey, error: itemError });
2893
+ logger19.trace("Skipping invalid storage item", { storageKey, error: itemError });
2765
2894
  }
2766
2895
  }
2767
2896
  } catch (error) {
2768
- logger18.error("Error getting keys from sessionStorage", { error });
2897
+ logger19.error("Error getting keys from sessionStorage", { error });
2769
2898
  }
2770
2899
  return keys;
2771
2900
  }
@@ -2782,28 +2911,28 @@ var SessionStorageCacheMap = class _SessionStorageCacheMap extends CacheMap {
2782
2911
  values.push(parsed.value);
2783
2912
  }
2784
2913
  } catch (itemError) {
2785
- logger18.trace("Skipping invalid storage item for values", { storageKey, error: itemError });
2914
+ logger19.trace("Skipping invalid storage item for values", { storageKey, error: itemError });
2786
2915
  }
2787
2916
  }
2788
2917
  } catch (error) {
2789
- logger18.error("Error getting values from sessionStorage", { error });
2918
+ logger19.error("Error getting values from sessionStorage", { error });
2790
2919
  }
2791
2920
  return values;
2792
2921
  }
2793
2922
  async clear() {
2794
- logger18.debug("Clearing sessionStorage cache");
2923
+ logger19.debug("Clearing sessionStorage cache");
2795
2924
  try {
2796
2925
  const storageKeys = this.getAllStorageKeys();
2797
2926
  for (const storageKey of storageKeys) {
2798
2927
  sessionStorage.removeItem(storageKey);
2799
2928
  }
2800
2929
  } catch (error) {
2801
- logger18.error("Error clearing sessionStorage cache", { error });
2930
+ logger19.error("Error clearing sessionStorage cache", { error });
2802
2931
  }
2803
2932
  }
2804
2933
  // Query result caching methods implementation
2805
2934
  async setQueryResult(queryHash, itemKeys) {
2806
- logger18.trace("setQueryResult", { queryHash, itemKeys });
2935
+ logger19.trace("setQueryResult", { queryHash, itemKeys });
2807
2936
  const queryKey = `${this.keyPrefix}:query:${queryHash}`;
2808
2937
  const entry = {
2809
2938
  itemKeys
@@ -2812,11 +2941,11 @@ var SessionStorageCacheMap = class _SessionStorageCacheMap extends CacheMap {
2812
2941
  const jsonString = safeStringify2(entry);
2813
2942
  sessionStorage.setItem(queryKey, jsonString);
2814
2943
  } catch (error) {
2815
- logger18.error("Failed to store query result in sessionStorage", { queryHash, error });
2944
+ logger19.error("Failed to store query result in sessionStorage", { queryHash, error });
2816
2945
  }
2817
2946
  }
2818
2947
  async getQueryResult(queryHash) {
2819
- logger18.trace("getQueryResult", { queryHash });
2948
+ logger19.trace("getQueryResult", { queryHash });
2820
2949
  const queryKey = `${this.keyPrefix}:query:${queryHash}`;
2821
2950
  try {
2822
2951
  const data = sessionStorage.getItem(queryKey);
@@ -2829,7 +2958,7 @@ var SessionStorageCacheMap = class _SessionStorageCacheMap extends CacheMap {
2829
2958
  }
2830
2959
  return entry.itemKeys || null;
2831
2960
  } catch (error) {
2832
- logger18.error("Failed to retrieve query result from sessionStorage", { queryHash, error });
2961
+ logger19.error("Failed to retrieve query result from sessionStorage", { queryHash, error });
2833
2962
  return null;
2834
2963
  }
2835
2964
  }
@@ -2838,21 +2967,21 @@ var SessionStorageCacheMap = class _SessionStorageCacheMap extends CacheMap {
2838
2967
  try {
2839
2968
  return sessionStorage.getItem(queryKey) !== null;
2840
2969
  } catch (error) {
2841
- logger18.error("Failed to check query result in sessionStorage", { queryHash, error });
2970
+ logger19.error("Failed to check query result in sessionStorage", { queryHash, error });
2842
2971
  return false;
2843
2972
  }
2844
2973
  }
2845
2974
  async deleteQueryResult(queryHash) {
2846
- logger18.trace("deleteQueryResult", { queryHash });
2975
+ logger19.trace("deleteQueryResult", { queryHash });
2847
2976
  const queryKey = `${this.keyPrefix}:query:${queryHash}`;
2848
2977
  try {
2849
2978
  sessionStorage.removeItem(queryKey);
2850
2979
  } catch (error) {
2851
- logger18.error("Failed to delete query result from sessionStorage", { queryHash, error });
2980
+ logger19.error("Failed to delete query result from sessionStorage", { queryHash, error });
2852
2981
  }
2853
2982
  }
2854
2983
  async clearQueryResults() {
2855
- logger18.trace("clearQueryResults");
2984
+ logger19.trace("clearQueryResults");
2856
2985
  const queryPrefix = `${this.keyPrefix}:query:`;
2857
2986
  try {
2858
2987
  const keysToRemove = [];
@@ -2864,7 +2993,7 @@ var SessionStorageCacheMap = class _SessionStorageCacheMap extends CacheMap {
2864
2993
  }
2865
2994
  keysToRemove.forEach((key) => sessionStorage.removeItem(key));
2866
2995
  } catch (error) {
2867
- logger18.error("Failed to clear query results from sessionStorage", { error });
2996
+ logger19.error("Failed to clear query results from sessionStorage", { error });
2868
2997
  }
2869
2998
  }
2870
2999
  // CacheMapMetadataProvider implementation
@@ -2911,7 +3040,7 @@ var SessionStorageCacheMap = class _SessionStorageCacheMap extends CacheMap {
2911
3040
  }
2912
3041
  return metadata;
2913
3042
  } catch (error) {
2914
- logger18.error("Error getting all metadata from sessionStorage", { error });
3043
+ logger19.error("Error getting all metadata from sessionStorage", { error });
2915
3044
  return metadata;
2916
3045
  }
2917
3046
  }
@@ -2931,7 +3060,7 @@ var SessionStorageCacheMap = class _SessionStorageCacheMap extends CacheMap {
2931
3060
  }
2932
3061
  // Invalidation methods
2933
3062
  async invalidateItemKeys(keys) {
2934
- logger18.debug("invalidateItemKeys", { keys });
3063
+ logger19.debug("invalidateItemKeys", { keys });
2935
3064
  if (keys.length === 0) {
2936
3065
  return;
2937
3066
  }
@@ -2975,24 +3104,24 @@ var SessionStorageCacheMap = class _SessionStorageCacheMap extends CacheMap {
2975
3104
  }
2976
3105
  }
2977
3106
  } catch (error) {
2978
- logger18.debug("Failed to parse query result", { queryKey, error });
3107
+ logger19.debug("Failed to parse query result", { queryKey, error });
2979
3108
  }
2980
3109
  }
2981
3110
  queriesToRemove.forEach((queryKey) => {
2982
3111
  sessionStorage.removeItem(queryKey);
2983
3112
  });
2984
- logger18.debug("Selectively invalidated queries referencing affected keys", {
3113
+ logger19.debug("Selectively invalidated queries referencing affected keys", {
2985
3114
  affectedKeys: keys.length,
2986
3115
  queriesRemoved: queriesToRemove.length,
2987
3116
  totalQueries: queryKeys.length
2988
3117
  });
2989
3118
  } catch (error) {
2990
- logger18.error("Error during selective query invalidation, falling back to clearing all queries", { error });
3119
+ logger19.error("Error during selective query invalidation, falling back to clearing all queries", { error });
2991
3120
  await this.clearQueryResults();
2992
3121
  }
2993
3122
  }
2994
3123
  async invalidateLocation(locations) {
2995
- logger18.debug("invalidateLocation", { locations });
3124
+ logger19.debug("invalidateLocation", { locations });
2996
3125
  let keysToInvalidate = [];
2997
3126
  if (locations.length === 0) {
2998
3127
  const allKeys = await this.keys();
@@ -3058,7 +3187,7 @@ import {
3058
3187
  isQueryMatch as isQueryMatch5
3059
3188
  } from "@fjell/core";
3060
3189
  import safeStringify3 from "fast-safe-stringify";
3061
- var logger19 = logger_default.get("AsyncIndexDBCacheMap");
3190
+ var logger20 = logger_default.get("AsyncIndexDBCacheMap");
3062
3191
  var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
3063
3192
  types;
3064
3193
  dbName;
@@ -3084,19 +3213,19 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
3084
3213
  }
3085
3214
  const request = indexedDB.open(this.dbName, this.version);
3086
3215
  request.onerror = () => {
3087
- logger19.error("Error opening IndexedDB", { error: request.error });
3216
+ logger20.error("Error opening IndexedDB", { error: request.error });
3088
3217
  reject(request.error);
3089
3218
  };
3090
3219
  request.onsuccess = () => {
3091
- logger19.debug("IndexedDB opened successfully");
3220
+ logger20.debug("IndexedDB opened successfully");
3092
3221
  resolve(request.result);
3093
3222
  };
3094
3223
  request.onupgradeneeded = (event) => {
3095
- logger19.debug("IndexedDB upgrade needed");
3224
+ logger20.debug("IndexedDB upgrade needed");
3096
3225
  const db = event.target.result;
3097
3226
  if (!db.objectStoreNames.contains(this.storeName)) {
3098
3227
  db.createObjectStore(this.storeName);
3099
- logger19.debug("Created object store", { storeName: this.storeName });
3228
+ logger20.debug("Created object store", { storeName: this.storeName });
3100
3229
  }
3101
3230
  };
3102
3231
  });
@@ -3107,7 +3236,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
3107
3236
  return this.normalizedHashFunction(key);
3108
3237
  }
3109
3238
  async get(key) {
3110
- logger19.trace("get", { key });
3239
+ logger20.trace("get", { key });
3111
3240
  try {
3112
3241
  const db = await this.getDB();
3113
3242
  const transaction = db.transaction([this.storeName], "readonly");
@@ -3116,7 +3245,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
3116
3245
  return new Promise((resolve, reject) => {
3117
3246
  const request = store.get(storageKey);
3118
3247
  request.onerror = () => {
3119
- logger19.error("Error getting from IndexedDB", { key, error: request.error });
3248
+ logger20.error("Error getting from IndexedDB", { key, error: request.error });
3120
3249
  reject(request.error);
3121
3250
  };
3122
3251
  request.onsuccess = () => {
@@ -3129,7 +3258,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
3129
3258
  };
3130
3259
  });
3131
3260
  } catch (error) {
3132
- logger19.error("Error in IndexedDB get operation", { key, error });
3261
+ logger20.error("Error in IndexedDB get operation", { key, error });
3133
3262
  return null;
3134
3263
  }
3135
3264
  }
@@ -3137,7 +3266,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
3137
3266
  * Get both the value and metadata for an item
3138
3267
  */
3139
3268
  async getWithMetadata(key) {
3140
- logger19.trace("getWithMetadata", { key });
3269
+ logger20.trace("getWithMetadata", { key });
3141
3270
  try {
3142
3271
  const db = await this.getDB();
3143
3272
  const transaction = db.transaction([this.storeName], "readonly");
@@ -3146,7 +3275,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
3146
3275
  return new Promise((resolve, reject) => {
3147
3276
  const request = store.get(storageKey);
3148
3277
  request.onerror = () => {
3149
- logger19.error("Error getting from IndexedDB", { key, error: request.error });
3278
+ logger20.error("Error getting from IndexedDB", { key, error: request.error });
3150
3279
  reject(request.error);
3151
3280
  };
3152
3281
  request.onsuccess = () => {
@@ -3162,12 +3291,12 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
3162
3291
  };
3163
3292
  });
3164
3293
  } catch (error) {
3165
- logger19.error("Error in IndexedDB getWithMetadata operation", { key, error });
3294
+ logger20.error("Error in IndexedDB getWithMetadata operation", { key, error });
3166
3295
  return null;
3167
3296
  }
3168
3297
  }
3169
3298
  async set(key, value, metadata) {
3170
- logger19.trace("set", { key, value, hasMetadata: !!metadata });
3299
+ logger20.trace("set", { key, value, hasMetadata: !!metadata });
3171
3300
  try {
3172
3301
  const db = await this.getDB();
3173
3302
  const transaction = db.transaction([this.storeName], "readwrite");
@@ -3182,7 +3311,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
3182
3311
  return new Promise((resolve, reject) => {
3183
3312
  const request = store.put(storedItem, storageKey);
3184
3313
  request.onerror = () => {
3185
- logger19.error("Error setting in IndexedDB", { key, value, error: request.error });
3314
+ logger20.error("Error setting in IndexedDB", { key, value, error: request.error });
3186
3315
  reject(request.error);
3187
3316
  };
3188
3317
  request.onsuccess = () => {
@@ -3190,7 +3319,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
3190
3319
  };
3191
3320
  });
3192
3321
  } catch (error) {
3193
- logger19.error("Error in IndexedDB set operation", { key, value, error });
3322
+ logger20.error("Error in IndexedDB set operation", { key, value, error });
3194
3323
  throw new Error(`Failed to store item in IndexedDB: ${error}`);
3195
3324
  }
3196
3325
  }
@@ -3198,16 +3327,16 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
3198
3327
  * Update only the metadata for an existing item
3199
3328
  */
3200
3329
  async setMetadata(key, metadata) {
3201
- logger19.trace("setMetadata", { key, metadata });
3330
+ logger20.trace("setMetadata", { key, metadata });
3202
3331
  try {
3203
3332
  const existing = await this.getWithMetadata(key);
3204
3333
  if (existing) {
3205
3334
  await this.set(key, existing.value, metadata);
3206
3335
  } else {
3207
- logger19.warning("Attempted to set metadata for non-existent item", { key });
3336
+ logger20.warning("Attempted to set metadata for non-existent item", { key });
3208
3337
  }
3209
3338
  } catch (error) {
3210
- logger19.error("Error in IndexedDB setMetadata operation", { key, error });
3339
+ logger20.error("Error in IndexedDB setMetadata operation", { key, error });
3211
3340
  throw new Error(`Failed to update metadata in IndexedDB: ${error}`);
3212
3341
  }
3213
3342
  }
@@ -3220,7 +3349,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
3220
3349
  return new Promise((resolve, reject) => {
3221
3350
  const request = store.get(storageKey);
3222
3351
  request.onerror = () => {
3223
- logger19.error("Error checking key in IndexedDB", { key, error: request.error });
3352
+ logger20.error("Error checking key in IndexedDB", { key, error: request.error });
3224
3353
  reject(request.error);
3225
3354
  };
3226
3355
  request.onsuccess = () => {
@@ -3234,12 +3363,12 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
3234
3363
  };
3235
3364
  });
3236
3365
  } catch (error) {
3237
- logger19.error("Error in IndexedDB includesKey operation", { key, error });
3366
+ logger20.error("Error in IndexedDB includesKey operation", { key, error });
3238
3367
  return false;
3239
3368
  }
3240
3369
  }
3241
3370
  async delete(key) {
3242
- logger19.trace("delete", { key });
3371
+ logger20.trace("delete", { key });
3243
3372
  try {
3244
3373
  const db = await this.getDB();
3245
3374
  const transaction = db.transaction([this.storeName], "readwrite");
@@ -3248,7 +3377,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
3248
3377
  return new Promise((resolve, reject) => {
3249
3378
  const request = store.delete(storageKey);
3250
3379
  request.onerror = () => {
3251
- logger19.error("Error deleting from IndexedDB", { key, error: request.error });
3380
+ logger20.error("Error deleting from IndexedDB", { key, error: request.error });
3252
3381
  reject(request.error);
3253
3382
  };
3254
3383
  request.onsuccess = () => {
@@ -3256,26 +3385,26 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
3256
3385
  };
3257
3386
  });
3258
3387
  } catch (error) {
3259
- logger19.error("Error in IndexedDB delete operation", { key, error });
3388
+ logger20.error("Error in IndexedDB delete operation", { key, error });
3260
3389
  }
3261
3390
  }
3262
3391
  async allIn(locations) {
3263
3392
  const allKeys = await this.keys();
3264
3393
  if (locations.length === 0) {
3265
- logger19.debug("Returning all items, LocKeys is empty");
3394
+ logger20.debug("Returning all items, LocKeys is empty");
3266
3395
  const promises = allKeys.map((key) => this.get(key));
3267
3396
  const results = await Promise.all(promises);
3268
3397
  return results.filter((item) => item !== null);
3269
3398
  } else {
3270
3399
  const locKeys = locations;
3271
- logger19.debug("allIn", { locKeys, keys: allKeys.length });
3400
+ logger20.debug("allIn", { locKeys, keys: allKeys.length });
3272
3401
  const filteredKeys = allKeys.filter((key) => key && isComKey5(key)).filter((key) => {
3273
- const ComKey13 = key;
3274
- logger19.debug("Comparing Location Keys", {
3402
+ const ComKey15 = key;
3403
+ logger20.debug("Comparing Location Keys", {
3275
3404
  locKeys,
3276
- ComKey: ComKey13
3405
+ ComKey: ComKey15
3277
3406
  });
3278
- return isLocKeyArrayEqual(locKeys, ComKey13.loc);
3407
+ return isLocKeyArrayEqual(locKeys, ComKey15.loc);
3279
3408
  });
3280
3409
  const promises = filteredKeys.map((key) => this.get(key));
3281
3410
  const results = await Promise.all(promises);
@@ -3283,12 +3412,12 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
3283
3412
  }
3284
3413
  }
3285
3414
  async contains(query, locations) {
3286
- logger19.debug("contains", { query, locations });
3415
+ logger20.debug("contains", { query, locations });
3287
3416
  const items = await this.allIn(locations);
3288
3417
  return items.some((item) => isQueryMatch5(item, query));
3289
3418
  }
3290
3419
  async queryIn(query, locations = []) {
3291
- logger19.debug("queryIn", { query, locations });
3420
+ logger20.debug("queryIn", { query, locations });
3292
3421
  const items = await this.allIn(locations);
3293
3422
  return items.filter((item) => isQueryMatch5(item, query));
3294
3423
  }
@@ -3304,7 +3433,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
3304
3433
  return new Promise((resolve, reject) => {
3305
3434
  const request = store.openCursor();
3306
3435
  request.onerror = () => {
3307
- logger19.error("Error getting keys from IndexedDB", { error: request.error });
3436
+ logger20.error("Error getting keys from IndexedDB", { error: request.error });
3308
3437
  reject(request.error);
3309
3438
  };
3310
3439
  request.onsuccess = (event) => {
@@ -3319,7 +3448,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
3319
3448
  };
3320
3449
  });
3321
3450
  } catch (error) {
3322
- logger19.error("Error in IndexedDB keys operation", { error });
3451
+ logger20.error("Error in IndexedDB keys operation", { error });
3323
3452
  return [];
3324
3453
  }
3325
3454
  }
@@ -3335,7 +3464,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
3335
3464
  return new Promise((resolve, reject) => {
3336
3465
  const request = store.openCursor();
3337
3466
  request.onerror = () => {
3338
- logger19.error("Error getting metadata from IndexedDB", { error: request.error });
3467
+ logger20.error("Error getting metadata from IndexedDB", { error: request.error });
3339
3468
  reject(request.error);
3340
3469
  };
3341
3470
  request.onsuccess = (event) => {
@@ -3353,7 +3482,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
3353
3482
  };
3354
3483
  });
3355
3484
  } catch (error) {
3356
- logger19.error("Error in IndexedDB getAllMetadata operation", { error });
3485
+ logger20.error("Error in IndexedDB getAllMetadata operation", { error });
3357
3486
  return metadataMap;
3358
3487
  }
3359
3488
  }
@@ -3366,7 +3495,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
3366
3495
  return new Promise((resolve, reject) => {
3367
3496
  const request = store.openCursor();
3368
3497
  request.onerror = () => {
3369
- logger19.error("Error getting values from IndexedDB", { error: request.error });
3498
+ logger20.error("Error getting values from IndexedDB", { error: request.error });
3370
3499
  reject(request.error);
3371
3500
  };
3372
3501
  request.onsuccess = (event) => {
@@ -3381,12 +3510,12 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
3381
3510
  };
3382
3511
  });
3383
3512
  } catch (error) {
3384
- logger19.error("Error in IndexedDB values operation", { error });
3513
+ logger20.error("Error in IndexedDB values operation", { error });
3385
3514
  return [];
3386
3515
  }
3387
3516
  }
3388
3517
  async clear() {
3389
- logger19.debug("Clearing IndexedDB cache");
3518
+ logger20.debug("Clearing IndexedDB cache");
3390
3519
  try {
3391
3520
  const db = await this.getDB();
3392
3521
  const transaction = db.transaction([this.storeName], "readwrite");
@@ -3394,7 +3523,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
3394
3523
  return new Promise((resolve, reject) => {
3395
3524
  const request = store.clear();
3396
3525
  request.onerror = () => {
3397
- logger19.error("Error clearing IndexedDB cache", { error: request.error });
3526
+ logger20.error("Error clearing IndexedDB cache", { error: request.error });
3398
3527
  reject(request.error);
3399
3528
  };
3400
3529
  request.onsuccess = () => {
@@ -3402,17 +3531,17 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
3402
3531
  };
3403
3532
  });
3404
3533
  } catch (error) {
3405
- logger19.error("Error in IndexedDB clear operation", { error });
3534
+ logger20.error("Error in IndexedDB clear operation", { error });
3406
3535
  }
3407
3536
  }
3408
3537
  // Async Query result caching methods
3409
3538
  async setQueryResult(queryHash, itemKeys) {
3410
- logger19.trace("setQueryResult", { queryHash, itemKeys });
3539
+ logger20.trace("setQueryResult", { queryHash, itemKeys });
3411
3540
  try {
3412
3541
  return new Promise((resolve, reject) => {
3413
3542
  const request = indexedDB.open(this.dbName, this.version);
3414
3543
  request.onerror = () => {
3415
- logger19.error("Failed to open database for setQueryResult", { error: request.error });
3544
+ logger20.error("Failed to open database for setQueryResult", { error: request.error });
3416
3545
  reject(request.error);
3417
3546
  };
3418
3547
  request.onsuccess = () => {
@@ -3425,7 +3554,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
3425
3554
  const queryKey = `query:${queryHash}`;
3426
3555
  const putRequest = store.put(safeStringify3(entry), queryKey);
3427
3556
  putRequest.onerror = () => {
3428
- logger19.error("Failed to store query result", { queryHash, error: putRequest.error });
3557
+ logger20.error("Failed to store query result", { queryHash, error: putRequest.error });
3429
3558
  reject(putRequest.error);
3430
3559
  };
3431
3560
  putRequest.onsuccess = () => {
@@ -3434,17 +3563,17 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
3434
3563
  };
3435
3564
  });
3436
3565
  } catch (error) {
3437
- logger19.error("Error in setQueryResult", { queryHash, error });
3566
+ logger20.error("Error in setQueryResult", { queryHash, error });
3438
3567
  throw error;
3439
3568
  }
3440
3569
  }
3441
3570
  async getQueryResult(queryHash) {
3442
- logger19.trace("getQueryResult", { queryHash });
3571
+ logger20.trace("getQueryResult", { queryHash });
3443
3572
  try {
3444
3573
  return new Promise((resolve, reject) => {
3445
3574
  const request = indexedDB.open(this.dbName, this.version);
3446
3575
  request.onerror = () => {
3447
- logger19.error("Failed to open database for getQueryResult", { error: request.error });
3576
+ logger20.error("Failed to open database for getQueryResult", { error: request.error });
3448
3577
  reject(request.error);
3449
3578
  };
3450
3579
  request.onsuccess = () => {
@@ -3454,7 +3583,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
3454
3583
  const queryKey = `query:${queryHash}`;
3455
3584
  const getRequest = store.get(queryKey);
3456
3585
  getRequest.onerror = () => {
3457
- logger19.error("Failed to retrieve query result", { queryHash, error: getRequest.error });
3586
+ logger20.error("Failed to retrieve query result", { queryHash, error: getRequest.error });
3458
3587
  reject(getRequest.error);
3459
3588
  };
3460
3589
  getRequest.onsuccess = () => {
@@ -3471,34 +3600,34 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
3471
3600
  }
3472
3601
  resolve(entry.itemKeys || null);
3473
3602
  } catch (parseError) {
3474
- logger19.error("Failed to parse query result", { queryHash, error: parseError });
3603
+ logger20.error("Failed to parse query result", { queryHash, error: parseError });
3475
3604
  resolve(null);
3476
3605
  }
3477
3606
  };
3478
3607
  };
3479
3608
  });
3480
3609
  } catch (error) {
3481
- logger19.error("Error in getQueryResult", { queryHash, error });
3610
+ logger20.error("Error in getQueryResult", { queryHash, error });
3482
3611
  return null;
3483
3612
  }
3484
3613
  }
3485
3614
  async hasQueryResult(queryHash) {
3486
- logger19.trace("hasQueryResult", { queryHash });
3615
+ logger20.trace("hasQueryResult", { queryHash });
3487
3616
  try {
3488
3617
  const result = await this.getQueryResult(queryHash);
3489
3618
  return result !== null;
3490
3619
  } catch (error) {
3491
- logger19.error("Error in hasQueryResult", { queryHash, error });
3620
+ logger20.error("Error in hasQueryResult", { queryHash, error });
3492
3621
  return false;
3493
3622
  }
3494
3623
  }
3495
3624
  async deleteQueryResult(queryHash) {
3496
- logger19.trace("deleteQueryResult", { queryHash });
3625
+ logger20.trace("deleteQueryResult", { queryHash });
3497
3626
  try {
3498
3627
  return new Promise((resolve, reject) => {
3499
3628
  const request = indexedDB.open(this.dbName, this.version);
3500
3629
  request.onerror = () => {
3501
- logger19.error("Failed to open database for deleteQueryResult", { error: request.error });
3630
+ logger20.error("Failed to open database for deleteQueryResult", { error: request.error });
3502
3631
  reject(request.error);
3503
3632
  };
3504
3633
  request.onsuccess = () => {
@@ -3508,7 +3637,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
3508
3637
  const queryKey = `query:${queryHash}`;
3509
3638
  const deleteRequest = store.delete(queryKey);
3510
3639
  deleteRequest.onerror = () => {
3511
- logger19.error("Failed to delete query result", { queryHash, error: deleteRequest.error });
3640
+ logger20.error("Failed to delete query result", { queryHash, error: deleteRequest.error });
3512
3641
  reject(deleteRequest.error);
3513
3642
  };
3514
3643
  deleteRequest.onsuccess = () => {
@@ -3517,12 +3646,12 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
3517
3646
  };
3518
3647
  });
3519
3648
  } catch (error) {
3520
- logger19.error("Error in deleteQueryResult", { queryHash, error });
3649
+ logger20.error("Error in deleteQueryResult", { queryHash, error });
3521
3650
  throw error;
3522
3651
  }
3523
3652
  }
3524
3653
  async invalidateItemKeys(keys) {
3525
- logger19.debug("invalidateItemKeys", { keys });
3654
+ logger20.debug("invalidateItemKeys", { keys });
3526
3655
  if (keys.length === 0) {
3527
3656
  return;
3528
3657
  }
@@ -3559,7 +3688,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
3559
3688
  queryResults2[queryHash] = itemKeys;
3560
3689
  }
3561
3690
  } catch (error) {
3562
- logger19.debug("Failed to parse query result", { key: item.key, error });
3691
+ logger20.debug("Failed to parse query result", { key: item.key, error });
3563
3692
  }
3564
3693
  }
3565
3694
  }
@@ -3587,18 +3716,18 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
3587
3716
  });
3588
3717
  }
3589
3718
  }
3590
- logger19.debug("Selectively invalidated queries referencing affected keys", {
3719
+ logger20.debug("Selectively invalidated queries referencing affected keys", {
3591
3720
  affectedKeys: keys.length,
3592
3721
  queriesRemoved: queriesToRemove.length,
3593
3722
  totalQueries: Object.keys(queryResults).length
3594
3723
  });
3595
3724
  } catch (error) {
3596
- logger19.error("Error during selective query invalidation, falling back to clearing all queries", { error });
3725
+ logger20.error("Error during selective query invalidation, falling back to clearing all queries", { error });
3597
3726
  await this.clearQueryResults();
3598
3727
  }
3599
3728
  }
3600
3729
  async invalidateLocation(locations) {
3601
- logger19.debug("invalidateLocation", { locations });
3730
+ logger20.debug("invalidateLocation", { locations });
3602
3731
  let keysToInvalidate = [];
3603
3732
  if (locations.length === 0) {
3604
3733
  await this.clearQueryResults();
@@ -3611,12 +3740,12 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
3611
3740
  }
3612
3741
  }
3613
3742
  async clearQueryResults() {
3614
- logger19.trace("clearQueryResults");
3743
+ logger20.trace("clearQueryResults");
3615
3744
  try {
3616
3745
  return new Promise((resolve, reject) => {
3617
3746
  const request = indexedDB.open(this.dbName, this.version);
3618
3747
  request.onerror = () => {
3619
- logger19.error("Failed to open database for clearQueryResults", { error: request.error });
3748
+ logger20.error("Failed to open database for clearQueryResults", { error: request.error });
3620
3749
  reject(request.error);
3621
3750
  };
3622
3751
  request.onsuccess = () => {
@@ -3626,7 +3755,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
3626
3755
  const cursorRequest = store.openCursor();
3627
3756
  const keysToDelete = [];
3628
3757
  cursorRequest.onerror = () => {
3629
- logger19.error("Failed to open cursor for clearQueryResults", { error: cursorRequest.error });
3758
+ logger20.error("Failed to open cursor for clearQueryResults", { error: cursorRequest.error });
3630
3759
  reject(cursorRequest.error);
3631
3760
  };
3632
3761
  cursorRequest.onsuccess = () => {
@@ -3647,7 +3776,7 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
3647
3776
  keysToDelete.forEach((queryKey) => {
3648
3777
  const deleteRequest = store.delete(queryKey);
3649
3778
  deleteRequest.onerror = () => {
3650
- logger19.error("Failed to delete query key", { queryKey, error: deleteRequest.error });
3779
+ logger20.error("Failed to delete query key", { queryKey, error: deleteRequest.error });
3651
3780
  deletedCount++;
3652
3781
  if (deletedCount === totalToDelete) {
3653
3782
  resolve();
@@ -3665,14 +3794,14 @@ var AsyncIndexDBCacheMap = class _AsyncIndexDBCacheMap {
3665
3794
  };
3666
3795
  });
3667
3796
  } catch (error) {
3668
- logger19.error("Error in clearQueryResults", { error });
3797
+ logger20.error("Error in clearQueryResults", { error });
3669
3798
  throw error;
3670
3799
  }
3671
3800
  }
3672
3801
  };
3673
3802
 
3674
3803
  // src/browser/IndexDBCacheMap.ts
3675
- var logger20 = logger_default.get("IndexDBCacheMap");
3804
+ var logger21 = logger_default.get("IndexDBCacheMap");
3676
3805
  var IndexDBCacheMap = class _IndexDBCacheMap extends CacheMap {
3677
3806
  implementationType = "browser/indexedDB";
3678
3807
  // Memory storage
@@ -3856,7 +3985,7 @@ var IndexDBCacheMap = class _IndexDBCacheMap extends CacheMap {
3856
3985
  }
3857
3986
  // Invalidation methods
3858
3987
  async invalidateItemKeys(keys) {
3859
- logger20.debug("invalidateItemKeys", { keys });
3988
+ logger21.debug("invalidateItemKeys", { keys });
3860
3989
  if (keys.length === 0) {
3861
3990
  return;
3862
3991
  }
@@ -3886,7 +4015,7 @@ var IndexDBCacheMap = class _IndexDBCacheMap extends CacheMap {
3886
4015
  queriesToRemove.forEach((queryHash) => {
3887
4016
  delete this.queryResultCache[queryHash];
3888
4017
  });
3889
- logger20.debug("Selectively invalidated queries referencing affected keys", {
4018
+ logger21.debug("Selectively invalidated queries referencing affected keys", {
3890
4019
  affectedKeys: keys.length,
3891
4020
  queriesRemoved: queriesToRemove.length,
3892
4021
  totalQueries: Object.keys(this.queryResultCache).length
@@ -4268,8 +4397,8 @@ var reset = async (coordinate, options) => {
4268
4397
  };
4269
4398
 
4270
4399
  // src/Operations.ts
4271
- var createOperations = (api, coordinate, cacheMap, pkType, options, eventEmitter, ttlManager, evictionManager, statsManager) => {
4272
- const context = createCacheContext(api, cacheMap, pkType, options, eventEmitter, ttlManager, evictionManager, statsManager);
4400
+ var createOperations = (api, coordinate, cacheMap, pkType, options, eventEmitter, ttlManager, evictionManager, statsManager, registry) => {
4401
+ const context = createCacheContext(api, cacheMap, pkType, options, eventEmitter, ttlManager, evictionManager, statsManager, registry);
4273
4402
  return {
4274
4403
  all: (query, locations) => all(query, locations, context).then(([ctx, result]) => result),
4275
4404
  one: (query, locations) => one(query, locations, context).then(([ctx, result]) => result),
@@ -4278,8 +4407,8 @@ var createOperations = (api, coordinate, cacheMap, pkType, options, eventEmitter
4278
4407
  retrieve: (key) => retrieve(key, context).then(([ctx, result]) => result),
4279
4408
  remove: (key) => remove(key, context).then((ctx) => void 0),
4280
4409
  update: (key, item) => update(key, item, context).then(([ctx, result]) => result),
4281
- action: (key, actionName, body) => action(key, actionName, body, context).then(([ctx, result]) => result),
4282
- allAction: (actionName, body, locations) => allAction(actionName, body, locations, context).then(([ctx, result]) => result),
4410
+ action: (key, actionName, body) => action(key, actionName, body, context).then(([ctx, result, affectedItems]) => [result, affectedItems]),
4411
+ allAction: (actionName, body, locations) => allAction(actionName, body, locations, context).then(([ctx, result, affectedItems]) => [result, affectedItems]),
4283
4412
  facet: (key, facetName, params) => facet(key, facetName, params, context).then((result) => result),
4284
4413
  allFacet: (facetName, params, locations) => allFacet(facetName, params, locations, context).then((result) => result),
4285
4414
  find: (finder, params, locations) => find(finder, params, locations, context).then(([ctx, result]) => result),
@@ -4290,7 +4419,7 @@ var createOperations = (api, coordinate, cacheMap, pkType, options, eventEmitter
4290
4419
  };
4291
4420
 
4292
4421
  // src/eviction/EvictionManager.ts
4293
- var logger21 = logger_default.get("EvictionManager");
4422
+ var logger22 = logger_default.get("EvictionManager");
4294
4423
  var EvictionManager = class {
4295
4424
  evictionStrategy;
4296
4425
  constructor(evictionStrategy) {
@@ -4302,7 +4431,7 @@ var EvictionManager = class {
4302
4431
  */
4303
4432
  setEvictionStrategy(strategy) {
4304
4433
  this.evictionStrategy = strategy;
4305
- logger21.debug("Eviction strategy updated", {
4434
+ logger22.debug("Eviction strategy updated", {
4306
4435
  strategy: strategy?.getStrategyName() || "none"
4307
4436
  });
4308
4437
  }
@@ -4325,7 +4454,7 @@ var EvictionManager = class {
4325
4454
  try {
4326
4455
  await this.evictionStrategy.onItemAccessed(key, metadataProvider);
4327
4456
  } catch (error) {
4328
- logger21.error("Error in eviction strategy onItemAccessed", { key, error });
4457
+ logger22.error("Error in eviction strategy onItemAccessed", { key, error });
4329
4458
  }
4330
4459
  }
4331
4460
  /**
@@ -4350,14 +4479,14 @@ var EvictionManager = class {
4350
4479
  }
4351
4480
  await this.evictionStrategy.onItemAdded(key, estimatedSize, metadataProvider);
4352
4481
  if (evictedKeys.length > 0) {
4353
- logger21.debug("Items evicted during addition", {
4482
+ logger22.debug("Items evicted during addition", {
4354
4483
  newKey: key,
4355
4484
  evictedKeys,
4356
4485
  strategy: this.evictionStrategy.getStrategyName()
4357
4486
  });
4358
4487
  }
4359
4488
  } catch (error) {
4360
- logger21.error("Error in eviction strategy onItemAdded", { key, error });
4489
+ logger22.error("Error in eviction strategy onItemAdded", { key, error });
4361
4490
  }
4362
4491
  return evictedKeys;
4363
4492
  }
@@ -4373,7 +4502,7 @@ var EvictionManager = class {
4373
4502
  try {
4374
4503
  this.evictionStrategy.onItemRemoved(key, metadataProvider);
4375
4504
  } catch (error) {
4376
- logger21.error("Error in eviction strategy onItemRemoved", { key, error });
4505
+ logger22.error("Error in eviction strategy onItemRemoved", { key, error });
4377
4506
  }
4378
4507
  }
4379
4508
  /**
@@ -4394,13 +4523,13 @@ var EvictionManager = class {
4394
4523
  evictedKeys.push(evictKey);
4395
4524
  }
4396
4525
  if (evictedKeys.length > 0) {
4397
- logger21.debug("Manual eviction performed", {
4526
+ logger22.debug("Manual eviction performed", {
4398
4527
  evictedKeys,
4399
4528
  strategy: this.evictionStrategy.getStrategyName()
4400
4529
  });
4401
4530
  }
4402
4531
  } catch (error) {
4403
- logger21.error("Error in manual eviction", { error });
4532
+ logger22.error("Error in manual eviction", { error });
4404
4533
  }
4405
4534
  return evictedKeys;
4406
4535
  }
@@ -5747,7 +5876,7 @@ function createEvictionStrategy(policy, maxCacheSize, config) {
5747
5876
  }
5748
5877
 
5749
5878
  // src/ttl/TTLManager.ts
5750
- var logger22 = logger_default.get("TTLManager");
5879
+ var logger23 = logger_default.get("TTLManager");
5751
5880
  var TTLManager = class {
5752
5881
  config;
5753
5882
  cleanupTimer;
@@ -5759,7 +5888,7 @@ var TTLManager = class {
5759
5888
  validateOnAccess: true,
5760
5889
  ...config
5761
5890
  };
5762
- logger22.debug("TTL_DEBUG: TTLManager created", {
5891
+ logger23.debug("TTL_DEBUG: TTLManager created", {
5763
5892
  config: this.config,
5764
5893
  isTTLEnabled: this.isTTLEnabled(),
5765
5894
  defaultTTL: this.config.defaultTTL
@@ -5792,13 +5921,13 @@ var TTLManager = class {
5792
5921
  this.startAutoCleanup();
5793
5922
  }
5794
5923
  }
5795
- logger22.debug("TTL configuration updated", { config: this.config });
5924
+ logger23.debug("TTL configuration updated", { config: this.config });
5796
5925
  }
5797
5926
  /**
5798
5927
  * Set TTL metadata for an item when it's added
5799
5928
  */
5800
5929
  async onItemAdded(key, metadataProvider, itemTTL) {
5801
- logger22.debug("TTL_DEBUG: onItemAdded called", {
5930
+ logger23.debug("TTL_DEBUG: onItemAdded called", {
5802
5931
  key,
5803
5932
  itemTTL,
5804
5933
  isTTLEnabled: this.isTTLEnabled(),
@@ -5806,19 +5935,19 @@ var TTLManager = class {
5806
5935
  metadataProviderType: metadataProvider?.constructor?.name
5807
5936
  });
5808
5937
  if (!this.isTTLEnabled() && !itemTTL) {
5809
- logger22.debug("TTL_DEBUG: No TTL configured for item - returning early", { key });
5938
+ logger23.debug("TTL_DEBUG: No TTL configured for item - returning early", { key });
5810
5939
  return;
5811
5940
  }
5812
- logger22.debug("TTL_DEBUG: Getting metadata for key", { key });
5941
+ logger23.debug("TTL_DEBUG: Getting metadata for key", { key });
5813
5942
  const metadata = await metadataProvider.getMetadata(key);
5814
- logger22.debug("TTL_DEBUG: Retrieved metadata", {
5943
+ logger23.debug("TTL_DEBUG: Retrieved metadata", {
5815
5944
  key,
5816
5945
  hasMetadata: !!metadata,
5817
5946
  metadataKeys: metadata ? Object.keys(metadata) : null,
5818
5947
  metadata
5819
5948
  });
5820
5949
  if (!metadata) {
5821
- logger22.warning("TTL_DEBUG: No metadata found for item when setting TTL", {
5950
+ logger23.warning("TTL_DEBUG: No metadata found for item when setting TTL", {
5822
5951
  key,
5823
5952
  metadataProviderType: metadataProvider?.constructor?.name,
5824
5953
  metadataProviderMethods: metadataProvider ? Object.getOwnPropertyNames(Object.getPrototypeOf(metadataProvider)) : null
@@ -5826,7 +5955,7 @@ var TTLManager = class {
5826
5955
  return;
5827
5956
  }
5828
5957
  const ttl = itemTTL || this.config.defaultTTL;
5829
- logger22.debug("TTL_DEBUG: Calculated TTL value", {
5958
+ logger23.debug("TTL_DEBUG: Calculated TTL value", {
5830
5959
  key,
5831
5960
  itemTTL,
5832
5961
  defaultTTL: this.config.defaultTTL,
@@ -5839,7 +5968,7 @@ var TTLManager = class {
5839
5968
  expiresAt: metadata.addedAt + ttl,
5840
5969
  ttl
5841
5970
  };
5842
- logger22.debug("TTL_DEBUG: Setting TTL metadata", {
5971
+ logger23.debug("TTL_DEBUG: Setting TTL metadata", {
5843
5972
  key,
5844
5973
  ttl,
5845
5974
  addedAt: metadata.addedAt,
@@ -5847,9 +5976,9 @@ var TTLManager = class {
5847
5976
  ttlMetadata
5848
5977
  });
5849
5978
  await metadataProvider.setMetadata(key, ttlMetadata);
5850
- logger22.trace("TTL_DEBUG: TTL set for item", { key, ttl, expiresAt: ttlMetadata.expiresAt });
5979
+ logger23.trace("TTL_DEBUG: TTL set for item", { key, ttl, expiresAt: ttlMetadata.expiresAt });
5851
5980
  } else {
5852
- logger22.debug("TTL_DEBUG: No TTL set - invalid TTL value", { key, ttl });
5981
+ logger23.debug("TTL_DEBUG: No TTL set - invalid TTL value", { key, ttl });
5853
5982
  }
5854
5983
  }
5855
5984
  /**
@@ -5863,7 +5992,7 @@ var TTLManager = class {
5863
5992
  const now = Date.now();
5864
5993
  const expired = now >= metadata.expiresAt;
5865
5994
  if (expired) {
5866
- logger22.trace("Item expired", { key, expiresAt: metadata.expiresAt, now });
5995
+ logger23.trace("Item expired", { key, expiresAt: metadata.expiresAt, now });
5867
5996
  }
5868
5997
  return expired;
5869
5998
  }
@@ -5910,7 +6039,7 @@ var TTLManager = class {
5910
6039
  }
5911
6040
  }
5912
6041
  if (expiredKeys.length > 0) {
5913
- logger22.debug("Found expired items", { count: expiredKeys.length, keys: expiredKeys });
6042
+ logger23.debug("Found expired items", { count: expiredKeys.length, keys: expiredKeys });
5914
6043
  }
5915
6044
  return expiredKeys;
5916
6045
  }
@@ -5938,7 +6067,7 @@ var TTLManager = class {
5938
6067
  }
5939
6068
  metadata.expiresAt += additionalTTL;
5940
6069
  await metadataProvider.setMetadata(key, metadata);
5941
- logger22.trace("TTL extended for item", { key, additionalTTL, newExpiresAt: metadata.expiresAt });
6070
+ logger23.trace("TTL extended for item", { key, additionalTTL, newExpiresAt: metadata.expiresAt });
5942
6071
  return true;
5943
6072
  }
5944
6073
  /**
@@ -5960,7 +6089,7 @@ var TTLManager = class {
5960
6089
  ttl
5961
6090
  };
5962
6091
  await metadataProvider.setMetadata(key, ttlMetadata);
5963
- logger22.trace("TTL refreshed for item", { key, ttl, expiresAt: ttlMetadata.expiresAt });
6092
+ logger23.trace("TTL refreshed for item", { key, ttl, expiresAt: ttlMetadata.expiresAt });
5964
6093
  return true;
5965
6094
  }
5966
6095
  /**
@@ -5972,9 +6101,9 @@ var TTLManager = class {
5972
6101
  }
5973
6102
  if (this.config.cleanupInterval) {
5974
6103
  this.cleanupTimer = setInterval(() => {
5975
- logger22.trace("Auto cleanup timer triggered");
6104
+ logger23.trace("Auto cleanup timer triggered");
5976
6105
  }, this.config.cleanupInterval);
5977
- logger22.debug("Auto cleanup started", { interval: this.config.cleanupInterval });
6106
+ logger23.debug("Auto cleanup started", { interval: this.config.cleanupInterval });
5978
6107
  }
5979
6108
  }
5980
6109
  /**
@@ -5984,7 +6113,7 @@ var TTLManager = class {
5984
6113
  if (this.cleanupTimer) {
5985
6114
  clearInterval(this.cleanupTimer);
5986
6115
  this.cleanupTimer = null;
5987
- logger22.debug("Auto cleanup stopped");
6116
+ logger23.debug("Auto cleanup stopped");
5988
6117
  }
5989
6118
  }
5990
6119
  /**
@@ -5992,7 +6121,7 @@ var TTLManager = class {
5992
6121
  */
5993
6122
  destroy() {
5994
6123
  this.stopAutoCleanup();
5995
- logger22.debug("TTL manager destroyed");
6124
+ logger23.debug("TTL manager destroyed");
5996
6125
  }
5997
6126
  };
5998
6127
 
@@ -6392,9 +6521,9 @@ var CacheStatsManager = class {
6392
6521
  };
6393
6522
 
6394
6523
  // src/Cache.ts
6395
- var logger23 = logger_default.get("Cache");
6524
+ var logger24 = logger_default.get("Cache");
6396
6525
  var createCache = (api, coordinate, registry, options) => {
6397
- logger23.debug("createCache", { coordinate, registry, options });
6526
+ logger24.debug("createCache", { coordinate, registry, options });
6398
6527
  const completeOptions = createOptions(options);
6399
6528
  const cacheMap = createCacheMap(coordinate.kta, completeOptions);
6400
6529
  const pkType = coordinate.kta[0];
@@ -6417,7 +6546,7 @@ var createCache = (api, coordinate, registry, options) => {
6417
6546
  validateOnAccess: true
6418
6547
  });
6419
6548
  const statsManager = new CacheStatsManager();
6420
- const operations = createOperations(api, coordinate, cacheMap, pkType, completeOptions, eventEmitter, ttlManager, evictionManager, statsManager);
6549
+ const operations = createOperations(api, coordinate, cacheMap, pkType, completeOptions, eventEmitter, ttlManager, evictionManager, statsManager, registry);
6421
6550
  const cache = {
6422
6551
  coordinate,
6423
6552
  registry,
@@ -6469,18 +6598,18 @@ var createCache = (api, coordinate, registry, options) => {
6469
6598
  };
6470
6599
  return cache;
6471
6600
  };
6472
- var isCache = (cache) => {
6601
+ var isCache2 = (cache) => {
6473
6602
  return cache !== null && typeof cache === "object" && "coordinate" in cache && "registry" in cache && "api" in cache && "cacheMap" in cache && "operations" in cache;
6474
6603
  };
6475
6604
 
6476
6605
  // src/InstanceFactory.ts
6477
- var logger24 = logger_default.get("InstanceFactory");
6606
+ var logger25 = logger_default.get("InstanceFactory");
6478
6607
  var createInstanceFactory = (api, options) => {
6479
6608
  const templateOptions = createOptions(options);
6480
6609
  validateOptions(templateOptions);
6481
6610
  return (coordinate, context) => {
6482
6611
  const instanceOptions = createOptions(options);
6483
- logger24.debug("Creating cache instance", {
6612
+ logger25.debug("Creating cache instance", {
6484
6613
  coordinate,
6485
6614
  registry: context.registry,
6486
6615
  api,
@@ -6506,7 +6635,8 @@ var createInstanceFactory = (api, options) => {
6506
6635
  eventEmitter,
6507
6636
  ttlManager,
6508
6637
  evictionManager,
6509
- statsManager
6638
+ statsManager,
6639
+ context.registry
6510
6640
  );
6511
6641
  return {
6512
6642
  coordinate,
@@ -6546,9 +6676,9 @@ var createInstanceFactory = (api, options) => {
6546
6676
  };
6547
6677
 
6548
6678
  // src/Instance.ts
6549
- var logger25 = logger_default.get("Instance");
6679
+ var logger26 = logger_default.get("Instance");
6550
6680
  var createInstance = (registry, coordinate, api, options) => {
6551
- logger25.debug("createInstance", { coordinate, api, registry, options });
6681
+ logger26.debug("createInstance", { coordinate, api, registry, options });
6552
6682
  return createCache(api, coordinate, registry, options);
6553
6683
  };
6554
6684
  var isInstance = (instance) => {
@@ -6556,7 +6686,7 @@ var isInstance = (instance) => {
6556
6686
  };
6557
6687
 
6558
6688
  // src/Aggregator.ts
6559
- var logger26 = logger_default.get("ItemAggregator");
6689
+ var logger27 = logger_default.get("ItemAggregator");
6560
6690
  var toCacheConfig = (config) => {
6561
6691
  let cacheConfig;
6562
6692
  if (config.optional === void 0) {
@@ -6568,22 +6698,22 @@ var toCacheConfig = (config) => {
6568
6698
  };
6569
6699
  var createAggregator = async (cache, { aggregates = {}, events = {} }) => {
6570
6700
  const populate = async (item) => {
6571
- logger26.default("populate", { item });
6701
+ logger27.default("populate", { item });
6572
6702
  for (const key in aggregates) {
6573
6703
  await populateAggregate(key, item);
6574
6704
  }
6575
6705
  for (const key in events) {
6576
6706
  await populateEvent(key, item);
6577
6707
  }
6578
- logger26.default("populate done", { item });
6708
+ logger27.default("populate done", { item });
6579
6709
  return item;
6580
6710
  };
6581
6711
  const populateAggregate = async (key, item) => {
6582
- logger26.default("populate aggregate key", { key });
6712
+ logger27.default("populate aggregate key", { key });
6583
6713
  const cacheConfig = toCacheConfig(aggregates[key]);
6584
6714
  if (item.refs === void 0) {
6585
6715
  if (cacheConfig.optional === false) {
6586
- logger26.error("Item does not have refs an is not optional ", { item });
6716
+ logger27.error("Item does not have refs an is not optional ", { item });
6587
6717
  throw new Error("Item does not have refs an is not optional " + JSON.stringify(item));
6588
6718
  } else {
6589
6719
  if (item.events && Object.prototype.hasOwnProperty.call(item.events, key)) {
@@ -6592,7 +6722,7 @@ var createAggregator = async (cache, { aggregates = {}, events = {} }) => {
6592
6722
  }
6593
6723
  } else if (item.refs[key] === void 0) {
6594
6724
  if (cacheConfig.optional === false) {
6595
- logger26.error("Item does not have mandatory ref with key, not optional ", { key, item });
6725
+ logger27.error("Item does not have mandatory ref with key, not optional ", { key, item });
6596
6726
  throw new Error("Item does not have mandatory ref with key, not optional " + key + " " + JSON.stringify(item));
6597
6727
  } else {
6598
6728
  if (item.events && Object.prototype.hasOwnProperty.call(item.events, key)) {
@@ -6601,7 +6731,7 @@ var createAggregator = async (cache, { aggregates = {}, events = {} }) => {
6601
6731
  }
6602
6732
  } else {
6603
6733
  const ref = item.refs[key];
6604
- logger26.default("AGG Retrieving Item in Populate", { key: ref });
6734
+ logger27.default("AGG Retrieving Item in Populate", { key: ref });
6605
6735
  const newItem = await cacheConfig.cache.operations.retrieve(ref);
6606
6736
  if (newItem) {
6607
6737
  if (item.aggs === void 0) {
@@ -6615,25 +6745,25 @@ var createAggregator = async (cache, { aggregates = {}, events = {} }) => {
6615
6745
  }
6616
6746
  };
6617
6747
  const populateEvent = async (key, item) => {
6618
- logger26.default("populate event key", { key });
6748
+ logger27.default("populate event key", { key });
6619
6749
  const cacheConfig = toCacheConfig(events[key]);
6620
6750
  if (item.events === void 0) {
6621
6751
  throw new Error("Item does not have events " + JSON.stringify(item));
6622
6752
  } else if (item.events[key] === void 0) {
6623
6753
  if (cacheConfig.optional === false) {
6624
- logger26.error("Item does not have mandatory event with key", { key, item });
6754
+ logger27.error("Item does not have mandatory event with key", { key, item });
6625
6755
  throw new Error("Item does not have mandatory event with key " + key + " " + JSON.stringify(item));
6626
6756
  }
6627
6757
  } else {
6628
6758
  const event = item.events[key];
6629
6759
  if (event.by === void 0) {
6630
- logger26.error(
6760
+ logger27.error(
6631
6761
  "populateEvent with an Event that does not have by",
6632
6762
  { event, ik: item.key, eventKey: key }
6633
6763
  );
6634
6764
  throw new Error("populateEvent with an Event that does not have by: " + JSON.stringify({ key }));
6635
6765
  }
6636
- logger26.default("EVENT Retrieving Item in Populate", { key: event.by });
6766
+ logger27.default("EVENT Retrieving Item in Populate", { key: event.by });
6637
6767
  const newItem = await cacheConfig.cache.operations.retrieve(event.by);
6638
6768
  if (newItem) {
6639
6769
  event.agg = newItem;
@@ -6641,13 +6771,13 @@ var createAggregator = async (cache, { aggregates = {}, events = {} }) => {
6641
6771
  }
6642
6772
  };
6643
6773
  const all2 = async (query = {}, locations = []) => {
6644
- logger26.default("all", { query, locations });
6774
+ logger27.default("all", { query, locations });
6645
6775
  const items = await cache.operations.all(query, locations);
6646
6776
  const populatedItems = await Promise.all(items.map(async (item) => populate(item)));
6647
6777
  return populatedItems;
6648
6778
  };
6649
6779
  const one2 = async (query = {}, locations = []) => {
6650
- logger26.default("one", { query, locations });
6780
+ logger27.default("one", { query, locations });
6651
6781
  const item = await cache.operations.one(query, locations);
6652
6782
  let populatedItem = null;
6653
6783
  if (item) {
@@ -6656,30 +6786,30 @@ var createAggregator = async (cache, { aggregates = {}, events = {} }) => {
6656
6786
  return populatedItem;
6657
6787
  };
6658
6788
  const action2 = async (key, action3, body = {}) => {
6659
- logger26.default("action", { key, action: action3, body });
6660
- const item = await cache.operations.action(key, action3, body);
6789
+ logger27.default("action", { key, action: action3, body });
6790
+ const [item, affectedItems] = await cache.operations.action(key, action3, body);
6661
6791
  const populatedItem = await populate(item);
6662
- return populatedItem;
6792
+ return [populatedItem, affectedItems];
6663
6793
  };
6664
6794
  const allAction2 = async (action3, body = {}, locations = []) => {
6665
- logger26.default("action", { action: action3, body, locations });
6666
- const items = await cache.operations.allAction(action3, body, locations);
6795
+ logger27.default("action", { action: action3, body, locations });
6796
+ const [items, affectedItems] = await cache.operations.allAction(action3, body, locations);
6667
6797
  const populatedItems = await Promise.all(items.map(async (item) => populate(item)));
6668
- return populatedItems;
6798
+ return [populatedItems, affectedItems];
6669
6799
  };
6670
6800
  const allFacet2 = async (facet3, params = {}, locations = []) => {
6671
- logger26.default("allFacet", { facet: facet3, params, locations });
6801
+ logger27.default("allFacet", { facet: facet3, params, locations });
6672
6802
  const response = await cache.operations.allFacet(facet3, params, locations);
6673
6803
  return response;
6674
6804
  };
6675
6805
  const create2 = async (v, locations = []) => {
6676
- logger26.default("create", { v, locations });
6806
+ logger27.default("create", { v, locations });
6677
6807
  const item = await cache.operations.create(v, locations);
6678
6808
  const populatedItem = await populate(item);
6679
6809
  return populatedItem;
6680
6810
  };
6681
6811
  const get2 = async (key) => {
6682
- logger26.default("get", { key });
6812
+ logger27.default("get", { key });
6683
6813
  const item = await cache.operations.get(key);
6684
6814
  let populatedItem = null;
6685
6815
  if (item) {
@@ -6688,7 +6818,7 @@ var createAggregator = async (cache, { aggregates = {}, events = {} }) => {
6688
6818
  return populatedItem;
6689
6819
  };
6690
6820
  const retrieve2 = async (key) => {
6691
- logger26.default("retrieve", { key });
6821
+ logger27.default("retrieve", { key });
6692
6822
  const item = await cache.operations.retrieve(key);
6693
6823
  let populatedItem = null;
6694
6824
  if (item) {
@@ -6697,34 +6827,34 @@ var createAggregator = async (cache, { aggregates = {}, events = {} }) => {
6697
6827
  return populatedItem;
6698
6828
  };
6699
6829
  const remove2 = async (key) => {
6700
- logger26.default("remove", { key });
6830
+ logger27.default("remove", { key });
6701
6831
  await cache.operations.remove(key);
6702
6832
  };
6703
6833
  const update2 = async (key, v) => {
6704
- logger26.default("update", { key, v });
6834
+ logger27.default("update", { key, v });
6705
6835
  const item = await cache.operations.update(key, v);
6706
6836
  const populatedItem = await populate(item);
6707
6837
  return populatedItem;
6708
6838
  };
6709
6839
  const facet2 = async (key, facet3) => {
6710
- logger26.default("facet", { key, facet: facet3 });
6840
+ logger27.default("facet", { key, facet: facet3 });
6711
6841
  const response = await cache.operations.facet(key, facet3);
6712
6842
  return response;
6713
6843
  };
6714
6844
  const find2 = async (finder, finderParams = {}, locations = []) => {
6715
- logger26.default("find", { finder, finderParams, locations });
6845
+ logger27.default("find", { finder, finderParams, locations });
6716
6846
  const items = await cache.operations.find(finder, finderParams, locations);
6717
6847
  const populatedItems = await Promise.all(items.map(async (item) => populate(item)));
6718
6848
  return populatedItems;
6719
6849
  };
6720
6850
  const findOne2 = async (finder, finderParams = {}, locations = []) => {
6721
- logger26.default("find", { finder, finderParams, locations });
6851
+ logger27.default("find", { finder, finderParams, locations });
6722
6852
  const item = await cache.operations.findOne(finder, finderParams, locations);
6723
6853
  const populatedItem = await populate(item);
6724
6854
  return populatedItem;
6725
6855
  };
6726
6856
  const set2 = async (key, v) => {
6727
- logger26.default("set", { key, v });
6857
+ logger27.default("set", { key, v });
6728
6858
  const item = await cache.operations.set(key, v);
6729
6859
  const populatedItem = await populate(item);
6730
6860
  return populatedItem;
@@ -6776,13 +6906,13 @@ var createAggregator = async (cache, { aggregates = {}, events = {} }) => {
6776
6906
  import {
6777
6907
  createRegistry as createBaseRegistry
6778
6908
  } from "@fjell/registry";
6779
- var logger27 = logger_default.get("Registry");
6909
+ var logger28 = logger_default.get("Registry");
6780
6910
  var createRegistryFactory = () => {
6781
6911
  return (type, registryHub) => {
6782
6912
  if (type !== "cache") {
6783
6913
  throw new Error(`Cache registry factory can only create 'cache' type registries, got: ${type}`);
6784
6914
  }
6785
- logger27.debug("Creating cache registry", { type, registryHub });
6915
+ logger28.debug("Creating cache registry", { type, registryHub });
6786
6916
  const baseRegistry = createBaseRegistry(type, registryHub);
6787
6917
  return baseRegistry;
6788
6918
  };
@@ -6820,7 +6950,7 @@ export {
6820
6950
  createValidatedConfig,
6821
6951
  estimateValueSize,
6822
6952
  formatBytes,
6823
- isCache,
6953
+ isCache2 as isCache,
6824
6954
  isInstance,
6825
6955
  isLocKeyArrayEqual,
6826
6956
  normalizeKeyValue,